diff --git a/.env.template b/.env.template index dbf140be..72ce5674 100644 --- a/.env.template +++ b/.env.template @@ -30,8 +30,5 @@ SENTRY_AUTH_TOKEN="" #disabled for local # https://www.alchemy.com/ ALCHEMY_API_KEY="" -# https://www.infura.io/ -INFURA_API_KEY="" - # https://drpc.org/ DRPC_API_KEY="" \ No newline at end of file diff --git a/.github/workflows/ci-test-unit.yml b/.github/workflows/ci-test-unit.yml index 067b33e3..e8286685 100644 --- a/.github/workflows/ci-test-unit.yml +++ b/.github/workflows/ci-test-unit.yml @@ -18,7 +18,6 @@ jobs: ALCHEMY_API_KEY: ${{ secrets.ALCHEMY_API_KEY }} DRPC_API_KEY: "test" - INFURA_API_KEY: "test" FILECOIN_API_KEY: "test" INDEXER_ENVIRONMENT: "test" @@ -26,6 +25,8 @@ jobs: CACHING_DATABASE_URL: "https://test.supabase.com" DATA_DATABASE_URL: "https://test.supabase.com" + ENABLE_CRON_JOBS: "false" + permissions: # Required to checkout the code contents: read diff --git a/.gitignore b/.gitignore index f2efca40..1280f5c6 100644 --- a/.gitignore +++ b/.gitignore @@ -36,9 +36,9 @@ yarn-error.log* *.tsbuildinfo next-env.d.ts -# generated graphql files -src/generated/ - dist .rollup.cache .idea + +# generated swagger files +src/__generated__/ diff --git a/README.md b/README.md index 691a2ecd..7c48f1ff 100644 --- a/README.md +++ b/README.md @@ -28,10 +28,10 @@ The API implements a fallback to the first available RPC. You can set the RPCs i ### Supabase -* Install Docker -* `git submodule init` -* `git submodule update --remote` -* `pnpm supabase:start:all` +- Install Docker +- `git submodule init` +- `git submodule update --remote` +- `pnpm supabase:start:all` This will spin up 2 Supabase instances in Docker, one for the indexer service (caching) and one for the data service (static data) which are both exposed by the API. @@ -43,7 +43,7 @@ From both instances, you can get their respective keys and add them to the env v This will run a live production instance by running `swc` to compile the code and `nodemon` to restart the server on changes. -You can then find the API at `localhost:4000/spec` (Swagger instance) and the GraphQL at `localhost:4000/v1/graphql` +You can then find the API at `localhost:4000/spec` (Swagger instance) and the GraphQL at `localhost:4000/v2/graphql` ## Deployments @@ -51,13 +51,13 @@ Production: `https://api.hypercerts.org/` Staging: `https://staging-api.hypercerts.org` `/spec` - Swagger instance documenting the API and exposing a playground to experiment with the endpoints -`/v1/graphql` - GraphQL API to access hypercerts data like claims, fractions, attestations, allow lists +`/v2/graphql` - GraphQL API to access hypercerts data like claims, fractions, attestations, allow lists ## Scripts - `dev`: Starts the development server using `nodemon`, which will automatically restart the server whenever you save a file that the server uses. - `build`: Denerates the OpenAPI specification and routes using `tsoa`, and then compiles the TypeScript code into JavaScript using `swc`. The compiled code is output to the `dist` directory. -- `start`: Starts the application in production mode. +- `start`: Starts the application in production mode. - `lint`: Runs `eslint` on the codebase to check for linting errors. - `test`: Runs tests using `vitest` @@ -86,38 +86,38 @@ The API also provides an upload and validation endpoint for hypercert and allow graph TB Client[Client Applications] API[Hypercerts API :4000] - + subgraph "API Endpoints" Swagger["/spec\nSwagger Documentation"] - GraphQL["/v1/graphql\nGraphQL Endpoint"] + GraphQL["/v2/graphql\nGraphQL Endpoint"] Upload["Upload & Validation\nEndpoints"] end - + subgraph "Data Services" Static[("Static Data Service\n(Supabase DB)\n- User Data\n- Collections\n- Signed Orders")] Indexer[("Indexer Service\n(Supabase DB)\n- On-chain Data\n- IPFS Data")] end - + subgraph "External Services" IPFS[(IPFS\nMetadata Storage)] Blockchain[(Blockchain\nSupported Chains)] EAS[(EAS\nAttestations)] end - + Client --> API API --> Swagger API --> GraphQL API --> Upload - + GraphQL --> Static GraphQL --> Indexer Upload --> IPFS - + Indexer --> Blockchain Indexer --> IPFS Indexer --> EAS - + class Swagger,GraphQL,Upload apiEndpoint; class Static,Indexer database; class IPFS,Blockchain,EAS external; -``` \ No newline at end of file +``` diff --git a/docs/DEVELOPMENT.md b/docs/DEVELOPMENT.md new file mode 100644 index 00000000..9429249e --- /dev/null +++ b/docs/DEVELOPMENT.md @@ -0,0 +1,176 @@ +# Development Guide: Implementing a New Entity + +This guide explains how to implement a new entity in the Hypercerts API, from type definition to resolver implementation. + +## Overview + +The Hypercerts API uses a modular architecture where each entity follows a consistent pattern: + +1. Type Definition +2. Query Arguments +3. Entity Service +4. Resolver + +## Step-by-Step Implementation + +### 1. Define Entity Types + +Create a new file in `src/graphql/schemas/typeDefs/` for your entity types: + +```typescript +// src/graphql/schemas/typeDefs/yourEntityTypeDefs.ts +import { Field, ObjectType } from "type-graphql"; +import { BaseEntity } from "./baseTypes.js"; + +@ObjectType() +export class YourEntity extends BaseEntity { + @Field(() => String) + name: string; + + @Field(() => String, { nullable: true }) + description?: string; + + // Add other fields as needed +} + +@ObjectType() +export class GetYourEntitiesResponse { + @Field(() => [YourEntity]) + data: YourEntity[]; + + @Field(() => Int) + count: number; +} +``` + +### 2. Define Query Arguments + +Create a new file in `src/graphql/schemas/args/` for your query arguments: + +```typescript +// src/graphql/schemas/args/yourEntityArgs.ts +import { ArgsType } from "type-graphql"; +import { createEntityArgs } from "../../../lib/graphql/createEntityArgs.js"; +import { EntityTypeDefs } from "../typeDefs/typeDefs.js"; + +// Define your entity fields +const fields = { + name: "string", + description: "string", + // Add other fields as needed +} as const; + +// Create query arguments +export const { WhereInput, SortOptions } = createEntityArgs( + "YourEntity" as EntityTypeDefs, + fields, +); + +@ArgsType() +export class GetYourEntitiesArgs { + first?: number; + offset?: number; + where?: typeof WhereInput; + sortBy?: typeof SortOptions; +} +``` + +### 3. Create Entity Service + +Create a new file in `src/services/database/entities/` for your entity service: + +```typescript +// src/services/database/entities/YourEntityService.ts +import { injectable } from "tsyringe"; +import { createEntityService } from "./EntityServiceFactory.js"; +import { GetYourEntitiesArgs } from "../../../graphql/schemas/args/yourEntityArgs.js"; +import { YourEntity } from "../../../graphql/schemas/typeDefs/yourEntityTypeDefs.js"; + +@injectable() +export class YourEntityService { + private service = createEntityService( + "your_entity_table", + { + // Add any custom query modifiers if needed + }, + ); + + async getYourEntities(args: GetYourEntitiesArgs) { + return this.service.getMany(args); + } + + async getYourEntity(args: GetYourEntitiesArgs) { + return this.service.getSingle(args); + } +} +``` + +### 4. Implement Resolver + +Create a new file in `src/graphql/schemas/resolvers/` for your resolver: + +```typescript +// src/graphql/schemas/resolvers/yourEntityResolver.ts +import { inject, injectable } from "tsyringe"; +import { Args, Query, Resolver } from "type-graphql"; +import { YourEntityService } from "../../../services/database/entities/YourEntityService.js"; +import { GetYourEntitiesArgs } from "../args/yourEntityArgs.js"; +import { + GetYourEntitiesResponse, + YourEntity, +} from "../typeDefs/yourEntityTypeDefs.js"; + +@injectable() +@Resolver(() => YourEntity) +class YourEntityResolver { + constructor( + @inject(YourEntityService) + private yourEntityService: YourEntityService, + ) {} + + @Query(() => GetYourEntitiesResponse) + async yourEntities(@Args() args: GetYourEntitiesArgs) { + return this.yourEntityService.getYourEntities(args); + } +} +``` + +### 5. Register the Resolver + +Add your resolver to the list of resolvers in `src/graphql/schemas/resolvers/index.ts`: + +```typescript +export * from "./yourEntityResolver.js"; +``` + +## Best Practices + +1. **Type Safety**: Always use TypeScript's type system to ensure type safety across your implementation. +2. **Consistent Naming**: Follow the existing naming conventions in the codebase. +3. **Error Handling**: Implement proper error handling in your service and resolver methods. +4. **Testing**: Write unit tests for your new entity implementation. +5. **Documentation**: Add JSDoc comments to document your types, methods, and classes. + +## Example Implementation + +For a complete example, you can look at the implementation of existing entities like `Contract`, `Metadata`, or `AttestationSchema` in the codebase. + +## Common Pitfalls + +1. **Type Registration**: Ensure all your types are properly registered in the GraphQL schema. +2. **Dependency Injection**: Use the `@injectable()` and `@inject()` decorators correctly. +3. **Query Arguments**: Make sure your query arguments match the expected structure. +4. **Database Schema**: Ensure your database table matches the entity structure. + +## Testing Your Implementation + +1. Start the development server: `pnpm dev` +2. Access the GraphQL playground at `http://localhost:4000/v2/graphql` +3. Test your queries and mutations +4. Run the test suite: `pnpm test` + +## Additional Resources + +- [TypeGraphQL Documentation](https://typegraphql.com/) +- [Kysely Documentation](https://kysely.dev/docs/intro) +- [Supabase Documentation](https://supabase.com/docs) diff --git a/eslint.config.js b/eslint.config.js index 42f07d57..b74ac03f 100644 --- a/eslint.config.js +++ b/eslint.config.js @@ -9,4 +9,14 @@ export default tseslint.config( "@typescript-eslint/no-extraneous-class": "off", }, }, + { + files: ["**/*.test.ts"], + rules: { + "@typescript-eslint/no-explicit-any": "off", + "@typescript-eslint/no-unused-vars": [ + "error", + { argsIgnorePattern: "^_" }, + ], + }, + }, ); diff --git a/lib/hypercerts-indexer b/lib/hypercerts-indexer index b35f9af9..9315980a 160000 --- a/lib/hypercerts-indexer +++ b/lib/hypercerts-indexer @@ -1 +1 @@ -Subproject commit b35f9af91cb4e843910134a00f7a19c38ebabde4 +Subproject commit 9315980a0d82ad1c49556f4a4c9fecf0701acff3 diff --git a/package.json b/package.json index 6c770abb..c06b6023 100644 --- a/package.json +++ b/package.json @@ -5,7 +5,7 @@ "type": "module", "scripts": { "dev": "nodemon", - "build": "rimraf dist && tsoa spec-and-routes && swc src --out-dir dist --copy-files", + "build": "rimraf dist && tsoa spec-and-routes && tsc && swc src --out-dir dist --copy-files", "start": "node -r dotenv/config dist/src/index.js", "integration": "concurrently -c \"green,blue\" --names \"CACHE,DATA\" \"pnpm --dir ./lib/hypercerts-indexer run dev\" \"pnpm run dev\"", "supabase:reset:all": "concurrently -c \"blue,green\" --names \"DATA,CACHE\" \"npm run supabase:reset:data\" \"npm run supabase:reset:cache\"", @@ -26,40 +26,33 @@ "commitlint": "commitlint --config commitlintrc.ts --edit" }, "dependencies": { - "@graphql-tools/merge": "^9.0.19", + "@faker-js/faker": "^9.6.0", "@graphql-yoga/plugin-response-cache": "^3.13.0", "@hypercerts-org/contracts": "2.0.0-alpha.12", - "@hypercerts-org/marketplace-sdk": "0.5.1", + "@hypercerts-org/marketplace-sdk": "0.8.0", "@hypercerts-org/sdk": "2.5.0-beta.6", - "@ipld/car": "^5.2.5", "@openzeppelin/merkle-tree": "^1.0.5", "@safe-global/api-kit": "^2.5.4", "@safe-global/protocol-kit": "^5.0.4", - "@sentry/integrations": "^7.114.0", "@sentry/node": "^8.2.1", "@sentry/profiling-node": "^8.2.1", - "@snaplet/seed": "^0.97.20", - "@supabase/postgrest-js": "^1.15.2", "@supabase/supabase-js": "^2.42.5", "@tsoa/runtime": "^6.2.1", "@types/cors": "^2.8.17", "@types/express": "^4.17.21", "@types/lodash": "^4.17.7", "@types/node": "20.10.6", - "@ucanto/core": "^9.0.1", "@ucanto/principal": "^9.0.0", "@urql/core": "^5.0.4", "@web3-storage/access": "^20.0.1", "@web3-storage/w3up-client": "^16.0.0", - "axios": "^1.6.5", "cors": "^2.8.5", + "date-fns": "^4.1.0", "ethers": "^6.12.2", "express": "^4.19.2", "file-type": "^19.6.0", "gql.tada": "^1.8.10", "graphql": "^16.10.0", - "graphql-filter": "^1.1.5", - "graphql-middleware": "^6.1.35", "graphql-scalars": "^1.24.1", "graphql-yoga": "^5.11.0", "kysely": "^0.27.4", @@ -70,11 +63,9 @@ "node-cron": "^3.0.3", "pg": "^8.12.0", "reflect-metadata": "^0.2.2", - "rollup": "^4.12.0", "swagger-ui-express": "^5.0.0", "tsoa": "^6.2.1", "tsyringe": "^4.8.0", - "type-fest": "^4.12.0", "type-graphql": "^2.0.0-rc.2", "viem": "^2.0.3", "zod": "^3.23.8" @@ -99,6 +90,8 @@ "@sentry/types": "^8.2.1", "@swc/cli": "^0.3.12", "@swc/core": "^1.4.15", + "@swc/helpers": "^0.5.15", + "@swc/jest": "^0.2.37", "@types/body-parser": "^1.19.5", "@types/mime-types": "^2.1.4", "@types/multer": "^1.4.12", @@ -119,6 +112,7 @@ "multiformats": "^13.0.0", "node-mocks-http": "^1.14.1", "nodemon": "^3.0.3", + "pg-mem": "^3.0.5", "prettier": "3.3.2", "rimraf": "^5.0.5", "sinon": "^17.0.1", @@ -129,6 +123,7 @@ "typedoc": "^0.26.5", "typescript": "5.5.3", "typescript-eslint": "^7.7.0", + "unplugin-swc": "^1.5.1", "vite-tsconfig-paths": "^5.1.4", "vitest": "^2.1.8", "vitest-mock-extended": "^2.0.2" diff --git a/pnpm-lock.yaml b/pnpm-lock.yaml index c008ab67..e5b3c341 100644 --- a/pnpm-lock.yaml +++ b/pnpm-lock.yaml @@ -8,24 +8,21 @@ importers: .: dependencies: - '@graphql-tools/merge': - specifier: ^9.0.19 - version: 9.0.19(graphql@16.10.0) + '@faker-js/faker': + specifier: ^9.6.0 + version: 9.6.0 '@graphql-yoga/plugin-response-cache': specifier: ^3.13.0 version: 3.13.0(graphql-yoga@5.11.0(graphql@16.10.0))(graphql@16.10.0) '@hypercerts-org/contracts': specifier: 2.0.0-alpha.12 - version: 2.0.0-alpha.12(ts-node@10.9.2(@swc/core@1.4.15)(@types/node@20.10.6)(typescript@5.5.3))(typescript@5.5.3) + version: 2.0.0-alpha.12(ts-node@10.9.2(@swc/core@1.4.15(@swc/helpers@0.5.15))(@types/node@20.10.6)(typescript@5.5.3))(typescript@5.5.3) '@hypercerts-org/marketplace-sdk': - specifier: 0.5.1 - version: 0.5.1(@safe-global/api-kit@2.5.4(encoding@0.1.13)(typescript@5.5.3)(zod@3.23.8))(@safe-global/protocol-kit@5.0.4(typescript@5.5.3)(zod@3.23.8))(ethers@6.12.2)(graphql@16.10.0)(rollup@4.12.0)(ts-node@10.9.2(@swc/core@1.4.15)(@types/node@20.10.6)(typescript@5.5.3))(typescript@5.5.3)(zod@3.23.8) + specifier: 0.8.0 + version: 0.8.0(@safe-global/api-kit@2.5.4(encoding@0.1.13)(typescript@5.5.3)(zod@3.23.8))(@safe-global/protocol-kit@5.0.4(typescript@5.5.3)(zod@3.23.8))(@safe-global/types-kit@1.0.0(typescript@5.5.3)(zod@3.23.8))(@swc/helpers@0.5.15)(ethers@6.12.2)(graphql@16.10.0)(rollup@4.12.0)(ts-node@10.9.2(@swc/core@1.4.15(@swc/helpers@0.5.15))(@types/node@20.10.6)(typescript@5.5.3))(typescript@5.5.3)(zod@3.23.8) '@hypercerts-org/sdk': specifier: 2.5.0-beta.6 - version: 2.5.0-beta.6(graphql@16.10.0)(rollup@4.12.0)(ts-node@10.9.2(@swc/core@1.4.15)(@types/node@20.10.6)(typescript@5.5.3))(typescript@5.5.3) - '@ipld/car': - specifier: ^5.2.5 - version: 5.2.5 + version: 2.5.0-beta.6(@swc/helpers@0.5.15)(graphql@16.10.0)(rollup@4.12.0)(ts-node@10.9.2(@swc/core@1.4.15(@swc/helpers@0.5.15))(@types/node@20.10.6)(typescript@5.5.3))(typescript@5.5.3) '@openzeppelin/merkle-tree': specifier: ^1.0.5 version: 1.0.5 @@ -35,21 +32,12 @@ importers: '@safe-global/protocol-kit': specifier: ^5.0.4 version: 5.0.4(typescript@5.5.3)(zod@3.23.8) - '@sentry/integrations': - specifier: ^7.114.0 - version: 7.114.0 '@sentry/node': specifier: ^8.2.1 version: 8.2.1 '@sentry/profiling-node': specifier: ^8.2.1 version: 8.2.1 - '@snaplet/seed': - specifier: ^0.97.20 - version: 0.97.20(@snaplet/copycat@5.0.0)(@types/pg@8.11.6)(encoding@0.1.13)(pg@8.12.0) - '@supabase/postgrest-js': - specifier: ^1.15.2 - version: 1.15.2 '@supabase/supabase-js': specifier: ^2.42.5 version: 2.42.5 @@ -68,9 +56,6 @@ importers: '@types/node': specifier: 20.10.6 version: 20.10.6 - '@ucanto/core': - specifier: ^9.0.1 - version: 9.0.1 '@ucanto/principal': specifier: ^9.0.0 version: 9.0.0 @@ -83,12 +68,12 @@ importers: '@web3-storage/w3up-client': specifier: ^16.0.0 version: 16.0.0(encoding@0.1.13) - axios: - specifier: ^1.6.5 - version: 1.6.5(debug@4.3.4) cors: specifier: ^2.8.5 version: 2.8.5 + date-fns: + specifier: ^4.1.0 + version: 4.1.0 ethers: specifier: ^6.12.2 version: 6.12.2 @@ -104,12 +89,6 @@ importers: graphql: specifier: ^16.10.0 version: 16.10.0 - graphql-filter: - specifier: ^1.1.5 - version: 1.1.5(graphql@16.10.0) - graphql-middleware: - specifier: ^6.1.35 - version: 6.1.35(graphql@16.10.0) graphql-scalars: specifier: ^1.24.1 version: 1.24.1(graphql@16.10.0) @@ -118,7 +97,7 @@ importers: version: 5.11.0(graphql@16.10.0) kysely: specifier: ^0.27.4 - version: 0.27.4 + version: 0.27.6 lodash: specifier: ^4.17.21 version: 4.17.21 @@ -140,9 +119,6 @@ importers: reflect-metadata: specifier: ^0.2.2 version: 0.2.2 - rollup: - specifier: ^4.12.0 - version: 4.12.0 swagger-ui-express: specifier: ^5.0.0 version: 5.0.0(express@4.19.2) @@ -152,9 +128,6 @@ importers: tsyringe: specifier: ^4.8.0 version: 4.8.0 - type-fest: - specifier: ^4.12.0 - version: 4.12.0 type-graphql: specifier: ^2.0.0-rc.2 version: 2.0.0-rc.2(graphql-scalars@1.24.1(graphql@16.10.0))(graphql@16.10.0) @@ -209,10 +182,16 @@ importers: version: 8.2.1 '@swc/cli': specifier: ^0.3.12 - version: 0.3.12(@swc/core@1.4.15)(chokidar@3.6.0) + version: 0.3.12(@swc/core@1.4.15(@swc/helpers@0.5.15))(chokidar@3.5.3) '@swc/core': specifier: ^1.4.15 - version: 1.4.15 + version: 1.4.15(@swc/helpers@0.5.15) + '@swc/helpers': + specifier: ^0.5.15 + version: 0.5.15 + '@swc/jest': + specifier: ^0.2.37 + version: 0.2.37(@swc/core@1.4.15(@swc/helpers@0.5.15)) '@types/body-parser': specifier: ^1.19.5 version: 1.19.5 @@ -260,7 +239,7 @@ importers: version: 9.1.5 kysely-supabase: specifier: ^0.2.0 - version: 0.2.0(@supabase/supabase-js@2.42.5)(kysely@0.27.4)(supabase@1.191.3) + version: 0.2.0(@supabase/supabase-js@2.42.5)(kysely@0.27.6)(supabase@1.191.3) lint-staged: specifier: ^15.2.9 version: 15.2.9 @@ -273,6 +252,9 @@ importers: nodemon: specifier: ^3.0.3 version: 3.0.3 + pg-mem: + specifier: ^3.0.5 + version: 3.0.5(kysely@0.27.6) prettier: specifier: 3.3.2 version: 3.3.2 @@ -287,7 +269,7 @@ importers: version: 1.191.3 ts-node: specifier: ^10.9.2 - version: 10.9.2(@swc/core@1.4.15)(@types/node@20.10.6)(typescript@5.5.3) + version: 10.9.2(@swc/core@1.4.15(@swc/helpers@0.5.15))(@types/node@20.10.6)(typescript@5.5.3) tsconfig-paths: specifier: ^4.2.0 version: 4.2.0 @@ -303,6 +285,9 @@ importers: typescript-eslint: specifier: ^7.7.0 version: 7.7.0(eslint@8.56.0)(typescript@5.5.3) + unplugin-swc: + specifier: ^1.5.1 + version: 1.5.1(@swc/core@1.4.15(@swc/helpers@0.5.15))(rollup@4.12.0) vite-tsconfig-paths: specifier: ^5.1.4 version: 5.1.4(typescript@5.5.3)(vite@5.0.11(@types/node@20.10.6)) @@ -996,9 +981,9 @@ packages: '@ethersproject/web@5.7.1': resolution: {integrity: sha512-Gueu8lSvyjBWL4cYsWsjh6MtMwM0+H4HvqFPZfB6dV8ctbP9zFAO73VG1cMWae0FLPCtz0peKPpZY8/ugJJX2w==} - '@faker-js/faker@8.4.1': - resolution: {integrity: sha512-XQ3cU+Q8Uqmrbf2e0cIC/QN43sTBSC8KF12u29Mb47tWrt2hAgBXSgpZMj4Ao8Uk0iJcU99QsOCaIL8934obCg==} - engines: {node: ^14.17.0 || ^16.13.0 || >=18.0.0, npm: '>=6.14.13'} + '@faker-js/faker@9.6.0': + resolution: {integrity: sha512-3vm4by+B5lvsFPSyep3ELWmZfE3kicDtmemVpuwl1yH7tqtnHdsA6hG8fbXedMVdkzgtvzWoRgjSB4Q+FHnZiw==} + engines: {node: '>=18.0.0', npm: '>=9.0.0'} '@fastify/busboy@2.1.0': resolution: {integrity: sha512-+KpH+QxZU7O4675t3mnkQKcZZg56u+K/Ct2K+N2AZYNVK8kyeo/bI18tI8aPm3tvNNRyTWfj6s5tnGNlcbQRsA==} @@ -1007,9 +992,6 @@ packages: '@fastify/deepmerge@1.3.0': resolution: {integrity: sha512-J8TOSBq3SoZbDhM9+R/u77hP93gz/rajSA+K2kGyijPpORPWUXHUpTaleoj+92As0S9uPRP7Oi8IqMf0u+ro6A==} - '@glideapps/ts-necessities@2.2.3': - resolution: {integrity: sha512-gXi0awOZLHk3TbW55GZLCPP6O+y/b5X1pBXKBVckFONSwF1z1E5ND2BGJsghQFah+pW7pkkyFb2VhUQI2qhL5w==} - '@gql.tada/cli-utils@1.6.3': resolution: {integrity: sha512-jFFSY8OxYeBxdKi58UzeMXG1tdm4FVjXa8WHIi66Gzu9JWtCE6mqom3a8xkmSw+mVaybFW5EN2WXf1WztJVNyQ==} peerDependencies: @@ -1107,11 +1089,6 @@ packages: peerDependencies: graphql: ^14.0.0 || ^15.0.0 || ^16.0.0 || ^17.0.0 - '@graphql-tools/batch-execute@8.5.1': - resolution: {integrity: sha512-hRVDduX0UDEneVyEWtc2nu5H2PxpfSfM/riUlgZvo/a/nG475uyehxR5cFGvTEPEQUKY3vGIlqvtRigzqTfCew==} - peerDependencies: - graphql: ^14.0.0 || ^15.0.0 || ^16.0.0 || ^17.0.0 - '@graphql-tools/batch-execute@9.0.11': resolution: {integrity: sha512-v9b618cj3hIrRGTDrOotYzpK+ZigvNcKdXK3LNBM4g/uA7pND0d4GOnuOSBQGKKN6kT/1nsz4ZpUxCoUvWPbzg==} engines: {node: '>=18.0.0'} @@ -1130,11 +1107,6 @@ packages: peerDependencies: graphql: ^14.0.0 || ^15.0.0 || ^16.0.0 || ^17.0.0 - '@graphql-tools/delegate@8.8.1': - resolution: {integrity: sha512-NDcg3GEQmdEHlnF7QS8b4lM1PSF+DKeFcIlLEfZFBvVq84791UtJcDj8734sIHLukmyuAxXMfA1qLd2l4lZqzA==} - peerDependencies: - graphql: ^14.0.0 || ^15.0.0 || ^16.0.0 || ^17.0.0 - '@graphql-tools/documents@1.0.0': resolution: {integrity: sha512-rHGjX1vg/nZ2DKqRGfDPNC55CWZBMldEVcH+91BThRa6JeT80NqXknffLLEZLRUxyikCfkwMsk6xR3UNMqG0Rg==} engines: {node: '>=16.0.0'} @@ -1213,11 +1185,6 @@ packages: peerDependencies: graphql: ^14.0.0 || ^15.0.0 || ^16.0.0 || ^17.0.0 - '@graphql-tools/merge@8.3.1': - resolution: {integrity: sha512-BMm99mqdNZbEYeTPK3it9r9S6rsZsQKtlqJsSBknAclXq2pGEfOxjcIZi+kBSkHZKPKCRrYDd5vY0+rUmIHVLg==} - peerDependencies: - graphql: ^14.0.0 || ^15.0.0 || ^16.0.0 || ^17.0.0 - '@graphql-tools/merge@9.0.19': resolution: {integrity: sha512-iJP3Xke+vgnST58A1Q/1+y3bzfbYalIMnegUNupYHNvHHSE0PXoq8YieqQF8JYzWVACMxiq/M4Y1vW75mS2UVg==} engines: {node: '>=16.0.0'} @@ -1248,11 +1215,6 @@ packages: peerDependencies: graphql: ^14.0.0 || ^15.0.0 || ^16.0.0 || ^17.0.0 - '@graphql-tools/schema@8.5.1': - resolution: {integrity: sha512-0Esilsh0P/qYcB5DKQpiKeQs/jevzIadNTaT0jeWklPMwNbT7yMX4EqZany7mbeRRlSRwMzNzL5olyFdffHBZg==} - peerDependencies: - graphql: ^14.0.0 || ^15.0.0 || ^16.0.0 || ^17.0.0 - '@graphql-tools/stitch@9.4.16': resolution: {integrity: sha512-SYsdAlpKY1o2AxIc9v2zHLeVwxq0w2Sp3CIl/wE3dcnD5QqXJqvyqoeciJ7T+XWTldyhxyJpUfbSQLWGXbqwiQ==} engines: {node: '>=18.0.0'} @@ -1277,11 +1239,6 @@ packages: peerDependencies: graphql: ^14.0.0 || ^15.0.0 || ^16.0.0 || ^17.0.0 - '@graphql-tools/utils@8.9.0': - resolution: {integrity: sha512-pjJIWH0XOVnYGXCqej8g/u/tsfV4LvLlj0eATKQu5zwnxd/TiTHq7Cg313qUPTFFHZ3PP5wJ15chYVtLDwaymg==} - peerDependencies: - graphql: ^14.0.0 || ^15.0.0 || ^16.0.0 || ^17.0.0 - '@graphql-tools/wrap@10.0.29': resolution: {integrity: sha512-kQdosPBo6EvFhQV5s0XpN6+N0YN+31mCZTV7uwZisaUwwroAT19ujs2Zxz8Zyw4H9XRCsueLT0wqmSupjIFibQ==} engines: {node: '>=18.0.0'} @@ -1425,12 +1382,13 @@ packages: '@hypercerts-org/contracts@2.0.0-alpha.12': resolution: {integrity: sha512-Nr0aTJIt6/H1mI3N0uve3yF922kCVpAeN3aUqhWZfizukTVSD5aRE64fmKYpwzCWe0JNR9mBBR+Ogxq5lcGSvA==} - '@hypercerts-org/marketplace-sdk@0.5.1': - resolution: {integrity: sha512-OGB56YH2wcSqIKkPxaABaxNQ5MH8ROyFGZw+vKDekY0yYDGlItRC3Jvb6B9aZ9COXoOeNiijSDEHBwpSJBi2mA==} + '@hypercerts-org/marketplace-sdk@0.8.0': + resolution: {integrity: sha512-fm56Kl/oopti4XdPKX19+0SNDPYuY7m46fk4rDSXUeST1tFf8lei4qKate2IBsoknRWCrE8O77QMw1BkJiK2FA==} engines: {node: '>= 16.15.1 <= 20.x'} peerDependencies: '@safe-global/api-kit': ^2.5.7 '@safe-global/protocol-kit': ^5.2.0 + '@safe-global/types-kit': ^1.0.4 ethers: ^6.6.2 '@hypercerts-org/sdk@2.4.0': @@ -1439,20 +1397,12 @@ packages: '@hypercerts-org/sdk@2.5.0-beta.6': resolution: {integrity: sha512-v24hjmCwkL2/lkbQbYxzepLAJOc2SwfHVBoADNcdcT+/s7Fvpq5I+MddlWHYDcBLacPhyF3k+F9O/tkwvofY1g==} - '@inquirer/checkbox@2.3.5': - resolution: {integrity: sha512-3V0OSykTkE/38GG1DhxRGLBmqefgzRg2EK5A375zz+XEvIWfAHcac31e+zlBDPypRHxhmXc/Oh6v9eOPbH3nAg==} - engines: {node: '>=18'} - '@inquirer/checkbox@4.0.6': resolution: {integrity: sha512-PgP35JfmGjHU0LSXOyRew0zHuA9N6OJwOlos1fZ20b7j8ISeAdib3L+n0jIxBtX958UeEpte6xhG/gxJ5iUqMw==} engines: {node: '>=18'} peerDependencies: '@types/node': '>=18' - '@inquirer/confirm@3.1.9': - resolution: {integrity: sha512-UF09aejxCi4Xqm6N/jJAiFXArXfi9al52AFaSD+2uIHnhZGtd1d6lIGTRMPouVSJxbGEi+HkOWSYaiEY/+szUw==} - engines: {node: '>=18'} - '@inquirer/confirm@5.1.3': resolution: {integrity: sha512-fuF9laMmHoOgWapF9h9hv6opA5WvmGFHsTYGCmuFxcghIhEhb3dN0CdQR4BUMqa2H506NCj8cGX4jwMsE4t6dA==} engines: {node: '>=18'} @@ -1463,42 +1413,22 @@ packages: resolution: {integrity: sha512-5y4/PUJVnRb4bwWY67KLdebWOhOc7xj5IP2J80oWXa64mVag24rwQ1VAdnj7/eDY/odhguW0zQ1Mp1pj6fO/2w==} engines: {node: '>=18'} - '@inquirer/core@8.2.2': - resolution: {integrity: sha512-K8SuNX45jEFlX3EBJpu9B+S2TISzMPGXZIuJ9ME924SqbdW6Pt6fIkKvXg7mOEOKJ4WxpQsxj0UTfcL/A434Ww==} - engines: {node: '>=18'} - - '@inquirer/editor@2.1.9': - resolution: {integrity: sha512-5xCD7CoCh993YqXcsZPt45qkE3gl+03Yfv9vmAkptRi4nrzaUDmyhgBzndKdRG8SrKbQLBmOtztnRLGxvG/ahg==} - engines: {node: '>=18'} - '@inquirer/editor@4.2.3': resolution: {integrity: sha512-S9KnIOJuTZpb9upeRSBBhoDZv7aSV3pG9TECrBj0f+ZsFwccz886hzKBrChGrXMJwd4NKY+pOA9Vy72uqnd6Eg==} engines: {node: '>=18'} peerDependencies: '@types/node': '>=18' - '@inquirer/expand@2.1.9': - resolution: {integrity: sha512-ymnR8qu2ie/3JpOeyZ3QSGJ+ai8qqtjBwopxLjzIZm7mZVKT6SV1sURzijkOLRgGUHwPemOfYX5biqXuqhpoBg==} - engines: {node: '>=18'} - '@inquirer/expand@4.0.6': resolution: {integrity: sha512-TRTfi1mv1GeIZGyi9PQmvAaH65ZlG4/FACq6wSzs7Vvf1z5dnNWsAAXBjWMHt76l+1hUY8teIqJFrWBk5N6gsg==} engines: {node: '>=18'} peerDependencies: '@types/node': '>=18' - '@inquirer/figures@1.0.3': - resolution: {integrity: sha512-ErXXzENMH5pJt5/ssXV0DfWUZqly8nGzf0UcBV9xTnP+KyffE2mqyxIMBrZ8ijQck2nU0TQm40EQB53YreyWHw==} - engines: {node: '>=18'} - '@inquirer/figures@1.0.9': resolution: {integrity: sha512-BXvGj0ehzrngHTPTDqUoDT3NXL8U0RxUk2zJm2A66RhCEIWdtU1v6GuUqNAgArW4PQ9CinqIWyHdQgdwOj06zQ==} engines: {node: '>=18'} - '@inquirer/input@2.1.9': - resolution: {integrity: sha512-1xTCHmIe48x9CG1+8glAHrVVdH+QfYhzgBUbgyoVpp5NovnXgRcjSn/SNulepxf9Ol8HDq3gzw3ZCAUr+h1Eyg==} - engines: {node: '>=18'} - '@inquirer/input@4.1.3': resolution: {integrity: sha512-zeo++6f7hxaEe7OjtMzdGZPHiawsfmCZxWB9X1NpmYgbeoyerIbWemvlBxxl+sQIlHC0WuSAG19ibMq3gbhaqQ==} engines: {node: '>=18'} @@ -1511,30 +1441,18 @@ packages: peerDependencies: '@types/node': '>=18' - '@inquirer/password@2.1.9': - resolution: {integrity: sha512-QPtVcT12Fkn0TyuZJelR7QOtc5l1d/6pB5EfkHOivTzC6QTFxRCHl+Gx7Q3E2U/kgJeCCmDov6itDFggk9nkgA==} - engines: {node: '>=18'} - '@inquirer/password@4.0.6': resolution: {integrity: sha512-QLF0HmMpHZPPMp10WGXh6F+ZPvzWE7LX6rNoccdktv/Rov0B+0f+eyXkAcgqy5cH9V+WSpbLxu2lo3ysEVK91w==} engines: {node: '>=18'} peerDependencies: '@types/node': '>=18' - '@inquirer/prompts@5.0.5': - resolution: {integrity: sha512-LV2XZzc8ls4zhUzYNSpsXcnA8djOptY4G01lFzp3Bey6E1oiZMzIU25N9cb5AOwNz6pqDXpjLwRFQmLQ8h6PaQ==} - engines: {node: '>=18'} - '@inquirer/prompts@7.2.3': resolution: {integrity: sha512-hzfnm3uOoDySDXfDNOm9usOuYIaQvTgKp/13l1uJoe6UNY+Zpcn2RYt0jXz3yA+yemGHvDOxVzqWl3S5sQq53Q==} engines: {node: '>=18'} peerDependencies: '@types/node': '>=18' - '@inquirer/rawlist@2.1.9': - resolution: {integrity: sha512-GuMmfa/v1ZJqEWSkUx1hMxzs5/0DCUP0S8IicV/wu8QrbjfBOh+7mIQgtsvh8IJ3sRkRcQ+9wh9CE9jiYqyMgw==} - engines: {node: '>=18'} - '@inquirer/rawlist@4.0.6': resolution: {integrity: sha512-QoE4s1SsIPx27FO4L1b1mUjVcoHm1pWE/oCmm4z/Hl+V1Aw5IXl8FYYzGmfXaBT0l/sWr49XmNSiq7kg3Kd/Lg==} engines: {node: '>=18'} @@ -1547,20 +1465,12 @@ packages: peerDependencies: '@types/node': '>=18' - '@inquirer/select@2.3.5': - resolution: {integrity: sha512-IyBj8oEtmdF2Gx4FJTPtEya37MD6s0KATKsHqgmls0lK7EQbhYSq9GQlcFq6cBsYe/cgQ0Fg2cCqYYPi/d/fxQ==} - engines: {node: '>=18'} - '@inquirer/select@4.0.6': resolution: {integrity: sha512-yANzIiNZ8fhMm4NORm+a74+KFYHmf7BZphSOBovIzYPVLquseTGEkU5l2UTnBOf5k0VLmTgPighNDLE9QtbViQ==} engines: {node: '>=18'} peerDependencies: '@types/node': '>=18' - '@inquirer/type@1.3.3': - resolution: {integrity: sha512-xTUt0NulylX27/zMx04ZYar/kr1raaiFTVvQ5feljQsiAgdm0WPj4S73/ye0fbslh+15QrIuDvfCXTek7pMY5A==} - engines: {node: '>=18'} - '@inquirer/type@3.0.2': resolution: {integrity: sha512-ZhQ4TvhwHZF+lGhQ2O/rsjo80XoZR5/5qhOY3t6FJuX5XBg5Be8YzYTvaUGJnc12AUGI2nr4QSUE4PhKSigx7g==} engines: {node: '>=18'} @@ -1601,6 +1511,18 @@ packages: resolution: {integrity: sha512-ZXRY4jNvVgSVQ8DL3LTcakaAtXwTVUxE81hslsyD2AtoXW/wVob10HkOJ1X/pAlcI7D+2YoZKg5do8G/w6RYgA==} engines: {node: '>=8'} + '@jest/create-cache-key-function@29.7.0': + resolution: {integrity: sha512-4QqS3LY5PBmTRHj9sAg1HLoPzqAI0uOX6wI/TRqHIcOxlFidy6YEmCQJk6FSZjNLGCeubDMfmkWL+qaLKhSGQA==} + engines: {node: ^14.15.0 || ^16.10.0 || >=18.0.0} + + '@jest/schemas@29.6.3': + resolution: {integrity: sha512-mo5j5X+jIZmJQveBKeS/clAueipV7KgiX1vMgCxam1RNYiqE1w62n0/tJJnHtjW8ZHcQco5gY85jA3mi0L+nSA==} + engines: {node: ^14.15.0 || ^16.10.0 || >=18.0.0} + + '@jest/types@29.6.3': + resolution: {integrity: sha512-u3UPsIilWKOM3F9CXtrG8LEJmNxwoCQC/XVj4IKYXvvpx7QIi/Kg1LI5uDmDpKlac62NUtX7eLjRh+jVZcLOzw==} + engines: {node: ^14.15.0 || ^16.10.0 || >=18.0.0} + '@jridgewell/gen-mapping@0.3.3': resolution: {integrity: sha512-HLhSWOLRi875zjjMG/r+Nv0oCW8umGb0BgEhyX3dDX3egwZtB8PqLnjz3yedt8R5StBrzcg4aBpnh8UA9D1BoQ==} engines: {node: '>=6.0.0'} @@ -2039,36 +1961,9 @@ packages: resolution: {integrity: sha512-+1VkjdD0QBLPodGrJUeqarH8VAIvQODIbwh9XpP5Syisf7YoQgsJKPNFoqqLQlu+VQ/tVSshMR6loPMn8U+dPg==} engines: {node: '>=14'} - '@prisma/debug@5.14.0-dev.34': - resolution: {integrity: sha512-mc4Ue07QjYcb4yV0ZXap2AJBLlBAk0owO3fHKWovQA9Ig2XXlxlAUesk9RxPYKj9zIpDZXYMPUC3iKIdUi5SUA==} - - '@prisma/engines-version@5.14.0-6.264f24ce0b2f544ff968ff76bfaa999de1161361': - resolution: {integrity: sha512-XkTJYtdOIrJkJv/tzXzsaUsfyvp82IWSPx4DlR52G0cyKoqT6lC55daIdsnuEoKPM2jPcL6P7dJENYBMGHQLEg==} - - '@prisma/engines@5.14.0-dev.34': - resolution: {integrity: sha512-RWkQHOPxSfy0ANoE0hhrDTf7SuNACILx/LTM1LINlWSYG+Ev/do+5RFbrCv6liCxi1fRZuuhtTux9sH56o01cQ==} - - '@prisma/fetch-engine@5.14.0-dev.34': - resolution: {integrity: sha512-Ieqp/Zfq7KaZWndJAq2K0Z5r77DBPyvXlKXbztXnyvoQhce+9QTkjwJ8U3dOHUwSwNqIb6TY7j1dal3epSUZkg==} - - '@prisma/generator-helper@5.14.0-dev.34': - resolution: {integrity: sha512-AsY7piYVHtaGf/TjSoK2j7pZmG+xX/Mqv/VQMNJmfJDEGAnt1fXg6e6veSGLm/SqxA3JJhVCaX3XUHYDeXnsOg==} - - '@prisma/get-platform@5.14.0-dev.34': - resolution: {integrity: sha512-JlzzUMQKsj1cFMXiGMkqrdP7dl3OZtZQapEeCAoH42J6GCrEuV+qNhTOlkywyNuFDj+j1VjfE7p9HRFO1+kiiw==} - '@prisma/instrumentation@5.13.0': resolution: {integrity: sha512-MEJX1aWLsEjS+2iheBkEy1LlzQuUruPgKEzA9HPMwzitCoUUK1qn5o+yIphU7wWs47Le/cED0egYQL7y9/rSsA==} - '@prisma/internals@5.14.0-dev.34': - resolution: {integrity: sha512-FKToi0h7DFkSZ+eAo737RisLAlRrHq2VPRnm53aVe7LH1J4qwVhl7U+Gy9CsifUgi5VDX311M2W5hyaRcBs46A==} - - '@prisma/prisma-schema-wasm@5.14.0-6.264f24ce0b2f544ff968ff76bfaa999de1161361': - resolution: {integrity: sha512-lMNW0WEI+eP5gPn+blBj2yK2znvQlWQbbcOdbqR6PmOOMZRPXbfoC1LgxFn0QrZalJ1csJSFPjmQiYcrv9/39w==} - - '@prisma/schema-files-loader@5.14.0-dev.34': - resolution: {integrity: sha512-oO0dMzBJbNN3OwcNpRpKO6iq/rqWg02OKBeUI+Qy3Cwrqo5SlKO+DeolkUnx2PPWiHitDX/8UkGRBkMRG0HI9g==} - '@protobufjs/aspromise@1.1.2': resolution: {integrity: sha512-j+gKExEuLmKwvz3OgROXtrJ2UG2x8Ch2YZUxahh+s1F2HZ+wAceUNLkvy6zKCPVRkU++ZWQrdxsUeQXmcg4uoQ==} @@ -2207,15 +2102,6 @@ packages: '@safe-global/types-kit@1.0.0': resolution: {integrity: sha512-jZNUeHbWobeVrURbcEvfas4Q1IDasQni5UYm2umUtAR6SBDazp1kGni8IjZPRKq3+8q+fYwu9FmKpX50rUYn3w==} - '@sagold/json-pointer@5.1.2': - resolution: {integrity: sha512-+wAhJZBXa6MNxRScg6tkqEbChEHMgVZAhTHVJ60Y7sbtXtu9XA49KfUkdWlS2x78D6H9nryiKePiYozumauPfA==} - - '@sagold/json-query@6.2.0': - resolution: {integrity: sha512-7bOIdUE6eHeoWtFm8TvHQHfTVSZuCs+3RpOKmZCDBIOrxpvF/rNFTeuvIyjHva/RR0yVS3kQtr+9TW72LQEZjA==} - - '@scaleleap/pg-format@1.0.0': - resolution: {integrity: sha512-gFkcYMnpeylF2OJ30FsDBjwICB9JTiZ5i3guPwdiBDrJFwIKr+Zk6jwI8Mg22a4FwXn5ezd5cHEFMKqBqBz4RQ==} - '@scure/base@1.1.5': resolution: {integrity: sha512-Brj9FiG2W1MRQSTB212YVPRrcbjkv48FoZi/u4l/zds/ieRrqsh7aUf6CLwkAq61oKXr/ZlTzlY66gLIj3TFTQ==} @@ -2265,10 +2151,6 @@ packages: resolution: {integrity: sha512-TmfrII8w1PQZSZgPpUESqjB+jC6MvZJZdLtE/0hZ+SrnKhW3x5WlYLvTXZpcWePYBku7rl2wn1RZu6uT0qCTeg==} engines: {node: '>=6'} - '@sentry/core@7.114.0': - resolution: {integrity: sha512-YnanVlmulkjgZiVZ9BfY9k6I082n+C+LbZo52MTvx3FY6RE5iyiPMpaOh67oXEZRWcYQEGm+bKruRxLVP6RlbA==} - engines: {node: '>=8'} - '@sentry/core@8.2.1': resolution: {integrity: sha512-xHS+DGZodTwXkoqe35UnNR9zWZ7I8pptXGxHntPrNnd/PmXK3ysj4NsRBshtSzDX3gWfwUsMN+vmjrYSwcfYeQ==} engines: {node: '>=14.18'} @@ -2277,10 +2159,6 @@ packages: resolution: {integrity: sha512-2tYrGnzb1gKz2EkMDQcfLrDTvmGcQPuWxLnJKXJvYTQDGLlEvi2tWz1VIHjunmOvJrB5aIQLhm+dcMRwFZDCqQ==} engines: {node: '>=6'} - '@sentry/integrations@7.114.0': - resolution: {integrity: sha512-BJIBWXGKeIH0ifd7goxOS29fBA8BkEgVVCahs6xIOXBjX1IRS6PmX0zYx/GP23nQTfhJiubv2XPzoYOlZZmDxg==} - engines: {node: '>=8'} - '@sentry/minimal@5.30.0': resolution: {integrity: sha512-BwWb/owZKtkDX+Sc4zCSTNcvZUq7YcH3uAVlmh/gtR9rmUvbzAA3ewLuB3myi4wWRAMEtny6+J/FN/x+2wn9Xw==} engines: {node: '>=6'} @@ -2316,10 +2194,6 @@ packages: resolution: {integrity: sha512-R8xOqlSTZ+htqrfteCWU5Nk0CDN5ApUTvrlvBuiH1DyP6czDZ4ktbZB0hAgBlVcK0U+qpD3ag3Tqqpa5Q67rPw==} engines: {node: '>=6'} - '@sentry/types@7.114.0': - resolution: {integrity: sha512-tsqkkyL3eJtptmPtT0m9W/bPLkU7ILY7nvwpi1hahA5jrM7ppoU0IMaQWAgTD+U3rzFH40IdXNBFb8Gnqcva4w==} - engines: {node: '>=8'} - '@sentry/types@8.2.1': resolution: {integrity: sha512-22ZuANU6Dj/XSvaGhcmNTKD+6WcMc7Zn5uKd8Oj7YcuME6rOnrU8dPGEVwbGTQkE87mTDjVTDSxl8ipb0L+Eag==} engines: {node: '>=14.18'} @@ -2328,10 +2202,6 @@ packages: resolution: {integrity: sha512-zaYmoH0NWWtvnJjC9/CBseXMtKHm/tm40sz3YfJRxeQjyzRqNQPgivpd9R/oDJCYj999mzdW382p/qi2ypjLww==} engines: {node: '>=6'} - '@sentry/utils@7.114.0': - resolution: {integrity: sha512-319N90McVpupQ6vws4+tfCy/03AdtsU0MurIE4+W5cubHME08HtiEWlfacvAxX+yuKFhvdsO4K4BB/dj54ideg==} - engines: {node: '>=8'} - '@sentry/utils@8.2.1': resolution: {integrity: sha512-qFeiCdo+QUVpwNSwe63LOPEKc8GWmJ051twtV3tfZ62XgUYOOi2C0qC6mliY3+GKiGVV8fQE6S930nM//j7G1w==} engines: {node: '>=14.18'} @@ -2339,6 +2209,9 @@ packages: '@shikijs/core@1.12.1': resolution: {integrity: sha512-biCz/mnkMktImI6hMfMX3H9kOeqsInxWEyCHbSlL8C/2TR1FqfmGxTLRNwYCKsyCyxWLbB8rEqXRVZuyxuLFmA==} + '@sinclair/typebox@0.27.8': + resolution: {integrity: sha512-+Fj43pSMwJs4KRrH/938Uf+uAELIgVBmQzg/q1YG10djyfA3TnrU8N8XzqCh/okZdszqBQTZf96idMfE5lnwTA==} + '@sindresorhus/is@4.6.0': resolution: {integrity: sha512-t09vSN3MdfsyCHoFcTRCH/iUtG7OJ0CsjzB8cjAmKc/va/kIgeDI/TxsigdncE/4be734m0cvIYwNaV4i2XqAw==} engines: {node: '>=10'} @@ -2361,38 +2234,6 @@ packages: '@sinonjs/text-encoding@0.7.2': resolution: {integrity: sha512-sXXKG+uL9IrKqViTtao2Ws6dy0znu9sOaP1di/jKGW1M6VssO8vlpXCQcpZ+jisQ1tTFAC5Jo/EOzFbggBagFQ==} - '@snaplet/copycat@5.0.0': - resolution: {integrity: sha512-qapZN1mwVO5v1GmUW66gXoZ8qtpsqJk+tPUb3lGtyzwYtAPifHA0uymsi/Pjv6SVvl9SQhD2Af6Bb8Eime856g==} - - '@snaplet/seed@0.97.20': - resolution: {integrity: sha512-+lnqESgwP92O1266vsTyoRgrg4hMCUTybBUxDT1ICMBFcvdjgwcOaUt8Xjj81YvxYkZlu5+TTBIjyNQT4nP4jQ==} - engines: {node: '>=18.5.0'} - hasBin: true - peerDependencies: - '@prisma/client': '>=5' - '@snaplet/copycat': '>=2' - '@types/better-sqlite3': '*' - '@types/pg': '*' - better-sqlite3: '>=9' - mysql2: '>=3' - pg: '>=8' - postgres: '>=3' - peerDependenciesMeta: - '@prisma/client': - optional: true - '@types/better-sqlite3': - optional: true - '@types/pg': - optional: true - better-sqlite3: - optional: true - mysql2: - optional: true - pg: - optional: true - postgres: - optional: true - '@storacha/one-webcrypto@1.0.1': resolution: {integrity: sha512-bD+vWmcgsEBqU0Dz04BR43SA03bBoLTAY29vaKasY9Oe8cb6XIP0/vkm0OS2UwKC13c8uRgFW4rjJUgDCNLejQ==} @@ -2570,6 +2411,15 @@ packages: '@swc/counter@0.1.3': resolution: {integrity: sha512-e2BR4lsJkkRlKZ/qCHPw9ZaSxc0MVUd7gtbtaB7aMvHeJVYe8sOB8DBZkP2DtISHGSku9sCK6T6cnY0CtXrOCQ==} + '@swc/helpers@0.5.15': + resolution: {integrity: sha512-JQ5TuMi45Owi4/BIMAJBoSQoOJu12oOk/gADqlcUL9JEdHB8vyjUSsxqeNXnmXHjYKMi2WcYtezGEEhqUI/E2g==} + + '@swc/jest@0.2.37': + resolution: {integrity: sha512-CR2BHhmXKGxTiFr21DYPRHQunLkX3mNIFGFkxBGji6r9uyIR5zftTOVYj1e0sFNMV2H7mf/+vpaglqaryBtqfQ==} + engines: {npm: '>= 7.0.0'} + peerDependencies: + '@swc/core': '*' + '@swc/types@0.1.17': resolution: {integrity: sha512-V5gRru+aD8YVyCOMAjMpWR1Ui577DD5KSJsHP8RAxopAH22jFz6GZd/qxqjO6MJHQhcsjvjOFXyDhyLQUnMveQ==} @@ -2583,17 +2433,6 @@ packages: '@tokenizer/token@0.3.0': resolution: {integrity: sha512-OvjF+z51L3ov0OyAU0duzsYuvO01PH7x4t6DJx+guahgTnBHkhJdG7soQeTSFLWN3efnHyibZ4Z8l2EuWwJN3A==} - '@total-typescript/ts-reset@0.5.1': - resolution: {integrity: sha512-AqlrT8YA1o7Ff5wPfMOL0pvL+1X+sw60NN6CcOCqs658emD6RfiXhF7Gu9QcfKBH7ELY2nInLhKSCWVoNL70MQ==} - - '@trpc/client@10.45.2': - resolution: {integrity: sha512-ykALM5kYWTLn1zYuUOZ2cPWlVfrXhc18HzBDyRhoPYN0jey4iQHEFSEowfnhg1RvYnrAVjNBgHNeSAXjrDbGwg==} - peerDependencies: - '@trpc/server': 10.45.2 - - '@trpc/server@10.45.2': - resolution: {integrity: sha512-wOrSThNNE4HUnuhJG6PfDRp4L2009KDVxsd+2VYH8ro6o/7/jwYZ8Uu5j+VaW+mOmc8EHerHzGcdbGNQSAUPgg==} - '@ts-morph/common@0.20.0': resolution: {integrity: sha512-7uKjByfbPpwuzkstL3L5MQyuXPSKdoNG93Fmi2JoDcTf3pEP731JdRFAduRVkOs8oqxPsXKA+ScrWkdQ8t/I+Q==} @@ -2672,6 +2511,15 @@ packages: '@types/http-errors@2.0.4': resolution: {integrity: sha512-D0CFMMtydbJAegzOyHjtiKPLlvnm3iTZyZRSZoLq2mRhDdmLfIWOCYPfQJ4cu2erKghU++QvjcUjp/5h7hESpA==} + '@types/istanbul-lib-coverage@2.0.6': + resolution: {integrity: sha512-2QF/t/auWm0lsy8XtKVPG19v3sSOQlJe/YHZgfjb/KBBHOGSV+J2q/S671rcq9uTBrLAXmZpqJiaQbMT+zNU1w==} + + '@types/istanbul-lib-report@3.0.3': + resolution: {integrity: sha512-NQn7AHQnk/RSLOxrBbGyJM/aVQ+pjj5HCgasFxc0K/KhoATfQ/47AyUl15I2yBUpihjmas+a+VJBOqecrFH+uA==} + + '@types/istanbul-reports@3.0.4': + resolution: {integrity: sha512-pk2B1NWalF9toCRu6gjBzR69syFjP4Od8WRAX+0mmf9lAjCRicLOWc+ZrxZHx/0XRjotgkF9t6iaMJ+aXcOdZQ==} + '@types/js-yaml@4.0.9': resolution: {integrity: sha512-k4MGaQl5TGo/iipqb2UDG2UwjXziSWkh0uysQelTlJpX1qGlpUZYm8PnO4DxG1qBomtJUdYJ6qR6xdIah10JLg==} @@ -2720,9 +2568,6 @@ packages: '@types/multer@1.4.12': resolution: {integrity: sha512-pQ2hoqvXiJt2FP9WQVLPRO+AmiIm/ZYkavPlIQnx282u4ZrVdztx0pkh3jjpQt0Kz+YI0YhSG264y08UJKoUQg==} - '@types/mute-stream@0.0.4': - resolution: {integrity: sha512-CPM9nzrCPPJHQNA9keH9CVkVI+WR5kMa+7XEs5jcGQ0VoAGnLv242w8lIVgwAEfmE4oufJRaTc9PNLQl0ioAow==} - '@types/mysql@2.15.22': resolution: {integrity: sha512-wK1pzsJVVAjYCSZWQoWHziQZbNggXFDUEIGf54g4ZM/ERuP86uGdWeKZWMYlqTPMZfHJJvLPyogXGvCOg87yLQ==} @@ -2735,9 +2580,6 @@ packages: '@types/node@20.10.6': resolution: {integrity: sha512-Vac8H+NlRNNlAmDfGUP7b5h/KA+AtWIzuXy0E6OyP8f1tCLYAtPvKRRDJjAPqhpCb0t6U2j7/xqAuLEebW2kiw==} - '@types/node@20.14.0': - resolution: {integrity: sha512-5cHBxFGJx6L4s56Bubp4fglrEpmyJypsqI6RgzMfBHWUJQGWAAi8cWcgetEbZXHYXo9C2Fa4EEds/uSyS4cxmA==} - '@types/pbkdf2@3.1.2': resolution: {integrity: sha512-uRwJqmiXmh9++aSu1VNEn3iIxWOhd8AHXNSdlaLfdAAdSTY9jYVeGWnzejM3dvrkbqE3/hyQkQQ29IFATEGlew==} @@ -2792,15 +2634,15 @@ packages: '@types/unist@3.0.2': resolution: {integrity: sha512-dqId9J8K/vGi5Zr7oo212BGii5m3q5Hxlkwy3WpYuKPklmBEvsbMYYyLxAQpSffdLl/gdW0XUpKWFvYmyoWCoQ==} - '@types/urijs@1.19.25': - resolution: {integrity: sha512-XOfUup9r3Y06nFAZh3WvO0rBU4OtlfPB/vgxpjg+NRdGU6CN6djdc6OEiH+PcqHCY6eFLo9Ista73uarf4gnBg==} - - '@types/wrap-ansi@3.0.0': - resolution: {integrity: sha512-ltIpx+kM7g/MLRZfkbL7EsCEjfzCcScLpkg37eXEtx5kmrAKBkTJwd1GIAjDSL8wTpM6Hzn5YO4pSb91BEwu1g==} - '@types/ws@8.5.10': resolution: {integrity: sha512-vmQSUcfalpIq0R9q7uTo2lXs6eGIpt9wtnLdMv9LVpIjCA/+ufZRozlVoVelIYixx1ugCBKDhn89vnsEGOCx9A==} + '@types/yargs-parser@21.0.3': + resolution: {integrity: sha512-I4q9QU9MQv4oEOz4tAHJtNz1cwuLxn2F3xcc2iV5WdqLPpUnj30aUuxt1mAxYTG+oe8CZMV/+6rU4S4gRDzqtQ==} + + '@types/yargs@17.0.33': + resolution: {integrity: sha512-WpxBCKWPLr4xSsHgz511rFJAM+wS28w2zEO1QDNY5zM/S8ok70NNfztH0xwhqKyaK0OHCbN98LDAZuy1ctxDkA==} + '@typescript-eslint/eslint-plugin@7.7.0': resolution: {integrity: sha512-GJWR0YnfrKnsRoluVO3PRb9r5aMZriiMMM/RHj5nnTrBy1/wIgk76XCtCKcnXGjpZQJQRFtGV9/0JJ6n30uwpQ==} engines: {node: ^18.18.0 || >=20.0.0} @@ -2865,9 +2707,6 @@ packages: '@ucanto/core@10.0.1': resolution: {integrity: sha512-1BfUaJu0/c9Rl/WdZSDbScJJLsPsPe1g4ynl5kubUj3xDD/lyp/Q12PQVQ2X7hDiWwkpwmxCkRMkOxwc70iNKQ==} - '@ucanto/core@9.0.1': - resolution: {integrity: sha512-SsYvKCO3FD27roTVcg8ASxnixjn+j96sPlijpVq1uBUxq7SmuNxNPYFZqpxXKj2R4gty/Oc8XTse12ebB9Kofg==} - '@ucanto/interface@10.0.1': resolution: {integrity: sha512-+Vr/N4mLsdynV9/bqtdFiq7WsUf3265/Qx2aHJmPtXo9/QvWKthJtpe0g8U4NWkWpVfqIFvyAO2db6D9zWQfQw==} @@ -3015,9 +2854,6 @@ packages: resolution: {integrity: sha512-9bRgQTXfxWrYIyeUvuZ9FzQSxUE3uNyxh0C3NnHeYs6Vx+1+dBlNSujI9WEZolY/dvGuFq+oVePn6k67iblHIA==} engines: {node: '>=18.0.0'} - '@wry/equality@0.1.11': - resolution: {integrity: sha512-mwEVBDUVODlsQQ5dfuLUS5/Tf7jqUKyhKYHmVi4fPB6bDMOfWvUPJmKgS1Z7Za/sOI3vzWt4+O7yCiL/70MogA==} - JSONStream@1.3.5: resolution: {integrity: sha512-E+iruNOY8VV9s4JEbe1aNEm6MiszPRr/UfcHMz0TQh1BXSxHK+ASV1R6W4HpjBhSeS+54PIsAMCBmwD06LLsqQ==} hasBin: true @@ -3058,10 +2894,6 @@ packages: zod: optional: true - abort-controller@3.0.0: - resolution: {integrity: sha512-h8lQ8tacZYnR3vNQTgibj+tODHI5/+l06Au2Pcriv/Gmet0eaj4TwWH41sO9wnHDiQsEj19q0drzdWdeAHtweg==} - engines: {node: '>=6.5'} - accepts@1.3.8: resolution: {integrity: sha512-PYAthTa2m2VKxuvSD3DPC/Gy+U+sOA1LAuT8mkmRuvw+NACSaeXEQ+NHcVF7rONl6qcaxV3Uuemwawk+7+SJLw==} engines: {node: '>= 0.6'} @@ -3090,6 +2922,11 @@ packages: engines: {node: '>=0.4.0'} hasBin: true + acorn@8.14.0: + resolution: {integrity: sha512-cl669nCJTZBsL97OF4kUQm5g5hC2uihk0NxY3WENAC0TYdILVkAyHymAntgxGkl7K+t0cXIrH5siy5S4XkFycA==} + engines: {node: '>=0.4.0'} + hasBin: true + actor@2.3.1: resolution: {integrity: sha512-ST/3wnvcP2tKDXnum7nLCLXm+/rsf8vPocXH2Fre6D8FQwNkGDd4JEitBlXj007VQJfiGYRQvXqwOBZVi+JtRg==} @@ -3141,14 +2978,6 @@ packages: resolution: {integrity: sha512-gKXj5ALrKWQLsYG9jlTRmR/xKluxHV+Z9QEwNIgCfM1/uwPMCuzVVnh5mwTd+OuBZcwSIMbqssNWRm1lE51QaQ==} engines: {node: '>=8'} - ansi-escapes@5.0.0: - resolution: {integrity: sha512-5GFMVX8HqE/TB+FuBJGuO5XG0WrsA6ptUqoODaT/n9mmUaZFkqnBueB4leqGBCmrUHnCnC4PCZTCd0E7QQ83bA==} - engines: {node: '>=12'} - - ansi-escapes@6.2.1: - resolution: {integrity: sha512-4nJ3yixlEthEJ9Rk4vPcdBRkZvQZlYyu8j4/Mqz5sgIkddmEnH2Yj2ZrnP9S3tQOvSNRUIgVNF/1yPpRAGNRig==} - engines: {node: '>=14.16'} - ansi-escapes@7.0.0: resolution: {integrity: sha512-GdYO7a61mR0fOlAsvC9/rIHf7L96sBc6dEWzeOu+KAea5bZyQRPIpojrVoI4AXGJS/ycu/fBTdLrUkA4ODrvjw==} engines: {node: '>=18'} @@ -3180,16 +3009,6 @@ packages: resolution: {integrity: sha512-KMReFUr0B4t+D+OBkjR3KYqvocp2XaSzO55UcB6mgQMd3KbcE+mWTyvVV7D/zsdEbNnV6acZUutkiHQXvTr1Rw==} engines: {node: '>= 8'} - apollo-link@1.2.14: - resolution: {integrity: sha512-p67CMEFP7kOG1JZ0ZkYZwRDa369w5PIjtMjvrQd/HnIV8FRsHRqLqK+oAZQnFa1DDdZtOtHTi+aMIW6EatC2jg==} - peerDependencies: - graphql: ^0.11.3 || ^0.12.3 || ^0.13.0 || ^14.0.0 || ^15.0.0 - - apollo-utilities@1.3.4: - resolution: {integrity: sha512-pk2hiWrCXMAy2fRPwEyhvka+mqwzeP60Jr1tRYi5xru+3ko94HI9o6lK0CT33/w4RDlxWchmdhDCrvdr+pHCig==} - peerDependencies: - graphql: ^0.11.0 || ^0.12.0 || ^0.13.0 || ^14.0.0 || ^15.0.0 - append-field@1.0.0: resolution: {integrity: sha512-klpgFSWLW1ZEs8svjfb7g4qWY0YS5imI82dTg+QahUvJ8YqAY0P10Uk8tTyh9ZGuYEZEMaeJYCF5BFuX552hsw==} @@ -3199,16 +3018,9 @@ packages: arg@4.1.3: resolution: {integrity: sha512-58S9QDqG0Xx27YwPSt9fJxivjYl432YCwfDMfZ+71RAqUrZef7LrKQZ3LHLOwCS4FLNBplP533Zx895SeOCHvA==} - arg@5.0.2: - resolution: {integrity: sha512-PYjyFOLKQ9y57JvQ6QLo8dAgNqswh8M1RMJYdQduT6xbWSgK36P/Z/v+p888pM69jMMfS8Xd8F6I1kQ/I9HUGg==} - argparse@2.0.1: resolution: {integrity: sha512-8+9WqebbFzpX9OR+Wa6O29asIogeRMzcGtAINdpMHHyAg10f05aSFVBbcEqGf/PXw1EjAZ+q2/bEBg3DvurK3Q==} - array-differ@4.0.0: - resolution: {integrity: sha512-Q6VPTLMsmXZ47ENG3V+wQyZS1ZxXMxFyYzA+Z/GMrJ6yIutAIEf9wTyroTzmGjNfox9/h3GdGBCVh43GVFx4Uw==} - engines: {node: ^12.20.0 || ^14.13.1 || >=16.0.0} - array-flatten@1.1.1: resolution: {integrity: sha512-PCVAQswWemu6UdxsDFFX/+gVeYqKAod3D3UVm91jHwynguOwAvYPhx8nNlM++NqRcK6CxxpUafjmhIdKiHibqg==} @@ -3219,10 +3031,6 @@ packages: resolution: {integrity: sha512-HGyxoOTYUyCM6stUe6EJgnd4EoewAI7zMdfqO+kGjnlZmBDz/cR5pf8r/cR4Wq60sL/p0IkcjUEEPwS3GFrIyw==} engines: {node: '>=8'} - array-union@3.0.1: - resolution: {integrity: sha512-1OvF9IbWwaeiM9VhzYXVQacMibxpXOMYVNIvMtKRyX9SImBXpKcFr8XvFDeEslCyuH/t6KRt7HEO94AlP8Iatw==} - engines: {node: '>=12'} - asap@2.0.6: resolution: {integrity: sha512-BSHWgDSAiKs50o2Re8ppvp3seVHXSRM44cdSsT9FfNEUUZLOGWVCsiWaRPWM1Znn+mqZ1OfVZ3z3DWEzSp7hRA==} @@ -3238,9 +3046,6 @@ packages: resolution: {integrity: sha512-Z7tMw1ytTXt5jqMcOP+OQteU1VuNK9Y02uuJtKQ1Sv69jXQKKg5cibLwGJow8yzZP+eAc18EmLGPal0bp36rvQ==} engines: {node: '>=8'} - async@2.6.4: - resolution: {integrity: sha512-mzo5dfJYwAn29PeiJ0zvwTo04zj8HDJj0Mn8TD7sno7q12prdbnasKJHhkm2c1LgrhlJ0teaea8860oxi51mGA==} - asynckit@0.4.0: resolution: {integrity: sha512-Oei9OH4tRh0YqU3GxhX79dM/mwVgvbZJaSNaRk+bshkj0S5cfHcgYakreBjrHwatXKbz+IoIdYLxrKim2MjW0Q==} @@ -3251,9 +3056,6 @@ packages: resolution: {integrity: sha512-Hdw8qdNiqdJ8LqT0iK0sVzkFbzg6fhnQqqfWhBDxcHZvU75+B+ayzTy8x+k5Ix0Y92XOhOUlx74ps+bA6BeYMQ==} engines: {node: '>=8'} - axios@1.6.5: - resolution: {integrity: sha512-Ii012v05KEVuUoFWmMW/UQv9aRIc3ZwkWDcM+h5Il8izZCtRVpDUfwpoFf7eOtajT3QiGR4yDUx7lPqHJULgbg==} - axios@1.7.9: resolution: {integrity: sha512-LhLcE7Hbiryz8oMDdDptSrWowmB4Bl6RCt6sIJKpRB4XtVf0iEgewX3au/pJqm+Py1kCASkb/FFKjxQaLtxJvw==} @@ -3324,10 +3126,6 @@ packages: resolution: {integrity: sha512-9gYgQKXx+1nP8mP7CzFyaUARhg7D3n1dF/FnErWmu9l6JvGpNUN278h0aSb+QjoiKSWG+iZ3uHrcqk0qrY9RQQ==} engines: {node: '>=10'} - boxen@7.1.1: - resolution: {integrity: sha512-2hCgjEmP8YLWQ130n2FerGv7rYpfBmnmp9Uy2Le1vge6X3gZIfSmEzP5QTDElFxcvVcXlEn8Aq6MU/PZygIOog==} - engines: {node: '>=14.16'} - brace-expansion@1.1.11: resolution: {integrity: sha512-iCuPHDFgrHX7H2vEI/5xpz07zSHB00TpugqhmYtVmMO6518mCuRMoOYFldEBl0g187ufozdaHgWKcYFb61qGiA==} @@ -3345,9 +3143,6 @@ packages: brorand@1.1.0: resolution: {integrity: sha512-cKV8tMCEpQs4hK/ik71d6LrPOnpkpGBR0wzxqr68g2m/LB2GxVYQroAjMJZRVM1Y4BCjCKc3vAamxSzOY2RP+w==} - browser-or-node@2.1.1: - resolution: {integrity: sha512-8CVjaLJGuSKMVTxJ2DpBl5XnlNDiT4cQFeuCJJrvJmts9YrTZDizTX7PjC2s6W4x+MBGZeEY6dGMrF04/6Hgqg==} - browser-readablestream-to-it@1.0.3: resolution: {integrity: sha512-+12sHB+Br8HIh6VAMVEG5r3UXCyESIgDW7kzk3BjIXa43DVqVwL7GC5TW3jeh+72dtcH99pPVpw0X8i0jt+/kw==} @@ -3394,9 +3189,6 @@ packages: resolution: {integrity: sha512-/Nf7TyzTx6S3yRJObOAV7956r8cr2+Oj8AC5dt8wSP3BQAoeX58NoHyCU8P8zGkNXStjTSi6fzO6F0pBdcYbEg==} engines: {node: '>= 0.8'} - c12@1.10.0: - resolution: {integrity: sha512-0SsG7UDhoRWcuSvKWHaXmu5uNjDCDN3nkQLRL4Q42IlFy+ze58FcCoI3uPwINXinkz7ZinbhEgyzYFw9u9ZV8g==} - cac@6.7.14: resolution: {integrity: sha512-b6Ilus+c3RrdDk+JhLKUAQfzzgLEPy6wcXqS7f/xe1EETvsDP6GORG7SFuOs6cID5YkqchW/LXZbX5bc8j7ZcQ==} engines: {node: '>=8'} @@ -3428,10 +3220,6 @@ packages: resolution: {integrity: sha512-Gmy6FhYlCY7uOElZUSbxo2UCDH8owEk996gkbrpsgGtrJLM3J7jGxl9Ic7Qwwj4ivOE5AWZWRMecDdF7hqGjFA==} engines: {node: '>=10'} - camelcase@7.0.1: - resolution: {integrity: sha512-xlx1yCK2Oc1APsPXDL2LdlNP6+uu8OCDdhOBSVT279M/S+y75O30C2VuD8T2ogdePBBl7PfPF4504tnLgX3zfw==} - engines: {node: '>=14.16'} - caniuse-lite@1.0.30001588: resolution: {integrity: sha512-+hVY9jE44uKLkH0SrUTqxjxqNTOWHsbnQDIKjwkZ3lNTzUUVdBLBGXtj/q5Mp5u98r3droaZAewQuEDzjQdZlQ==} @@ -3476,9 +3264,6 @@ packages: change-case@4.1.2: resolution: {integrity: sha512-bSxY2ws9OtviILG1EiY5K7NNxkqg/JnRnFxLtKQ96JaviiIxi7djMrSd0ECT9AC+lttClmYwKw53BWpOMblo7A==} - change-case@5.4.4: - resolution: {integrity: sha512-HRQyTk2/YPEkt9TnUPbOpr64Uw3KOicFWPVBb+xiHvd6eBx/qPr9xqfBFDT8P2vWsvvz4jbEkfDe71W3VyNu2w==} - chardet@0.7.0: resolution: {integrity: sha512-mT8iDcrh03qDGRRmoA2hmBJnxpllMR+0/0qlzjqZES6NdiWDcZkCNAk4rPFZ9Q85r27unkiNNg8ZOiwZXBHwcA==} @@ -3494,18 +3279,10 @@ packages: resolution: {integrity: sha512-Dr3sfKRP6oTcjf2JmUmFJfeVMvXBdegxB0iVQ5eb2V10uFJUCAS8OByZdVAyVb8xXNz3GjjTgj9kLWsZTqE6kw==} engines: {node: '>= 8.10.0'} - chokidar@3.6.0: - resolution: {integrity: sha512-7VT13fmjotKpGipCW9JEQAusEPE+Ei8nl6/g4FBAmIm0GOOLMua9NDDo/DWp0ZAxCr3cPq5ZpBqmPAQgDda2Pw==} - engines: {node: '>= 8.10.0'} - chokidar@4.0.1: resolution: {integrity: sha512-n8enUVCED/KVRQlab1hr3MVpcVMvxtZjmEa956u+4YijlmQED223XMSYj2tLuKvr4jcCTzNNMpQDUer72MMmzA==} engines: {node: '>= 14.16.0'} - chownr@2.0.0: - resolution: {integrity: sha512-bIomtDF5KGpdogkLd9VspvFzk9KfpyyGlS8YFVZl7TGPBHL5snIOnxeshwVgPteQ9b4Eydl+pVbIyE1DcvCWgQ==} - engines: {node: '>=10'} - chownr@3.0.0: resolution: {integrity: sha512-+IxzY9BZOQd/XuYPRmrvEVjF/nqj5kgT4kEq7VofrDoM1MxoRjEWkrCC3EtLi59TVawxTAn+orJwFQcrqEN1+g==} engines: {node: '>=18'} @@ -3513,16 +3290,9 @@ packages: ci-info@2.0.0: resolution: {integrity: sha512-5tK7EtrZ0N+OLFMthtqOj4fI2Jeb88C4CAZPu25LDVUgXJ0A3Js4PMGqrn0JU1W0Mh1/Z8wZzYPxqUrXeBboCQ==} - ci-info@4.0.0: - resolution: {integrity: sha512-TdHqgGf9odd8SXNuxtUBVx8Nv+qZOejE6qyqiy5NtbYYQOeFa6zmHkxlPzmaLxWWHsU6nJmB7AETdVPi+2NBUg==} - engines: {node: '>=8'} - cipher-base@1.0.4: resolution: {integrity: sha512-Kkht5ye6ZGmwv40uUDZztayT2ThLQGfnj/T71N/XzeZeo3nf8foyW7zGTsPYkEya3m5f3cAypH+qe7YOrM1U2Q==} - citty@0.1.6: - resolution: {integrity: sha512-tskPPKEs8D2KPafUypv2gxwJP8h/OaJmC82QQGGDQcHvXX43xF2VDACcJVmZ0EuSxkpO9Kc4MlrA3q0+FG58AQ==} - cjs-module-lexer@1.3.1: resolution: {integrity: sha512-a3KdPAANPbNE4ZUv9h6LckSl9zLsYOP4MBmhIPkRaeyybt+r4UghLvq+xw/YwUcC1gqylCkL4rdVs3Lwupjm4Q==} @@ -3534,18 +3304,10 @@ packages: resolution: {integrity: sha512-y4coMcylgSCdVinjiDBuR8PCC2bLjyGTwEmPb9NHR/QaNU6EUOXcTY/s6VjGMD6ENSEaeQYHCY0GNGS5jfMwPw==} engines: {node: '>=6'} - cli-boxes@3.0.0: - resolution: {integrity: sha512-/lzGpEWL/8PfI0BmBOPRwp0c/wFNX1RdUML3jK/RcSBA9T8mZDdQpqYBKtCFTOfQbwPqWEOpjqW+Fnayc0969g==} - engines: {node: '>=10'} - cli-cursor@3.1.0: resolution: {integrity: sha512-I/zHAwsKf9FqGoXM4WWRACob9+SNukZTd94DWF57E4toouRulbCxcUh6RKUEOQlYTHJnzkPMySvPNaaSLNfLZw==} engines: {node: '>=8'} - cli-cursor@4.0.0: - resolution: {integrity: sha512-VGtlMu3x/4DOtIUwEkRezxUZ2lBacNJCHash0N0WeZDBS+7Ux1dm3XWAgWYxLJFMMdOeXMHXorshEFhbMSGelg==} - engines: {node: ^12.20.0 || ^14.13.1 || >=16.0.0} - cli-cursor@5.0.0: resolution: {integrity: sha512-aCj4O5wKyszjMmDT4tZj93kxyydN/K5zPWSCe6/0AV/AA1pqe5ZBIw0a2ZfPQV7lL5/yb5HsUreJ6UFAF1tEQw==} engines: {node: '>=18'} @@ -3594,9 +3356,6 @@ packages: code-block-writer@12.0.0: resolution: {integrity: sha512-q4dMFMlXtKR3XNBHyMHt/3pwYNA69EDk00lloMOaaUMKPUXBw6lpXtbu3MMVG6/uOihGnRDOlkyqsONEUj60+w==} - collection-utils@1.0.1: - resolution: {integrity: sha512-LA2YTIlR7biSpXkKYwwuzGjwL5rjWEZVOSnvdUc7gObvWe4WkjxOpfrdhoP7Hs09YWDVfg0Mal9BpAqLfVEzQg==} - color-convert@1.9.3: resolution: {integrity: sha512-QfAUtd+vFdAtFQcC8CCyYt1fYWxSqAiK2cSD6zDB8N3cpsEBAvRxp9zOGg6G/SHHJYAT88/az/IuDGALsNVbGg==} @@ -3654,13 +3413,6 @@ packages: resolution: {integrity: sha512-jjyhlQ0ew/iwmtwsS2RaB6s8DBifcE2GYBEaw2SJDUY/slJJbNfY4GlDVzOs/ff8cM/Wua5CikqXgbFl5eu85A==} engines: {node: '>=14.16'} - confbox@0.1.7: - resolution: {integrity: sha512-uJcB/FKZtBMCJpK8MQji6bJHgu1tixKPxRLeGkNzBoOZzpnZUJm0jm2/sBDWcuBx1dYgxV4JU+g5hmNxCyAmdA==} - - consola@3.2.3: - resolution: {integrity: sha512-I5qxpzLv+sJhTVEoLYNcTW+bThDCPsit0vLNKShZx6rLtpilNpmmeTPaeqJb9ZE9dV3DGaeby6Vuhrw38WjeyQ==} - engines: {node: ^14.18.0 || >=16.10.0} - constant-case@3.0.4: resolution: {integrity: sha512-I2hSBi7Vvs7BEuJDr5dDHfzb/Ruj3FyvFyh7KLilAjNQw3Be+xgqUBA2W6scVEcL0hL1dwPRtIqEPVUCKkSsyQ==} @@ -3744,9 +3496,6 @@ packages: cross-fetch@3.1.8: resolution: {integrity: sha512-cvA+JwZoU0Xq+h6WkMvAUqPEYy92Obet6UdKLfW60qn99ftItKjB5T+BkyWOFWe2pUyfQ+IJHmpOTznqk1M6Kg==} - cross-fetch@4.0.0: - resolution: {integrity: sha512-e4a5N8lVvuLgAWgnCrLr2PP0YyDOTHa9H/Rj54dirp61qXnNq46m82bRhNqIA5VccJtWBvPTFRV3TtvHUKPB1g==} - cross-inspect@1.0.0: resolution: {integrity: sha512-4PFfn4b5ZN6FMNGSZlyb7wUhuN8wvj8t/VQHZdM4JsDcruGJ8L2kf9zao98QIrBPFCpdk27qst/AGTl7pL3ypQ==} engines: {node: '>=16.0.0'} @@ -3776,9 +3525,6 @@ packages: resolution: {integrity: sha512-0R9ikRb668HB7QDxT1vkpuUBtqc53YyAwMwGeUFKRojY/NWKvdZ+9UYtRfGmhqNbRkTSVpMbmyhXipFFv2cb/A==} engines: {node: '>= 12'} - dataloader@2.1.0: - resolution: {integrity: sha512-qTcEYLen3r7ojZNgVUaRggOI+KM7jrKxXeSHhogh/TWxYMeONEMqY+hmkobiYQozsGIyg9OYVzO4ZIfoB4I0pQ==} - dataloader@2.2.3: resolution: {integrity: sha512-y2krtASINtPFS1rSDjacrFgn1dcUuoREVabwlOGOe4SdxenREqwjwjElAdwvbGM7kgZz9a3KVicWR7vcz8rnzA==} @@ -3786,6 +3532,9 @@ packages: resolution: {integrity: sha512-fnULvOpxnC5/Vg3NCiWelDsLiUc9bRwAPs/+LfTLNvetFCtCTN+yQz15C/fs4AwX1R9K5GLtLfn8QW+dWisaAw==} engines: {node: '>=0.11'} + date-fns@4.1.0: + resolution: {integrity: sha512-Ukq0owbQXxa/U3EGtsdVBkR1w7KOQ5gIBqdH2hkvknzZPYvBxb/aa6E8L7tmjFtkwZBu3UXBbjIgPo/Ez4xaNg==} + debounce-fn@5.1.2: resolution: {integrity: sha512-Sr4SdOZ4vw6eQDvPYNxHogvrxmCIld/VenC5JbNrFwMiwd7lY/Z18ZFfo+EWNG4DD9nFlAujWAo/wGuOPHmy5A==} engines: {node: '>=12'} @@ -3801,14 +3550,6 @@ packages: supports-color: optional: true - debug@3.2.7: - resolution: {integrity: sha512-CFjzYYAi4ThfiQvizrFQevTTXHtnCqWfe7x1AhgEscTz6ZbLbfoLRLPugTQyBth6f8ZERVUSyWHFD/7Wu4t1XQ==} - peerDependencies: - supports-color: '*' - peerDependenciesMeta: - supports-color: - optional: true - debug@4.3.4: resolution: {integrity: sha512-PRWFHuSU3eDtQJPvnNY7Jcket1j0t5OuOsFzPPzsekD52Zl8qUfFIPEiswXqIvHWGVHOgX+7G/vCNNhehwxfkQ==} engines: {node: '>=6.0'} @@ -3844,21 +3585,10 @@ packages: resolution: {integrity: sha512-9iE1PgSik9HeIIw2JO94IidnE3eBoQrFJ3w7sFuzSX4DpmZ3v5sZpUiV5Swcf6mQEF+Y0ru8Neo+p+nyh2J+hQ==} engines: {node: '>=10'} - decimal.js@10.5.0: - resolution: {integrity: sha512-8vDa8Qxvr/+d94hSh5P3IJwI5t8/c0KsMp+g8bNw9cY2icONa5aPfvKeieW1WlG0WQYwwhJ7mjui2xtiePQSXw==} - decompress-response@6.0.0: resolution: {integrity: sha512-aW35yZM6Bb/4oJlZncMH2LCoZtJXTRxES17vE3hoRiowU2kWHaJKFkSBDnDR+cm9J+9QhXmREyIfv0pji9ejCQ==} engines: {node: '>=10'} - dedent@1.5.3: - resolution: {integrity: sha512-NHQtfOOW68WD8lgypbLA5oT+Bt0xXJhiYvoR6SmmNXZfpzOGXwdKWmcwG8N7PwVVWV3eF/68nmD9BaJSsTBhyQ==} - peerDependencies: - babel-plugin-macros: ^3.1.0 - peerDependenciesMeta: - babel-plugin-macros: - optional: true - deep-eql@5.0.1: resolution: {integrity: sha512-nwQCf6ne2gez3o1MxWifqkciwt0zhl0LO1/UwVu4uMBuPmflWM4oQ70XMqHqnBJA+nhzncaqL9HVL6KkHJ28lw==} engines: {node: '>=6'} @@ -3866,10 +3596,6 @@ packages: deep-is@0.1.4: resolution: {integrity: sha512-oIPzksmTg4/MriiaYGO+okXDT7ztn/w3Eptv/+gSIdMdKsJo0u4CfYNFJPy+4SKMuCqGw2wxnA+URMg3t8a/bQ==} - deepmerge@4.3.1: - resolution: {integrity: sha512-3sUqbMEc77XqpdNO7FRyRog+eW3ph+GYCbj+rK+uYyRMuwsVy0rMiVtPn+QJlKFvWP/1PYpapqYn0Me2knFn+A==} - engines: {node: '>=0.10.0'} - defaults@1.0.4: resolution: {integrity: sha512-eFuaLoy/Rxalv2kr+lqMlUnrDWV+3j4pljOIJgLIhI058IQfWJ7vXhyEIHu+HtC738klGALYxOKDO0bQP3tg8A==} @@ -3881,9 +3607,6 @@ packages: resolution: {integrity: sha512-rBMvIzlpA8v6E+SJZoo++HAYqsLrkg7MSfIinMPFhmkorw7X+dOXVJQs+QT69zGkzMyfDnIMN2Wid1+NbL3T+A==} engines: {node: '>= 0.4'} - defu@6.1.4: - resolution: {integrity: sha512-mEQCMmwJu317oSz8CwdIOdwf3xMif1ttiM8LTufzc3g6kR+9Pe236twL8j3IYT1F7GfRgGcW6MWxzZjLIkuHIg==} - delayed-stream@1.0.0: resolution: {integrity: sha512-ZySD7Nf91aLB0RxL4KGrKHBXl7Eds1DAmEdcoVawXnLD7SDhpNgtuII2aAkg7a7QS41jxPSZ17p4VdGnMHk3MQ==} engines: {node: '>=0.4.0'} @@ -3900,12 +3623,6 @@ packages: resolution: {integrity: sha512-JeMq7fEshyepOWDfcfHK06N3MhyPhz++vtqWhMT5O9A3K42rdsEDpfdVqjaqaAhsw6a+ZqeDvQVtD0hFHQWrzg==} engines: {node: '>= 0.6.0'} - deprecated-decorator@0.1.6: - resolution: {integrity: sha512-MHidOOnCHGlZDKsI21+mbIIhf4Fff+hhCTB7gtVg4uoIqjcrTZc5v6M+GS2zVI0sV7PqK415rb8XaOSQsQkHOw==} - - destr@2.0.3: - resolution: {integrity: sha512-2N3BOUU4gYMpTP24s5rF5iP7BDr7uNTCs4ozw3kf/eKfvWSIu93GEBi5m427YoyJoeOzQ5smuu4nNAPGb8idSQ==} - destroy@1.2.0: resolution: {integrity: sha512-2sJGJTaXIIaR1w4iJSNoN0hnMY7Gpc/n8D4qSCJw8QqFWXf7cuAgnEHxBpweaVcPevC2l3KpjYCx3NypQQgaJg==} engines: {node: '>= 0.8', npm: 1.2.8000 || >= 1.4.16} @@ -3956,10 +3673,6 @@ packages: resolution: {integrity: sha512-IPzF4w4/Rd94bA9imS68tZBaYyBWSCE47V1RGuMrB94iyTOIEwRmVL2x/4An+6mETpLrKJ5hQkB8W4kFAadeIQ==} engines: {node: '>=12'} - dotenv@16.4.5: - resolution: {integrity: sha512-ZmdL2rui+eB2YwhsWzjInR8LldtZHGDoQ1ugH85ppHKwpUHL7j7rN0Ti9NCnGiQbhaZ11FpR+7ao1dNsmduNUg==} - engines: {node: '>=12'} - dotenv@16.4.7: resolution: {integrity: sha512-47qPchRCykZC03FhkYAhrvwU4xDBFIj1QPqaarj6mdM/hgUzfPHcpkHJOn3mJAufFeeAxAzeGsr5X0M4k6fLZQ==} engines: {node: '>=12'} @@ -3975,10 +3688,6 @@ packages: eastasianwidth@0.2.0: resolution: {integrity: sha512-I88TYZWc9XiYHRQ4/3c5rjjfgkjhLyW2luGIheGERbNQ6OY7yTybanSpDXZa8y7VUP9YmDcYa+eyq4ca7iLqWA==} - ebnf@1.9.1: - resolution: {integrity: sha512-uW2UKSsuty9ANJ3YByIQE4ANkD8nqUPO7r6Fwcc1ADKPe9FRdcPpMl3VEput4JSvKBJ4J86npIC2MLP0pYkCuw==} - hasBin: true - ee-first@1.1.1: resolution: {integrity: sha512-WMwm9LhRUo+WUaRN+vRuETqG89IgZphVSNkdFgeb6sS/E4OrDIN7t48CAewSHXc6C8lefD8KKfr5vY61brQlow==} @@ -4145,17 +3854,9 @@ packages: resolution: {integrity: sha512-CUnVOQq7gSpDHZVVrQW8ExxUETWrnrvXYvYz55wOU8Uj4VCgw56XC2B/fVqQN+f7gmrnRHSLVnFAwsCuNwji8w==} engines: {node: '>=6.5.0', npm: '>=3'} - event-target-shim@5.0.1: - resolution: {integrity: sha512-i/2XbnSz/uxRCU6+NdVJgKWDTM427+MqYbkQzD321DuCQJUqOuJKIA0IM2+W2xtYHdKOmZ4dR6fExsd4SXL+WQ==} - engines: {node: '>=6'} - eventemitter3@5.0.1: resolution: {integrity: sha512-GWkBvjiSZK87ELrYOSESUYeVIc9mvLLf/nXalMOS5dYrgZq9o5OVkbZAVM06CVxYsCwH9BDZFPlQTlPA1j4ahA==} - events@3.3.0: - resolution: {integrity: sha512-mQw+2fkQbALzQ7V0MY0IqdnXNOeTtP4r0lN9z7AAawCXgqea7bDii20AYrIBrFd/Hx0M2Ocz6S111CaFkUcb0Q==} - engines: {node: '>=0.8.x'} - evp_bytestokey@1.0.3: resolution: {integrity: sha512-/f2Go4TognH/KvCISP7OUsHn85hT9nUkxxA9BEWxFn+Oj9o8ZNLm/40hdlgSLyuOimsrTKLUMEorQexp/aPQeA==} @@ -4175,10 +3876,6 @@ packages: resolution: {integrity: sha512-8iA79xD3uAch729dUG8xaaBBFGaEa0wdD2VkYLFHwlqosEj/jT66AzcreRDSgV7ehnNLBW2WR5jIXwGKjVdTLg==} engines: {node: '>=4'} - exit-hook@4.0.0: - resolution: {integrity: sha512-Fqs7ChZm72y40wKjOFXBKg7nJZvQJmewP5/7LtePDdnah/+FH9Hp5sgMujSCMPXlxOAW2//1jrW9pnsY7o20vQ==} - engines: {node: '>=18'} - expect-type@1.1.0: resolution: {integrity: sha512-bFi65yM+xZgk+u/KRIpekdSYkTB5W1pEf0Lt8Q8Msh7b+eQ7LXVtIB1Bkm4fvclDEL1b2CZkMhv2mOeF8tMdkA==} engines: {node: '>=12.0.0'} @@ -4203,9 +3900,6 @@ packages: resolution: {integrity: sha512-FuoE1qtbJ4bBVvv94CC7s0oTnKUGvQs+Rjf1L2SJFfS+HTVVjhPFtehPdQ0JiGPqVNfSSZvL5yzHHQq2Z4WNhQ==} engines: {node: ^12.20 || >= 14.13} - fast-copy@3.0.2: - resolution: {integrity: sha512-dl0O9Vhju8IrcLndv2eU4ldt1ftXMqqfgN4H1cpmGV7P6jeB9FwpN9a2c8DPGE1Ys88rNUJVYDHq73CGAGOPfQ==} - fast-decode-uri-component@1.0.1: resolution: {integrity: sha512-WKgKWg5eUxvRZGwW8FvfbaH7AXSh2cL+3j5fMGzUMCxWBJ3dV3a7Wz8y2f/uQ0e3B6WmodD3oS54jTQ9HVTIIg==} @@ -4255,9 +3949,6 @@ packages: resolution: {integrity: sha512-7yAQpD2UMJzLi1Dqv7qFYnPbaPx7ZfFK6PiIxQ4PfkGPyNyl2Ugx+a/umUonmKqjhM4DnfbMvdX6otXq83soQQ==} engines: {node: ^12.20 || >= 14.13} - fictional@2.1.1: - resolution: {integrity: sha512-lHrMISII22AXlro16kjq0tTEr+LWWQwPVT3h3H8Xxd1pDr3sTI+ZxNOpp/SQm1wrY1jr5A7taOxJAOXvnLdjVQ==} - figures@3.2.0: resolution: {integrity: sha512-yaduQFRKLXYOGgEn6AZau90j3ggSOyiqXU0F9JZfeXYhNa+Jk4X+s45A2zg5jns87GAFa34BBm2kXw4XpNcbdg==} engines: {node: '>=8'} @@ -4321,18 +4012,6 @@ packages: flatted@3.2.9: resolution: {integrity: sha512-36yxDn5H7OFZQla0/jFJmbIKTdZAQHngCedGxiMmpNfEZM0sdEeT+WczLQrjK6D7o2aiyLYDnkw0R3JK0Qv1RQ==} - fnv-plus@1.3.1: - resolution: {integrity: sha512-Gz1EvfOneuFfk4yG458dJ3TLJ7gV19q3OM/vVvvHf7eT02Hm1DleB4edsia6ahbKgAYxO9gvyQ1ioWZR+a00Yw==} - - follow-redirects@1.15.4: - resolution: {integrity: sha512-Cr4D/5wlrb0z9dgERpUL3LrmPKVDsETIJhaCMeDfuFYcqa5bldGV6wBsAN6X/vxlXQtFBMrXdXxdL8CbDTGniw==} - engines: {node: '>=4.0'} - peerDependencies: - debug: '*' - peerDependenciesMeta: - debug: - optional: true - follow-redirects@1.15.6: resolution: {integrity: sha512-wWN62YITEaOpSK584EZXJafH1AGpO8RVgElfkuXbTOrPX4fIfOyEpW/CsiNd8JdYrAoOvafRTOEnvsO++qCqFA==} engines: {node: '>=4.0'} @@ -4365,10 +4044,6 @@ packages: resolution: {integrity: sha512-zJ2mQYM18rEFOudeV4GShTGIQ7RbzA7ozbU9I/XBpm7kqgMywgmylMwXHxZJmkVoYkna9d2pVXVXPdYTP9ej8Q==} engines: {node: '>= 0.6'} - fs-extra@11.1.1: - resolution: {integrity: sha512-MGIE4HOvQCeUCzmlHs0vXpih4ysz4wg9qiSAu6cd42lVwPbTM1TjV7RusoyQqMmk/95gdQZX72u+YW+c3eEpFQ==} - engines: {node: '>=14.14'} - fs-extra@11.2.0: resolution: {integrity: sha512-PmDi3uwK5nFuXh7XDTlVnS17xJS7vW36is2+w3xcv8SVxiB4NyATf4ctkVY5bkSjX0Y4nbvZCq1/EjtEyr9ktw==} engines: {node: '>=14.14'} @@ -4377,10 +4052,6 @@ packages: resolution: {integrity: sha512-YJDaCJZEnBmcbw13fvdAM9AwNOJwOzrE4pqMqBq5nFiEqXUqHwlK4B+3pUw6JNvfSPtX05xFHtYy/1ni01eGCw==} engines: {node: '>=6 <7 || >=8'} - fs-minipass@2.1.0: - resolution: {integrity: sha512-V/JgOLFCS+R6Vcq0slCuaeWEdNC3ouDlJMNIsacH2VtALiu9mV4LPrHc5cDl8k5aw6J8jwgWWpiTo5RYhmIzvg==} - engines: {node: '>= 8'} - fs.realpath@1.0.0: resolution: {integrity: sha512-OO0pH2lK6a0hZnAdau5ItzHPI6pUlvI7jMVnxUQRtw4owF2wk8lOSabtGDCTP4Ggrg2MbGnWO9X8K1t4+fGMDw==} @@ -4392,6 +4063,9 @@ packages: function-bind@1.1.2: resolution: {integrity: sha512-7XHNxH7qX9xG5mIwxkhumTox/MIRNcOgDrxWsMt2pAr23WHp6MrRlN7FBSFpCpr+oVO0F744iUgR82nJMfG2SA==} + functional-red-black-tree@1.0.1: + resolution: {integrity: sha512-dsKNQNdj6xA3T+QlADDA7mOSlX0qiMINjn0cgr+eGHGsbSHzTabcIogz2+p/iqP1Xs6EP/sS2SbqH+brGTbq0g==} + gensync@1.0.0-beta.2: resolution: {integrity: sha512-3hN7NaskYvMDLQY55gnW3NQ+mesEAepTqlg+VEbj7zzqEMBVNhzcGYYeqFo/TlYz6eQiFcp1HcsCZO+nGgS8zg==} engines: {node: '>=6.9.0'} @@ -4440,10 +4114,6 @@ packages: get-tsconfig@4.7.5: resolution: {integrity: sha512-ZCuZCnlqNzjb4QprAzXKdpp/gh6KTxSJuw3IBsPnV/7fV4NxC9ckB+vPTt8w7fJA0TaSD7c55BR47JD6MEDyDw==} - giget@1.2.3: - resolution: {integrity: sha512-8EHPljDvs7qKykr6uw8b+lqLiUc/vUg+KVTI0uND4s63TdsZM2Xus3mflvF0DDG9SiM4RlCkFGL+7aAjRmV7KA==} - hasBin: true - git-raw-commits@4.0.0: resolution: {integrity: sha512-ICsMM1Wk8xSGMowkOmPrzo2Fgmfo4bMHLNX6ytHjajRJUqvHOw/TFapQ+QG75c3X/tTDDhOSRPGC52dDbNM8FQ==} engines: {node: '>=16'} @@ -4520,16 +4190,6 @@ packages: cosmiconfig-toml-loader: optional: true - graphql-filter@1.1.5: - resolution: {integrity: sha512-8JtQxm3tu1463bRFTzr17x6bgMgG2dH7gyzVJH4BNa8TnfpaZtcovkUgJic63dVQqMMVEvmiPx/k/bWtLQ3j8Q==} - peerDependencies: - graphql: ^15.3.0 - - graphql-middleware@6.1.35: - resolution: {integrity: sha512-azawK7ApUYtcuPGRGBR9vDZu795pRuaFhO5fgomdJppdfKRt7jwncuh0b7+D3i574/4B+16CNWgVpnGVlg3ZCg==} - peerDependencies: - graphql: ^0.12.0 || ^0.13.0 || ^14.0.0 || ^15.0.0 || ^16.0.0 - graphql-query-complexity@0.12.0: resolution: {integrity: sha512-fWEyuSL6g/+nSiIRgIipfI6UXTI7bAxrpPlCY1c0+V3pAEUo1ybaKmSBgNr1ed2r+agm1plJww8Loig9y6s2dw==} peerDependencies: @@ -4552,11 +4212,6 @@ packages: peerDependencies: graphql: ^0.9.0 || ^0.10.0 || ^0.11.0 || ^0.12.0 || ^0.13.0 || ^14.0.0 || ^15.0.0 || ^16.0.0 - graphql-tools@4.0.8: - resolution: {integrity: sha512-MW+ioleBrwhRjalKjYaLQbr+920pHBgy9vM/n47sswtns8+96sRn5M/G+J1eu7IMeKWiN/9p6tmwCHU7552VJg==} - peerDependencies: - graphql: ^0.13.0 || ^14.0.0 || ^15.0.0 - graphql-ws@5.16.0: resolution: {integrity: sha512-Ju2RCU2dQMgSKtArPbEtsK5gNLnsQyTNIo/T7cZNp96niC1x0KdJNZV0TIoilceBPQwfb5itrGl8pkFeOUMl4A==} engines: {node: '>=10'} @@ -4695,9 +4350,6 @@ packages: resolution: {integrity: sha512-5Fytz/IraMjqpwfd34ke28PTVMjZjJG2MPn5t7OE4eUCUNf8BAa7b5WUS9/Qvr6mwOQS7Mk6vdsMno5he+T8Xw==} engines: {node: '>= 4'} - immediate@3.0.6: - resolution: {integrity: sha512-XXOFtyqDjNDAQxVfYxuF7g9Il/IbWmmlQg2MYKOH8ExIT1qg6xc4zyS3HaEEATgs1btfzxq15ciUiY7gjSXRGQ==} - immutable@3.7.6: resolution: {integrity: sha512-AizQPcaofEtO11RZhPPHBOJRdo/20MKQF9mBLnVkBoyHi1/zXK8fzVdnEpSV9gxqtnh6Qomfp3F0xT5qP/vThw==} engines: {node: '>=0.8.0'} @@ -4733,10 +4385,6 @@ packages: resolution: {integrity: sha512-EdDDZu4A2OyIK7Lr/2zG+w5jmbuk1DVBnEwREQvBzspBJkCEbRa8GxU1lghYcaGJCnRWibjDXlq779X1/y5xwg==} engines: {node: '>=8'} - inflection@3.0.0: - resolution: {integrity: sha512-1zEJU1l19SgJlmwqsEyFTbScw/tkMHFenUo//Y0i+XEP83gDFdMvPizAD/WGcE+l1ku12PcTVHQhO6g5E0UCMw==} - engines: {node: '>=18.0.0'} - inflight@1.0.6: resolution: {integrity: sha512-k92I/b08q4wvFscXCLvqfsHCrjrF7yiXsQuIVvVE7N82W3+aqpzuUdBbfhWcy/FZR3/4IgflMgKLOsvPDrGCJA==} @@ -4810,10 +4458,6 @@ packages: resolution: {integrity: sha512-2HvIEKRoqS62guEC+qBjpvRubdX910WCMuJTZ+I9yvqKU2/12eSL549HMwtabb4oupdj2sMP50k+XJfB/8JE6w==} engines: {node: '>=8'} - is-interactive@2.0.0: - resolution: {integrity: sha512-qP1vozQRI+BMOPcjFzrjXuQvdak2pHNUMZoeG2eRbiSqyvbEf/wQtEOTOX1guk6E3t36RkaqiSt8A/6YElNxLQ==} - engines: {node: '>=12'} - is-lower-case@2.0.2: resolution: {integrity: sha512-bVcMJy4X5Og6VZfdOZstSexlEy20Sr0k/p/b2IlQJlfdKAQuMpiv5w2Ccxb8sKdRUNAG1PnHVHjFSdRDVS6NlQ==} @@ -4869,20 +4513,9 @@ packages: resolution: {integrity: sha512-knxG2q4UC3u8stRGyAVJCOdxFmv5DZiRcdlIaAQXAbSfJya+OhopNotLQrstBhququ4ZpuKbDc/8S6mgXgPFPw==} engines: {node: '>=10'} - is-unicode-supported@1.3.0: - resolution: {integrity: sha512-43r2mRvz+8JRIKnWJ+3j8JtjRKZ6GmjzfaE/qiBJnikNnYv/6bagRJ1kUhNk8R5EX/GkobD+r+sfxCPJsiKBLQ==} - engines: {node: '>=12'} - - is-unicode-supported@2.0.0: - resolution: {integrity: sha512-FRdAyx5lusK1iHG0TWpVtk9+1i+GjrzRffhDg4ovQ7mcidMQ6mj+MhKPmvh7Xwyv5gIS06ns49CA7Sqg7lC22Q==} - engines: {node: '>=18'} - is-upper-case@2.0.2: resolution: {integrity: sha512-44pxmxAvnnAOwBg4tHPnkfvgjPwbc5QIsSstNU+YcJ1ovxVzCWpSGosPJOZh/a1tdl81fbgnLc9LLv+x2ywbPQ==} - is-url@1.2.4: - resolution: {integrity: sha512-ITvGim8FhRiYe4IQ5uHSkj7pVaPDrCTkNd3yq3cV7iZAcJdHTUMPMEHcqSOy9xZ9qFenQCvi+2wjH9a1nXqHww==} - is-what@4.1.16: resolution: {integrity: sha512-ZhMwEosbFJkA0YhFnNDgTM4ZxDRsS6HqTo7qsZM08fehyRYIYa0yHu5R6mgo1n/8MgaPBXiPimPD77baVFYg+A==} engines: {node: '>=12.13'} @@ -4947,9 +4580,6 @@ packages: it-to-stream@1.0.0: resolution: {integrity: sha512-pLULMZMAB/+vbdvbZtebC0nWBTbG581lk6w8P7DfIIIKUfa8FbY7Oi0FxZcFPbxvISs7A9E+cMpLDBc1XhpAOA==} - iterall@1.3.0: - resolution: {integrity: sha512-QZ9qOMdF+QLHxy1QIpUHUU1D5pS2CG2P69LF6L6CPjPYA/XMOmKV3PZpawHoAjHNyB0swdVTRxdYT4tbBbxqwg==} - jackspeak@2.3.6: resolution: {integrity: sha512-N3yCS/NegsOBokc8GAdM8UcmfsKiSS8cipheD/nivzr700H+nsMOxJjQnvwOcRYVuFkdH0wGUvW2WbXGmrZGbQ==} engines: {node: '>=14'} @@ -4957,9 +4587,6 @@ packages: jackspeak@3.4.3: resolution: {integrity: sha512-OGlZQpz2yfahA/Rd1Y8Cd9SIEsqvXkLVoSw/cgwhnhFMDbsQFeZYoJJ7bIZBS9BcamUW96asq/npPWugM+RQBw==} - javascript-stringify@2.1.0: - resolution: {integrity: sha512-JVAfqNPTvNq3sB/VHQJAFxN/sPgKnsKrCwyRt15zwNCdrMMJDdcEOdubuy+DuJYYdm0ox1J4uzEuYKkN+9yhVg==} - jiti@1.21.0: resolution: {integrity: sha512-gFqAIbuKyyso/3G2qhiO2OM6shY6EPP/R0+mkDbyspxKazh8BXDC5FiFsUjlczgdNz/vfra0da2y+aHrusLG/Q==} hasBin: true @@ -4967,9 +4594,6 @@ packages: jose@5.2.3: resolution: {integrity: sha512-KUXdbctm1uHVL8BYhnyHkgp3zDX5KW8ZhAKVFEfUbU2P8Alpzjb+48hHvjOdQIyPshoblhzsuqOwEEAbtHVirA==} - js-base64@3.7.7: - resolution: {integrity: sha512-7rCnleh0z2CkXhH67J8K1Ytz0b2Y+yxTPL+/KOJoa20hfnVQ/3/T6W/KflYI4bRHRagNeXeU2bkNGI3v1oS/lw==} - js-sha3@0.8.0: resolution: {integrity: sha512-gF1cRrHhIzNfToc802P800N8PpXS+evLLXfsVpowqmAFR9uwbi89WvXg2QspOmXL8QL86J4T1EpFu+yUkwJY3Q==} @@ -4994,9 +4618,6 @@ packages: json-parse-even-better-errors@2.3.1: resolution: {integrity: sha512-xyFwyhro/JEof6Ghe2iz2NcXoj2sloNsWr/XsERDK/oiPCfaNhl5ONfp+jQdAZRQQ0IJWNzH9zIZF7li91kh2w==} - json-schema-library@9.3.4: - resolution: {integrity: sha512-220lm9RVt9BUeF2QhBT711aX4IogUHhPT8Tjhkksc4CUw8WmChFMuf0mJdpDAHDfJDkI064jcZIH8P70HdPAOA==} - json-schema-traverse@0.4.1: resolution: {integrity: sha512-xbbCH5dCYU5T8LcEhhuh7HJ88HXuW3qsI3Y0zOZFKfZEHcpWiHU/Jxzk629Brsab/mMiHQti9wMP+845RPe3Vg==} @@ -5052,14 +4673,6 @@ packages: keyv@4.5.4: resolution: {integrity: sha512-oxVHkHR/EJf2CNXnWxRLW6mg7JyCCUcG0DtEGmL2ctUo1PNTin1PUil+r/+4r5MpVgC/fn1kjsx7mjSujKqIpw==} - kleur@3.0.3: - resolution: {integrity: sha512-eTIzlVOSUR+JxdDFepEYcBMtZ9Qqdef+rnzWdRZuMbOywu5tO2w2N7rqjoANZ5k9vywhL6Br1VRjUIgTQx4E8w==} - engines: {node: '>=6'} - - kleur@4.1.5: - resolution: {integrity: sha512-o+NO+8WrRiQEE4/7nwRJhN1HWpVmJm511pBHUxPLtp0BUISzlBplORYSmTclCnJvQq2tKu/sgl3xVpkc7ZWuQQ==} - engines: {node: '>=6'} - kysely-supabase@0.2.0: resolution: {integrity: sha512-InDRSd2TD8ddCAcMzW2mIoIRqJgWy5qJe4Ydb37quKiijjERu5m1FhFitvfC8bVjEHd8S3xhl0y0DFPeIAwjTQ==} peerDependencies: @@ -5067,17 +4680,14 @@ packages: kysely: '>= 0.24.0 < 1' supabase: '>= 1.0.0 < 2' - kysely@0.27.4: - resolution: {integrity: sha512-dyNKv2KRvYOQPLCAOCjjQuCk4YFd33BvGdf/o5bC7FiW+BB6snA81Zt+2wT9QDFzKqxKa5rrOmvlK/anehCcgA==} + kysely@0.27.6: + resolution: {integrity: sha512-FIyV/64EkKhJmjgC0g2hygpBv5RNWVPyNCqSAD7eTCv6eFWNIi4PN1UvdSJGicN/o35bnevgis4Y0UDC0qi8jQ==} engines: {node: '>=14.0.0'} levn@0.4.1: resolution: {integrity: sha512-+bT2uH4E5LGE7h/n3evcS/sQlJXCpIp6ym8OWJ5eV6+67Dsql/LaaT7qJBAt2rzfoa/5QBGBhxDix1dMt2kQKQ==} engines: {node: '>= 0.8.0'} - lie@3.1.1: - resolution: {integrity: sha512-RiNhHysUjhrDQntfYSfY4MU24coXXdEOgw9WGcKHNeEwffDYbF//u87M1EWaMGzuFoSbqW0C9C6lEEhDOAswfw==} - lilconfig@3.1.2: resolution: {integrity: sha512-eop+wDAvpItUys0FWkHIKeC9ybYrTGbU41U5K7+bttZZeohvnY7M9dZ5kB21GNWiFT2q1OoPTvncPCgSOVO5ow==} engines: {node: '>=14'} @@ -5106,8 +4716,9 @@ packages: resolution: {integrity: sha512-opevsywziHd3zHCVQGAj8zu+Z3yHNkkoYhWIGnq54RrCVwLz0MozotJEDnKsIBLvkfLGN6BLOyAeRrYI0pKA4g==} engines: {node: '>=18.0.0'} - localforage@1.10.0: - resolution: {integrity: sha512-14/H1aX7hzBBmmh7sGPd+AOMkkIrHM3Z1PAyGgZigA1H1p5O5ANnMyWzvpAETtG68/dC4pC0ncy3+PPGzXZHPg==} + load-tsconfig@0.2.5: + resolution: {integrity: sha512-IXO6OCs9yg8tMKzfPZ1YmheJbZCiEsnBdcB03l0OcfK9prKnJb96siuHCr5Fl37/yo9DnKU+TLpxzTUspw9shg==} + engines: {node: ^12.20.0 || ^14.13.1 || >=16.0.0} locate-path@5.0.0: resolution: {integrity: sha512-t7hw9pI+WvuwNJXwk5zVHpyhIqzg2qTlklJOf0mVxGSbe3Fp2VieZcduNYjaLDoy6p9uGpQEGWG87WpMKlNq8g==} @@ -5161,10 +4772,6 @@ packages: resolution: {integrity: sha512-8XPvpAA8uyhfteu8pIvQxpJZ7SYYdpUivZpGy6sFsBuKRY/7rQGavedeB8aK+Zkyq6upMFVL/9AW6vOYzfRyLg==} engines: {node: '>=10'} - log-symbols@6.0.0: - resolution: {integrity: sha512-i24m8rpwhmPIS4zscNzK6MSEhk0DUWa/8iYQWxhffV8jkI4Phvs3F+quL5xvS0gdQR0FyTCMMH33Y78dDTzzIw==} - engines: {node: '>=18'} - log-update@4.0.0: resolution: {integrity: sha512-9fkkDevMefjg0mmzWFBW8YkFP91OrizzkW3diF7CpG+S2EYdy4+TVfGwz1zeF8x7hCx1ovSPTOE9Ngib74qqUg==} engines: {node: '>=10'} @@ -5381,14 +4988,6 @@ packages: minimist@1.2.8: resolution: {integrity: sha512-2yyAR8qBkN3YuheJanUpWC5U3bb5osDywNB8RzDVlDwDHbocAJveqqj1u8+SVD7jkWT4yvsHCpWqqWqAxb0zCA==} - minipass@3.3.6: - resolution: {integrity: sha512-DxiNidxSEK+tHG6zOIklvNOwm3hvCrbUrdtzY74U6HKTJxvIDfOUL5W5P2Ghd3DTkhhKPYGqeNUIh5qcM4YBfw==} - engines: {node: '>=8'} - - minipass@5.0.0: - resolution: {integrity: sha512-3FnjYuehv9k6ovOEbyOswadCDPX1piCfhV8ncmYtHOjuPwylVWsghTLo7rabjC3Rx5xD4HDx8Wm1xnMF7S5qFQ==} - engines: {node: '>=8'} - minipass@7.0.4: resolution: {integrity: sha512-jYofLM5Dam9279rdkWzqHozUo4ybjdZmCsDHePy5V/PbBcVMiSZR97gmAy45aqi8CK1lG2ECd356FU86avfwUQ==} engines: {node: '>=16 || 14 >=14.17'} @@ -5397,10 +4996,6 @@ packages: resolution: {integrity: sha512-qOOzS1cBTWYF4BH8fVePDBOO9iptMnGUEZwNc/cMWnTV2nVLZ7VoNWEPHkYczZA0pdoA7dl6e7FL659nX9S2aw==} engines: {node: '>=16 || 14 >=14.17'} - minizlib@2.1.2: - resolution: {integrity: sha512-bAxsR8BVfj60DWXHE3u30oHzfl4G7khkSuPW+qvpd7jFRHm7dLxOjUk1EHACJ/hxLY8phGJ0YhYHZo7jil7Qdg==} - engines: {node: '>= 8'} - minizlib@3.0.1: resolution: {integrity: sha512-umcy022ILvb5/3Djuu8LWeqUa8D68JaBzlttKeMWen48SjabqS3iY5w/vzeMzMUNhLDifyhbOwKDSznB1vvrwg==} engines: {node: '>= 18'} @@ -5409,11 +5004,6 @@ packages: resolution: {integrity: sha512-FP+p8RB8OWpF3YZBCrP5gtADmtXApB5AMLn+vdyA+PyxCjrCs00mjyUozssO33cwDeT3wNGdLxJ5M//YqtHAJw==} hasBin: true - mkdirp@1.0.4: - resolution: {integrity: sha512-vVqVZQyf3WLx2Shd0qJ9xuvqgAyKPLAiqITEtqW0oIUjzo3PePDd6fW9iFz30ef7Ysp/oiWqbhszeGWW2T6Gzw==} - engines: {node: '>=10'} - hasBin: true - mkdirp@2.1.6: resolution: {integrity: sha512-+hEnITedc8LAtIP9u3HJDFIdcLV2vXP33sqLLIzkv1Db1zO/1OxbvYf0Y1OC/S/Qo5dxHXepofhmxL02PsKe+A==} engines: {node: '>=10'} @@ -5424,12 +5014,6 @@ packages: engines: {node: '>=10'} hasBin: true - mlly@1.4.2: - resolution: {integrity: sha512-i/Ykufi2t1EZ6NaPLdfnZk2AX8cs0d+mTzVKuPfqPKPatxLApaBoxJQ9x1/uckXtrS/U5oisPMDkNs0yQTaBRg==} - - mlly@1.7.0: - resolution: {integrity: sha512-U9SDaXGEREBYQgfejV97coK0UL1r+qnF2SyO9A3qcI8MzKnsIFKHNVEkrDyNncQTKQQumsasmeq84eNMdBfsNQ==} - mnemonist@0.38.5: resolution: {integrity: sha512-bZTFT5rrPKtPJxj8KSV0WkPyNxl72vQepqqVUAW2ARUpUSF2qXMB6jZj7hW5/k7C1rtpzqbD/IIbJwLXUjCHeg==} @@ -5441,6 +5025,9 @@ packages: module-details-from-path@1.0.3: resolution: {integrity: sha512-ySViT69/76t8VhE1xXHK6Ch4NcDd26gx0MzKXLO+F7NOtnqH68d9zF94nT8ZWSxXh8ELOERsnJO/sWt1xZYw5A==} + moment@2.30.1: + resolution: {integrity: sha512-uEmtNhbDOrWPFS+hdjFCBfy9f2YoyzRpwcl+DqpC6taX21FzsTLQVbMV/W7PzNSX6x/bhC1zA3c2UQ5NzH6how==} + moo@0.5.2: resolution: {integrity: sha512-iSAJLHYKnX41mKcJKjqvnAN9sf0LMDTXDEvFv+ffuRR9a1MIuXLjMNL6EsnDHSkKLTWNqQQ5uo61P4EbU4NU+Q==} @@ -5471,10 +5058,6 @@ packages: multiformats@13.2.2: resolution: {integrity: sha512-RWI+nyf0q64vyOxL8LbKtjJMki0sogRL/8axvklNtiTM0iFCVtHwME9w6+0P1/v4dQvsIg8A45oT3ka1t/M/+A==} - multimatch@7.0.0: - resolution: {integrity: sha512-SYU3HBAdF4psHEL/+jXDKHO95/m5P2RvboHT2Y0WtTttvJLP4H/2WS9WlQPFvF6C8d6SpLw8vjCnQOnVIVOSJQ==} - engines: {node: '>=18'} - murmurhash3js-revisited@3.0.0: resolution: {integrity: sha512-/sF3ee6zvScXMb1XFJ8gDsSnY+X8PbOyjIuBhtgis10W2Jx4ZjIhikUCIF9c4gpJxVnQIsPAFrSwTCuAjicP6g==} engines: {node: '>=8.0.0'} @@ -5482,10 +5065,6 @@ packages: mute-stream@0.0.8: resolution: {integrity: sha512-nnbWWOkoWyUsTjKrhgD0dcz22mdkSnpYqbEjIm2nhwhuxlSkpywJmBo8h0ZqJdkp73mb90SssHkN4rsRaBAfAA==} - mute-stream@1.0.0: - resolution: {integrity: sha512-avsJQhyd+680gKXyG/sQc0nXaC6rBkPOfyHYcFb9+hdkqQkR9bdnkJ0AMZhke0oesPqIO+mFFJ+IdBc7mst4IA==} - engines: {node: ^14.17.0 || ^16.13.0 || >=18.0.0} - mute-stream@2.0.0: resolution: {integrity: sha512-WWdIxpyjEn+FhQJQQv9aQAYlHoNVdzIzUySNV1gHUPDSdZJ3yZn7pAAbQcV7B56Mvu881q9FZV+0Vx2xC44VWA==} engines: {node: ^18.17.0 || >=20.5.0} @@ -5547,9 +5126,6 @@ packages: resolution: {integrity: sha512-/jKZoMpw0F8GRwl4/eLROPA3cfcXtLApP0QzLmUT/HuPCZWyB7IY9ZrMeKw2O/nFIqPQB3PVM9aYm0F312AXDQ==} engines: {node: '>=10.5.0'} - node-fetch-native@1.6.4: - resolution: {integrity: sha512-IhOigYzAKHd244OC0JIMIUrjzctirCmPkaIfhDeGcEETWof5zKYUW7e7MYvChGWh/4CJeXEgsRyGzuF334rOOQ==} - node-fetch@2.7.0: resolution: {integrity: sha512-c4FRfUm/dbcWZ7U+1Wq0AwCyFL+3nt2bEw05wfxSz+DWpWsitgmSgYmy2dQdWyKC1694ELPqMs/YzUSNozLt8A==} engines: {node: 4.x || >=6.0.0} @@ -5621,15 +5197,14 @@ packages: resolution: {integrity: sha512-wsJ9gfSz1/s4ZsJN01lyonwuxA1tml6X1yBDnfpMglypcBRFZZkus26EdPSlqS5GJfYddVZa22p3VNb3z5m5Ig==} engines: {node: '>=6.5.0', npm: '>=3'} - nypm@0.3.8: - resolution: {integrity: sha512-IGWlC6So2xv6V4cIDmoV0SwwWx7zLG086gyqkyumteH2fIgCAM4nDVFB2iDRszDvmdSVW9xb1N+2KjQ6C7d4og==} - engines: {node: ^14.16.0 || >=16.10.0} - hasBin: true - object-assign@4.1.1: resolution: {integrity: sha512-rJgTQnkUnH1sFw8yT6VSU3zD3sWmu6sZhIseY8VX+GRu3P6F7Fu+JNDoXfklElbLJSnc3FUQHVe4cU5hj+BcUg==} engines: {node: '>=0.10.0'} + object-hash@2.2.0: + resolution: {integrity: sha512-gScRMn0bS5fH+IuwyIFgnh9zBdo4DV+6GhygmWM9HyNJSgS0hScp1f5vjtm7oIIOiT9trXrShAkLFSc2IqKNgw==} + engines: {node: '>= 6'} + object-inspect@1.13.1: resolution: {integrity: sha512-5qoj1RUiKOMsCCNLV1CBiPYE10sziTsnmNxkAI/rZhiD63CF7IqdFGC/XzjWjpSgLf0LxXX3bDFIh0E18f6UhQ==} @@ -5643,9 +5218,6 @@ packages: obuf@1.1.2: resolution: {integrity: sha512-PX1wu0AmAdPqOL1mWhqmlOd8kOIZQwGZw6rh7uby9fTc5lhaOWFLX3I6R1hrF9k3zUY40e6igsLGkDXK92LJNg==} - ohash@1.1.3: - resolution: {integrity: sha512-zuHHiGTYTA1sYJ/wZN+t5HKZaH23i4yI1HMwbuXm24Nid7Dv0KcuRlKoNKS9UNfAVSBlnGLcuQrnOKWOZoEGaw==} - on-finished@2.4.1: resolution: {integrity: sha512-oVlzkg3ENAhCk2zdv7IJwd/QUD4z2RxRwpkcGY8psCVcCYZNq4wYnVWALHM+brtuJjePWiYF/ClmuDr8Ch5+kg==} engines: {node: '>= 0.8'} @@ -5680,10 +5252,6 @@ packages: resolution: {integrity: sha512-5b6Y85tPxZZ7QytO+BQzysW31HJku27cRIlkbAXaNx+BdcVi+LlRFmVXzeF6a7JCwJpyw5c4b+YSVImQIrBpuQ==} engines: {node: '>=10'} - ora@8.0.1: - resolution: {integrity: sha512-ANIvzobt1rls2BDny5fWZ3ZVKyD6nscLvfFRpQgfWsythlcsVUC9kL0zq6j2Z5z9wwp1kd7wpsD/T9qNPVLCaQ==} - engines: {node: '>=18'} - os-filter-obj@2.0.0: resolution: {integrity: sha512-uksVLsqG3pVdzzPvmAHpBK0wKxYItuzZr7SziusRPoz67tGV8rL1szZ6IdeUrbqLjGDwApBtN29eEE3IqGHOjg==} engines: {node: '>=4'} @@ -5766,12 +5334,6 @@ packages: package-json-from-dist@1.0.1: resolution: {integrity: sha512-UEZIS3/by4OC8vL3P2dTXRETpebLI2NiI5vIrjaD/5UtrkFX/tNbwjTSRAGC/+7CAo2pIcBaRgWmcBBHcsaCIw==} - pako@0.2.9: - resolution: {integrity: sha512-NUcwaKxUxWrZLpDG+z/xZaCgQITkA/Dv4V/T6bw7VON6l1Xz/VnrBqrYjZQ12TamKHzITTfOEIYUj48y2KXImA==} - - pako@1.0.11: - resolution: {integrity: sha512-4hLB8Py4zZce5s4yd9XzopqwVv/yGNhV1Bl8NTmCq1763HeK2+EwVTv+leGeL13Dnh2wfbqowVPXCIO0z4taYw==} - param-case@3.0.4: resolution: {integrity: sha512-RXlj7zCYokReqWpOPH9oYivUzLYZ5vAPIfEmCTNViosC78F8F0H9y7T7gG2M39ymgutxF5gcFEsyZQSph9Bp3A==} @@ -5853,9 +5415,6 @@ packages: resolution: {integrity: sha512-gDKb8aZMDeD/tZWs9P6+q0J9Mwkdl6xMV8TjnGP3qJVJ06bdMgkbBlLU8IdfOsIsFz2BW1rNVT3XuNEl8zPAvw==} engines: {node: '>=8'} - pathe@1.1.1: - resolution: {integrity: sha512-d+RQGp0MAYTIaDBIMmOfMwz3E+LOZnxx1HZd5R18mmCZY0QBlK0LDZfPc8FW8Ed2DlvsuE6PRjroDY+wg4+j/Q==} - pathe@1.1.2: resolution: {integrity: sha512-whLdWMYL2TwI08hn8/ZqAbrVemu0LNaNNJZX73O6qaIdCTfXutsLhMkjdENX0qhsQ9uIimo4/aQOmXkoon2nDQ==} @@ -5875,9 +5434,6 @@ packages: resolution: {integrity: sha512-GVlENSDW6KHaXcd9zkZltB7tCLosKB/4Hg0fqBJkAoBgYG2Tn1xtMgXtSUuMU9AK/gCm/tTdT8mgAeF4YNeeqw==} engines: {node: '>=14.16'} - perfect-debounce@1.0.0: - resolution: {integrity: sha512-xCy9V055GLEqoFaHoC1SoLIaLmWctgCUaBaWxDZ7/Zx4CTyX7cJQLJOok/orfjZAh9kEYpjJa4d0KcJmCbctZA==} - pg-cloudflare@1.1.1: resolution: {integrity: sha512-xWPagP/4B6BgFO+EKz3JONXv3YDgvkbVrGw2mTo3D6tVDQRh1e7cqVGvyR3BE+eQgAvx1XhW/iEASj4/jCWl3Q==} @@ -5888,6 +5444,41 @@ packages: resolution: {integrity: sha512-WCtabS6t3c8SkpDBUlb1kjOs7l66xsGdKpIPZsg4wR+B3+u9UAum2odSsF9tnvxg80h4ZxLWMy4pRjOsFIqQpw==} engines: {node: '>=4.0.0'} + pg-mem@3.0.5: + resolution: {integrity: sha512-Bh8xHD6u/wUXCoyFE2vyRs5pgaKbqjWFQowKDlbKWCiF0vOlo2A0PZdiUxmf2PKgb6Vb6C7gwAlA7jKvsfDHZA==} + peerDependencies: + '@mikro-orm/core': '>=4.5.3' + '@mikro-orm/postgresql': '>=4.5.3' + knex: '>=0.20' + kysely: '>=0.26' + mikro-orm: '*' + pg-promise: '>=10.8.7' + pg-server: ^0.1.5 + postgres: ^3.4.4 + slonik: '>=23.0.1' + typeorm: '>=0.2.29' + peerDependenciesMeta: + '@mikro-orm/core': + optional: true + '@mikro-orm/postgresql': + optional: true + knex: + optional: true + kysely: + optional: true + mikro-orm: + optional: true + pg-promise: + optional: true + pg-server: + optional: true + postgres: + optional: true + slonik: + optional: true + typeorm: + optional: true + pg-numeric@1.0.2: resolution: {integrity: sha512-BM/Thnrw5jm2kKLE5uJkXqqExRUY/toLHda65XgFTBTFYZyopbKjBe29Ii3RbkvlsMoFwD+tHeGaCjjv0gHlyw==} engines: {node: '>=4'} @@ -5920,6 +5511,9 @@ packages: pgpass@1.0.5: resolution: {integrity: sha512-FdW9r/jQZhSeohs1Z3sI1yxFQNFvMcnmfuj4WBMUTxOrAyLMaTcE1aAMBiTlbMNaXvBCQuVi0R7hd8udDSP7ug==} + pgsql-ast-parser@12.0.1: + resolution: {integrity: sha512-pe8C6Zh5MsS+o38WlSu18NhrTjAv1UNMeDTs2/Km2ZReZdYBYtwtbWGZKK2BM2izv5CrQpbmP0oI10wvHOwv4A==} + picocolors@1.0.0: resolution: {integrity: sha512-1fygroTLlHu66zi26VoTDv8yRgm0Fccecssto+MhsZ0D/DGW2sm8E8AjW7NU5VVTRt5GxbeZ5qBuJr+HyLYkjQ==} @@ -5946,20 +5540,6 @@ packages: piscina@4.4.0: resolution: {integrity: sha512-+AQduEJefrOApE4bV7KRmp3N2JnnyErlVqq4P/jmko4FPz9Z877BCccl/iB3FdrWSUkvbGV9Kan/KllJgat3Vg==} - pkg-types@1.0.3: - resolution: {integrity: sha512-nN7pYi0AQqJnoLPC9eHFQ8AcyaixBUOwvqc5TDnIKCMEE6I0y8P7OKA7fPexsXGCGxQDl/cmrLAp26LhcwxZ4A==} - - pkg-types@1.1.1: - resolution: {integrity: sha512-ko14TjmDuQJ14zsotODv7dBlwxKhUKQEhuhmbqo1uCi9BB0Z2alo/wAXg6q1dTR5TyuqYyWhjtfe/Tsh+X28jQ==} - - pluralize@8.0.0: - resolution: {integrity: sha512-Nc3IT5yHzflTfbjgqWcCPpo7DaKy4FnpB0l/zCAW0Tc7jxAiuqSxHasntB3D7887LSrA93kDJ9IXovxJYxyLCA==} - engines: {node: '>=4'} - - portfinder@1.0.32: - resolution: {integrity: sha512-on2ZJVVDXRADWE6jnQaX0ioEylzgBpQk8r55NE4wjXW1ZxO+BgDlY6DXwj20i0V8eB4SenDQ00WEaxfiIQPcxg==} - engines: {node: '>= 0.12.0'} - postcss@8.4.33: resolution: {integrity: sha512-Kkpbhhdjw2qQs2O2DGX+8m5OVqEcbB9HRBvuYM9pgrjEFUg30A9LmXNlTAUj4S9kgtGyrMbTzVjH7E+s5Re2yg==} engines: {node: ^10 || ^12 || >=14} @@ -5999,10 +5579,6 @@ packages: postgres-range@1.1.4: resolution: {integrity: sha512-i/hbxIE9803Alj/6ytL7UHQxRvZkI9O4Sy+J3HGc4F4oo/2eQAjTSNJ0bfxyse3bH0nuVesCk+3IRLaMtG3H6w==} - posthog-node@4.0.1: - resolution: {integrity: sha512-rtqm2h22QxLGBrW2bLYzbRhliIrqgZ0k+gF0LkQ1SNdeD06YE5eilV0MxZppFSxC8TfH0+B0cWCuebEnreIDgQ==} - engines: {node: '>=15.0.0'} - prelude-ls@1.2.1: resolution: {integrity: sha512-vkcDPrRZo1QZLbn5RLGPpg/WmIQ65qoWWhcGKf/b5eplkkarX0m9z8ppCat4mlOqUsWpyNuYgO3VRyrYHSzX5g==} engines: {node: '>= 0.8.0'} @@ -6015,17 +5591,9 @@ packages: process-nextick-args@2.0.1: resolution: {integrity: sha512-3ouUOpQhtgrbOa17J7+uxOTpITYWaGP7/AhoR3+A+/1e9skrzelGi/dXzEYyvbxubEF6Wn2ypscTKiKJFFn1ag==} - process@0.11.10: - resolution: {integrity: sha512-cdGef/drWFoydD1JsMzuFf8100nZl+GT+yacc2bEced5f9Rjk4z+WtFUTBu9PhOi9j/jfmBPu0mMEY4wIdAF8A==} - engines: {node: '>= 0.6.0'} - promise@7.3.1: resolution: {integrity: sha512-nolQXZ/4L+bP/UGlkfaIujX9BKxGwmQ9OT4mOt5yvy8iK1h3wqTEJCijzGANTCCl9nWjY41juyAn2K3Q1hLLTg==} - prompts@2.4.2: - resolution: {integrity: sha512-NxNv/kLguCA7p3jE8oL2aEBsrJWgAakBpgmgK6lpPWV+WuOmY6r2/zbAVnP+T8bQlA0nzHXSJSJW0Hq7ylaD2Q==} - engines: {node: '>= 6'} - protobufjs@7.2.5: resolution: {integrity: sha512-gGXRSXvxQ7UiPgfw8gevrfRWcTlSbOFg+p/N+JVJEK5VhueL2miT6qTymqAmjr1Q5WbOCyJbyrk6JfWKwlFn6A==} engines: {node: '>=12.0.0'} @@ -6075,9 +5643,6 @@ packages: resolution: {integrity: sha512-WuyALRjWPDGtt/wzJiadO5AXY+8hZ80hVpe6MyivgraREW751X3SbhRvG3eLKOYN+8VEvqLcf3wdnt44Z4S4SA==} engines: {node: '>=10'} - quicktype-core@23.0.149: - resolution: {integrity: sha512-P6orZe46XwDcl17MdJc1SLgAornP3XzEHYE25vhS2DWG5t0mszS9oSS5BiFir/XnBv2Ak0P70Zz5m7C2WhLjWw==} - rabin-rs@2.1.0: resolution: {integrity: sha512-5y72gAXPzIBsAMHcpxZP8eMDuDT98qMP1BqSDHRbHkJJXEgWIN1lA47LxUqzsK6jknOJtgfkQr9v+7qMlFDm6g==} @@ -6099,9 +5664,6 @@ packages: resolution: {integrity: sha512-8zGqypfENjCIqGhgXToC8aB2r7YrBX+AQAfIPs/Mlk+BtPTztOvTS01NRW/3Eh60J+a48lt8qsCzirQ6loCVfA==} engines: {node: '>= 0.8'} - rc9@2.1.2: - resolution: {integrity: sha512-btXCnMmRIBINM2LDZoEmOogIZU7Qe7zn4BpomSKZ/ykbLObuBdvG+mFq11DL6fjH1DRwHhrlgtYWG96bJiC7Cg==} - react-native-fetch-api@3.0.0: resolution: {integrity: sha512-g2rtqPjdroaboDKTsJCTlcmtw54E25OjyaunUP0anOZn4Fuo2IKs8BVfe02zVggA/UysbmfSnRJIqtNkAgggNA==} @@ -6116,10 +5678,6 @@ packages: resolution: {integrity: sha512-9u/sniCrY3D5WdsERHzHE4G2YCXqoG5FTHUiCC4SIbr6XcLZBY05ya9EKjYek9O5xOAwjGq+1JdGBAS7Q9ScoA==} engines: {node: '>= 6'} - readable-stream@4.5.2: - resolution: {integrity: sha512-yjavECdqeZ3GLXNgRXgeQEdz9fvDDkNKyHnbHRFtOr7/LcfgBcmct7t/ET+HaCTqfh06OzoAxrkN/IfjJBVe+g==} - engines: {node: ^12.22.0 || ^14.17.0 || >=16.0.0} - readable-web-to-node-stream@3.0.2: resolution: {integrity: sha512-ePeK6cc1EcKLEhJFt/AebMCLL+GgSKhuygrZ/GLaKZYEecIgIECf4UaUuaByiGtzckwR4ain9VzUh95T1exYGw==} engines: {node: '>=8'} @@ -6141,9 +5699,6 @@ packages: relay-runtime@12.0.0: resolution: {integrity: sha512-QU6JKr1tMsry22DXNy9Whsq5rmvwr3LSZiiWV/9+DFpuTWvp+WFhobWMc8TC4OjKFfNhEZy7mOiqUAn5atQtug==} - remeda@1.61.0: - resolution: {integrity: sha512-caKfSz9rDeSKBQQnlJnVW3mbVdFgxgGWQKq1XlFokqjf+hQD5gxutLGTTY2A/x24UxVyJe9gH5fAkFI63ULw4A==} - remedial@1.0.8: resolution: {integrity: sha512-/62tYiOe6DzS5BqVsNpH/nkGlX45C/Sp6V+NtiN6JQNS1Viay7cWkazmRkrQrdFj2eshDe96SIQNIoMxqhzBOg==} @@ -6196,10 +5751,6 @@ packages: resolution: {integrity: sha512-l+sSefzHpj5qimhFSE5a8nufZYAM3sBSVMAPtYkmC+4EH2anSGaEMXSD0izRQbu9nfyQ9y5JrVmp7E8oZrUjvA==} engines: {node: '>=8'} - restore-cursor@4.0.0: - resolution: {integrity: sha512-I9fPXU9geO9bHOt9pHHOhOkYerIMsmVaWB0rA2AI9ERh/+x/i7MV5HKBNrg+ljO5eoPVgCcnFuRjJ9uH6I/3eg==} - engines: {node: ^12.20.0 || ^14.13.1 || >=16.0.0} - restore-cursor@5.1.0: resolution: {integrity: sha512-oMA2dcrw6u0YfxJQXm342bFKX/E4sG9rbTzO9ptUcR/e8A33cHuvStiYOwH7fszkZlZ1z/ta9AAoPk2F4qIOHA==} engines: {node: '>=18'} @@ -6262,9 +5813,6 @@ packages: run-parallel@1.2.0: resolution: {integrity: sha512-5l4VyZR86LZ/lDxZTR6jqL8AFE2S0IFLMP26AbjsLVADxHdhB/c0GUsH+y39UfCi3dzz8OlQuPmnaJOMoDHQBA==} - rusha@0.8.14: - resolution: {integrity: sha512-cLgakCUf6PedEu15t8kbsjnwIFFR2D4RfL+W3iWFJ4iac7z4B0ZI8fxy4R3J956kAI68HclCFGL8MPoUVC3qVA==} - rxjs@7.8.1: resolution: {integrity: sha512-AA3TVj+0A2iuIoQkWEK/tqFjBq2j+6PO6Y0zJcvzLAFhEFIO3HL0vls9hWLncZbAAbK0mar7oZ4V079I/qPMxg==} @@ -6397,12 +5945,6 @@ packages: sinon@17.0.1: resolution: {integrity: sha512-wmwE19Lie0MLT+ZYNpDymasPHUKTaZHUH/pKEubRXIzySv9Atnlw+BUMGCzWgV7b7wO+Hw6f1TEOr0IUnmU8/g==} - siphash@1.2.0: - resolution: {integrity: sha512-zGo/O5A0Nr4oSteEAMlhemqQpCBbVTRaTjUQdO+QFUqe1iofq/NNPe2W1RxJreh89fIk6NhQcNi41UeTGCvr+g==} - - sisteransi@1.0.5: - resolution: {integrity: sha512-bLGGlR1QxBcynn2d5YmDX4MGjlZvy2MRBDRNHLJ8VI6l6+9FUiyTFNJ0IveOSP0bcXgVDPRcfGqA0pjaqUpfVg==} - slash@3.0.0: resolution: {integrity: sha512-g9Q1haeby36OSStwb4ntCGGGaKsaVSjQ68fBxoQcutl5fS1vuY18H3wSt3jFyFtrkx+Kz0V1G85A4MyAdDMi2Q==} engines: {node: '>=8'} @@ -6423,10 +5965,6 @@ packages: resolution: {integrity: sha512-bSiSngZ/jWeX93BqeIAbImyTbEihizcwNjFoRUIY/T1wWQsfsm2Vw1agPKylXvQTU7iASGdHhyqRlqQzfz+Htg==} engines: {node: '>=18'} - smtp-address-parser@1.0.10: - resolution: {integrity: sha512-Osg9LmvGeAG/hyao4mldbflLOkkr3a+h4m1lwKCK5U8M6ZAr7tdXEz/+/vr752TSGE4MNUlUl9cIK2cB8cgzXg==} - engines: {node: '>=0.10'} - snake-case@3.0.4: resolution: {integrity: sha512-LAOh4z89bGQvl9pFfNF8V146i7o7/CqFPbqzYgP+yYzDIDeS9HaNFtXABamRW+AQzEVODcvE79ljJ+8a9YSdMg==} @@ -6468,10 +6006,6 @@ packages: sponge-case@1.0.1: resolution: {integrity: sha512-dblb9Et4DAtiZ5YSUZHLl4XhH4uK80GhAZrVXdN4O2P4gQ40Wa5UIOPUHlA/nFd2PLblBZWUioLMMAVrgpoYcA==} - sqlstring@2.3.3: - resolution: {integrity: sha512-qC9iz2FlN7DQl3+wjwn3802RTyjCx7sDvfQEXchwa6CWOx07/WVfh91gBmQ9fahw8snwGEWU3xGzOt4tFyHLxg==} - engines: {node: '>= 0.6'} - stackback@0.0.2: resolution: {integrity: sha512-1XMJE5fQo1jGH6Y/7ebnwPOBEkIEnT4QF32d5R1+VXdXveM0IBMJt8zfaxX1P3QhVwrYe+576+jkANtSS2mBbw==} @@ -6486,10 +6020,6 @@ packages: std-env@3.8.0: resolution: {integrity: sha512-Bc3YwwCB+OzldMxOXJIIvC6cPRWr/LxOp48CdQTOkPyk/t4JWWJbrilwBd7RJzKV8QW7tJkcgAmeuLLJugl5/w==} - stdin-discarder@0.2.2: - resolution: {integrity: sha512-UhDfHmA92YAlNnCfhmq0VeNL5bDbiZGg7sZ2IvPsXubGkiNa9EC+tUTsjBRsYUAz87btI6/1wf4XoVvQ3uRnmQ==} - engines: {node: '>=18'} - stream-to-it@0.2.4: resolution: {integrity: sha512-4vEbkSs83OahpmBybNJXlJd7d6/RxzkkSdT3I0mnGt79Xd2Kk+e1JqbvAvsQfCeKj3aKb0QIWkyK3/n0j506vQ==} @@ -6586,10 +6116,6 @@ packages: resolution: {integrity: sha512-MpUEN2OodtUzxvKQl72cUF7RQ5EiHsGvSsVG0ia9c5RbWGL2CI4C7EpPS8UTBIplnlzZiNuV56w+FuNxy3ty2Q==} engines: {node: '>=10'} - supports-hyperlinks@2.3.0: - resolution: {integrity: sha512-RpsAZlpWcDwOPQA22aCH4J0t7L8JmAvsCxfOSEwm7cQs3LshN36QaTkwd70DnBOXDWGssw2eUoc8CaRWT0XunA==} - engines: {node: '>=8'} - supports-preserve-symlinks-flag@1.0.0: resolution: {integrity: sha512-ot0WnXS9fgdkgIcePe6RHNk1WA8+muPa6cSjeR3V8K27q9BB1rTE3R1p7Hv0z1ZyAc8s6Vvv8DIyWf681MAt0w==} engines: {node: '>= 0.4'} @@ -6609,18 +6135,10 @@ packages: sync-multihash-sha2@1.0.0: resolution: {integrity: sha512-A5gVpmtKF0ov+/XID0M0QRJqF2QxAsj3x/LlDC8yivzgoYCoWkV+XaZPfVu7Vj1T/hYzYS1tfjwboSbXjqocug==} - tar@6.2.1: - resolution: {integrity: sha512-DZ4yORTwrbTj/7MZYq2w+/ZFdI6OZ/f9SFHR+71gIVUZhOQPHzVCLpvRnPgyaMpfWxxk/4ONva3GQSyNIKRv6A==} - engines: {node: '>=10'} - tar@7.4.3: resolution: {integrity: sha512-5S7Va8hKfV7W5U6g3aYxXmlPoZVAwUMy9AOKyF2fVuZa2UD3qZjg578OrLRt8PcNN1PleVaL/5/yYATNL0ICUw==} engines: {node: '>=18'} - terminal-link@3.0.0: - resolution: {integrity: sha512-flFL3m4wuixmf6IfhFJd1YPiLiMuxEc8uHRM1buzIeZPm22Au2pDqBJQgdo7n1WfPU1ONFGv7YDwpFBmHGF6lg==} - engines: {node: '>=12'} - test-exclude@7.0.1: resolution: {integrity: sha512-pFYqmTw68LXVjeWJMST4+borgQP2AyMNbg1BpZh9LbyhUeNkeaPF9gzfPGUAnSMV3qPYdWUwDIjjCLiSDOl7vg==} engines: {node: '>=18'} @@ -6635,9 +6153,6 @@ packages: through@2.3.8: resolution: {integrity: sha512-w89qg7PI8wAdvX60bMDP+bFoD5Dvhm9oLheFp5O4a2QF0cSBGsBX4qZmadPMvVqlLJBBci+WqGGOAPvcDeNSVg==} - tiny-inflate@1.0.3: - resolution: {integrity: sha512-pkY1fj1cKHb2seWDy0B16HeWyczlJA9/WW3u3c4z/NiWDsO3DOU5D7nhTLE9CF0yXv/QZFY7sEJmj24dK+Rrqw==} - tinybench@2.9.0: resolution: {integrity: sha512-0+DUvqWMValLmha6lr4kD8iAMK1HzV0/aKnCtWb9v9641TnP/MFb7Pc2bxoxQjTXAErryXVgUOfv2YqNllqGeg==} @@ -6724,9 +6239,6 @@ packages: typescript: optional: true - ts-invariant@0.4.4: - resolution: {integrity: sha512-uEtWkFM/sdZvRNNDL3Ehu4WVpwaulhwQszV8mrtcdeE8nN00BV9mAmQ88RkrBhFgl9gMgvjJLAQcZbnPXI9mlA==} - ts-log@2.2.5: resolution: {integrity: sha512-PGcnJoTBnVGy6yYNFxWVNkdcAuAMstvutN9MgDJIV6L0oG8fB+ZNNy1T+wJzah8RPGor1mZuPQkVfXNDpy9eHA==} @@ -6767,9 +6279,6 @@ packages: tslib@2.4.0: resolution: {integrity: sha512-d6xOpEDfsi2CZVlPQzGeux8XMwLT9hssAsaPYExaQMuYskwb+x1x7J371tWlbBdWHroy99KnVB6qIkUbs5X3UQ==} - tslib@2.4.1: - resolution: {integrity: sha512-tGyy4dAjRIEwI7BzsB0lynWgOpfqjUdq91XXAlIWD2OwKBH7oCl/GZG/HT4BOHrTlPMOASlMQ7veyTqpmRcrNA==} - tslib@2.6.2: resolution: {integrity: sha512-AEYxH93jGFPn/a2iVAwW87VuUIkR1FVUKB77NwMF7nBTDkDrrT/Hpt/IrCJ0QXhW27jTBDcf5ZY7w6RiqTMw2Q==} @@ -6819,10 +6328,6 @@ packages: resolution: {integrity: sha512-Ne2YiiGN8bmrmJJEuTWTLJR32nh/JdL1+PSicowtNb0WFpn59GK8/lfD61bVtzguz7b3PBt74nxpv/Pw5po5Rg==} engines: {node: '>=8'} - type-fest@1.4.0: - resolution: {integrity: sha512-yGSza74xk0UG8k+pLh5oeoYirvIiWo5t0/o3zHHAO2tRDiZcxWP7fywNlXhqb6/r6sWvwi+RsyQMWhVLe4BVuA==} - engines: {node: '>=10'} - type-fest@2.19.0: resolution: {integrity: sha512-RAH822pAdBgcNMAfWnCBU3CFZcfZ/i1eZjwFU/dsLKumyuuP3niueg2UAukXYF0E2AAoc82ZSSf9J0WQBinzHA==} engines: {node: '>=12.20'} @@ -6877,12 +6382,6 @@ packages: uc.micro@2.1.0: resolution: {integrity: sha512-ARDJmphmdvUk6Glw7y9DQ2bFkKBHwQHLi2lsaH6PPmz/Ka9sFOBsBluozhDltWmnv9u/cF6Rt87znRTPV+yp/A==} - ufo@1.3.2: - resolution: {integrity: sha512-o+ORpgGwaYQXgqGDwd+hkS4PuZ3QnmqMMxRuajK/a38L6fTpcE5GPIfrf+L/KemFzfUpeUQc1rRS1iDBozvnFA==} - - ufo@1.5.3: - resolution: {integrity: sha512-Y7HYmWaFwPUmkoQCUIAYpKqkOf+SbVj/2fJJZ4RJMCfZp0rTGwRbzQD+HghfnhKOjL9E01okqz+ncJskGYfBNw==} - uglify-js@3.17.4: resolution: {integrity: sha512-T9q82TJI9e/C1TAxYvfb16xO120tMVFZrGA3f9/P4424DNu6ypK103y0GPFVa17yotwSyZW5iYXgjYHkGrJW/g==} engines: {node: '>=0.8.0'} @@ -6915,12 +6414,6 @@ packages: resolution: {integrity: sha512-wh1pHJHnUeQV5Xa8/kyQhO7WFa8M34l026L5P/+2TYiakvGy5Rdc8jWZVyG7ieht/0WgJLEd3kcU5gKx+6GC8w==} engines: {node: '>=14.0'} - unicode-properties@1.4.1: - resolution: {integrity: sha512-CLjCCLQ6UuMxWnbIylkisbRj31qxHPAurvena/0iwSVbQ2G1VY5/HjV0IRabOEbDHlzZlRdCrD4NhB0JtU40Pg==} - - unicode-trie@2.0.0: - resolution: {integrity: sha512-x7bc76x0bm4prf1VLg79uhAzKw8DVboClSN5VxJuQ+LKDOVEW9CdH+VY7SP+vX7xCYQqzzgQpFqz15zeLvAtZQ==} - unicorn-magic@0.1.0: resolution: {integrity: sha512-lRfVq8fE8gz6QMBuDM6a+LO3IAzTi05H6gCVaUpir2E1Rwpo4ZUog45KpNXKC/Mn3Yb9UDuHumeFTo9iV/D9FQ==} engines: {node: '>=18'} @@ -6941,6 +6434,15 @@ packages: resolution: {integrity: sha512-pjy2bYhSsufwWlKwPc+l3cN7+wuJlK6uz0YdJEOlQDbl6jo/YlPi4mb8agUkVC8BF7V8NuzeyPNqRksA3hztKQ==} engines: {node: '>= 0.8'} + unplugin-swc@1.5.1: + resolution: {integrity: sha512-/ZLrPNjChhGx3Z95pxJ4tQgfI6rWqukgYHKflrNB4zAV1izOQuDhkTn55JWeivpBxDCoK7M/TStb2aS/14PS/g==} + peerDependencies: + '@swc/core': ^1.2.108 + + unplugin@1.16.1: + resolution: {integrity: sha512-4/u/j4FrCKdi17jaxuJA0jClGxB1AvU2hw/IuayPc4ay1XGaJs/rbb4v5WKwAjNifjmXK9PIFyuPiaK8azyR9w==} + engines: {node: '>=14.0.0'} + update-browserslist-db@1.0.13: resolution: {integrity: sha512-xebP81SNcPuNpPP3uzeW1NYXxI3rxyJzF3pD6sH4jE7o/IX+WtSpwnVU+qIsDPyk0d3hmFQ7mjqc6AtV604hbg==} hasBin: true @@ -6956,9 +6458,6 @@ packages: uri-js@4.4.1: resolution: {integrity: sha512-7rKUyy33Q1yc98pQ1DAmLtwX109F7TIfWlW1Ydo8Wl1ii1SeHieeh0HHfPeL2fMXK6z0s8ecKs9frCuLJvndBg==} - urijs@1.19.11: - resolution: {integrity: sha512-HXgFDgDommxn5/bIv0cnQZsPhHDA90NPHD6+c/v21U5+Sx5hoP8+dP9IZXBU1gIfvdRfhG8cel9QNPeionfcCQ==} - urlpattern-polyfill@10.0.0: resolution: {integrity: sha512-H/A06tKD7sS1O1X2SshBVeA5FLycRpjqiBeqGKmBwBDBy28EnRjORxTNe269KSSr5un5qyWi1iL61wLxpd+ZOg==} @@ -6978,32 +6477,17 @@ packages: resolution: {integrity: sha512-pMZTvIkT1d+TFGvDOqodOclx0QWkkgi6Tdoa8gC8ffGAAqz9pzPTZWAybbsHHoED/ztMtkv/VoYTYyShUn81hA==} engines: {node: '>= 0.4.0'} - uuid@3.4.0: - resolution: {integrity: sha512-HjSDRw6gZE5JMggctHBcjVak08+KEVhSIiDzFnT9S9aegmp85S/bReBVTb4QTFaRNptJ9kuYaNhnbNEOkbKb/A==} - hasBin: true - uuid@8.3.2: resolution: {integrity: sha512-+NYs2QeMWy+GWFOEm9xnn6HCDp0l7QBD7ml8zLUmJ+93Q5NF0NocErnwkTkXVFNiX3/fpC6afS8Dhb/gz7R7eg==} hasBin: true - uuid@9.0.1: - resolution: {integrity: sha512-b+1eJOlsR9K8HJpow9Ok3fiWOWSIcIzXodvv0rQjVoOVNpWMpxf1wZNpt4y9h10odCNrqnYp1OBzRktckBe3sA==} - hasBin: true - v8-compile-cache-lib@3.0.1: resolution: {integrity: sha512-wa7YjyUGfNZngI/vtK0UHAN+lgDCxBPCylVXGp0zu59Fz5aiGtNXaq3DhIov063MorB+VfufLh3JlF2KdTK3xg==} - valid-url@1.0.9: - resolution: {integrity: sha512-QQDsV8OnSf5Uc30CKSwG9lnhMPe6exHtTXLRYX8uMwKENy640pU+2BgBL0LRbDh/eYRahNCS7aewCx0wf3NYVA==} - validator@13.12.0: resolution: {integrity: sha512-c1Q0mCiPlgdTVVVIJIrBuxNicYE+t/7oKeI9MWLj3fh/uq2Pxh/3eeWbVZ4OcGW1TUf53At0njHw5SMdA3tmMg==} engines: {node: '>= 0.10'} - value-or-promise@1.0.11: - resolution: {integrity: sha512-41BrgH+dIbCFXClcSapVs5M6GkENd3gQOJpEfPDNa71LsUGMXDL0jMWpI/Rh7WhX+Aalfz2TTS3Zt5pUsbnhLg==} - engines: {node: '>=12'} - value-or-promise@1.0.12: resolution: {integrity: sha512-Z6Uz+TYwEqE7ZN50gwn+1LCVo9ZVrpxRPOhOLnncYkY1ZzOYtrX8Fwf/rFktZ8R5mJms6EZf5TqNOMeZmnPq9Q==} engines: {node: '>=12'} @@ -7139,6 +6623,9 @@ packages: webidl-conversions@3.0.1: resolution: {integrity: sha512-2JAn3z8AR6rjK8Sm8orRC0h/bcl/DqL7tRPdGZ4I1CjdF+EaMLmYxBHyXuKL849eucPFhvBoxMsflfOb8kxaeQ==} + webpack-virtual-modules@0.6.2: + resolution: {integrity: sha512-66/V2i5hQanC51vBQKPH4aI8NMAcBW59FVBs+rC7eGHupMyfn34q7rZIE+ETlJ+XTevqfUhVVBgSUNSW2flEUQ==} + whatwg-url@5.0.0: resolution: {integrity: sha512-saE57nupxk6v3HY35+jzBwYa0rKSy0XR8JSxZPwgLr7ys0IBzhGviA1/TUGJLmSVqs8pb9AnvICXEuOHLprYTw==} @@ -7166,10 +6653,6 @@ packages: resolution: {integrity: sha512-NsmoXalsWVDMGupxZ5R08ka9flZjjiLvHVAWYOKtiKM8ujtZWr9cRffak+uSE48+Ob8ObalXpwyeUiyDD6QFgg==} engines: {node: '>=8'} - widest-line@4.0.1: - resolution: {integrity: sha512-o0cyEG0e8GPzT4iGHphIOh0cJOV8fivsXxddQasHPHfoZf1ZexrfeA21w2NaEN1RHE+fXlfISmOE8R9N3u3Qig==} - engines: {node: '>=12'} - wonka@6.3.4: resolution: {integrity: sha512-CjpbqNtBGNAeyNS/9W6q3kSkKE52+FjIj7AkFlLr11s/VWGUu6a2CdYSdGxocIhIVjaW/zchesBQUKPVU69Cqg==} @@ -7359,12 +6842,6 @@ packages: resolution: {integrity: sha512-cYVsTjKl8b+FrnidjibDWskAv7UKOfcwaVZdp/it9n1s9fU3IkgDbhdIRKCW4JDsAlECJY0ytoVPT3sK6kideA==} engines: {node: '>=18'} - zen-observable-ts@0.8.21: - resolution: {integrity: sha512-Yj3yXweRc8LdRMrCC8nIc4kkjWecPAUVh0TI0OUrWXx6aX790vLcDlWca6I4vsyCGH3LpWxq0dJRcMOFoVqmeg==} - - zen-observable@0.8.15: - resolution: {integrity: sha512-PQ2PC7R9rslx84ndNBZB/Dkv8V8fZEpk83RLgXtYd0fwUgEjseMn1Dgajh2x6S8QbZAFa9p2qVCEuYZNgve0dQ==} - zod@3.23.8: resolution: {integrity: sha512-XBx9AXhXktjUqnepgTiE5flcKIYWi/rme0Eaj+5Y0lftuGBq+jyRu/md4WnuxqgP1ubdpNCsYEYPxrzVHD8d6g==} @@ -7823,7 +7300,7 @@ snapshots: '@commitlint/is-ignored@19.2.2': dependencies: '@commitlint/types': 19.0.3 - semver: 7.6.0 + semver: 7.6.3 '@commitlint/lint@19.4.1': dependencies: @@ -8165,14 +7642,12 @@ snapshots: '@ethersproject/properties': 5.7.0 '@ethersproject/strings': 5.7.0 - '@faker-js/faker@8.4.1': {} + '@faker-js/faker@9.6.0': {} '@fastify/busboy@2.1.0': {} '@fastify/deepmerge@1.3.0': {} - '@glideapps/ts-necessities@2.2.3': {} - '@gql.tada/cli-utils@1.6.3(@0no-co/graphqlsp@1.12.16(graphql@16.10.0)(typescript@5.5.3))(graphql@16.10.0)(typescript@5.5.3)': dependencies: '@0no-co/graphqlsp': 1.12.16(graphql@16.10.0)(typescript@5.5.3) @@ -8381,14 +7856,6 @@ snapshots: graphql: 16.10.0 tslib: 2.8.1 - '@graphql-tools/batch-execute@8.5.1(graphql@16.10.0)': - dependencies: - '@graphql-tools/utils': 8.9.0(graphql@16.10.0) - dataloader: 2.1.0 - graphql: 16.10.0 - tslib: 2.4.1 - value-or-promise: 1.0.11 - '@graphql-tools/batch-execute@9.0.11(graphql@16.10.0)': dependencies: '@graphql-tools/utils': 10.8.1(graphql@16.10.0) @@ -8419,16 +7886,6 @@ snapshots: graphql: 16.10.0 tslib: 2.8.1 - '@graphql-tools/delegate@8.8.1(graphql@16.10.0)': - dependencies: - '@graphql-tools/batch-execute': 8.5.1(graphql@16.10.0) - '@graphql-tools/schema': 8.5.1(graphql@16.10.0) - '@graphql-tools/utils': 8.9.0(graphql@16.10.0) - dataloader: 2.1.0 - graphql: 16.10.0 - tslib: 2.4.1 - value-or-promise: 1.0.11 - '@graphql-tools/documents@1.0.0(graphql@16.10.0)': dependencies: graphql: 16.10.0 @@ -8563,12 +8020,6 @@ snapshots: p-limit: 3.1.0 tslib: 2.6.2 - '@graphql-tools/merge@8.3.1(graphql@16.10.0)': - dependencies: - '@graphql-tools/utils': 8.9.0(graphql@16.10.0) - graphql: 16.10.0 - tslib: 2.8.1 - '@graphql-tools/merge@9.0.19(graphql@16.10.0)': dependencies: '@graphql-tools/utils': 10.8.1(graphql@16.10.0) @@ -8625,14 +8076,6 @@ snapshots: graphql: 16.10.0 tslib: 2.6.2 - '@graphql-tools/schema@8.5.1(graphql@16.10.0)': - dependencies: - '@graphql-tools/merge': 8.3.1(graphql@16.10.0) - '@graphql-tools/utils': 8.9.0(graphql@16.10.0) - graphql: 16.10.0 - tslib: 2.8.1 - value-or-promise: 1.0.11 - '@graphql-tools/stitch@9.4.16(graphql@16.10.0)': dependencies: '@graphql-tools/batch-delegate': 9.0.29(graphql@16.10.0) @@ -8683,11 +8126,6 @@ snapshots: graphql: 16.10.0 tslib: 2.6.2 - '@graphql-tools/utils@8.9.0(graphql@16.10.0)': - dependencies: - graphql: 16.10.0 - tslib: 2.4.1 - '@graphql-tools/wrap@10.0.29(graphql@16.10.0)': dependencies: '@graphql-tools/delegate': 10.2.11(graphql@16.10.0) @@ -8910,9 +8348,9 @@ snapshots: '@humanwhocodes/object-schema@2.0.1': {} - '@hypercerts-org/contracts@2.0.0-alpha.12(ts-node@10.9.2(@swc/core@1.4.15)(@types/node@20.10.6)(typescript@5.5.3))(typescript@5.5.3)': + '@hypercerts-org/contracts@2.0.0-alpha.12(ts-node@10.9.2(@swc/core@1.4.15(@swc/helpers@0.5.15))(@types/node@20.10.6)(typescript@5.5.3))(typescript@5.5.3)': dependencies: - hardhat: 2.22.18(ts-node@10.9.2(@swc/core@1.4.15)(@types/node@20.10.6)(typescript@5.5.3))(typescript@5.5.3) + hardhat: 2.22.18(ts-node@10.9.2(@swc/core@1.4.15(@swc/helpers@0.5.15))(@types/node@20.10.6)(typescript@5.5.3))(typescript@5.5.3) transitivePeerDependencies: - bufferutil - c-kzg @@ -8921,12 +8359,13 @@ snapshots: - typescript - utf-8-validate - '@hypercerts-org/marketplace-sdk@0.5.1(@safe-global/api-kit@2.5.4(encoding@0.1.13)(typescript@5.5.3)(zod@3.23.8))(@safe-global/protocol-kit@5.0.4(typescript@5.5.3)(zod@3.23.8))(ethers@6.12.2)(graphql@16.10.0)(rollup@4.12.0)(ts-node@10.9.2(@swc/core@1.4.15)(@types/node@20.10.6)(typescript@5.5.3))(typescript@5.5.3)(zod@3.23.8)': + '@hypercerts-org/marketplace-sdk@0.8.0(@safe-global/api-kit@2.5.4(encoding@0.1.13)(typescript@5.5.3)(zod@3.23.8))(@safe-global/protocol-kit@5.0.4(typescript@5.5.3)(zod@3.23.8))(@safe-global/types-kit@1.0.0(typescript@5.5.3)(zod@3.23.8))(@swc/helpers@0.5.15)(ethers@6.12.2)(graphql@16.10.0)(rollup@4.12.0)(ts-node@10.9.2(@swc/core@1.4.15(@swc/helpers@0.5.15))(@types/node@20.10.6)(typescript@5.5.3))(typescript@5.5.3)(zod@3.23.8)': dependencies: - '@hypercerts-org/sdk': 2.4.0(graphql@16.10.0)(rollup@4.12.0)(ts-node@10.9.2(@swc/core@1.4.15)(@types/node@20.10.6)(typescript@5.5.3))(typescript@5.5.3) + '@hypercerts-org/sdk': 2.4.0(@swc/helpers@0.5.15)(graphql@16.10.0)(rollup@4.12.0)(ts-node@10.9.2(@swc/core@1.4.15(@swc/helpers@0.5.15))(@types/node@20.10.6)(typescript@5.5.3))(typescript@5.5.3) '@looksrare/contracts-libs': 3.5.1 '@safe-global/api-kit': 2.5.4(encoding@0.1.13)(typescript@5.5.3)(zod@3.23.8) '@safe-global/protocol-kit': 5.0.4(typescript@5.5.3)(zod@3.23.8) + '@safe-global/types-kit': 1.0.0(typescript@5.5.3)(zod@3.23.8) '@urql/core': 5.0.4(graphql@16.10.0) ethers: 6.12.2 gql.tada: 1.8.10(graphql@16.10.0)(typescript@5.5.3) @@ -8948,16 +8387,16 @@ snapshots: - utf-8-validate - zod - '@hypercerts-org/sdk@2.4.0(graphql@16.10.0)(rollup@4.12.0)(ts-node@10.9.2(@swc/core@1.4.15)(@types/node@20.10.6)(typescript@5.5.3))(typescript@5.5.3)': + '@hypercerts-org/sdk@2.4.0(@swc/helpers@0.5.15)(graphql@16.10.0)(rollup@4.12.0)(ts-node@10.9.2(@swc/core@1.4.15(@swc/helpers@0.5.15))(@types/node@20.10.6)(typescript@5.5.3))(typescript@5.5.3)': dependencies: '@graphql-typed-document-node/core': 3.2.0(graphql@16.10.0) - '@hypercerts-org/contracts': 2.0.0-alpha.12(ts-node@10.9.2(@swc/core@1.4.15)(@types/node@20.10.6)(typescript@5.5.3))(typescript@5.5.3) + '@hypercerts-org/contracts': 2.0.0-alpha.12(ts-node@10.9.2(@swc/core@1.4.15(@swc/helpers@0.5.15))(@types/node@20.10.6)(typescript@5.5.3))(typescript@5.5.3) '@openzeppelin/merkle-tree': 1.0.7 - '@swc/core': 1.10.9 + '@swc/core': 1.10.9(@swc/helpers@0.5.15) ajv: 8.16.0 axios: 1.7.9 dotenv: 16.4.7 - rollup-plugin-swc3: 0.11.2(@swc/core@1.10.9)(rollup@4.12.0) + rollup-plugin-swc3: 0.11.2(@swc/core@1.10.9(@swc/helpers@0.5.15))(rollup@4.12.0) viem: 2.23.3(typescript@5.5.3)(zod@3.24.1) zod: 3.24.1 transitivePeerDependencies: @@ -8972,16 +8411,16 @@ snapshots: - typescript - utf-8-validate - '@hypercerts-org/sdk@2.5.0-beta.6(graphql@16.10.0)(rollup@4.12.0)(ts-node@10.9.2(@swc/core@1.4.15)(@types/node@20.10.6)(typescript@5.5.3))(typescript@5.5.3)': + '@hypercerts-org/sdk@2.5.0-beta.6(@swc/helpers@0.5.15)(graphql@16.10.0)(rollup@4.12.0)(ts-node@10.9.2(@swc/core@1.4.15(@swc/helpers@0.5.15))(@types/node@20.10.6)(typescript@5.5.3))(typescript@5.5.3)': dependencies: '@graphql-typed-document-node/core': 3.2.0(graphql@16.10.0) - '@hypercerts-org/contracts': 2.0.0-alpha.12(ts-node@10.9.2(@swc/core@1.4.15)(@types/node@20.10.6)(typescript@5.5.3))(typescript@5.5.3) + '@hypercerts-org/contracts': 2.0.0-alpha.12(ts-node@10.9.2(@swc/core@1.4.15(@swc/helpers@0.5.15))(@types/node@20.10.6)(typescript@5.5.3))(typescript@5.5.3) '@openzeppelin/merkle-tree': 1.0.7 - '@swc/core': 1.10.9 + '@swc/core': 1.10.9(@swc/helpers@0.5.15) ajv: 8.16.0 axios: 1.7.9 dotenv: 16.4.7 - rollup-plugin-swc3: 0.11.2(@swc/core@1.10.9)(rollup@4.12.0) + rollup-plugin-swc3: 0.11.2(@swc/core@1.10.9(@swc/helpers@0.5.15))(rollup@4.12.0) viem: 2.22.15(typescript@5.5.3)(zod@3.24.1) zod: 3.24.1 transitivePeerDependencies: @@ -8996,14 +8435,6 @@ snapshots: - typescript - utf-8-validate - '@inquirer/checkbox@2.3.5': - dependencies: - '@inquirer/core': 8.2.2 - '@inquirer/figures': 1.0.3 - '@inquirer/type': 1.3.3 - ansi-escapes: 4.3.2 - chalk: 4.1.2 - '@inquirer/checkbox@4.0.6(@types/node@20.10.6)': dependencies: '@inquirer/core': 10.1.4(@types/node@20.10.6) @@ -9013,11 +8444,6 @@ snapshots: ansi-escapes: 4.3.2 yoctocolors-cjs: 2.1.2 - '@inquirer/confirm@3.1.9': - dependencies: - '@inquirer/core': 8.2.2 - '@inquirer/type': 1.3.3 - '@inquirer/confirm@5.1.3(@types/node@20.10.6)': dependencies: '@inquirer/core': 10.1.4(@types/node@20.10.6) @@ -9038,28 +8464,6 @@ snapshots: transitivePeerDependencies: - '@types/node' - '@inquirer/core@8.2.2': - dependencies: - '@inquirer/figures': 1.0.3 - '@inquirer/type': 1.3.3 - '@types/mute-stream': 0.0.4 - '@types/node': 20.14.0 - '@types/wrap-ansi': 3.0.0 - ansi-escapes: 4.3.2 - chalk: 4.1.2 - cli-spinners: 2.9.2 - cli-width: 4.1.0 - mute-stream: 1.0.0 - signal-exit: 4.1.0 - strip-ansi: 6.0.1 - wrap-ansi: 6.2.0 - - '@inquirer/editor@2.1.9': - dependencies: - '@inquirer/core': 8.2.2 - '@inquirer/type': 1.3.3 - external-editor: 3.1.0 - '@inquirer/editor@4.2.3(@types/node@20.10.6)': dependencies: '@inquirer/core': 10.1.4(@types/node@20.10.6) @@ -9067,12 +8471,6 @@ snapshots: '@types/node': 20.10.6 external-editor: 3.1.0 - '@inquirer/expand@2.1.9': - dependencies: - '@inquirer/core': 8.2.2 - '@inquirer/type': 1.3.3 - chalk: 4.1.2 - '@inquirer/expand@4.0.6(@types/node@20.10.6)': dependencies: '@inquirer/core': 10.1.4(@types/node@20.10.6) @@ -9080,15 +8478,8 @@ snapshots: '@types/node': 20.10.6 yoctocolors-cjs: 2.1.2 - '@inquirer/figures@1.0.3': {} - '@inquirer/figures@1.0.9': {} - '@inquirer/input@2.1.9': - dependencies: - '@inquirer/core': 8.2.2 - '@inquirer/type': 1.3.3 - '@inquirer/input@4.1.3(@types/node@20.10.6)': dependencies: '@inquirer/core': 10.1.4(@types/node@20.10.6) @@ -9101,12 +8492,6 @@ snapshots: '@inquirer/type': 3.0.2(@types/node@20.10.6) '@types/node': 20.10.6 - '@inquirer/password@2.1.9': - dependencies: - '@inquirer/core': 8.2.2 - '@inquirer/type': 1.3.3 - ansi-escapes: 4.3.2 - '@inquirer/password@4.0.6(@types/node@20.10.6)': dependencies: '@inquirer/core': 10.1.4(@types/node@20.10.6) @@ -9114,17 +8499,6 @@ snapshots: '@types/node': 20.10.6 ansi-escapes: 4.3.2 - '@inquirer/prompts@5.0.5': - dependencies: - '@inquirer/checkbox': 2.3.5 - '@inquirer/confirm': 3.1.9 - '@inquirer/editor': 2.1.9 - '@inquirer/expand': 2.1.9 - '@inquirer/input': 2.1.9 - '@inquirer/password': 2.1.9 - '@inquirer/rawlist': 2.1.9 - '@inquirer/select': 2.3.5 - '@inquirer/prompts@7.2.3(@types/node@20.10.6)': dependencies: '@inquirer/checkbox': 4.0.6(@types/node@20.10.6) @@ -9139,12 +8513,6 @@ snapshots: '@inquirer/select': 4.0.6(@types/node@20.10.6) '@types/node': 20.10.6 - '@inquirer/rawlist@2.1.9': - dependencies: - '@inquirer/core': 8.2.2 - '@inquirer/type': 1.3.3 - chalk: 4.1.2 - '@inquirer/rawlist@4.0.6(@types/node@20.10.6)': dependencies: '@inquirer/core': 10.1.4(@types/node@20.10.6) @@ -9160,14 +8528,6 @@ snapshots: '@types/node': 20.10.6 yoctocolors-cjs: 2.1.2 - '@inquirer/select@2.3.5': - dependencies: - '@inquirer/core': 8.2.2 - '@inquirer/figures': 1.0.3 - '@inquirer/type': 1.3.3 - ansi-escapes: 4.3.2 - chalk: 4.1.2 - '@inquirer/select@4.0.6(@types/node@20.10.6)': dependencies: '@inquirer/core': 10.1.4(@types/node@20.10.6) @@ -9177,8 +8537,6 @@ snapshots: ansi-escapes: 4.3.2 yoctocolors-cjs: 2.1.2 - '@inquirer/type@1.3.3': {} - '@inquirer/type@3.0.2(@types/node@20.10.6)': dependencies: '@types/node': 20.10.6 @@ -9236,6 +8594,23 @@ snapshots: '@istanbuljs/schema@0.1.3': {} + '@jest/create-cache-key-function@29.7.0': + dependencies: + '@jest/types': 29.6.3 + + '@jest/schemas@29.6.3': + dependencies: + '@sinclair/typebox': 0.27.8 + + '@jest/types@29.6.3': + dependencies: + '@jest/schemas': 29.6.3 + '@types/istanbul-lib-coverage': 2.0.6 + '@types/istanbul-reports': 3.0.4 + '@types/node': 20.10.6 + '@types/yargs': 17.0.33 + chalk: 4.1.2 + '@jridgewell/gen-mapping@0.3.3': dependencies: '@jridgewell/set-array': 1.1.2 @@ -9510,7 +8885,7 @@ snapshots: '@opentelemetry/core': 1.24.1(@opentelemetry/api@1.8.0) '@opentelemetry/instrumentation': 0.51.1(@opentelemetry/api@1.8.0) '@opentelemetry/semantic-conventions': 1.24.1 - semver: 7.6.0 + semver: 7.6.3 transitivePeerDependencies: - supports-color @@ -9595,7 +8970,7 @@ snapshots: '@types/shimmer': 1.0.5 import-in-the-middle: 1.4.2 require-in-the-middle: 7.3.0 - semver: 7.6.0 + semver: 7.6.3 shimmer: 1.2.1 transitivePeerDependencies: - supports-color @@ -9608,7 +8983,7 @@ snapshots: '@types/shimmer': 1.0.5 import-in-the-middle: 1.7.1 require-in-the-middle: 7.3.0 - semver: 7.6.0 + semver: 7.6.3 shimmer: 1.2.1 transitivePeerDependencies: - supports-color @@ -9620,7 +8995,7 @@ snapshots: '@types/shimmer': 1.0.5 import-in-the-middle: 1.7.4 require-in-the-middle: 7.3.0 - semver: 7.6.0 + semver: 7.6.3 shimmer: 1.2.1 transitivePeerDependencies: - supports-color @@ -9707,31 +9082,6 @@ snapshots: '@pkgjs/parseargs@0.11.0': optional: true - '@prisma/debug@5.14.0-dev.34': {} - - '@prisma/engines-version@5.14.0-6.264f24ce0b2f544ff968ff76bfaa999de1161361': {} - - '@prisma/engines@5.14.0-dev.34': - dependencies: - '@prisma/debug': 5.14.0-dev.34 - '@prisma/engines-version': 5.14.0-6.264f24ce0b2f544ff968ff76bfaa999de1161361 - '@prisma/fetch-engine': 5.14.0-dev.34 - '@prisma/get-platform': 5.14.0-dev.34 - - '@prisma/fetch-engine@5.14.0-dev.34': - dependencies: - '@prisma/debug': 5.14.0-dev.34 - '@prisma/engines-version': 5.14.0-6.264f24ce0b2f544ff968ff76bfaa999de1161361 - '@prisma/get-platform': 5.14.0-dev.34 - - '@prisma/generator-helper@5.14.0-dev.34': - dependencies: - '@prisma/debug': 5.14.0-dev.34 - - '@prisma/get-platform@5.14.0-dev.34': - dependencies: - '@prisma/debug': 5.14.0-dev.34 - '@prisma/instrumentation@5.13.0': dependencies: '@opentelemetry/api': 1.8.0 @@ -9740,24 +9090,6 @@ snapshots: transitivePeerDependencies: - supports-color - '@prisma/internals@5.14.0-dev.34': - dependencies: - '@prisma/debug': 5.14.0-dev.34 - '@prisma/engines': 5.14.0-dev.34 - '@prisma/fetch-engine': 5.14.0-dev.34 - '@prisma/generator-helper': 5.14.0-dev.34 - '@prisma/get-platform': 5.14.0-dev.34 - '@prisma/prisma-schema-wasm': 5.14.0-6.264f24ce0b2f544ff968ff76bfaa999de1161361 - '@prisma/schema-files-loader': 5.14.0-dev.34 - arg: 5.0.2 - prompts: 2.4.2 - - '@prisma/prisma-schema-wasm@5.14.0-6.264f24ce0b2f544ff968ff76bfaa999de1161361': {} - - '@prisma/schema-files-loader@5.14.0-dev.34': - dependencies: - fs-extra: 11.1.1 - '@protobufjs/aspromise@1.1.2': {} '@protobufjs/base64@1.1.2': {} @@ -9882,15 +9214,6 @@ snapshots: - typescript - zod - '@sagold/json-pointer@5.1.2': {} - - '@sagold/json-query@6.2.0': - dependencies: - '@sagold/json-pointer': 5.1.2 - ebnf: 1.9.1 - - '@scaleleap/pg-format@1.0.0': {} - '@scure/base@1.1.5': {} '@scure/base@1.1.7': {} @@ -9964,11 +9287,6 @@ snapshots: '@sentry/utils': 5.30.0 tslib: 1.14.1 - '@sentry/core@7.114.0': - dependencies: - '@sentry/types': 7.114.0 - '@sentry/utils': 7.114.0 - '@sentry/core@8.2.1': dependencies: '@sentry/types': 8.2.1 @@ -9980,13 +9298,6 @@ snapshots: '@sentry/utils': 5.30.0 tslib: 1.14.1 - '@sentry/integrations@7.114.0': - dependencies: - '@sentry/core': 7.114.0 - '@sentry/types': 7.114.0 - '@sentry/utils': 7.114.0 - localforage: 1.10.0 - '@sentry/minimal@5.30.0': dependencies: '@sentry/hub': 5.30.0 @@ -10072,8 +9383,6 @@ snapshots: '@sentry/types@5.30.0': {} - '@sentry/types@7.114.0': {} - '@sentry/types@8.2.1': {} '@sentry/utils@5.30.0': @@ -10081,10 +9390,6 @@ snapshots: '@sentry/types': 5.30.0 tslib: 1.14.1 - '@sentry/utils@7.114.0': - dependencies: - '@sentry/types': 7.114.0 - '@sentry/utils@8.2.1': dependencies: '@sentry/types': 8.2.1 @@ -10093,6 +9398,8 @@ snapshots: dependencies: '@types/hast': 3.0.4 + '@sinclair/typebox@0.27.8': {} + '@sindresorhus/is@4.6.0': {} '@sinonjs/commons@2.0.0': @@ -10119,58 +9426,6 @@ snapshots: '@sinonjs/text-encoding@0.7.2': {} - '@snaplet/copycat@5.0.0': - dependencies: - '@faker-js/faker': 8.4.1 - fictional: 2.1.1 - string-argv: 0.3.2 - uuid: 8.3.2 - - '@snaplet/seed@0.97.20(@snaplet/copycat@5.0.0)(@types/pg@8.11.6)(encoding@0.1.13)(pg@8.12.0)': - dependencies: - '@inquirer/prompts': 5.0.5 - '@prisma/generator-helper': 5.14.0-dev.34 - '@prisma/internals': 5.14.0-dev.34 - '@scaleleap/pg-format': 1.0.0 - '@snaplet/copycat': 5.0.0 - '@total-typescript/ts-reset': 0.5.1 - '@trpc/client': 10.45.2(@trpc/server@10.45.2) - '@trpc/server': 10.45.2 - ansi-escapes: 6.2.1 - boxen: 7.1.1 - c12: 1.10.0 - change-case: 5.4.4 - ci-info: 4.0.0 - debug: 4.3.4(supports-color@8.1.1) - dedent: 1.5.3 - deepmerge: 4.3.1 - execa: 8.0.1 - exit-hook: 4.0.0 - find-up: 7.0.0 - fs-extra: 11.2.0 - inflection: 3.0.0 - javascript-stringify: 2.1.0 - json-schema-library: 9.3.4 - kleur: 4.1.5 - multimatch: 7.0.0 - ora: 8.0.1 - portfinder: 1.0.32 - posthog-node: 4.0.1(debug@4.3.4) - quicktype-core: 23.0.149(encoding@0.1.13) - remeda: 1.61.0 - sqlstring: 2.3.3 - terminal-link: 3.0.0 - uuid: 9.0.1 - yargs: 17.7.2 - zod: 3.23.8 - optionalDependencies: - '@types/pg': 8.11.6 - pg: 8.12.0 - transitivePeerDependencies: - - babel-plugin-macros - - encoding - - supports-color - '@storacha/one-webcrypto@1.0.1': {} '@supabase/auth-js@2.63.1': @@ -10215,10 +9470,10 @@ snapshots: - bufferutil - utf-8-validate - '@swc/cli@0.3.12(@swc/core@1.4.15)(chokidar@3.6.0)': + '@swc/cli@0.3.12(@swc/core@1.4.15(@swc/helpers@0.5.15))(chokidar@3.5.3)': dependencies: '@mole-inc/bin-wrapper': 8.0.1 - '@swc/core': 1.4.15 + '@swc/core': 1.4.15(@swc/helpers@0.5.15) '@swc/counter': 0.1.3 commander: 8.3.0 fast-glob: 3.3.2 @@ -10228,7 +9483,7 @@ snapshots: slash: 3.0.0 source-map: 0.7.4 optionalDependencies: - chokidar: 3.6.0 + chokidar: 3.5.3 '@swc/core-darwin-arm64@1.10.9': optional: true @@ -10290,7 +9545,7 @@ snapshots: '@swc/core-win32-x64-msvc@1.4.15': optional: true - '@swc/core@1.10.9': + '@swc/core@1.10.9(@swc/helpers@0.5.15)': dependencies: '@swc/counter': 0.1.3 '@swc/types': 0.1.17 @@ -10305,8 +9560,9 @@ snapshots: '@swc/core-win32-arm64-msvc': 1.10.9 '@swc/core-win32-ia32-msvc': 1.10.9 '@swc/core-win32-x64-msvc': 1.10.9 + '@swc/helpers': 0.5.15 - '@swc/core@1.4.15': + '@swc/core@1.4.15(@swc/helpers@0.5.15)': dependencies: '@swc/counter': 0.1.3 '@swc/types': 0.1.6 @@ -10321,9 +9577,21 @@ snapshots: '@swc/core-win32-arm64-msvc': 1.4.15 '@swc/core-win32-ia32-msvc': 1.4.15 '@swc/core-win32-x64-msvc': 1.4.15 + '@swc/helpers': 0.5.15 '@swc/counter@0.1.3': {} + '@swc/helpers@0.5.15': + dependencies: + tslib: 2.8.1 + + '@swc/jest@0.2.37(@swc/core@1.4.15(@swc/helpers@0.5.15))': + dependencies: + '@jest/create-cache-key-function': 29.7.0 + '@swc/core': 1.4.15(@swc/helpers@0.5.15) + '@swc/counter': 0.1.3 + jsonc-parser: 3.2.0 + '@swc/types@0.1.17': dependencies: '@swc/counter': 0.1.3 @@ -10338,14 +9606,6 @@ snapshots: '@tokenizer/token@0.3.0': {} - '@total-typescript/ts-reset@0.5.1': {} - - '@trpc/client@10.45.2(@trpc/server@10.45.2)': - dependencies: - '@trpc/server': 10.45.2 - - '@trpc/server@10.45.2': {} - '@ts-morph/common@0.20.0': dependencies: fast-glob: 3.3.2 @@ -10465,6 +9725,16 @@ snapshots: '@types/http-errors@2.0.4': {} + '@types/istanbul-lib-coverage@2.0.6': {} + + '@types/istanbul-lib-report@3.0.3': + dependencies: + '@types/istanbul-lib-coverage': 2.0.6 + + '@types/istanbul-reports@3.0.4': + dependencies: + '@types/istanbul-lib-report': 3.0.3 + '@types/js-yaml@4.0.9': {} '@types/json-schema@7.0.15': {} @@ -10523,10 +9793,6 @@ snapshots: dependencies: '@types/express': 4.17.21 - '@types/mute-stream@0.0.4': - dependencies: - '@types/node': 20.10.6 - '@types/mysql@2.15.22': dependencies: '@types/node': 20.10.6 @@ -10539,10 +9805,6 @@ snapshots: dependencies: undici-types: 5.26.5 - '@types/node@20.14.0': - dependencies: - undici-types: 5.26.5 - '@types/pbkdf2@3.1.2': dependencies: '@types/node': 20.10.6 @@ -10607,14 +9869,16 @@ snapshots: '@types/unist@3.0.2': {} - '@types/urijs@1.19.25': {} - - '@types/wrap-ansi@3.0.0': {} - '@types/ws@8.5.10': dependencies: '@types/node': 20.10.6 + '@types/yargs-parser@21.0.3': {} + + '@types/yargs@17.0.33': + dependencies: + '@types/yargs-parser': 21.0.3 + '@typescript-eslint/eslint-plugin@7.7.0(@typescript-eslint/parser@7.7.0(eslint@8.56.0)(typescript@5.5.3))(eslint@8.56.0)(typescript@5.5.3)': dependencies: '@eslint-community/regexpp': 4.10.0 @@ -10675,7 +9939,7 @@ snapshots: globby: 11.1.0 is-glob: 4.0.3 minimatch: 9.0.4 - semver: 7.6.0 + semver: 7.6.3 ts-api-utils: 1.3.0(typescript@5.5.3) optionalDependencies: typescript: 5.5.3 @@ -10714,14 +9978,6 @@ snapshots: '@ucanto/interface': 10.0.1 multiformats: 11.0.2 - '@ucanto/core@9.0.1': - dependencies: - '@ipld/car': 5.2.5 - '@ipld/dag-cbor': 9.0.7 - '@ipld/dag-ucan': 3.4.0 - '@ucanto/interface': 9.0.0 - multiformats: 11.0.2 - '@ucanto/interface@10.0.1': dependencies: '@ipld/dag-ucan': 3.4.0 @@ -11019,10 +10275,6 @@ snapshots: '@whatwg-node/fetch': 0.10.3 tslib: 2.8.1 - '@wry/equality@0.1.11': - dependencies: - tslib: 1.14.1 - JSONStream@1.3.5: dependencies: jsonparse: 1.3.1 @@ -11050,10 +10302,6 @@ snapshots: typescript: 5.5.3 zod: 3.24.1 - abort-controller@3.0.0: - dependencies: - event-target-shim: 5.0.1 - accepts@1.3.8: dependencies: mime-types: 2.1.35 @@ -11075,6 +10323,8 @@ snapshots: acorn@8.11.3: {} + acorn@8.14.0: {} + actor@2.3.1: {} adm-zip@0.4.16: {} @@ -11128,12 +10378,6 @@ snapshots: dependencies: type-fest: 0.21.3 - ansi-escapes@5.0.0: - dependencies: - type-fest: 1.4.0 - - ansi-escapes@6.2.1: {} - ansi-escapes@7.0.0: dependencies: environment: 1.1.0 @@ -11159,42 +10403,20 @@ snapshots: normalize-path: 3.0.0 picomatch: 2.3.1 - apollo-link@1.2.14(graphql@16.10.0): - dependencies: - apollo-utilities: 1.3.4(graphql@16.10.0) - graphql: 16.10.0 - ts-invariant: 0.4.4 - tslib: 1.14.1 - zen-observable-ts: 0.8.21 - - apollo-utilities@1.3.4(graphql@16.10.0): - dependencies: - '@wry/equality': 0.1.11 - fast-json-stable-stringify: 2.1.0 - graphql: 16.10.0 - ts-invariant: 0.4.4 - tslib: 1.14.1 - append-field@1.0.0: {} arch@2.2.0: {} arg@4.1.3: {} - arg@5.0.2: {} - argparse@2.0.1: {} - array-differ@4.0.0: {} - array-flatten@1.1.1: {} array-ify@1.0.0: {} array-union@2.1.0: {} - array-union@3.0.1: {} - asap@2.0.6: {} asn1js@3.0.5: @@ -11207,10 +10429,6 @@ snapshots: astral-regex@2.0.0: {} - async@2.6.4: - dependencies: - lodash: 4.17.21 - asynckit@0.4.0: {} atomically@2.0.2: @@ -11220,14 +10438,6 @@ snapshots: auto-bind@4.0.0: {} - axios@1.6.5(debug@4.3.4): - dependencies: - follow-redirects: 1.15.4(debug@4.3.4) - form-data: 4.0.0 - proxy-from-env: 1.1.0 - transitivePeerDependencies: - - debug - axios@1.7.9: dependencies: follow-redirects: 1.15.6(debug@4.4.0) @@ -11296,7 +10506,7 @@ snapshots: bin-version-check@5.1.0: dependencies: bin-version: 6.0.0 - semver: 7.5.4 + semver: 7.6.3 semver-truncate: 3.0.0 bin-version@6.0.0: @@ -11348,17 +10558,6 @@ snapshots: widest-line: 3.1.0 wrap-ansi: 7.0.0 - boxen@7.1.1: - dependencies: - ansi-align: 3.0.1 - camelcase: 7.0.1 - chalk: 5.3.0 - cli-boxes: 3.0.0 - string-width: 5.1.2 - type-fest: 2.19.0 - widest-line: 4.0.1 - wrap-ansi: 8.1.0 - brace-expansion@1.1.11: dependencies: balanced-match: 1.0.2 @@ -11378,8 +10577,6 @@ snapshots: brorand@1.1.0: {} - browser-or-node@2.1.1: {} - browser-readablestream-to-it@1.0.3: {} browser-stdout@1.3.1: {} @@ -11436,21 +10633,6 @@ snapshots: bytes@3.1.2: {} - c12@1.10.0: - dependencies: - chokidar: 3.6.0 - confbox: 0.1.7 - defu: 6.1.4 - dotenv: 16.4.5 - giget: 1.2.3 - jiti: 1.21.0 - mlly: 1.7.0 - ohash: 1.1.3 - pathe: 1.1.2 - perfect-debounce: 1.0.0 - pkg-types: 1.0.3 - rc9: 2.1.2 - cac@6.7.14: {} cacheable-lookup@5.0.4: {} @@ -11484,8 +10666,6 @@ snapshots: camelcase@6.3.0: {} - camelcase@7.0.1: {} - caniuse-lite@1.0.30001588: {} capital-case@1.0.4: @@ -11563,8 +10743,6 @@ snapshots: snake-case: 3.0.4 tslib: 2.6.2 - change-case@5.4.4: {} - chardet@0.7.0: {} check-error@2.0.0: {} @@ -11583,55 +10761,29 @@ snapshots: optionalDependencies: fsevents: 2.3.3 - chokidar@3.6.0: - dependencies: - anymatch: 3.1.3 - braces: 3.0.3 - glob-parent: 5.1.2 - is-binary-path: 2.1.0 - is-glob: 4.0.3 - normalize-path: 3.0.0 - readdirp: 3.6.0 - optionalDependencies: - fsevents: 2.3.3 - chokidar@4.0.1: dependencies: readdirp: 4.0.2 - chownr@2.0.0: {} - chownr@3.0.0: {} ci-info@2.0.0: {} - ci-info@4.0.0: {} - cipher-base@1.0.4: dependencies: inherits: 2.0.4 safe-buffer: 5.2.1 - citty@0.1.6: - dependencies: - consola: 3.2.3 - cjs-module-lexer@1.3.1: {} clean-stack@2.2.0: {} cli-boxes@2.2.1: {} - cli-boxes@3.0.0: {} - cli-cursor@3.1.0: dependencies: restore-cursor: 3.1.0 - cli-cursor@4.0.0: - dependencies: - restore-cursor: 4.0.0 - cli-cursor@5.0.0: dependencies: restore-cursor: 5.1.0 @@ -11680,8 +10832,6 @@ snapshots: code-block-writer@12.0.0: {} - collection-utils@1.0.1: {} - color-convert@1.9.3: dependencies: color-name: 1.1.3 @@ -11745,11 +10895,7 @@ snapshots: dot-prop: 7.2.0 env-paths: 3.0.0 json-schema-typed: 8.0.1 - semver: 7.6.0 - - confbox@0.1.7: {} - - consola@3.2.3: {} + semver: 7.6.3 constant-case@3.0.4: dependencies: @@ -11843,12 +10989,6 @@ snapshots: transitivePeerDependencies: - encoding - cross-fetch@4.0.0(encoding@0.1.13): - dependencies: - node-fetch: 2.7.0(encoding@0.1.13) - transitivePeerDependencies: - - encoding - cross-inspect@1.0.0: dependencies: tslib: 2.6.2 @@ -11877,14 +11017,14 @@ snapshots: data-uri-to-buffer@4.0.1: {} - dataloader@2.1.0: {} - dataloader@2.2.3: {} date-fns@2.30.0: dependencies: '@babel/runtime': 7.23.7 + date-fns@4.1.0: {} + debounce-fn@5.1.2: dependencies: mimic-fn: 4.0.0 @@ -11895,10 +11035,6 @@ snapshots: dependencies: ms: 2.0.0 - debug@3.2.7: - dependencies: - ms: 2.1.3 - debug@4.3.4(supports-color@5.5.0): dependencies: ms: 2.1.2 @@ -11923,20 +11059,14 @@ snapshots: decamelize@4.0.0: {} - decimal.js@10.5.0: {} - decompress-response@6.0.0: dependencies: mimic-response: 3.1.0 - dedent@1.5.3: {} - deep-eql@5.0.1: {} deep-is@0.1.4: {} - deepmerge@4.3.1: {} - defaults@1.0.4: dependencies: clone: 1.0.4 @@ -11949,8 +11079,6 @@ snapshots: es-errors: 1.3.0 gopd: 1.0.1 - defu@6.1.4: {} - delayed-stream@1.0.0: {} depd@1.1.2: {} @@ -11959,10 +11087,6 @@ snapshots: dependency-graph@0.11.0: {} - deprecated-decorator@0.1.6: {} - - destr@2.0.3: {} - destroy@1.2.0: {} detect-indent@6.1.0: {} @@ -12000,8 +11124,6 @@ snapshots: dotenv@16.3.1: {} - dotenv@16.4.5: {} - dotenv@16.4.7: {} dset@3.1.3: {} @@ -12010,8 +11132,6 @@ snapshots: eastasianwidth@0.2.0: {} - ebnf@1.9.1: {} - ee-first@1.1.1: {} electron-fetch@1.9.1: @@ -12259,12 +11379,8 @@ snapshots: is-hex-prefixed: 1.0.0 strip-hex-prefix: 1.0.0 - event-target-shim@5.0.1: {} - eventemitter3@5.0.1: {} - events@3.3.0: {} - evp_bytestokey@1.0.3: dependencies: md5.js: 1.3.5 @@ -12308,8 +11424,6 @@ snapshots: dependencies: pify: 2.3.0 - exit-hook@4.0.0: {} - expect-type@1.1.0: {} express@4.19.2: @@ -12365,8 +11479,6 @@ snapshots: extract-files@11.0.0: {} - fast-copy@3.0.2: {} - fast-decode-uri-component@1.0.1: {} fast-deep-equal@3.1.3: {} @@ -12424,13 +11536,6 @@ snapshots: node-domexception: 1.0.0 web-streams-polyfill: 3.3.2 - fictional@2.1.1: - dependencies: - decimal.js: 10.5.0 - fast-json-stable-stringify: 2.1.0 - fnv-plus: 1.3.1 - siphash: 1.2.0 - figures@3.2.0: dependencies: escape-string-regexp: 1.0.5 @@ -12510,12 +11615,6 @@ snapshots: flatted@3.2.9: {} - fnv-plus@1.3.1: {} - - follow-redirects@1.15.4(debug@4.3.4): - optionalDependencies: - debug: 4.3.4(supports-color@8.1.1) - follow-redirects@1.15.6(debug@4.4.0): optionalDependencies: debug: 4.4.0 @@ -12541,12 +11640,6 @@ snapshots: fresh@0.5.2: {} - fs-extra@11.1.1: - dependencies: - graceful-fs: 4.2.11 - jsonfile: 6.1.0 - universalify: 2.0.1 - fs-extra@11.2.0: dependencies: graceful-fs: 4.2.11 @@ -12559,10 +11652,6 @@ snapshots: jsonfile: 4.0.0 universalify: 0.1.2 - fs-minipass@2.1.0: - dependencies: - minipass: 3.3.6 - fs.realpath@1.0.0: {} fsevents@2.3.3: @@ -12570,6 +11659,8 @@ snapshots: function-bind@1.1.2: {} + functional-red-black-tree@1.0.1: {} + gensync@1.0.0-beta.2: {} get-caller-file@2.0.5: {} @@ -12611,17 +11702,6 @@ snapshots: dependencies: resolve-pkg-maps: 1.0.0 - giget@1.2.3: - dependencies: - citty: 0.1.6 - consola: 3.2.3 - defu: 6.1.4 - node-fetch-native: 1.6.4 - nypm: 0.3.8 - ohash: 1.1.3 - pathe: 1.1.2 - tar: 6.2.1 - git-raw-commits@4.0.0: dependencies: dargs: 8.1.0 @@ -12747,17 +11827,6 @@ snapshots: - typescript - utf-8-validate - graphql-filter@1.1.5(graphql@16.10.0): - dependencies: - graphql: 16.10.0 - graphql-tools: 4.0.8(graphql@16.10.0) - - graphql-middleware@6.1.35(graphql@16.10.0): - dependencies: - '@graphql-tools/delegate': 8.8.1(graphql@16.10.0) - '@graphql-tools/schema': 8.5.1(graphql@16.10.0) - graphql: 16.10.0 - graphql-query-complexity@0.12.0(graphql@16.10.0): dependencies: graphql: 16.10.0 @@ -12781,15 +11850,6 @@ snapshots: graphql: 16.10.0 tslib: 2.6.2 - graphql-tools@4.0.8(graphql@16.10.0): - dependencies: - apollo-link: 1.2.14(graphql@16.10.0) - apollo-utilities: 1.3.4(graphql@16.10.0) - deprecated-decorator: 0.1.6 - graphql: 16.10.0 - iterall: 1.3.0 - uuid: 3.4.0 - graphql-ws@5.16.0(graphql@16.10.0): dependencies: graphql: 16.10.0 @@ -12820,7 +11880,7 @@ snapshots: optionalDependencies: uglify-js: 3.17.4 - hardhat@2.22.18(ts-node@10.9.2(@swc/core@1.4.15)(@types/node@20.10.6)(typescript@5.5.3))(typescript@5.5.3): + hardhat@2.22.18(ts-node@10.9.2(@swc/core@1.4.15(@swc/helpers@0.5.15))(@types/node@20.10.6)(typescript@5.5.3))(typescript@5.5.3): dependencies: '@ethersproject/abi': 5.7.0 '@metamask/eth-sig-util': 4.0.1 @@ -12867,7 +11927,7 @@ snapshots: uuid: 8.3.2 ws: 7.5.9 optionalDependencies: - ts-node: 10.9.2(@swc/core@1.4.15)(@types/node@20.10.6)(typescript@5.5.3) + ts-node: 10.9.2(@swc/core@1.4.15(@swc/helpers@0.5.15))(@types/node@20.10.6)(typescript@5.5.3) typescript: 5.5.3 transitivePeerDependencies: - bufferutil @@ -12979,8 +12039,6 @@ snapshots: ignore@5.3.1: {} - immediate@3.0.6: {} - immutable@3.7.6: {} immutable@4.3.4: {} @@ -13020,8 +12078,6 @@ snapshots: indent-string@4.0.0: {} - inflection@3.0.0: {} - inflight@1.0.6: dependencies: once: 1.4.0 @@ -13115,8 +12171,6 @@ snapshots: is-interactive@1.0.0: {} - is-interactive@2.0.0: {} - is-lower-case@2.0.2: dependencies: tslib: 2.6.2 @@ -13153,16 +12207,10 @@ snapshots: is-unicode-supported@0.1.0: {} - is-unicode-supported@1.3.0: {} - - is-unicode-supported@2.0.0: {} - is-upper-case@2.0.2: dependencies: tslib: 2.6.2 - is-url@1.2.4: {} - is-what@4.1.16: {} is-windows@1.0.2: {} @@ -13226,8 +12274,6 @@ snapshots: p-fifo: 1.0.0 readable-stream: 3.6.2 - iterall@1.3.0: {} - jackspeak@2.3.6: dependencies: '@isaacs/cliui': 8.0.2 @@ -13240,14 +12286,10 @@ snapshots: optionalDependencies: '@pkgjs/parseargs': 0.11.0 - javascript-stringify@2.1.0: {} - jiti@1.21.0: {} jose@5.2.3: {} - js-base64@3.7.7: {} - js-sha3@0.8.0: {} js-sha3@0.9.3: {} @@ -13264,16 +12306,6 @@ snapshots: json-parse-even-better-errors@2.3.1: {} - json-schema-library@9.3.4: - dependencies: - '@sagold/json-pointer': 5.1.2 - '@sagold/json-query': 6.2.0 - deepmerge: 4.3.1 - fast-copy: 3.0.2 - fast-deep-equal: 3.1.3 - smtp-address-parser: 1.0.10 - valid-url: 1.0.9 - json-schema-traverse@0.4.1: {} json-schema-traverse@1.0.0: {} @@ -13326,27 +12358,19 @@ snapshots: dependencies: json-buffer: 3.0.1 - kleur@3.0.3: {} - - kleur@4.1.5: {} - - kysely-supabase@0.2.0(@supabase/supabase-js@2.42.5)(kysely@0.27.4)(supabase@1.191.3): + kysely-supabase@0.2.0(@supabase/supabase-js@2.42.5)(kysely@0.27.6)(supabase@1.191.3): dependencies: '@supabase/supabase-js': 2.42.5 - kysely: 0.27.4 + kysely: 0.27.6 supabase: 1.191.3 - kysely@0.27.4: {} + kysely@0.27.6: {} levn@0.4.1: dependencies: prelude-ls: 1.2.1 type-check: 0.4.0 - lie@3.1.1: - dependencies: - immediate: 3.0.6 - lilconfig@3.1.2: {} lines-and-columns@1.2.4: {} @@ -13392,9 +12416,7 @@ snapshots: rfdc: 1.4.1 wrap-ansi: 9.0.0 - localforage@1.10.0: - dependencies: - lie: 3.1.1 + load-tsconfig@0.2.5: {} locate-path@5.0.0: dependencies: @@ -13437,11 +12459,6 @@ snapshots: chalk: 4.1.2 is-unicode-supported: 0.1.0 - log-symbols@6.0.0: - dependencies: - chalk: 5.3.0 - is-unicode-supported: 1.3.0 - log-update@4.0.0: dependencies: ansi-escapes: 4.3.2 @@ -13639,21 +12656,10 @@ snapshots: minimist@1.2.8: {} - minipass@3.3.6: - dependencies: - yallist: 4.0.0 - - minipass@5.0.0: {} - minipass@7.0.4: {} minipass@7.1.2: {} - minizlib@2.1.2: - dependencies: - minipass: 3.3.6 - yallist: 4.0.0 - minizlib@3.0.1: dependencies: minipass: 7.1.2 @@ -13663,26 +12669,10 @@ snapshots: dependencies: minimist: 1.2.8 - mkdirp@1.0.4: {} - mkdirp@2.1.6: {} mkdirp@3.0.1: {} - mlly@1.4.2: - dependencies: - acorn: 8.11.3 - pathe: 1.1.1 - pkg-types: 1.0.3 - ufo: 1.3.2 - - mlly@1.7.0: - dependencies: - acorn: 8.11.3 - pathe: 1.1.2 - pkg-types: 1.1.1 - ufo: 1.5.3 - mnemonist@0.38.5: dependencies: obliterator: 2.0.4 @@ -13713,6 +12703,8 @@ snapshots: module-details-from-path@1.0.3: {} + moment@2.30.1: {} + moo@0.5.2: {} ms@2.0.0: {} @@ -13739,18 +12731,10 @@ snapshots: multiformats@13.2.2: {} - multimatch@7.0.0: - dependencies: - array-differ: 4.0.0 - array-union: 3.0.1 - minimatch: 9.0.4 - murmurhash3js-revisited@3.0.0: {} mute-stream@0.0.8: {} - mute-stream@1.0.0: {} - mute-stream@2.0.0: {} nanoid@3.3.3: {} @@ -13795,7 +12779,7 @@ snapshots: node-abi@3.62.0: dependencies: - semver: 7.6.0 + semver: 7.6.3 node-addon-api@2.0.2: {} @@ -13808,8 +12792,6 @@ snapshots: node-domexception@1.0.0: {} - node-fetch-native@1.6.4: {} - node-fetch@2.7.0(encoding@0.1.13): dependencies: whatwg-url: 5.0.0 @@ -13889,16 +12871,10 @@ snapshots: bn.js: 4.11.6 strip-hex-prefix: 1.0.0 - nypm@0.3.8: - dependencies: - citty: 0.1.6 - consola: 3.2.3 - execa: 8.0.1 - pathe: 1.1.2 - ufo: 1.5.3 - object-assign@4.1.1: {} + object-hash@2.2.0: {} + object-inspect@1.13.1: {} object-keys@1.1.1: {} @@ -13907,8 +12883,6 @@ snapshots: obuf@1.1.2: {} - ohash@1.1.3: {} - on-finished@2.4.1: dependencies: ee-first: 1.1.1 @@ -13961,18 +12935,6 @@ snapshots: strip-ansi: 6.0.1 wcwidth: 1.0.1 - ora@8.0.1: - dependencies: - chalk: 5.3.0 - cli-cursor: 4.0.0 - cli-spinners: 2.9.2 - is-interactive: 2.0.0 - is-unicode-supported: 2.0.0 - log-symbols: 6.0.0 - stdin-discarder: 0.2.2 - string-width: 7.1.0 - strip-ansi: 7.1.0 - os-filter-obj@2.0.0: dependencies: arch: 2.2.0 @@ -14071,10 +13033,6 @@ snapshots: package-json-from-dist@1.0.1: {} - pako@0.2.9: {} - - pako@1.0.11: {} - param-case@3.0.4: dependencies: dot-case: 3.0.4 @@ -14149,8 +13107,6 @@ snapshots: path-type@4.0.0: {} - pathe@1.1.1: {} - pathe@1.1.2: {} pathval@2.0.0: {} @@ -14167,8 +13123,6 @@ snapshots: peek-readable@5.3.1: {} - perfect-debounce@1.0.0: {} - pg-cloudflare@1.1.1: optional: true @@ -14176,6 +13130,18 @@ snapshots: pg-int8@1.0.1: {} + pg-mem@3.0.5(kysely@0.27.6): + dependencies: + functional-red-black-tree: 1.0.1 + immutable: 4.3.4 + json-stable-stringify: 1.1.1 + lru-cache: 6.0.0 + moment: 2.30.1 + object-hash: 2.2.0 + pgsql-ast-parser: 12.0.1 + optionalDependencies: + kysely: 0.27.6 + pg-numeric@1.0.2: {} pg-pool@3.6.2(pg@8.12.0): @@ -14216,6 +13182,11 @@ snapshots: dependencies: split2: 4.2.0 + pgsql-ast-parser@12.0.1: + dependencies: + moo: 0.5.2 + nearley: 2.20.1 + picocolors@1.0.0: {} picocolors@1.1.1: {} @@ -14232,28 +13203,6 @@ snapshots: optionalDependencies: nice-napi: 1.0.2 - pkg-types@1.0.3: - dependencies: - jsonc-parser: 3.2.0 - mlly: 1.4.2 - pathe: 1.1.1 - - pkg-types@1.1.1: - dependencies: - confbox: 0.1.7 - mlly: 1.7.0 - pathe: 1.1.2 - - pluralize@8.0.0: {} - - portfinder@1.0.32: - dependencies: - async: 2.6.4 - debug: 3.2.7 - mkdirp: 0.5.6 - transitivePeerDependencies: - - supports-color - postcss@8.4.33: dependencies: nanoid: 3.3.7 @@ -14282,30 +13231,16 @@ snapshots: postgres-range@1.1.4: {} - posthog-node@4.0.1(debug@4.3.4): - dependencies: - axios: 1.6.5(debug@4.3.4) - rusha: 0.8.14 - transitivePeerDependencies: - - debug - prelude-ls@1.2.1: {} prettier@3.3.2: {} process-nextick-args@2.0.1: {} - process@0.11.10: {} - promise@7.3.1: dependencies: asap: 2.0.6 - prompts@2.4.2: - dependencies: - kleur: 3.0.3 - sisteransi: 1.0.5 - protobufjs@7.2.5: dependencies: '@protobufjs/aspromise': 1.1.2 @@ -14357,26 +13292,6 @@ snapshots: quick-lru@5.1.1: {} - quicktype-core@23.0.149(encoding@0.1.13): - dependencies: - '@glideapps/ts-necessities': 2.2.3 - '@types/urijs': 1.19.25 - browser-or-node: 2.1.1 - collection-utils: 1.0.1 - cross-fetch: 4.0.0(encoding@0.1.13) - is-url: 1.2.4 - js-base64: 3.7.7 - lodash: 4.17.21 - pako: 1.0.11 - pluralize: 8.0.0 - readable-stream: 4.5.2 - unicode-properties: 1.4.1 - urijs: 1.19.11 - wordwrap: 1.0.0 - yaml: 2.5.0 - transitivePeerDependencies: - - encoding - rabin-rs@2.1.0: {} railroad-diagrams@1.0.0: {} @@ -14399,11 +13314,6 @@ snapshots: iconv-lite: 0.4.24 unpipe: 1.0.0 - rc9@2.1.2: - dependencies: - defu: 6.1.4 - destr: 2.0.3 - react-native-fetch-api@3.0.0: dependencies: p-defer: 3.0.0 @@ -14426,14 +13336,6 @@ snapshots: string_decoder: 1.3.0 util-deprecate: 1.0.2 - readable-stream@4.5.2: - dependencies: - abort-controller: 3.0.0 - buffer: 6.0.3 - events: 3.3.0 - process: 0.11.10 - string_decoder: 1.3.0 - readable-web-to-node-stream@3.0.2: dependencies: readable-stream: 3.6.2 @@ -14456,8 +13358,6 @@ snapshots: transitivePeerDependencies: - encoding - remeda@1.61.0: {} - remedial@1.0.8: {} remove-trailing-separator@1.1.0: {} @@ -14505,11 +13405,6 @@ snapshots: onetime: 5.1.2 signal-exit: 3.0.7 - restore-cursor@4.0.0: - dependencies: - onetime: 5.1.2 - signal-exit: 3.0.7 - restore-cursor@5.1.0: dependencies: onetime: 7.0.0 @@ -14542,11 +13437,11 @@ snapshots: dependencies: bn.js: 5.2.1 - rollup-plugin-swc3@0.11.2(@swc/core@1.10.9)(rollup@4.12.0): + rollup-plugin-swc3@0.11.2(@swc/core@1.10.9(@swc/helpers@0.5.15))(rollup@4.12.0): dependencies: '@fastify/deepmerge': 1.3.0 '@rollup/pluginutils': 5.1.0(rollup@4.12.0) - '@swc/core': 1.10.9 + '@swc/core': 1.10.9(@swc/helpers@0.5.15) get-tsconfig: 4.7.5 rollup: 4.12.0 rollup-preserve-directives: 1.1.1(rollup@4.12.0) @@ -14581,8 +13476,6 @@ snapshots: dependencies: queue-microtask: 1.2.3 - rusha@0.8.14: {} - rxjs@7.8.1: dependencies: tslib: 2.8.1 @@ -14607,7 +13500,7 @@ snapshots: semver-truncate@3.0.0: dependencies: - semver: 7.6.0 + semver: 7.6.3 semver@5.7.2: {} @@ -14728,10 +13621,6 @@ snapshots: nise: 5.1.5 supports-color: 7.2.0 - siphash@1.2.0: {} - - sisteransi@1.0.5: {} - slash@3.0.0: {} slice-ansi@3.0.0: @@ -14756,10 +13645,6 @@ snapshots: ansi-styles: 6.2.1 is-fullwidth-code-point: 5.0.0 - smtp-address-parser@1.0.10: - dependencies: - nearley: 2.20.1 - snake-case@3.0.4: dependencies: dot-case: 3.0.4 @@ -14804,8 +13689,6 @@ snapshots: dependencies: tslib: 2.6.2 - sqlstring@2.3.3: {} - stackback@0.0.2: {} stacktrace-parser@0.1.10: @@ -14816,8 +13699,6 @@ snapshots: std-env@3.8.0: {} - stdin-discarder@0.2.2: {} - stream-to-it@0.2.4: dependencies: get-iterator: 1.0.2 @@ -14911,11 +13792,6 @@ snapshots: dependencies: has-flag: 4.0.0 - supports-hyperlinks@2.3.0: - dependencies: - has-flag: 4.0.0 - supports-color: 7.2.0 - supports-preserve-symlinks-flag@1.0.0: {} swagger-ui-dist@5.17.9: {} @@ -14933,15 +13809,6 @@ snapshots: dependencies: '@noble/hashes': 1.7.1 - tar@6.2.1: - dependencies: - chownr: 2.0.0 - fs-minipass: 2.1.0 - minipass: 5.0.0 - minizlib: 2.1.2 - mkdirp: 1.0.4 - yallist: 4.0.0 - tar@7.4.3: dependencies: '@isaacs/fs-minipass': 4.0.1 @@ -14951,11 +13818,6 @@ snapshots: mkdirp: 3.0.1 yallist: 5.0.0 - terminal-link@3.0.0: - dependencies: - ansi-escapes: 5.0.0 - supports-hyperlinks: 2.3.0 - test-exclude@7.0.1: dependencies: '@istanbuljs/schema': 0.1.3 @@ -14968,8 +13830,6 @@ snapshots: through@2.3.8: {} - tiny-inflate@1.0.3: {} - tinybench@2.9.0: {} tinyexec@0.3.2: {} @@ -15035,10 +13895,6 @@ snapshots: optionalDependencies: typescript: 5.5.3 - ts-invariant@0.4.4: - dependencies: - tslib: 1.14.1 - ts-log@2.2.5: {} ts-morph@19.0.0: @@ -15046,7 +13902,7 @@ snapshots: '@ts-morph/common': 0.20.0 code-block-writer: 12.0.0 - ts-node@10.9.2(@swc/core@1.4.15)(@types/node@20.10.6)(typescript@5.5.3): + ts-node@10.9.2(@swc/core@1.4.15(@swc/helpers@0.5.15))(@types/node@20.10.6)(typescript@5.5.3): dependencies: '@cspotcode/source-map-support': 0.8.1 '@tsconfig/node10': 1.0.9 @@ -15064,7 +13920,7 @@ snapshots: v8-compile-cache-lib: 3.0.1 yn: 3.1.1 optionalDependencies: - '@swc/core': 1.4.15 + '@swc/core': 1.4.15(@swc/helpers@0.5.15) tsconfck@3.1.4(typescript@5.5.3): optionalDependencies: @@ -15080,8 +13936,6 @@ snapshots: tslib@2.4.0: {} - tslib@2.4.1: {} - tslib@2.6.2: {} tslib@2.8.1: {} @@ -15122,8 +13976,6 @@ snapshots: type-fest@0.7.1: {} - type-fest@1.4.0: {} - type-fest@2.19.0: {} type-fest@4.12.0: {} @@ -15172,10 +14024,6 @@ snapshots: uc.micro@2.1.0: {} - ufo@1.3.2: {} - - ufo@1.5.3: {} - uglify-js@3.17.4: optional: true @@ -15203,16 +14051,6 @@ snapshots: dependencies: '@fastify/busboy': 2.1.0 - unicode-properties@1.4.1: - dependencies: - base64-js: 1.5.1 - unicode-trie: 2.0.0 - - unicode-trie@2.0.0: - dependencies: - pako: 0.2.9 - tiny-inflate: 1.0.3 - unicorn-magic@0.1.0: {} universalify@0.1.2: {} @@ -15225,6 +14063,20 @@ snapshots: unpipe@1.0.0: {} + unplugin-swc@1.5.1(@swc/core@1.4.15(@swc/helpers@0.5.15))(rollup@4.12.0): + dependencies: + '@rollup/pluginutils': 5.1.0(rollup@4.12.0) + '@swc/core': 1.4.15(@swc/helpers@0.5.15) + load-tsconfig: 0.2.5 + unplugin: 1.16.1 + transitivePeerDependencies: + - rollup + + unplugin@1.16.1: + dependencies: + acorn: 8.14.0 + webpack-virtual-modules: 0.6.2 + update-browserslist-db@1.0.13(browserslist@4.23.0): dependencies: browserslist: 4.23.0 @@ -15243,8 +14095,6 @@ snapshots: dependencies: punycode: 2.3.1 - urijs@1.19.11: {} - urlpattern-polyfill@10.0.0: {} urlpattern-polyfill@8.0.2: {} @@ -15257,20 +14107,12 @@ snapshots: utils-merge@1.0.1: {} - uuid@3.4.0: {} - uuid@8.3.2: {} - uuid@9.0.1: {} - v8-compile-cache-lib@3.0.1: {} - valid-url@1.0.9: {} - validator@13.12.0: {} - value-or-promise@1.0.11: {} - value-or-promise@1.0.12: {} varint@6.0.0: {} @@ -15472,6 +14314,8 @@ snapshots: webidl-conversions@3.0.1: {} + webpack-virtual-modules@0.6.2: {} + whatwg-url@5.0.0: dependencies: tr46: 0.0.3 @@ -15498,10 +14342,6 @@ snapshots: dependencies: string-width: 4.2.3 - widest-line@4.0.1: - dependencies: - string-width: 5.1.2 - wonka@6.3.4: {} wordwrap@1.0.0: {} @@ -15631,13 +14471,6 @@ snapshots: yoctocolors-cjs@2.1.2: {} - zen-observable-ts@0.8.21: - dependencies: - tslib: 1.14.1 - zen-observable: 0.8.15 - - zen-observable@0.8.15: {} - zod@3.23.8: {} zod@3.24.1: {} diff --git a/schema.graphql b/schema.graphql index cd34d7dc..2ca9e120 100644 --- a/schema.graphql +++ b/schema.graphql @@ -11,9 +11,14 @@ type AllowlistRecord { """The entry index of the Merkle tree for the claimable fraction""" entry: Float + """The hypercert that the allow list record belongs to""" + hypercert: Hypercert + """The hypercert ID the claimable fraction belongs to""" hypercert_id: String - id: ID! + + """The ID of the allow list record""" + id: String """The leaf of the Merkle tree for the claimable fraction""" leaf: String @@ -37,26 +42,39 @@ type AllowlistRecord { user_address: String } -input AllowlistRecordFetchInput { - by: AllowlistRecordSortOptions +input AllowlistRecordHypercertWhereInput { + attestations_count: NumberSearchOptions + burned: BooleanSearchOptions + creation_block_number: BigIntSearchOptions + creation_block_timestamp: BigIntSearchOptions + creator_address: StringSearchOptions + hypercert_id: StringSearchOptions + id: StringSearchOptions + last_update_block_number: BigIntSearchOptions + last_update_block_timestamp: BigIntSearchOptions + sales_count: NumberSearchOptions + token_id: BigIntSearchOptions + units: BigIntSearchOptions + uri: StringSearchOptions } input AllowlistRecordSortOptions { - claimed: SortOrder - entry: SortOrder - hypercert_id: SortOrder - leaf: SortOrder - proof: SortOrder - root: SortOrder - token_id: SortOrder - total_units: SortOrder - units: SortOrder - user_address: SortOrder + claimed: SortOrder = null + entry: SortOrder = null + hypercert_id: SortOrder = null + leaf: SortOrder = null + proof: SortOrder = null + root: SortOrder = null + token_id: SortOrder = null + total_units: SortOrder = null + units: SortOrder = null + user_address: SortOrder = null } input AllowlistRecordWhereInput { claimed: BooleanSearchOptions - entry: BigIntSearchOptions + entry: NumberSearchOptions + hypercert: AllowlistRecordHypercertWhereInput = {} hypercert_id: StringSearchOptions leaf: StringSearchOptions proof: StringArraySearchOptions @@ -86,7 +104,7 @@ type Attestation { """Hypercert related to the attestation""" hypercert: HypercertBaseType! - id: ID! + id: ID """Block number at which the attestation was last updated""" last_update_block_number: EthBigInt @@ -94,12 +112,12 @@ type Attestation { """Timestamp at which the attestation was last updated""" last_update_block_timestamp: EthBigInt + """Metadata related to the attestation""" + metadata: Metadata! + """Address of the recipient of the attestation""" recipient: String - """Address of the resolver contract for the attestation""" - resolver: String - """Unique identifier of the EAS schema used to create the attestation""" schema_uid: String @@ -107,51 +125,38 @@ type Attestation { uid: ID } -type AttestationBaseType { - """Address of the creator of the attestation""" - attester: String - - """Block number at which the attestation was created""" - creation_block_number: EthBigInt - - """Timestamp at which the attestation was created""" - creation_block_timestamp: EthBigInt - - """Encoded data of the attestation""" - data: JSON - id: ID! - - """Block number at which the attestation was last updated""" - last_update_block_number: EthBigInt - - """Timestamp at which the attestation was last updated""" - last_update_block_timestamp: EthBigInt - - """Address of the recipient of the attestation""" - recipient: String - - """Address of the resolver contract for the attestation""" - resolver: String - - """Unique identifier of the EAS schema used to create the attestation""" - schema_uid: String - - """Unique identifier for the attestation on EAS""" - uid: ID +input AttestationAttestationSchemaWhereInput { + chain_id: NumberSearchOptions + id: StringSearchOptions + resolver: StringSearchOptions + revocable: BooleanSearchOptions + uid: StringSearchOptions } -input AttestationFetchInput { - by: AttestationSortOptions +input AttestationHypercertWhereInput { + attestations_count: NumberSearchOptions + burned: BooleanSearchOptions + creation_block_number: BigIntSearchOptions + creation_block_timestamp: BigIntSearchOptions + creator_address: StringSearchOptions + hypercert_id: StringSearchOptions + id: StringSearchOptions + last_update_block_number: BigIntSearchOptions + last_update_block_timestamp: BigIntSearchOptions + sales_count: NumberSearchOptions + token_id: BigIntSearchOptions + units: BigIntSearchOptions + uri: StringSearchOptions } """Supported EAS attestation schemas and their related records""" type AttestationSchema { + """List of attestations related to the attestation schema""" + attestations: GetAttestationsResponse! + """Chain ID of the chains where the attestation schema is supported""" chain_id: EthBigInt! - id: ID! - - """List of attestations related to the attestation schema""" - records: [AttestationBaseType!]! + id: ID """Address of the resolver contract for the attestation schema""" resolver: String! @@ -166,11 +171,25 @@ type AttestationSchema { uid: ID! } +input AttestationSchemaAttestationWhereInput { + attester: StringSearchOptions + contract_address: StringSearchOptions + creation_block_number: BigIntSearchOptions + creation_block_timestamp: BigIntSearchOptions + id: StringSearchOptions + last_update_block_number: BigIntSearchOptions + last_update_block_timestamp: BigIntSearchOptions + recipient: StringSearchOptions + resolver: StringSearchOptions + supported_schemas_id: StringSearchOptions + uid: StringSearchOptions +} + """Supported EAS attestation schemas and their related records""" type AttestationSchemaBaseType { """Chain ID of the chains where the attestation schema is supported""" chain_id: EthBigInt! - id: ID! + id: ID """Address of the resolver contract for the attestation schema""" resolver: String! @@ -185,109 +204,53 @@ type AttestationSchemaBaseType { uid: ID! } -input AttestationSortOptions { - attestation_uid: SortOrder - attester_address: SortOrder - creation_block_number: SortOrder - creation_block_timestamp: SortOrder - last_update_block_number: SortOrder - last_update_block_timestamp: SortOrder - recipient_address: SortOrder - schema: SortOrder +input AttestationSchemaSortOptions { + chain_id: SortOrder = null + id: SortOrder = null + resolver: SortOrder = null + revocable: SortOrder = null + uid: SortOrder = null } -input AttestationWhereInput { - attestation: StringSearchOptions - attester: StringSearchOptions - chain_id: BigIntSearchOptions - contract_address: StringSearchOptions - creation_block_number: BigIntSearchOptions - creation_block_timestamp: BigIntSearchOptions - eas_schema: BasicAttestationSchemaWhereInput - hypercerts: BasicHypercertWhereArgs - last_update_block_number: BigIntSearchOptions - last_update_block_timestamp: BigIntSearchOptions - metadata: BasicMetadataWhereInput - recipient: StringSearchOptions +input AttestationSchemaWhereInput { + attestations: AttestationSchemaAttestationWhereInput = {} + chain_id: NumberSearchOptions + id: StringSearchOptions resolver: StringSearchOptions - token_id: StringSearchOptions + revocable: BooleanSearchOptions uid: StringSearchOptions } -input BasicAttestationSchemaWhereInput { - chain_id: BigIntSearchOptions - resolver: StringSearchOptions - revocable: BooleanSearchOptions - schema: StringSearchOptions - uid: StringSearchOptions +input AttestationSortOptions { + attester: SortOrder = null + contract_address: SortOrder = null + creation_block_number: SortOrder = null + creation_block_timestamp: SortOrder = null + id: SortOrder = null + last_update_block_number: SortOrder = null + last_update_block_timestamp: SortOrder = null + recipient: SortOrder = null + resolver: SortOrder = null + supported_schemas_id: SortOrder = null + uid: SortOrder = null } -input BasicAttestationWhereInput { - attestation: StringSearchOptions +input AttestationWhereInput { attester: StringSearchOptions - chain_id: BigIntSearchOptions contract_address: StringSearchOptions creation_block_number: BigIntSearchOptions creation_block_timestamp: BigIntSearchOptions + eas_schema: AttestationAttestationSchemaWhereInput = {} + hypercert: AttestationHypercertWhereInput = {} + id: StringSearchOptions last_update_block_number: BigIntSearchOptions last_update_block_timestamp: BigIntSearchOptions recipient: StringSearchOptions resolver: StringSearchOptions - token_id: StringSearchOptions + supported_schemas_id: StringSearchOptions uid: StringSearchOptions } -input BasicContractWhereInput { - chain_id: BigIntSearchOptions - contract_address: StringSearchOptions - id: IdSearchOptions -} - -input BasicFractionWhereInput { - creation_block_number: BigIntSearchOptions - creation_block_timestamp: BigIntSearchOptions - fraction_id: StringSearchOptions - hypercert_id: StringSearchOptions - id: IdSearchOptions - last_update_block_number: BigIntSearchOptions - last_update_block_timestamp: BigIntSearchOptions - owner_address: StringSearchOptions - token_id: BigIntSearchOptions - units: BigIntSearchOptions -} - -input BasicHypercertWhereArgs { - """Count of attestations referencing this hypercert""" - attestations_count: NumberSearchOptions - creation_block_number: BigIntSearchOptions - creation_block_timestamp: BigIntSearchOptions - creator_address: StringSearchOptions - hypercert_id: StringSearchOptions - id: IdSearchOptions - last_update_block_number: BigIntSearchOptions - last_update_block_timestamp: BigIntSearchOptions - sales_count: NumberSearchOptions - token_id: BigIntSearchOptions - uri: StringSearchOptions -} - -input BasicMetadataWhereInput { - contributors: StringArraySearchOptions - creation_block_timestamp: BigIntSearchOptions - description: StringSearchOptions - id: IdSearchOptions - impact_scope: StringArraySearchOptions - impact_timeframe_from: BigIntSearchOptions - impact_timeframe_to: BigIntSearchOptions - last_block_update_timestamp: BigIntSearchOptions - name: StringSearchOptions - rights: StringArraySearchOptions - uri: StringSearchOptions - work_scope: StringArraySearchOptions - work_timeframe_from: BigIntSearchOptions - work_timeframe_to: BigIntSearchOptions -} - """ The `BigInt` scalar type represents non-fractional signed whole numeric values. """ @@ -301,26 +264,36 @@ input BigIntSearchOptions { lte: BigInt } +"""Blueprint for hypercert creation""" type Blueprint { admins: [User!]! created_at: String! form_values: JSON! - hypercerts: GetHypercertsResponse! + hypercerts: HypercertsResponse! id: Float! minted: Boolean! minter_address: String! } -input BlueprintFetchInput { - by: BlueprintSortOptions +input BlueprintSortOptions { + admin_address: SortOrder = null + created_at: SortOrder = null + id: SortOrder = null + minted: SortOrder = null + minter_address: SortOrder = null } -input BlueprintSortOptions { - created_at: SortOrder +input BlueprintUserWhereInput { + address: StringSearchOptions + chain_id: NumberSearchOptions + display_name: StringSearchOptions + id: StringSearchOptions } input BlueprintWhereInput { admin_address: StringSearchOptions + admins: BlueprintUserWhereInput = {} + created_at: StringSearchOptions id: NumberSearchOptions minted: BooleanSearchOptions minter_address: StringSearchOptions @@ -343,26 +316,58 @@ type Collection { """Description of the collection""" description: String! - hypercerts: [Hypercert!] - id: ID! + hypercerts: HypercertsResponse + id: ID """Name of the collection""" name: String! } -input CollectionFetchInput { - by: CollectionSortOptions +input CollectionBlueprintWhereInput { + admin_address: StringSearchOptions + created_at: StringSearchOptions + id: NumberSearchOptions + minted: BooleanSearchOptions + minter_address: StringSearchOptions +} + +input CollectionHypercertWhereInput { + attestations_count: NumberSearchOptions + burned: BooleanSearchOptions + creation_block_number: BigIntSearchOptions + creation_block_timestamp: BigIntSearchOptions + creator_address: StringSearchOptions + hypercert_id: StringSearchOptions + id: StringSearchOptions + last_update_block_number: BigIntSearchOptions + last_update_block_timestamp: BigIntSearchOptions + sales_count: NumberSearchOptions + token_id: BigIntSearchOptions + units: BigIntSearchOptions + uri: StringSearchOptions } input CollectionSortOptions { - created_at: SortOrder - description: SortOrder - name: SortOrder + created_at: SortOrder = null + description: SortOrder = null + id: SortOrder = null + name: SortOrder = null +} + +input CollectionUserWhereInput { + address: StringSearchOptions + chain_id: NumberSearchOptions + display_name: StringSearchOptions + id: StringSearchOptions } input CollectionWhereInput { + admins: CollectionUserWhereInput = {} + blueprints: CollectionBlueprintWhereInput = {} + created_at: StringSearchOptions description: StringSearchOptions - id: IdSearchOptions + hypercerts: CollectionHypercertWhereInput = {} + id: StringSearchOptions name: StringSearchOptions } @@ -373,26 +378,22 @@ type Contract { """The address of the contract""" contract_address: String - id: ID! + id: ID """The block number at which the contract was deployed""" start_block: EthBigInt } -input ContractFetchInput { - by: ContractSortOptions -} - input ContractSortOptions { - chain_id: SortOrder - contract_address: SortOrder - contract_id: SortOrder + chain_id: SortOrder = null + contract_address: SortOrder = null + id: SortOrder = null } input ContractWhereInput { chain_id: BigIntSearchOptions contract_address: StringSearchOptions - id: IdSearchOptions + id: StringSearchOptions } """Handles uint256 bigint values stored in DB""" @@ -400,6 +401,12 @@ scalar EthBigInt """Fraction of an hypercert""" type Fraction { + """Whether the fraction has been burned""" + burned: Boolean + + """The ID of the claims""" + claims_id: String + """Block number of the creation of the fraction""" creation_block_number: EthBigInt @@ -415,7 +422,7 @@ type Fraction { The ID of the fraction concatenated from the chain ID, contract address, and ID of the hypercert claim """ hypercert_id: ID - id: ID! + id: ID """Block number of the last update of the fraction""" last_update_block_number: EthBigInt @@ -435,33 +442,54 @@ type Fraction { """Sales related to this fraction""" sales: GetSalesResponse + """The token ID of the fraction""" + token_id: EthBigInt + """Units held by the fraction""" units: EthBigInt } -input FractionFetchInput { - by: FractionSortOptions +input FractionMetadataWhereInput { + allow_list_uri: StringSearchOptions + contributors: StringArraySearchOptions + description: StringSearchOptions + external_url: StringSearchOptions + id: StringSearchOptions + impact_scope: StringArraySearchOptions + impact_timeframe_from: BigIntSearchOptions + impact_timeframe_to: BigIntSearchOptions + name: StringSearchOptions + rights: StringArraySearchOptions + uri: StringSearchOptions + work_scope: StringArraySearchOptions + work_timeframe_from: BigIntSearchOptions + work_timeframe_to: BigIntSearchOptions } input FractionSortOptions { - creation_block_number: SortOrder - creation_block_timestamp: SortOrder - last_update_block_number: SortOrder - last_update_block_timestamp: SortOrder - owner_address: SortOrder - token_id: SortOrder - units: SortOrder + burned: SortOrder = null + creation_block_number: SortOrder = null + creation_block_timestamp: SortOrder = null + fraction_id: SortOrder = null + hypercert_id: SortOrder = null + id: SortOrder = null + last_update_block_number: SortOrder = null + last_update_block_timestamp: SortOrder = null + owner_address: SortOrder = null + token_id: SortOrder = null + units: SortOrder = null } input FractionWhereInput { + burned: BooleanSearchOptions creation_block_number: BigIntSearchOptions creation_block_timestamp: BigIntSearchOptions fraction_id: StringSearchOptions hypercert_id: StringSearchOptions - hypercerts: BasicHypercertWhereArgs - id: IdSearchOptions + id: StringSearchOptions last_update_block_number: BigIntSearchOptions last_update_block_timestamp: BigIntSearchOptions + metadata: FractionMetadataWhereInput = {} owner_address: StringSearchOptions token_id: BigIntSearchOptions units: BigIntSearchOptions @@ -482,28 +510,33 @@ type GetAttestationsSchemaResponse { data: [AttestationSchema!] } -type GetBlueprintResponse { +"""Blueprints for hypercert creation""" +type GetBlueprintsResponse { count: Int data: [Blueprint!] } +"""Collection of hypercerts for reference and display purposes""" type GetCollectionsResponse { count: Int data: [Collection!] } -"""Pointer to a contract deployed on a chain""" type GetContractsResponse { count: Int data: [Contract!] } -"""Fraction of an hypercert""" type GetFractionsResponse { count: Int data: [Fraction!] } +type GetHyperboardOwnersResponse { + count: Int + data: [HyperboardOwner!] +} + type GetHyperboardsResponse { count: Int data: [Hyperboard!] @@ -522,6 +555,9 @@ type GetMetadataResponse { data: [Metadata!] } +""" +Hypercert with metadata, contract, orders, sales and fraction information +""" type GetOrdersForHypercertResponse { cheapestOrder: Order count: Int @@ -539,6 +575,16 @@ type GetSalesResponse { data: [Sale!] } +type GetSectionEntryOwnersResponse { + count: Int + data: [SectionEntryOwner!] +} + +type GetSectionsResponse { + count: Int + data: [Section!] +} + type GetSignatureRequestResponse { count: Int data: [SignatureRequest!] @@ -551,7 +597,7 @@ type GetUsersResponse { """Hyperboard of hypercerts for reference and display purposes""" type Hyperboard { - admins: [User!]! + admins: GetUsersResponse! """Background image of the hyperboard""" background_image: String @@ -561,19 +607,22 @@ type Hyperboard { """Whether the hyperboard should be rendered as a grayscale image""" grayscale_images: Boolean - id: ID! + id: ID """Name of the hyperboard""" name: String! - owners: [HyperboardOwner!]! - sections: SectionResponseType! + owners: GetHyperboardOwnersResponse! + sections: GetSectionsResponse! """Color of the borders of the hyperboard""" tile_border_color: String } -input HyperboardFetchInput { - by: HyperboardSortOptions +input HyperboardCollectionWhereInput { + created_at: StringSearchOptions + description: StringSearchOptions + id: StringSearchOptions + name: StringSearchOptions } type HyperboardOwner { @@ -588,22 +637,32 @@ type HyperboardOwner { """The display name of the user""" display_name: String + id: ID percentage_owned: Float! """Pending signature requests for the user""" - signature_requests: [SignatureRequest!] + signature_requests: GetSignatureRequestResponse } input HyperboardSortOptions { - admin_id: SortOrder - chainId: SortOrder - name: SortOrder + admin_address: SortOrder = null + chain_ids: SortOrder = null + id: SortOrder = null +} + +input HyperboardUserWhereInput { + address: StringSearchOptions + chain_id: NumberSearchOptions + display_name: StringSearchOptions + id: StringSearchOptions } input HyperboardWhereInput { - admin_id: StringSearchOptions - chain_id: BigIntSearchOptions - id: IdSearchOptions + admin_address: StringSearchOptions + admins: HyperboardUserWhereInput = {} + chain_ids: NumberArraySearchOptions + collections: HyperboardCollectionWhereInput = {} + id: StringSearchOptions } """ @@ -616,6 +675,9 @@ type Hypercert { """Count of attestations referencing this hypercert""" attestations_count: Int + """Whether the hypercert has been burned""" + burned: Boolean + """The contract that the hypercert is associated with""" contract: Contract @@ -634,7 +696,7 @@ type Hypercert { Concatenation of [chainID]-[contractAddress]-[tokenID] to discern hypercerts across chains """ hypercert_id: ID - id: ID! + id: ID last_update_block_number: EthBigInt last_update_block_timestamp: EthBigInt @@ -660,10 +722,27 @@ type Hypercert { uri: String } +input HypercertAttestationWhereInput { + attester: StringSearchOptions + contract_address: StringSearchOptions + creation_block_number: BigIntSearchOptions + creation_block_timestamp: BigIntSearchOptions + id: StringSearchOptions + last_update_block_number: BigIntSearchOptions + last_update_block_timestamp: BigIntSearchOptions + recipient: StringSearchOptions + resolver: StringSearchOptions + supported_schemas_id: StringSearchOptions + uid: StringSearchOptions +} + type HypercertBaseType { """Count of attestations referencing this hypercert""" attestations_count: Int + """Whether the hypercert has been burned""" + burned: Boolean + """The UUID of the contract as stored in the database""" contracts_id: ID creation_block_number: EthBigInt @@ -676,13 +755,10 @@ type HypercertBaseType { Concatenation of [chainID]-[contractAddress]-[tokenID] to discern hypercerts across chains """ hypercert_id: ID - id: ID! + id: ID last_update_block_number: EthBigInt last_update_block_timestamp: EthBigInt - """The metadata for the hypercert as referenced by the uri""" - metadata: Metadata - """Count of sales of fractions that belong to this hypercert""" sales_count: Int @@ -696,49 +772,127 @@ type HypercertBaseType { uri: String } -input HypercertFetchInput { - by: HypercertSortOptions +input HypercertContractWhereInput { + chain_id: BigIntSearchOptions + contract_address: StringSearchOptions + id: StringSearchOptions } -input HypercertSortOptions { - attestations_count: SortOrder - creation_block_number: SortOrder - creation_block_timestamp: SortOrder - hypercert_id: SortOrder - last_block_update_timestamp: SortOrder - last_update_block_number: SortOrder - last_update_block_timestamp: SortOrder - owner_address: SortOrder - sales_count: SortOrder - token_id: SortOrder - units: SortOrder - uri: SortOrder -} - -"""Arguments for filtering hypercerts""" -input HypercertsWhereArgs { - attestations: BasicAttestationWhereInput +input HypercertFractionWhereInput { + burned: BooleanSearchOptions + creation_block_number: BigIntSearchOptions + creation_block_timestamp: BigIntSearchOptions + fraction_id: StringSearchOptions + hypercert_id: StringSearchOptions + id: StringSearchOptions + last_update_block_number: BigIntSearchOptions + last_update_block_timestamp: BigIntSearchOptions + owner_address: StringSearchOptions + token_id: BigIntSearchOptions + units: BigIntSearchOptions +} - """Count of attestations referencing this hypercert""" +input HypercertMetadataWhereInput { + allow_list_uri: StringSearchOptions + contributors: StringArraySearchOptions + description: StringSearchOptions + external_url: StringSearchOptions + id: StringSearchOptions + impact_scope: StringArraySearchOptions + impact_timeframe_from: BigIntSearchOptions + impact_timeframe_to: BigIntSearchOptions + name: StringSearchOptions + rights: StringArraySearchOptions + uri: StringSearchOptions + work_scope: StringArraySearchOptions + work_timeframe_from: BigIntSearchOptions + work_timeframe_to: BigIntSearchOptions +} + +input HypercertSortOptions { + attestations_count: SortOrder = null + burned: SortOrder = null + creation_block_number: SortOrder = null + creation_block_timestamp: SortOrder = null + creator_address: SortOrder = null + hypercert_id: SortOrder = null + id: SortOrder = null + last_update_block_number: SortOrder = null + last_update_block_timestamp: SortOrder = null + sales_count: SortOrder = null + token_id: SortOrder = null + units: SortOrder = null + uri: SortOrder = null +} + +input HypercertWhereInput { + attestations: HypercertAttestationWhereInput = {} attestations_count: NumberSearchOptions - contract: BasicContractWhereInput + burned: BooleanSearchOptions + contract: HypercertContractWhereInput = {} creation_block_number: BigIntSearchOptions creation_block_timestamp: BigIntSearchOptions creator_address: StringSearchOptions - fractions: BasicFractionWhereInput + fractions: HypercertFractionWhereInput = {} hypercert_id: StringSearchOptions - id: IdSearchOptions + id: StringSearchOptions last_update_block_number: BigIntSearchOptions last_update_block_timestamp: BigIntSearchOptions - metadata: BasicMetadataWhereInput + metadata: HypercertMetadataWhereInput = {} sales_count: NumberSearchOptions token_id: BigIntSearchOptions + units: BigIntSearchOptions uri: StringSearchOptions } -input IdSearchOptions { - eq: UUID - in: [UUID!] +""" +Hypercert with metadata, contract, orders, sales and fraction information +""" +type HypercertWithMetadata { + """Count of attestations referencing this hypercert""" + attestations_count: Int + + """Whether the hypercert has been burned""" + burned: Boolean + + """The UUID of the contract as stored in the database""" + contracts_id: ID + creation_block_number: EthBigInt + creation_block_timestamp: EthBigInt + + """The address of the creator of the hypercert""" + creator_address: String + + """ + Concatenation of [chainID]-[contractAddress]-[tokenID] to discern hypercerts across chains + """ + hypercert_id: ID + id: ID + last_update_block_number: EthBigInt + last_update_block_timestamp: EthBigInt + + """The metadata for the hypercert as referenced by the uri""" + metadata: Metadata + + """Count of sales of fractions that belong to this hypercert""" + sales_count: Int + + """The token ID of the hypercert""" + token_id: EthBigInt + + """The total units held by the hypercert""" + units: EthBigInt + + """References the metadata for this claim""" + uri: String +} + +""" +Hypercert without metadata, contract, orders, sales and fraction information +""" +type HypercertsResponse { + count: Int + data: [HypercertBaseType!] } """ @@ -761,10 +915,8 @@ type Metadata { """References additional information related to the hypercert""" external_url: String - id: ID! - - """Base64 encoded representation of the image of the hypercert""" - image: String + id: ID + image: String! """Impact scope of the hypercert""" impact_scope: [String!] @@ -797,29 +949,49 @@ type Metadata { work_timeframe_to: EthBigInt } -input MetadataFetchInput { - by: MetadataSortOptions +input MetadataHypercertWhereInput { + attestations_count: NumberSearchOptions + burned: BooleanSearchOptions + creation_block_number: BigIntSearchOptions + creation_block_timestamp: BigIntSearchOptions + creator_address: StringSearchOptions + hypercert_id: StringSearchOptions + id: StringSearchOptions + last_update_block_number: BigIntSearchOptions + last_update_block_timestamp: BigIntSearchOptions + sales_count: NumberSearchOptions + token_id: BigIntSearchOptions + units: BigIntSearchOptions + uri: StringSearchOptions } input MetadataSortOptions { - allow_list_uri: SortOrder - description: SortOrder - external_url: SortOrder - metadata_id: SortOrder - name: SortOrder - uri: SortOrder + allow_list_uri: SortOrder = null + contributors: SortOrder = null + description: SortOrder = null + external_url: SortOrder = null + id: SortOrder = null + impact_scope: SortOrder = null + impact_timeframe_from: SortOrder = null + impact_timeframe_to: SortOrder = null + name: SortOrder = null + rights: SortOrder = null + uri: SortOrder = null + work_scope: SortOrder = null + work_timeframe_from: SortOrder = null + work_timeframe_to: SortOrder = null } input MetadataWhereInput { + allow_list_uri: StringSearchOptions contributors: StringArraySearchOptions - creation_block_timestamp: BigIntSearchOptions description: StringSearchOptions - hypercerts: BasicHypercertWhereArgs - id: IdSearchOptions + external_url: StringSearchOptions + hypercert: MetadataHypercertWhereInput = {} + id: StringSearchOptions impact_scope: StringArraySearchOptions impact_timeframe_from: BigIntSearchOptions impact_timeframe_to: BigIntSearchOptions - last_block_update_timestamp: BigIntSearchOptions name: StringSearchOptions rights: StringArraySearchOptions uri: StringSearchOptions @@ -830,10 +1002,10 @@ input MetadataWhereInput { input NumberArraySearchOptions { """Array of numbers""" - contains: [BigInt!] + arrayContains: [BigInt!] """Array of numbers""" - overlaps: [BigInt!] + arrayOverlaps: [BigInt!] } input NumberSearchOptions { @@ -845,21 +1017,22 @@ input NumberSearchOptions { lte: Int } +"""Marketplace order for a hypercert""" type Order { additionalParameters: String! amounts: [Float!]! chainId: EthBigInt! collection: String! collectionType: Float! - createdAt: String! + createdAt: Float! currency: String! endTime: Float! globalNonce: String! """The hypercert associated with this order""" - hypercert: HypercertBaseType + hypercert: HypercertWithMetadata hypercert_id: String! - id: ID! + id: ID invalidated: Boolean! itemIds: [String!]! orderNonce: String! @@ -872,56 +1045,85 @@ type Order { startTime: Float! strategyId: Float! subsetNonce: Float! - validator_codes: [String!] + validator_codes: [Int!] } -input OrderFetchInput { - by: OrderSortOptions +input OrderHypercertWhereInput { + attestations_count: NumberSearchOptions + burned: BooleanSearchOptions + creation_block_number: BigIntSearchOptions + creation_block_timestamp: BigIntSearchOptions + creator_address: StringSearchOptions + hypercert_id: StringSearchOptions + id: StringSearchOptions + last_update_block_number: BigIntSearchOptions + last_update_block_timestamp: BigIntSearchOptions + sales_count: NumberSearchOptions + token_id: BigIntSearchOptions + units: BigIntSearchOptions + uri: StringSearchOptions } input OrderSortOptions { - amounts: SortOrder - chainId: SortOrder - collection: SortOrder - collectionType: SortOrder - createdAt: SortOrder - currency: SortOrder - endTime: SortOrder - globalNonce: SortOrder - hypercert_id: SortOrder - invalidated: SortOrder - orderNonce: SortOrder - price: SortOrder - quoteType: SortOrder - signer: SortOrder - startTime: SortOrder - strategyId: SortOrder + amounts: SortOrder = null + chainId: SortOrder = null + collection: SortOrder = null + collectionType: SortOrder = null + createdAt: SortOrder = null + currency: SortOrder = null + endTime: SortOrder = null + globalNonce: SortOrder = null + hypercert_id: SortOrder = null + id: SortOrder = null + invalidated: SortOrder = null + itemIds: SortOrder = null + orderNonce: SortOrder = null + price: SortOrder = null + quoteType: SortOrder = null + signer: SortOrder = null + startTime: SortOrder = null + strategyId: SortOrder = null + subsetNonce: SortOrder = null } input OrderWhereInput { + amounts: NumberArraySearchOptions chainId: BigIntSearchOptions + collection: StringSearchOptions + collectionType: NumberSearchOptions + createdAt: StringSearchOptions currency: StringSearchOptions + endTime: NumberSearchOptions + globalNonce: StringSearchOptions + hypercert: OrderHypercertWhereInput = {} hypercert_id: StringSearchOptions - id: IdSearchOptions + id: StringSearchOptions invalidated: BooleanSearchOptions + itemIds: StringArraySearchOptions + orderNonce: StringSearchOptions + price: StringSearchOptions + quoteType: NumberSearchOptions signer: StringSearchOptions + startTime: NumberSearchOptions + strategyId: NumberSearchOptions + subsetNonce: NumberSearchOptions } type Query { - allowlistRecords(first: Int, offset: Int, sort: AllowlistRecordFetchInput, where: AllowlistRecordWhereInput): GetAllowlistRecordResponse! - attestationSchemas(first: Int, offset: Int): GetAttestationsSchemaResponse! - attestations(first: Int, offset: Int, sort: AttestationFetchInput, where: AttestationWhereInput): GetAttestationsResponse! - blueprints(first: Int, offset: Int, sort: BlueprintFetchInput, where: BlueprintWhereInput): GetBlueprintResponse! - collections(first: Int, offset: Int, sort: CollectionFetchInput, where: CollectionWhereInput): GetCollectionsResponse! - contracts(first: Int, offset: Int, sort: ContractFetchInput, where: ContractWhereInput): GetContractsResponse! - fractions(first: Int, offset: Int, sort: FractionFetchInput, where: FractionWhereInput): GetFractionsResponse! - hyperboards(first: Int, offset: Int, sort: HyperboardFetchInput, where: HyperboardWhereInput): GetHyperboardsResponse! - hypercerts(first: Int, offset: Int, sort: HypercertFetchInput, where: HypercertsWhereArgs): GetHypercertsResponse! - metadata(first: Int, offset: Int, sort: MetadataFetchInput, where: MetadataWhereInput): GetMetadataResponse! - orders(first: Int, offset: Int, sort: OrderFetchInput, where: OrderWhereInput): GetOrdersResponse! - sales(first: Int, offset: Int, sort: SaleFetchInput, where: SaleWhereInput): GetSalesResponse! - signatureRequests(first: Int, offset: Int, sort: SignatureRequestFetchInput, where: SignatureRequestWhereInput): GetSignatureRequestResponse! - users(first: Int, offset: Int, where: UserWhereInput): GetUsersResponse! + allowlistRecords(first: Int, offset: Int, sortBy: AllowlistRecordSortOptions, where: AllowlistRecordWhereInput): GetAllowlistRecordResponse! + attestationSchemas(first: Int, offset: Int, sortBy: AttestationSchemaSortOptions, where: AttestationSchemaWhereInput): GetAttestationsSchemaResponse! + attestations(first: Int, offset: Int, sortBy: AttestationSortOptions, where: AttestationWhereInput): GetAttestationsResponse! + blueprints(first: Int, offset: Int, sortBy: BlueprintSortOptions, where: BlueprintWhereInput): GetBlueprintsResponse! + collections(first: Int, offset: Int, sortBy: CollectionSortOptions, where: CollectionWhereInput): GetCollectionsResponse! + contracts(first: Int, offset: Int, sortBy: ContractSortOptions, where: ContractWhereInput): GetContractsResponse! + fractions(first: Int, offset: Int, sortBy: FractionSortOptions, where: FractionWhereInput): GetFractionsResponse! + hyperboards(first: Int, offset: Int, sortBy: HyperboardSortOptions, where: HyperboardWhereInput): GetHyperboardsResponse! + hypercerts(first: Int, offset: Int, sortBy: HypercertSortOptions, where: HypercertWhereInput): GetHypercertsResponse! + metadata(first: Int, offset: Int, sortBy: MetadataSortOptions, where: MetadataWhereInput): GetMetadataResponse! + orders(first: Int, offset: Int, sortBy: OrderSortOptions, where: OrderWhereInput): GetOrdersResponse! + sales(first: Int, offset: Int, sortBy: SaleSortOptions, where: SaleWhereInput): GetSalesResponse! + signatureRequests(first: Int, offset: Int, sortBy: SignatureRequestSortOptions, where: SignatureRequestWhereInput): GetSignatureRequestResponse! + users(first: Int, offset: Int, sortBy: UserSortOptions, where: UserWhereInput): GetUsersResponse! } type Sale { @@ -945,11 +1147,11 @@ type Sale { currency_amount: EthBigInt! """The hypercert associated with this order""" - hypercert: HypercertBaseType + hypercert: HypercertWithMetadata """The ID of the hypercert token referenced in the order""" hypercert_id: String - id: ID! + id: ID """Token ids of the sold fractions""" item_ids: [EthBigInt!] @@ -964,21 +1166,35 @@ type Sale { transaction_hash: String! } -input SaleFetchInput { - by: SaleSortOptions +input SaleHypercertWhereInput { + attestations_count: NumberSearchOptions + burned: BooleanSearchOptions + creation_block_number: BigIntSearchOptions + creation_block_timestamp: BigIntSearchOptions + creator_address: StringSearchOptions + hypercert_id: StringSearchOptions + id: StringSearchOptions + last_update_block_number: BigIntSearchOptions + last_update_block_timestamp: BigIntSearchOptions + sales_count: NumberSearchOptions + token_id: BigIntSearchOptions + units: BigIntSearchOptions + uri: StringSearchOptions } input SaleSortOptions { - amounts: SortOrder - buyer: SortOrder - collection: SortOrder - creationBlockNumber: SortOrder - creationBlockTimestamp: SortOrder - currency: SortOrder - hypercertId: SortOrder - seller: SortOrder - strategyId: SortOrder - transactionHash: SortOrder + amounts: SortOrder = null + buyer: SortOrder = null + collection: SortOrder = null + creation_block_number: SortOrder = null + creation_block_timestamp: SortOrder = null + currency: SortOrder = null + hypercert_id: SortOrder = null + id: SortOrder = null + item_ids: SortOrder = null + seller: SortOrder = null + strategy_id: SortOrder = null + transaction_hash: SortOrder = null } input SaleWhereInput { @@ -988,19 +1204,21 @@ input SaleWhereInput { creation_block_number: BigIntSearchOptions creation_block_timestamp: BigIntSearchOptions currency: StringSearchOptions + hypercert: SaleHypercertWhereInput = {} hypercert_id: StringSearchOptions + id: StringSearchOptions item_ids: StringArraySearchOptions seller: StringSearchOptions - strategy_id: BigIntSearchOptions - transaction_hash: IdSearchOptions + strategy_id: NumberSearchOptions + transaction_hash: StringSearchOptions } -"""Section representing a collection within a hyperboard""" +"""Section representing one or more collectionswithin a hyperboard""" type Section { - collection: Collection! + collections: [Collection!]! entries: [SectionEntry!]! label: String! - owners: [HyperboardOwner!]! + owners: GetHyperboardOwnersResponse! } """Entry representing a hypercert or blueprint within a section""" @@ -1013,9 +1231,9 @@ type SectionEntry { """Name of the hypercert or blueprint""" name: String - owners: [SectionEntryOwner!]! + owners: GetSectionEntryOwnersResponse! percentage_of_section: Float! - total_units: BigInt + total_units: EthBigInt } type SectionEntryOwner { @@ -1030,16 +1248,12 @@ type SectionEntryOwner { """The display name of the user""" display_name: String + id: ID percentage: Float! """Pending signature requests for the user""" - signature_requests: [SignatureRequest!] - units: BigInt -} - -type SectionResponseType { - count: Float! - data: [Section!]! + signature_requests: GetSignatureRequestResponse + units: EthBigInt } """Pending signature request for a user""" @@ -1066,24 +1280,17 @@ type SignatureRequest { timestamp: EthBigInt! } -input SignatureRequestFetchInput { - by: SignatureRequestSortOptions -} - """Purpose of the signature request""" enum SignatureRequestPurpose { UPDATE_USER_DATA } -input SignatureRequestPurposeSearchOptions { - eq: SignatureRequestPurpose -} - input SignatureRequestSortOptions { - message_hash: SortOrder - purpose: SortOrder - safe_address: SortOrder - timestamp: SortOrder + chain_id: SortOrder = null + message_hash: SortOrder = null + safe_address: SortOrder = null + status: SortOrder = null + timestamp: SortOrder = null } """Status of the signature request""" @@ -1100,7 +1307,6 @@ input SignatureRequestStatusSearchOptions { input SignatureRequestWhereInput { chain_id: BigIntSearchOptions message_hash: StringSearchOptions - purpose: SignatureRequestPurposeSearchOptions safe_address: StringSearchOptions status: SignatureRequestStatusSearchOptions timestamp: BigIntSearchOptions @@ -1116,8 +1322,11 @@ enum SortOrder { } input StringArraySearchOptions { - contains: [String!] - overlaps: [String!] + """Array of strings""" + arrayContains: [String!] + + """Array of strings""" + arrayOverlaps: [String!] } input StringSearchOptions { @@ -1128,11 +1337,6 @@ input StringSearchOptions { startsWith: String } -""" -A field whose value is a generic Universally Unique Identifier: https://en.wikipedia.org/wiki/Universally_unique_identifier. -""" -scalar UUID - type User { """The address of the user""" address: String! @@ -1145,12 +1349,22 @@ type User { """The display name of the user""" display_name: String + id: ID """Pending signature requests for the user""" - signature_requests: [SignatureRequest!] + signature_requests: GetSignatureRequestResponse +} + +input UserSortOptions { + address: SortOrder = null + chain_id: SortOrder = null + display_name: SortOrder = null + id: SortOrder = null } input UserWhereInput { address: StringSearchOptions - chain_id: BigIntSearchOptions + chain_id: NumberSearchOptions + display_name: StringSearchOptions + id: StringSearchOptions } \ No newline at end of file diff --git a/seed.config.ts b/seed.config.ts deleted file mode 100644 index 94055144..00000000 --- a/seed.config.ts +++ /dev/null @@ -1,10 +0,0 @@ -import { SeedPostgres } from "@snaplet/seed/adapter-postgres"; -import { defineConfig } from "@snaplet/seed/config"; -import postgres from "postgres"; - -export default defineConfig({ - adapter: () => { - const client = postgres("postgresql://postgres:postgres@localhost:64322/postgres"); - return new SeedPostgres(client); - }, -}); \ No newline at end of file diff --git a/src/__generated__/routes/routes.ts b/src/__generated__/routes/routes.ts index 885c7914..086a8a1c 100644 --- a/src/__generated__/routes/routes.ts +++ b/src/__generated__/routes/routes.ts @@ -20,6 +20,8 @@ import { HyperboardController } from './../../controllers/HyperboardController.j import { BlueprintController } from './../../controllers/BlueprintController.js'; // WARNING: This file was auto-generated with tsoa. Please do not modify it. Re-run tsoa to re-generate this file: https://github.com/lukeautry/tsoa import { AllowListController } from './../../controllers/AllowListController.js'; +import { iocContainer } from './../../lib/tsoa/iocContainer.js'; +import type { IocContainer, IocContainerFactory } from '@tsoa/runtime'; import type { Request as ExRequest, Response as ExResponse, RequestHandler, Router } from 'express'; import multer from 'multer'; const upload = multer({"limits":{"fileSize":8388608}}); @@ -126,6 +128,11 @@ const models: TsoaRoute.Models = { "additionalProperties": false, }, // WARNING: This file was auto-generated with tsoa. Please do not modify it. Re-run tsoa to re-generate this file: https://github.com/lukeautry/tsoa + "Record_string.string-or-string-Array_": { + "dataType": "refAlias", + "type": {"dataType":"nestedObjectLiteral","nestedProperties":{},"validators":{}}, + }, + // WARNING: This file was auto-generated with tsoa. Please do not modify it. Re-run tsoa to re-generate this file: https://github.com/lukeautry/tsoa "HypercertClaimdata": { "dataType": "refObject", "properties": { @@ -268,11 +275,6 @@ const models: TsoaRoute.Models = { "additionalProperties": false, }, // WARNING: This file was auto-generated with tsoa. Please do not modify it. Re-run tsoa to re-generate this file: https://github.com/lukeautry/tsoa - "OrderValidatorCode": { - "dataType": "refEnum", - "enums": [0,101,111,112,113,201,211,212,213,301,311,312,321,322,401,402,411,412,413,414,415,421,422,501,502,503,601,611,612,621,622,623,631,632,633,634,641,642,701,702,801,802,901,902], - }, - // WARNING: This file was auto-generated with tsoa. Please do not modify it. Re-run tsoa to re-generate this file: https://github.com/lukeautry/tsoa "ValidateOrderRequest": { "dataType": "refObject", "properties": { @@ -410,7 +412,12 @@ export function RegisterRoutes(app: Router) { try { validatedArgs = templateService.getValidatedArgs({ args, request, response }); - const controller = new UserController(); + const container: IocContainer = typeof iocContainer === 'function' ? (iocContainer as IocContainerFactory)(request) : iocContainer; + + const controller: any = await container.get(UserController); + if (typeof controller['setStatus'] === 'function') { + controller.setStatus(undefined); + } await templateService.apiHandler({ methodName: 'addOrUpdateUser', @@ -442,7 +449,12 @@ export function RegisterRoutes(app: Router) { try { validatedArgs = templateService.getValidatedArgs({ args, request, response }); - const controller = new UploadController(); + const container: IocContainer = typeof iocContainer === 'function' ? (iocContainer as IocContainerFactory)(request) : iocContainer; + + const controller: any = await container.get(UploadController); + if (typeof controller['setStatus'] === 'function') { + controller.setStatus(undefined); + } await templateService.apiHandler({ methodName: 'upload', @@ -474,7 +486,12 @@ export function RegisterRoutes(app: Router) { try { validatedArgs = templateService.getValidatedArgs({ args, request, response }); - const controller = new SignatureRequestController(); + const container: IocContainer = typeof iocContainer === 'function' ? (iocContainer as IocContainerFactory)(request) : iocContainer; + + const controller: any = await container.get(SignatureRequestController); + if (typeof controller['setStatus'] === 'function') { + controller.setStatus(undefined); + } await templateService.apiHandler({ methodName: 'cancelSignatureRequest', @@ -503,7 +520,12 @@ export function RegisterRoutes(app: Router) { try { validatedArgs = templateService.getValidatedArgs({ args, request, response }); - const controller = new SignatureRequestController(); + const container: IocContainer = typeof iocContainer === 'function' ? (iocContainer as IocContainerFactory)(request) : iocContainer; + + const controller: any = await container.get(SignatureRequestController); + if (typeof controller['setStatus'] === 'function') { + controller.setStatus(undefined); + } await templateService.apiHandler({ methodName: 'processSignatureRequests', @@ -532,7 +554,12 @@ export function RegisterRoutes(app: Router) { try { validatedArgs = templateService.getValidatedArgs({ args, request, response }); - const controller = new MonitoringController(); + const container: IocContainer = typeof iocContainer === 'function' ? (iocContainer as IocContainerFactory)(request) : iocContainer; + + const controller: any = await container.get(MonitoringController); + if (typeof controller['setStatus'] === 'function') { + controller.setStatus(undefined); + } await templateService.apiHandler({ methodName: 'healthCheck', @@ -562,7 +589,12 @@ export function RegisterRoutes(app: Router) { try { validatedArgs = templateService.getValidatedArgs({ args, request, response }); - const controller = new MetadataController(); + const container: IocContainer = typeof iocContainer === 'function' ? (iocContainer as IocContainerFactory)(request) : iocContainer; + + const controller: any = await container.get(MetadataController); + if (typeof controller['setStatus'] === 'function') { + controller.setStatus(undefined); + } await templateService.apiHandler({ methodName: 'storeMetadata', @@ -592,7 +624,12 @@ export function RegisterRoutes(app: Router) { try { validatedArgs = templateService.getValidatedArgs({ args, request, response }); - const controller = new MetadataController(); + const container: IocContainer = typeof iocContainer === 'function' ? (iocContainer as IocContainerFactory)(request) : iocContainer; + + const controller: any = await container.get(MetadataController); + if (typeof controller['setStatus'] === 'function') { + controller.setStatus(undefined); + } await templateService.apiHandler({ methodName: 'storeMetadataWithAllowlist', @@ -622,7 +659,12 @@ export function RegisterRoutes(app: Router) { try { validatedArgs = templateService.getValidatedArgs({ args, request, response }); - const controller = new MetadataController(); + const container: IocContainer = typeof iocContainer === 'function' ? (iocContainer as IocContainerFactory)(request) : iocContainer; + + const controller: any = await container.get(MetadataController); + if (typeof controller['setStatus'] === 'function') { + controller.setStatus(undefined); + } await templateService.apiHandler({ methodName: 'validateMetadata', @@ -652,7 +694,12 @@ export function RegisterRoutes(app: Router) { try { validatedArgs = templateService.getValidatedArgs({ args, request, response }); - const controller = new MetadataController(); + const container: IocContainer = typeof iocContainer === 'function' ? (iocContainer as IocContainerFactory)(request) : iocContainer; + + const controller: any = await container.get(MetadataController); + if (typeof controller['setStatus'] === 'function') { + controller.setStatus(undefined); + } await templateService.apiHandler({ methodName: 'validateMetadataWithAllowlist', @@ -682,7 +729,12 @@ export function RegisterRoutes(app: Router) { try { validatedArgs = templateService.getValidatedArgs({ args, request, response }); - const controller = new MarketplaceController(); + const container: IocContainer = typeof iocContainer === 'function' ? (iocContainer as IocContainerFactory)(request) : iocContainer; + + const controller: any = await container.get(MarketplaceController); + if (typeof controller['setStatus'] === 'function') { + controller.setStatus(undefined); + } await templateService.apiHandler({ methodName: 'storeOrder', @@ -712,7 +764,12 @@ export function RegisterRoutes(app: Router) { try { validatedArgs = templateService.getValidatedArgs({ args, request, response }); - const controller = new MarketplaceController(); + const container: IocContainer = typeof iocContainer === 'function' ? (iocContainer as IocContainerFactory)(request) : iocContainer; + + const controller: any = await container.get(MarketplaceController); + if (typeof controller['setStatus'] === 'function') { + controller.setStatus(undefined); + } await templateService.apiHandler({ methodName: 'updateOrderNonce', @@ -742,7 +799,12 @@ export function RegisterRoutes(app: Router) { try { validatedArgs = templateService.getValidatedArgs({ args, request, response }); - const controller = new MarketplaceController(); + const container: IocContainer = typeof iocContainer === 'function' ? (iocContainer as IocContainerFactory)(request) : iocContainer; + + const controller: any = await container.get(MarketplaceController); + if (typeof controller['setStatus'] === 'function') { + controller.setStatus(undefined); + } await templateService.apiHandler({ methodName: 'validateOrder', @@ -772,7 +834,12 @@ export function RegisterRoutes(app: Router) { try { validatedArgs = templateService.getValidatedArgs({ args, request, response }); - const controller = new MarketplaceController(); + const container: IocContainer = typeof iocContainer === 'function' ? (iocContainer as IocContainerFactory)(request) : iocContainer; + + const controller: any = await container.get(MarketplaceController); + if (typeof controller['setStatus'] === 'function') { + controller.setStatus(undefined); + } await templateService.apiHandler({ methodName: 'deleteOrder', @@ -802,7 +869,12 @@ export function RegisterRoutes(app: Router) { try { validatedArgs = templateService.getValidatedArgs({ args, request, response }); - const controller = new HyperboardController(); + const container: IocContainer = typeof iocContainer === 'function' ? (iocContainer as IocContainerFactory)(request) : iocContainer; + + const controller: any = await container.get(HyperboardController); + if (typeof controller['setStatus'] === 'function') { + controller.setStatus(undefined); + } await templateService.apiHandler({ methodName: 'createHyperboard', @@ -833,7 +905,12 @@ export function RegisterRoutes(app: Router) { try { validatedArgs = templateService.getValidatedArgs({ args, request, response }); - const controller = new HyperboardController(); + const container: IocContainer = typeof iocContainer === 'function' ? (iocContainer as IocContainerFactory)(request) : iocContainer; + + const controller: any = await container.get(HyperboardController); + if (typeof controller['setStatus'] === 'function') { + controller.setStatus(undefined); + } await templateService.apiHandler({ methodName: 'updateHyperboard', @@ -865,7 +942,12 @@ export function RegisterRoutes(app: Router) { try { validatedArgs = templateService.getValidatedArgs({ args, request, response }); - const controller = new HyperboardController(); + const container: IocContainer = typeof iocContainer === 'function' ? (iocContainer as IocContainerFactory)(request) : iocContainer; + + const controller: any = await container.get(HyperboardController); + if (typeof controller['setStatus'] === 'function') { + controller.setStatus(undefined); + } await templateService.apiHandler({ methodName: 'deleteHyperboard', @@ -895,7 +977,12 @@ export function RegisterRoutes(app: Router) { try { validatedArgs = templateService.getValidatedArgs({ args, request, response }); - const controller = new BlueprintController(); + const container: IocContainer = typeof iocContainer === 'function' ? (iocContainer as IocContainerFactory)(request) : iocContainer; + + const controller: any = await container.get(BlueprintController); + if (typeof controller['setStatus'] === 'function') { + controller.setStatus(undefined); + } await templateService.apiHandler({ methodName: 'createBlueprint', @@ -926,7 +1013,12 @@ export function RegisterRoutes(app: Router) { try { validatedArgs = templateService.getValidatedArgs({ args, request, response }); - const controller = new BlueprintController(); + const container: IocContainer = typeof iocContainer === 'function' ? (iocContainer as IocContainerFactory)(request) : iocContainer; + + const controller: any = await container.get(BlueprintController); + if (typeof controller['setStatus'] === 'function') { + controller.setStatus(undefined); + } await templateService.apiHandler({ methodName: 'deleteBlueprint', @@ -957,7 +1049,12 @@ export function RegisterRoutes(app: Router) { try { validatedArgs = templateService.getValidatedArgs({ args, request, response }); - const controller = new BlueprintController(); + const container: IocContainer = typeof iocContainer === 'function' ? (iocContainer as IocContainerFactory)(request) : iocContainer; + + const controller: any = await container.get(BlueprintController); + if (typeof controller['setStatus'] === 'function') { + controller.setStatus(undefined); + } await templateService.apiHandler({ methodName: 'mintBlueprint', @@ -987,7 +1084,12 @@ export function RegisterRoutes(app: Router) { try { validatedArgs = templateService.getValidatedArgs({ args, request, response }); - const controller = new AllowListController(); + const container: IocContainer = typeof iocContainer === 'function' ? (iocContainer as IocContainerFactory)(request) : iocContainer; + + const controller: any = await container.get(AllowListController); + if (typeof controller['setStatus'] === 'function') { + controller.setStatus(undefined); + } await templateService.apiHandler({ methodName: 'storeAllowList', @@ -1017,7 +1119,12 @@ export function RegisterRoutes(app: Router) { try { validatedArgs = templateService.getValidatedArgs({ args, request, response }); - const controller = new AllowListController(); + const container: IocContainer = typeof iocContainer === 'function' ? (iocContainer as IocContainerFactory)(request) : iocContainer; + + const controller: any = await container.get(AllowListController); + if (typeof controller['setStatus'] === 'function') { + controller.setStatus(undefined); + } await templateService.apiHandler({ methodName: 'validateAllowList', diff --git a/src/__generated__/swagger.json b/src/__generated__/swagger.json index e9f246f0..c21444ea 100644 --- a/src/__generated__/swagger.json +++ b/src/__generated__/swagger.json @@ -264,6 +264,11 @@ "type": "object", "additionalProperties": false }, + "Record_string.string-or-string-Array_": { + "properties": {}, + "type": "object", + "description": "Construct a type with a set of properties K of type T" + }, "HypercertClaimdata": { "description": "Properties of an impact claim", "properties": { @@ -845,56 +850,6 @@ "type": "object", "additionalProperties": false }, - "OrderValidatorCode": { - "description": "Error errors returned by the order validator contract", - "enum": [ - 0, - 101, - 111, - 112, - 113, - 201, - 211, - 212, - 213, - 301, - 311, - 312, - 321, - 322, - 401, - 402, - 411, - 412, - 413, - 414, - 415, - 421, - 422, - 501, - 502, - 503, - 601, - 611, - 612, - 621, - 622, - 623, - 631, - 632, - 633, - 634, - 641, - 642, - 701, - 702, - 801, - 802, - 901, - 902 - ], - "type": "number" - }, "ValidateOrderRequest": { "properties": { "tokenIds": { @@ -1570,7 +1525,9 @@ { "properties": { "data": {}, - "errors": {}, + "errors": { + "$ref": "#/components/schemas/Record_string.string-or-string-Array_" + }, "message": { "type": "string" }, @@ -2104,23 +2061,10 @@ "nonce_counter": { "type": "number", "format": "double" - }, - "created_at": { - "type": "string" - }, - "chain_id": { - "type": "number", - "format": "double" - }, - "address": { - "type": "string" } }, "required": [ - "nonce_counter", - "created_at", - "chain_id", - "address" + "nonce_counter" ], "type": "object" }, @@ -2189,28 +2133,7 @@ "schema": { "properties": { "data": { - "items": { - "properties": { - "validator_codes": { - "items": { - "$ref": "#/components/schemas/OrderValidatorCode" - }, - "type": "array" - }, - "invalidated": { - "type": "boolean" - }, - "id": { - "type": "string" - } - }, - "required": [ - "validator_codes", - "invalidated", - "id" - ], - "type": "object" - }, + "items": {}, "type": "array" }, "message": { diff --git a/src/client/evmClient.ts b/src/client/evmClient.ts index 9e310cdf..42f180c3 100644 --- a/src/client/evmClient.ts +++ b/src/client/evmClient.ts @@ -1,8 +1,4 @@ -import { - alchemyApiKey, - drpcApiPkey, - infuraApiKey, -} from "../utils/constants.js"; +import { alchemyApiKey, drpcApiPkey } from "../utils/constants.js"; import { PublicClient, createPublicClient, fallback } from "viem"; import { ChainFactory } from "./chainFactory.js"; import { RpcClientFactory } from "./rpcClientFactory.js"; @@ -17,6 +13,7 @@ class AlchemyProvider implements RpcProvider { const urls: Record = { 10: `https://opt-mainnet.g.alchemy.com/v2/${alchemyApiKey}`, 8453: `https://base-mainnet.g.alchemy.com/v2/${alchemyApiKey}`, + 42220: `https://celo-mainnet.g.alchemy.com/v2/${alchemyApiKey}`, 42161: `https://arb-mainnet.g.alchemy.com/v2/${alchemyApiKey}`, 421614: `https://arb-sepolia.g.alchemy.com/v2/${alchemyApiKey}`, 84532: `https://base-sepolia.g.alchemy.com/v2/${alchemyApiKey}`, @@ -26,18 +23,6 @@ class AlchemyProvider implements RpcProvider { } } -class InfuraProvider implements RpcProvider { - getUrl(chainId: number): string | undefined { - const urls: Record = { - 10: `https://optimism-mainnet.infura.io/v3/${infuraApiKey}`, - 42220: `https://celo-mainnet.infura.io/v3/${infuraApiKey}`, - 42161: `https://arbitrum-mainnet.infura.io/v3/${infuraApiKey}`, - 421614: `https://arbitrum-sepolia.infura.io/v3/${infuraApiKey}`, - }; - return urls[chainId]; - } -} - class DrpcProvider implements RpcProvider { getUrl(chainId: number): string | undefined { const networks: Record = { @@ -97,7 +82,6 @@ class LavaProvider implements RpcProvider { export class EvmClientFactory { private static readonly providers: RpcProvider[] = [ new AlchemyProvider(), - new InfuraProvider(), new DrpcProvider(), new GlifProvider(), new AnkrProvider(), diff --git a/src/client/graphql.ts b/src/client/graphql.ts index aa3d915f..cd9f56a5 100644 --- a/src/client/graphql.ts +++ b/src/client/graphql.ts @@ -1,5 +1,5 @@ import { createYoga } from "graphql-yoga"; -import { resolvers } from "../graphql/schemas/resolvers/composed.js"; +import { resolvers } from "../services/graphql/resolvers/composed.js"; import { buildSchema } from "type-graphql"; import { container } from "tsyringe"; import { Client, cacheExchange, fetchExchange } from "@urql/core"; @@ -53,7 +53,7 @@ export const yoga = createYoga({ cors: { methods: ["POST"], }, - graphqlEndpoint: "/v1/graphql", + graphqlEndpoint: "/v2/graphql", plugins: [ useResponseCache({ // global cache @@ -76,6 +76,6 @@ export const yoga = createYoga({ }); export const urqlClient = new Client({ - url: `${CONSTANTS.ENDPOINTS[indexerEnvironment as "production" | "test"]}/v1/graphql`, + url: `${CONSTANTS.ENDPOINTS[indexerEnvironment as "production" | "test"]}/v2/graphql`, exchanges: [cacheExchange, fetchExchange], }); diff --git a/src/client/kysely.ts b/src/client/kysely.ts index 3955140c..176db032 100644 --- a/src/client/kysely.ts +++ b/src/client/kysely.ts @@ -1,22 +1,64 @@ import { Kysely, PostgresDialect } from "kysely"; +import { singleton } from "tsyringe"; import pkg from "pg"; const { Pool } = pkg; import type { CachingDatabase } from "../types/kyselySupabaseCaching.js"; +import type { DataDatabase } from "../types/kyselySupabaseData.js"; import { cachingDatabaseUrl, dataDatabaseUrl } from "../utils/constants.js"; -import { DataDatabase } from "../types/kyselySupabaseData.js"; - -export const kyselyCaching = new Kysely({ - dialect: new PostgresDialect({ - pool: new Pool({ - connectionString: cachingDatabaseUrl, - }), - }), +import { container } from "tsyringe"; +import { format } from "date-fns"; + +pkg.types.setTypeParser(pkg.types.builtins.TIMESTAMPTZ, (val) => { + return format(new Date(val), "t"); }); -export const kyselyData = new Kysely({ - dialect: new PostgresDialect({ - pool: new Pool({ - connectionString: dataDatabaseUrl, - }), - }), +pkg.types.setTypeParser(pkg.types.builtins.TIMESTAMP, (val) => { + return format(new Date(val), "t"); }); + +export abstract class BaseKyselyService< + DB extends CachingDatabase | DataDatabase, +> { + constructor(protected readonly db: Kysely) {} + + getConnection() { + return this.db; + } +} + +@singleton() +export class CachingKyselyService extends BaseKyselyService { + constructor() { + super( + new Kysely({ + dialect: new PostgresDialect({ + pool: new Pool({ + connectionString: cachingDatabaseUrl, + }), + }), + }), + ); + } +} + +@singleton() +export class DataKyselyService extends BaseKyselyService { + constructor() { + super( + new Kysely({ + dialect: new PostgresDialect({ + pool: new Pool({ + connectionString: dataDatabaseUrl, + }), + }), + }), + ); + } +} + +// For backwards compatibility during refactor +export const kyselyCaching = container + .resolve(CachingKyselyService) + .getConnection(); + +export const kyselyData = container.resolve(DataKyselyService).getConnection(); diff --git a/src/client/supabase.ts b/src/client/supabase.ts index ee4561c9..b32beb87 100644 --- a/src/client/supabase.ts +++ b/src/client/supabase.ts @@ -11,6 +11,7 @@ import { import { type Database as CachingDatabaseTypes } from "../types/supabaseCaching.js"; import { type Database as DataDatabaseTypes } from "../types/supabaseData.js"; import { cache } from "./graphql.js"; +import { singleton } from "tsyringe"; // Create a single supabase client for interacting with your database export const supabaseCaching = createClient( @@ -27,7 +28,7 @@ export const supabaseData = createClient( const handleChangeClaims = ( payload: RealtimePostgresChangesPayload<{ [key: string]: any }>, ) => { - console.debug(payload); + // console.debug(payload); switch (payload.eventType) { case "INSERT": cache.invalidate([{ typename: "Hypercert" }]); @@ -44,7 +45,7 @@ const handleChangeClaims = ( const handleChangeFractions = ( payload: RealtimePostgresChangesPayload<{ [key: string]: any }>, ) => { - console.debug(payload); + // console.debug(payload); switch (payload.eventType) { case "INSERT": cache.invalidate([{ typename: "Fraction" }]); @@ -61,7 +62,7 @@ const handleChangeFractions = ( const handleChangeMetadata = ( payload: RealtimePostgresChangesPayload<{ [key: string]: any }>, ) => { - console.debug(payload); + // console.debug(payload); switch (payload.eventType) { case "INSERT": cache.invalidate([{ typename: "Metadata", id: payload.new.id }]); @@ -78,7 +79,7 @@ const handleChangeMetadata = ( const handleChangeSales = ( payload: RealtimePostgresChangesPayload<{ [key: string]: any }>, ) => { - console.debug(payload); + // console.debug(payload); switch (payload.eventType) { case "INSERT": cache.invalidate([{ typename: "Sale" }]); @@ -95,7 +96,7 @@ const handleChangeSales = ( const handleChangeAllowlistRecords = ( payload: RealtimePostgresChangesPayload<{ [key: string]: any }>, ) => { - console.debug(payload); + // console.debug(payload); switch (payload.eventType) { case "INSERT": cache.invalidate([ @@ -121,7 +122,7 @@ const handleChangeAllowlistRecords = ( const handleChangeAttestations = ( payload: RealtimePostgresChangesPayload<{ [key: string]: any }>, ) => { - console.debug(payload); + // console.debug(payload); switch (payload.eventType) { case "INSERT": cache.invalidate([{ typename: "Attestation" }]); @@ -138,7 +139,7 @@ const handleChangeAttestations = ( const handleChangeUsers = ( payload: RealtimePostgresChangesPayload<{ [key: string]: any }>, ) => { - console.debug(payload); + // console.debug(payload); switch (payload.eventType) { case "INSERT": cache.invalidate([{ typename: "User" }]); @@ -155,7 +156,7 @@ const handleChangeUsers = ( const handleChangeBlueprints = ( payload: RealtimePostgresChangesPayload<{ [key: string]: any }>, ) => { - console.debug(payload); + // console.debug(payload); switch (payload.eventType) { case "INSERT": cache.invalidate([{ typename: "Blueprint" }]); @@ -175,7 +176,7 @@ const handleChangeBlueprints = ( const handleChangeHyperboards = ( payload: RealtimePostgresChangesPayload<{ [key: string]: any }>, ) => { - console.debug(payload); + // console.debug(payload); switch (payload.eventType) { case "UPDATE": case "DELETE": @@ -201,7 +202,7 @@ const handleChangeHyperboards = ( const handleChangeOrders = ( payload: RealtimePostgresChangesPayload<{ [key: string]: any }>, ) => { - console.debug(payload); + // console.debug(payload); switch (payload.eventType) { case "INSERT": @@ -218,7 +219,7 @@ const handleChangeOrders = ( const handleChangeSignatureRequests = ( payload: RealtimePostgresChangesPayload<{ [key: string]: any }>, ) => { - console.debug(payload); + // console.debug(payload); switch (payload.eventType) { case "INSERT": @@ -231,220 +232,332 @@ const handleChangeSignatureRequests = ( } }; -supabaseCaching - .channel("schema-db-changes") - .on( - "postgres_changes", - { - event: "*", - schema: "public", - table: "claims", - }, - (payload) => handleChangeClaims(payload), - ) - .on( - "postgres_changes", - { - event: "*", - schema: "public", - table: "fractions", - }, - (payload) => handleChangeFractions(payload), - ) - .on( - "postgres_changes", - { - event: "*", - schema: "public", - table: "metadata", - }, - (payload) => handleChangeMetadata(payload), - ) - .on( - "postgres_changes", - { - event: "*", - schema: "public", - table: "sales", - }, - (payload) => handleChangeSales(payload), - ) - .on( - "postgres_changes", - { - event: "*", - schema: "public", - table: "sales", - }, - (payload) => handleChangeSales(payload), - ) - .on( - "postgres_changes", - { - event: "*", - schema: "public", - table: "allow_list_data", - }, - (payload) => handleChangeAllowlistRecords(payload), - ) - .on( - "postgres_changes", - { - event: "*", - schema: "public", - table: "hypercert_allow_list_records", - }, - (payload) => handleChangeAllowlistRecords(payload), - ) - .on( - "postgres_changes", - { - event: "*", - schema: "public", - table: "attestations", - }, - (payload) => handleChangeAttestations(payload), - ) - .subscribe(); +@singleton() +export class SupabaseRealtimeManager { + private isSubscribed: boolean = false; -supabaseData - .channel("schema-db-changes") - .on( - "postgres_changes", - { - event: "*", - schema: "public", - table: "users", - }, - (payload) => handleChangeUsers(payload), - ) - .on( - "postgres_changes", - { - event: "*", - schema: "public", - table: "collections", - }, - (payload) => handleChangeHyperboards(payload), - ) - .on( - "postgres_changes", - { - event: "*", - schema: "public", - table: "hyperboards", - }, - (payload) => handleChangeHyperboards(payload), - ) - .on( - "postgres_changes", - { - event: "*", - schema: "public", - table: "hypercerts", - }, - (payload) => handleChangeHyperboards(payload), - ) - .on( - "postgres_changes", - { - event: "*", - schema: "public", - table: "hyperboard_hypercert_metadata", - }, - (payload) => handleChangeHyperboards(payload), - ) - .on( - "postgres_changes", - { - event: "*", - schema: "public", - table: "hyperboard_collections", - }, - (payload) => handleChangeHyperboards(payload), - ) - .on( - "postgres_changes", - { - event: "*", - schema: "public", - table: "hyperboard_blueprint_metadata", - }, - (payload) => handleChangeHyperboards(payload), - ) - .on( - "postgres_changes", - { - event: "*", - schema: "public", - table: "collection_blueprints", - }, - (payload) => handleChangeHyperboards(payload), - ) - .on( - "postgres_changes", - { - event: "*", - schema: "public", - table: "blueprints", - }, - (payload) => { - handleChangeBlueprints(payload); - handleChangeHyperboards(payload); - }, - ) - .on( - "postgres_changes", - { - event: "*", - schema: "public", - table: "users", - }, - (payload) => handleChangeHyperboards(payload), - ) - .on( - "postgres_changes", - { - event: "*", - schema: "public", - table: "collection_admins", - }, - (payload) => handleChangeHyperboards(payload), - ) - .on( - "postgres_changes", - { - event: "*", - schema: "public", - table: "hyperboard_admins", - }, - (payload) => handleChangeHyperboards(payload), - ) - .on( - "postgres_changes", - { - event: "*", - schema: "public", - table: "marketplace_orders", - }, - (payload) => handleChangeOrders(payload), - ) - .on( - "postgres_changes", - { - event: "*", - schema: "public", - table: "marketplace_order_nonces", - }, - (payload) => handleChangeOrders(payload), - ) - .on( - "postgres_changes", - { - event: "*", - schema: "public", - table: "signature_requests", - }, - (payload) => handleChangeSignatureRequests(payload), - ) - .subscribe(); + public async subscribeToEvents(): Promise { + if (this.isSubscribed) { + console.log( + "⚠️ [REALTIME] Already subscribed to Supabase realtime events", + ); + return; + } + + console.log( + "ℹ️ [REALTIME] Initializing Supabase realtime event subscriptions", + ); + + try { + await this.subscribeToSupabaseRealtimeEvents(); + this.isSubscribed = true; + console.log( + "✅ [REALTIME] Successfully subscribed to Supabase realtime events", + ); + } catch (error) { + console.error( + "⛔️ [REALTIME] Failed to subscribe to Supabase realtime events:", + error, + ); + throw error; + } + } + + private async subscribeToSupabaseRealtimeEvents() { + console.log("ℹ️ [REALTIME] Unsubscribing from all channels"); + const cachingChannel = supabaseCaching.channel("schema-db-changes"); + const dataChannel = supabaseData.channel("schema-db-changes"); + + await Promise.all([ + cachingChannel.unsubscribe().then((status) => { + console.log("ℹ️ [REALTIME] Caching channel unsubscribed", status); + }), + dataChannel.unsubscribe().then((status) => { + console.log("ℹ️ [REALTIME] Data channel unsubscribed", status); + }), + ]); + + console.log("ℹ️ [REALTIME] Subscribing to realtime events"); + + await Promise.all([ + this.subscribeToCachingChannel(), + this.subscribeToDataChannel(), + ]); + } + + private subscribeToCachingChannel(): Promise { + return new Promise((resolve, reject) => { + supabaseCaching + .channel("schema-db-changes") + .on( + "postgres_changes", + { + event: "*", + schema: "public", + table: "claims", + }, + (payload) => handleChangeClaims(payload), + ) + .on( + "postgres_changes", + { + event: "*", + schema: "public", + table: "fractions", + }, + (payload) => handleChangeFractions(payload), + ) + .on( + "postgres_changes", + { + event: "*", + schema: "public", + table: "metadata", + }, + (payload) => handleChangeMetadata(payload), + ) + .on( + "postgres_changes", + { + event: "*", + schema: "public", + table: "sales", + }, + (payload) => handleChangeSales(payload), + ) + .on( + "postgres_changes", + { + event: "*", + schema: "public", + table: "sales", + }, + (payload) => handleChangeSales(payload), + ) + .on( + "postgres_changes", + { + event: "*", + schema: "public", + table: "allow_list_data", + }, + (payload) => handleChangeAllowlistRecords(payload), + ) + .on( + "postgres_changes", + { + event: "*", + schema: "public", + table: "hypercert_allow_list_records", + }, + (payload) => handleChangeAllowlistRecords(payload), + ) + .on( + "postgres_changes", + { + event: "*", + schema: "public", + table: "attestations", + }, + (payload) => handleChangeAttestations(payload), + ) + .subscribe((status, error) => { + if (status === "SUBSCRIBED") { + console.log( + "✅ [REALTIME] Subscribed to realtime events for caching with status", + status, + ); + resolve(); + return; + } + + if (error) { + console.error( + "⛔️ [REALTIME] Error subscribing to realtime events for caching", + error, + ); + reject(new Error("Error subscribing to realtime events caching")); + } else { + console.log( + "⚠️ [REALTIME] Subscribed to realtime events for caching with status", + status, + ); + reject(new Error("Error subscribing to realtime events caching")); + } + }); + }); + } + + private subscribeToDataChannel(): Promise { + return new Promise((resolve, reject) => { + supabaseData + .channel("schema-db-changes") + .on( + "postgres_changes", + { + event: "*", + schema: "public", + table: "users", + }, + (payload) => handleChangeUsers(payload), + ) + .on( + "postgres_changes", + { + event: "*", + schema: "public", + table: "collections", + }, + (payload) => handleChangeHyperboards(payload), + ) + .on( + "postgres_changes", + { + event: "*", + schema: "public", + table: "hyperboards", + }, + (payload) => handleChangeHyperboards(payload), + ) + .on( + "postgres_changes", + { + event: "*", + schema: "public", + table: "hypercerts", + }, + (payload) => handleChangeHyperboards(payload), + ) + .on( + "postgres_changes", + { + event: "*", + schema: "public", + table: "hyperboard_hypercert_metadata", + }, + (payload) => handleChangeHyperboards(payload), + ) + .on( + "postgres_changes", + { + event: "*", + schema: "public", + table: "hyperboard_collections", + }, + (payload) => handleChangeHyperboards(payload), + ) + .on( + "postgres_changes", + { + event: "*", + schema: "public", + table: "hyperboard_blueprint_metadata", + }, + (payload) => handleChangeHyperboards(payload), + ) + .on( + "postgres_changes", + { + event: "*", + schema: "public", + table: "collection_blueprints", + }, + (payload) => handleChangeHyperboards(payload), + ) + .on( + "postgres_changes", + { + event: "*", + schema: "public", + table: "blueprints", + }, + (payload) => { + handleChangeBlueprints(payload); + handleChangeHyperboards(payload); + }, + ) + .on( + "postgres_changes", + { + event: "*", + schema: "public", + table: "users", + }, + (payload) => handleChangeHyperboards(payload), + ) + .on( + "postgres_changes", + { + event: "*", + schema: "public", + table: "collection_admins", + }, + (payload) => handleChangeHyperboards(payload), + ) + .on( + "postgres_changes", + { + event: "*", + schema: "public", + table: "hyperboard_admins", + }, + (payload) => handleChangeHyperboards(payload), + ) + .on( + "postgres_changes", + { + event: "*", + schema: "public", + table: "marketplace_orders", + }, + (payload) => handleChangeOrders(payload), + ) + .on( + "postgres_changes", + { + event: "*", + schema: "public", + table: "marketplace_order_nonces", + }, + (payload) => handleChangeOrders(payload), + ) + .on( + "postgres_changes", + { + event: "*", + schema: "public", + table: "signature_requests", + }, + (payload) => handleChangeSignatureRequests(payload), + ) + .subscribe((status, error) => { + if (status === "SUBSCRIBED") { + console.log( + "✅ [REALTIME] Subscribed to realtime events for data with status", + status, + ); + resolve(); + return; + } + + if (error) { + console.error( + "⛔️ [REALTIME] Error subscribing to realtime events for data", + error, + ); + reject(new Error("Error subscribing to realtime events data")); + } else { + console.log( + "⚠️ [REALTIME] Subscribed to realtime events for data with status", + status, + ); + reject(new Error("Error subscribing to realtime events data")); + } + }); + }); + } + + public isEventSubscriptionActive(): boolean { + return this.isSubscribed; + } +} diff --git a/src/commands/CommandFactory.ts b/src/commands/CommandFactory.ts index 68f53dc1..b7b5b96f 100644 --- a/src/commands/CommandFactory.ts +++ b/src/commands/CommandFactory.ts @@ -1,28 +1,35 @@ -import { Database } from "../types/supabaseData.js"; import { ISafeApiCommand } from "../types/safe-signatures.js"; import { MarketplaceCreateOrderCommand } from "./MarketplaceCreateOrderCommand.js"; import { SafeApiCommand } from "./SafeApiCommand.js"; import { UserUpsertCommand } from "./UserUpsertCommand.js"; +import { Selectable } from "kysely"; +import { DataDatabase } from "../types/kyselySupabaseData.js"; +import { container } from "tsyringe"; -type SignatureRequest = - Database["public"]["Tables"]["signature_requests"]["Row"]; +export type SignatureRequest = DataDatabase["signature_requests"]; -export function getCommand(request: SignatureRequest): ISafeApiCommand { +export function getCommand( + request: Selectable, +): ISafeApiCommand { switch (request.purpose) { case "update_user_data": - return new UserUpsertCommand( - request.safe_address, - request.message_hash, - // The type is lying. It's a string. - Number(request.chain_id), - ); + return container + .resolve(UserUpsertCommand) + .initialize( + request.safe_address, + request.message_hash, + Number(request.chain_id), + ); case "create_marketplace_order": - return new MarketplaceCreateOrderCommand( - request.safe_address, - request.message_hash, - Number(request.chain_id), - ); + return container + .resolve(MarketplaceCreateOrderCommand) + .initialize( + request.safe_address, + request.message_hash, + Number(request.chain_id), + ); + default: console.warn("Unrecognized purpose:", request.purpose); return new NoopCommand(); diff --git a/src/commands/MarketplaceCreateOrderCommand.ts b/src/commands/MarketplaceCreateOrderCommand.ts index 3f2ab8e5..5b1add24 100644 --- a/src/commands/MarketplaceCreateOrderCommand.ts +++ b/src/commands/MarketplaceCreateOrderCommand.ts @@ -9,13 +9,42 @@ import MarketplaceCreateOrderSignatureVerifier from "../lib/safe/signature-verif import { SafeApiCommand } from "./SafeApiCommand.js"; import { getHypercertTokenId } from "../utils/tokenIds.js"; +import { inject, injectable } from "tsyringe"; +import { SignatureRequestsService } from "../services/database/entities/SignatureRequestsEntityService.js"; +import { MarketplaceOrdersService } from "../services/database/entities/MarketplaceOrdersEntityService.js"; +@injectable() export class MarketplaceCreateOrderCommand extends SafeApiCommand { + constructor( + safeAddress: string, + messageHash: string, + chainId: number, + @inject(SignatureRequestsService) + private signatureRequestsService: SignatureRequestsService, + @inject(MarketplaceOrdersService) + private marketplaceOrdersService: MarketplaceOrdersService, + ) { + super(safeAddress, messageHash, chainId); + } + + initialize( + safeAddress: string, + messageHash: string, + chainId: number, + ): this { + this.safeAddress = safeAddress; + this.messageHash = messageHash; + this.chainId = chainId; + return this; + } + async execute(): Promise { - const signatureRequest = await this.dataService.getSignatureRequest( - this.safeAddress, - this.messageHash, - ); + const signatureRequest = await this.signatureRequestsService.getSignatureRequest({ + where: { + safe_address: { eq: this.safeAddress }, + message_hash: { eq: this.messageHash }, + }, + }); if (!signatureRequest || signatureRequest.status !== "pending") { return; @@ -72,9 +101,9 @@ export class MarketplaceCreateOrderCommand extends SafeApiCommand { amounts: orderDetails.amounts.map((x) => parseInt(x, 10)), }; - await this.dataService.storeOrder(insertEntity); + await this.marketplaceOrdersService.storeOrder(insertEntity); - await this.dataService.updateSignatureRequestStatus( + await this.signatureRequestsService.updateSignatureRequestStatus( this.safeAddress, this.messageHash, "executed", diff --git a/src/commands/SafeApiCommand.ts b/src/commands/SafeApiCommand.ts index 7edc8680..cbc4107c 100644 --- a/src/commands/SafeApiCommand.ts +++ b/src/commands/SafeApiCommand.ts @@ -1,21 +1,18 @@ import SafeApiKit from "@safe-global/api-kit"; import { SafeApiStrategyFactory } from "../lib/safe/SafeApiKitStrategy.js"; -import { SupabaseDataService } from "../services/SupabaseDataService.js"; import { ISafeApiCommand } from "../types/safe-signatures.js"; export abstract class SafeApiCommand implements ISafeApiCommand { - protected readonly safeAddress: string; - protected readonly messageHash: string; - protected readonly chainId: number; - protected readonly dataService: SupabaseDataService; - protected readonly safeApiKit: SafeApiKit.default; + protected safeAddress: string; + protected messageHash: string; + protected chainId: number; + protected safeApiKit: SafeApiKit.default; constructor(safeAddress: string, messageHash: string, chainId: number) { this.safeAddress = safeAddress; this.messageHash = messageHash; this.chainId = chainId; - this.dataService = new SupabaseDataService(); this.safeApiKit = SafeApiStrategyFactory.getStrategy(chainId).createInstance(); } diff --git a/src/commands/UserUpsertCommand.ts b/src/commands/UserUpsertCommand.ts index 248a255f..4b95810b 100644 --- a/src/commands/UserUpsertCommand.ts +++ b/src/commands/UserUpsertCommand.ts @@ -1,24 +1,40 @@ import { getAddress } from "viem"; +import UserUpsertSignatureVerifier from "../lib/safe/signature-verification/UserUpsertSignatureVerifier.js"; import { MultisigUserUpdateMessage, USER_UPDATE_MESSAGE_SCHEMA, } from "../lib/users/schemas.js"; import { isTypedMessage } from "../utils/signatures.js"; -import UserUpsertSignatureVerifier from "../lib/safe/signature-verification/UserUpsertSignatureVerifier.js"; -import { Database } from "../types/supabaseData.js"; +import { Insertable } from "kysely"; +import { inject, injectable } from "tsyringe"; +import { SignatureRequestsService } from "../services/database/entities/SignatureRequestsEntityService.js"; +import { UsersService } from "../services/database/entities/UsersEntityService.js"; +import { SignatureRequest } from "./CommandFactory.js"; import { SafeApiCommand } from "./SafeApiCommand.js"; -type SignatureRequest = - Database["public"]["Tables"]["signature_requests"]["Row"]; - +@injectable() export class UserUpsertCommand extends SafeApiCommand { + constructor( + safeAddress: string, + messageHash: string, + chainId: number, + @inject(SignatureRequestsService) + private signatureRequestsService: SignatureRequestsService, + @inject(UsersService) + private usersService: UsersService, + ) { + super(safeAddress, messageHash, chainId); + } async execute(): Promise { - const signatureRequest = await this.dataService.getSignatureRequest( - this.safeAddress, - this.messageHash, - ); + const signatureRequest = + await this.signatureRequestsService.getSignatureRequest({ + where: { + safe_address: { eq: this.safeAddress }, + message_hash: { eq: this.messageHash }, + }, + }); if (!signatureRequest || signatureRequest.status !== "pending") { return; @@ -44,7 +60,9 @@ export class UserUpsertCommand extends SafeApiCommand { message.data, ); - if (!(await verifier.verify(safeMessage.preparedSignature))) { + if ( + !(await verifier.verify(safeMessage.preparedSignature as `0x${string}`)) + ) { console.error(`Signature verification failed: ${this.getId()}`); return; } @@ -54,10 +72,10 @@ export class UserUpsertCommand extends SafeApiCommand { } async updateDatabase( - signatureRequest: Exclude, + signatureRequest: Insertable, message: MultisigUserUpdateMessage, ): Promise { - const users = await this.dataService.upsertUsers([ + const users = await this.usersService.upsertUsers([ { address: this.safeAddress, chain_id: signatureRequest.chain_id, @@ -68,10 +86,21 @@ export class UserUpsertCommand extends SafeApiCommand { if (!users.length) { throw new Error("Error adding or updating user"); } - await this.dataService.updateSignatureRequestStatus( + await this.signatureRequestsService.updateSignatureRequestStatus( this.safeAddress, this.messageHash, "executed", ); } + + public initialize( + safeAddress: string, + messageHash: string, + chainId: number, + ): this { + this.safeAddress = safeAddress; + this.messageHash = messageHash; + this.chainId = chainId; + return this; + } } diff --git a/src/controllers/AllowListController.ts b/src/controllers/AllowListController.ts index 5810d375..c852aa9b 100644 --- a/src/controllers/AllowListController.ts +++ b/src/controllers/AllowListController.ts @@ -1,4 +1,3 @@ -import { jsonToBlob } from "../utils/jsonToBlob.js"; import { Body, Controller, @@ -8,16 +7,17 @@ import { SuccessResponse, Tags, } from "tsoa"; -import { StorageService } from "../services/StorageService.js"; import { parseAndValidateMerkleTree } from "../lib/allowlists/parseAndValidateMerkleTreeDump.js"; +import { StorageService } from "../services/StorageService.js"; import type { StorageResponse, StoreAllowListRequest, ValidateAllowListRequest, ValidationResponse, } from "../types/api.js"; +import { jsonToBlob } from "../utils/jsonToBlob.js"; -@Route("v1/allowlists") +@Route("v2/allowlists") @Tags("Allowlists") export class AllowListController extends Controller { /** diff --git a/src/controllers/BlueprintController.ts b/src/controllers/BlueprintController.ts index 7e91e4ec..13c9aff5 100644 --- a/src/controllers/BlueprintController.ts +++ b/src/controllers/BlueprintController.ts @@ -9,10 +9,11 @@ import { SuccessResponse, Tags, } from "tsoa"; +import { inject, injectable } from "tsyringe"; import { isAddress } from "viem"; import { z } from "zod"; import { EvmClientFactory } from "../client/evmClient.js"; -import { SupabaseDataService } from "../services/SupabaseDataService.js"; +import { BlueprintsService } from "../services/database/entities/BlueprintsEntityService.js"; import type { BaseResponse, BlueprintCreateRequest, @@ -24,9 +25,16 @@ import { Json } from "../types/supabaseData.js"; import { verifyAuthSignedData } from "../utils/verifyAuthSignedData.js"; import { waitForTxThenMintBlueprint } from "../utils/waitForTxThenMintBlueprint.js"; -@Route("v1/blueprints") +@injectable() +@Route("v2/blueprints") @Tags("Blueprints") export class BlueprintController extends Controller { + constructor( + @inject(BlueprintsService) private blueprintsService: BlueprintsService, + ) { + super(); + } + @Post() @SuccessResponse(201, "Blueprint created successfully") @Response(422, "Unprocessable content", { @@ -160,11 +168,9 @@ export class BlueprintController extends Controller { }; } - const dataService = new SupabaseDataService(); - let blueprintId: number; try { - const blueprint = await dataService.upsertBlueprints([ + const blueprint = await this.blueprintsService.upsertBlueprints([ { form_values: form_values as unknown as Json, minter_address, @@ -190,7 +196,7 @@ export class BlueprintController extends Controller { } try { - await dataService.addAdminToBlueprint( + await this.blueprintsService.addAdminToBlueprint( blueprintId, admin_address, chain_id, @@ -243,8 +249,11 @@ export class BlueprintController extends Controller { const { signature, admin_address, chain_id } = parsedBody.data; - const dataService = new SupabaseDataService(); - const blueprint = await dataService.getBlueprintById(blueprintId); + const blueprint = await this.blueprintsService.getBlueprint({ + where: { + id: { eq: blueprintId }, + }, + }); if (!blueprint) { this.setStatus(404); @@ -255,7 +264,9 @@ export class BlueprintController extends Controller { }; } - const isAdmin = blueprint.admins.some( + const admins = await this.blueprintsService.getBlueprintAdmins(blueprintId); + + const isAdmin = admins.some( (admin) => admin.address === admin_address && admin.chain_id === chain_id, ); @@ -291,7 +302,7 @@ export class BlueprintController extends Controller { } try { - await dataService.deleteBlueprint(blueprintId); + await this.blueprintsService.deleteBlueprint(blueprintId); } catch (error) { this.setStatus(500); return { diff --git a/src/controllers/HyperboardController.ts b/src/controllers/HyperboardController.ts index 2d51f237..b2ab22d4 100644 --- a/src/controllers/HyperboardController.ts +++ b/src/controllers/HyperboardController.ts @@ -1,37 +1,48 @@ +import { CONSTANTS, parseClaimOrFractionId } from "@hypercerts-org/sdk"; +import { User } from "@sentry/node"; +import { Selectable } from "kysely"; +import _ from "lodash"; import { Body, Controller, Delete, + Patch, Path, Post, + Query, Response, Route, - Query, SuccessResponse, Tags, - Patch, } from "tsoa"; +import { inject, injectable } from "tsyringe"; +import { z } from "zod"; +import { CollectionService } from "../services/database/entities/CollectionEntityService.js"; +import { HyperboardService } from "../services/database/entities/HyperboardEntityService.js"; import type { BaseResponse, HyperboardCreateRequest, HyperboardResponse, HyperboardUpdateRequest, } from "../types/api.js"; -import { z } from "zod"; import { isValidHypercertId } from "../utils/hypercertIds.js"; -import { parseClaimOrFractionId } from "@hypercerts-org/sdk"; -import { SupabaseDataService } from "../services/SupabaseDataService.js"; -import { CONSTANTS } from "@hypercerts-org/sdk"; -import _ from "lodash"; import { verifyAuthSignedData } from "../utils/verifyAuthSignedData.js"; const allChains = Object.keys(CONSTANTS.DEPLOYMENTS).map((chain) => parseInt(chain), ); -@Route("v1/hyperboards") +@injectable() +@Route("v2/hyperboards") @Tags("Hyperboards") export class HyperboardController extends Controller { + constructor( + @inject(HyperboardService) private hyperboardsService: HyperboardService, + @inject(CollectionService) private collectionService: CollectionService, + ) { + super(); + } + /** * Create a new hyperboard. Creates the collections passed to it automatically. */ @@ -230,10 +241,9 @@ export class HyperboardController extends Controller { }; } - const dataService = new SupabaseDataService(); let hyperboardId: string; try { - const hyperboards = await dataService.upsertHyperboards([ + const hyperboards = await this.hyperboardsService.upsertHyperboard([ { background_image: parsedBody.data.backgroundImg, tile_border_color: parsedBody.data.borderColor, @@ -246,10 +256,12 @@ export class HyperboardController extends Controller { throw new Error("Hyperboard must have an id to add collections."); } hyperboardId = hyperboards[0]?.id; - const admin = await dataService.addAdminToHyperboard( + const admin = await this.hyperboardsService.addAdminToHyperboard( hyperboardId, - adminAddress, - chainId, + { + address: adminAddress, + chain_id: chainId, + }, ); if (!admin) { throw new Error("Admin must be added to hyperboard."); @@ -271,30 +283,34 @@ export class HyperboardController extends Controller { if (!collection.id) { continue; } - const currentCollection = await dataService.getCollectionById( - collection.id, - ); + const currentCollection = await this.collectionService.getCollection({ + where: { + id: { eq: collection.id }, + }, + }); if (!currentCollection) { throw new Error(`Collection with id ${collection.id} not found`); } // Add the collection to the hyperboard - await dataService.addCollectionToHyperboard( + await this.hyperboardsService.addCollectionToHyperboard( hyperboardId, collection.id, ); - const currentUserIsAdminForCollection = - currentCollection.collection_admins - .flatMap((x) => x.admins) - .find( - (admin) => - admin.chain_id === chainId && admin.address === adminAddress, - ); + const admins = await this.collectionService.getCollectionAdmins( + collection.id, + ); + + const currentUserIsAdminForCollection = admins.some( + (admin: Selectable) => + admin.chain_id === chainId && + admin.address.toLowerCase() === adminAddress.toLowerCase(), + ); if (currentUserIsAdminForCollection) { // Update collection if you are an admin of the collection - await dataService.upsertCollections([ + await this.collectionService.upsertCollections([ { id: collection.id, name: collection.title, @@ -304,11 +320,13 @@ export class HyperboardController extends Controller { ]); // Remove all hypercerts from the collection - await dataService.deleteAllHypercertsFromCollection(collection.id); + await this.collectionService.deleteAllHypercertsFromCollection( + collection.id, + ); if (collection.hypercerts?.length) { // Update hypercerts in the collection if you are an admin of the collection - await dataService.upsertHypercerts( + await this.collectionService.upsertHypercertCollections( collection.hypercerts.map((hc) => ({ hypercert_id: hc.hypercertId, collection_id: currentCollection.id, @@ -316,7 +334,7 @@ export class HyperboardController extends Controller { ); // Update metadata anyway because they are not collection specific - await dataService.upsertHyperboardHypercertMetadata( + await this.hyperboardsService.upsertHyperboardHypercertMetadata( collection.hypercerts.map((hc) => ({ hypercert_id: hc.hypercertId, hyperboard_id: hyperboardId, @@ -326,17 +344,19 @@ export class HyperboardController extends Controller { ); } - await dataService.deleteAllBlueprintsFromCollection(collection.id); + await this.collectionService.deleteAllBlueprintsFromCollection( + collection.id, + ); if (collection.blueprints?.length) { - await dataService.addBlueprintsToCollection( + await this.collectionService.addBlueprintsToCollection( collection.blueprints.map((bp) => ({ blueprint_id: bp.blueprintId, collection_id: currentCollection.id, })), ); - await dataService.upsertHyperboardBlueprintMetadata( + await this.hyperboardsService.upsertHyperboardBlueprintMetadata( collection.blueprints.map((bp) => ({ blueprint_id: bp.blueprintId, hyperboard_id: hyperboardId, @@ -361,24 +381,27 @@ export class HyperboardController extends Controller { ); for (const collection of collectionsToCreate) { try { - const collectionCreateResponse = await dataService.upsertCollections([ - { - name: collection.title, - description: collection.description, - chain_ids: [chainId], - }, - ]); + const collectionCreateResponse = + await this.collectionService.upsertCollections([ + { + name: collection.title, + description: collection.description, + chain_ids: [chainId], + }, + ]); - const collectionId = collectionCreateResponse[0]?.id; + const collectionId = collectionCreateResponse[0].id; if (!collectionId) { throw new Error("Collection must have an id to add claims."); } // Add current user as admin to the collection because they are creating it - const admin = await dataService.addAdminToCollection( - collectionId, - adminAddress, - chainId, + const admin = await this.collectionService.addAdminToCollection( + collectionId.toString(), + { + address: adminAddress, + chain_id: chainId, + }, ); if (!admin) { @@ -388,38 +411,41 @@ export class HyperboardController extends Controller { if (collection.hypercerts?.length) { const hypercerts = collection.hypercerts.map((hc) => ({ hypercert_id: hc.hypercertId, - collection_id: collectionId, + collection_id: collectionId.toString(), })); - await dataService.upsertHypercerts(hypercerts); - await dataService.upsertHyperboardHypercertMetadata( + await this.collectionService.upsertHypercertCollections(hypercerts); + await this.hyperboardsService.upsertHyperboardHypercertMetadata( collection.hypercerts.map((hc) => ({ hypercert_id: hc.hypercertId, hyperboard_id: hyperboardId, - collection_id: collectionId, + collection_id: collectionId.toString(), display_size: hc.factor, })), ); } if (collection.blueprints?.length) { - await dataService.addBlueprintsToCollection( + await this.collectionService.addBlueprintsToCollection( collection.blueprints.map((bp) => ({ blueprint_id: bp.blueprintId, - collection_id: collectionId, + collection_id: collectionId.toString(), })), ); - await dataService.upsertHyperboardBlueprintMetadata( + await this.hyperboardsService.upsertHyperboardBlueprintMetadata( collection.blueprints.map((bp) => ({ blueprint_id: bp.blueprintId, hyperboard_id: hyperboardId, - collection_id: collectionId, + collection_id: collectionId.toString(), display_size: bp.factor, })), ); } - await dataService.addCollectionToHyperboard(hyperboardId, collectionId); + await this.hyperboardsService.addCollectionToHyperboard( + hyperboardId, + collectionId.toString(), + ); } catch (e) { console.error(e); this.setStatus(400); @@ -589,8 +615,11 @@ export class HyperboardController extends Controller { }; } - const dataService = new SupabaseDataService(); - const hyperboard = await dataService.getHyperboardById(hyperboardId); + const hyperboard = await this.hyperboardsService.getHyperboard({ + where: { + id: { eq: hyperboardId }, + }, + }); if (!hyperboard) { this.setStatus(404); @@ -601,7 +630,14 @@ export class HyperboardController extends Controller { } const { signature, adminAddress } = parsedBody.data; - const chainId = hyperboard.chain_ids[0]; + const chainId = hyperboard.chain_ids?.[0]; + if (!chainId) { + this.setStatus(400); + return { + success: false, + message: "Hyperboard must have a chain id", + }; + } const success = await verifyAuthSignedData({ address: adminAddress as `0x${string}`, signature: signature as `0x${string}`, @@ -653,12 +689,17 @@ export class HyperboardController extends Controller { }; } + const { data: admins } = + await this.hyperboardsService.getHyperboardAdmins(hyperboardId); + // Check if the admin is authorized to update the hyperboard - const adminUser = hyperboard.admins.find( - (admin) => admin.address === adminAddress && admin.chain_id === chainId, + const isAdmin = admins.some( + (admin) => + admin.address.toLowerCase() === adminAddress.toLowerCase() && + admin.chain_id === chainId, ); - if (!adminUser) { + if (!isAdmin) { this.setStatus(401); return { success: false, @@ -666,8 +707,16 @@ export class HyperboardController extends Controller { }; } + if (!hyperboard.chain_ids) { + this.setStatus(400); + return { + success: false, + message: "Hyperboard must have a chain id", + }; + } + try { - await dataService.upsertHyperboards([ + await this.hyperboardsService.upsertHyperboard([ { id: hyperboardId, background_image: parsedBody.data.backgroundImg || null, @@ -689,41 +738,49 @@ export class HyperboardController extends Controller { const collectionsToUpdate = parsedBody.data.collections.filter( (collection) => !!collection.id, ); + + const { data: hyperboardCollections } = + await this.hyperboardsService.getHyperboardCollections(hyperboardId); for (const collection of collectionsToUpdate) { try { if (!collection.id) { continue; } - const currentCollection = await dataService.getCollectionById( - collection.id, - ); + const currentCollection = await this.collectionService.getCollection({ + where: { + id: { eq: collection.id }, + }, + }); + if (!currentCollection) { throw new Error(`Collection with id ${collection.id} not found`); } // Add the collection to the hyperboard if it hasn't been added already - const isCollectionInHyperboard = !!hyperboard.collections.find( + const isCollectionInHyperboard = !!hyperboardCollections.find( (c) => c.id === collection.id, ); if (!isCollectionInHyperboard) { - await dataService.addCollectionToHyperboard( + await this.hyperboardsService.addCollectionToHyperboard( hyperboardId, collection.id, ); } + const admins = await this.collectionService.getCollectionAdmins( + collection.id, + ); + // Update metadata anyway because they are not collection specific - const currentUserIsAdminForCollection = - currentCollection.collection_admins - .flatMap((x) => x.admins) - .find( - (admin) => - admin.chain_id === chainId && admin.address === adminAddress, - ); + const currentUserIsAdminForCollection = admins.some( + (admin: Selectable) => + admin.chain_id === chainId && + admin.address.toLowerCase() === adminAddress.toLowerCase(), + ); if (currentUserIsAdminForCollection) { // Update collection if you are an admin of the collection - await dataService.upsertCollections([ + await this.collectionService.upsertCollections([ { id: collection.id, name: collection.title, @@ -733,11 +790,13 @@ export class HyperboardController extends Controller { ]); // Start with removing all hypercerts from the collection - await dataService.deleteAllHypercertsFromCollection(collection.id); + await this.collectionService.deleteAllHypercertsFromCollection( + collection.id, + ); if (collection.hypercerts?.length) { // Update hypercerts in the collection if you are an admin of the collection - await dataService.upsertHypercerts( + await this.collectionService.upsertHypercertCollections( collection.hypercerts.map((hc) => ({ hypercert_id: hc.hypercertId, collection_id: currentCollection.id, @@ -745,7 +804,7 @@ export class HyperboardController extends Controller { ); // Add metadata for all newly added hypercerts - await dataService.upsertHyperboardHypercertMetadata( + await this.hyperboardsService.upsertHyperboardHypercertMetadata( collection.hypercerts.map((hc) => ({ hypercert_id: hc.hypercertId, hyperboard_id: hyperboardId, @@ -756,11 +815,13 @@ export class HyperboardController extends Controller { } // Delete all blueprints from teh collection for a fresh start - await dataService.deleteAllBlueprintsFromCollection(collection.id); + await this.collectionService.deleteAllBlueprintsFromCollection( + collection.id, + ); if (collection.blueprints?.length) { // Add blueprints to the collection - await dataService.addBlueprintsToCollection( + await this.collectionService.addBlueprintsToCollection( collection.blueprints.map((bp) => ({ blueprint_id: bp.blueprintId, collection_id: currentCollection.id, @@ -768,7 +829,7 @@ export class HyperboardController extends Controller { ); // Add metadata for all newly added blueprints - await dataService.upsertHyperboardBlueprintMetadata( + await this.hyperboardsService.upsertHyperboardBlueprintMetadata( collection.blueprints.map((bp) => ({ blueprint_id: bp.blueprintId, hyperboard_id: hyperboardId, @@ -793,24 +854,27 @@ export class HyperboardController extends Controller { ); for (const collection of collectionsToCreate) { try { - const collectionCreateResponse = await dataService.upsertCollections([ - { - name: collection.title, - description: collection.description, - chain_ids: [chainId], - }, - ]); + const collectionCreateResponse = + await this.collectionService.upsertCollections([ + { + name: collection.title, + description: collection.description, + chain_ids: [chainId], + }, + ]); - const collectionId = collectionCreateResponse[0]?.id; + const collectionId = collectionCreateResponse[0].id; if (!collectionId) { throw new Error("Collection must have an id to add claims."); } // Add current user as admin to the collection because they are creating it - const admin = await dataService.addAdminToCollection( - collectionId, - adminAddress, - chainId, + const admin = await this.collectionService.addAdminToCollection( + collectionId.toString(), + { + address: adminAddress, + chain_id: chainId, + }, ); if (!admin) { @@ -819,37 +883,40 @@ export class HyperboardController extends Controller { const hypercerts = collection.hypercerts.map((hc) => ({ hypercert_id: hc.hypercertId, - collection_id: collectionId, + collection_id: collectionId.toString(), })); - await dataService.upsertHypercerts(hypercerts); - await dataService.upsertHyperboardHypercertMetadata( + await this.collectionService.upsertHypercertCollections(hypercerts); + await this.hyperboardsService.upsertHyperboardHypercertMetadata( collection.hypercerts.map((hc) => ({ hypercert_id: hc.hypercertId, hyperboard_id: hyperboardId, - collection_id: collectionId, + collection_id: collectionId.toString(), display_size: hc.factor, })), ); if (collection.blueprints?.length) { - await dataService.addBlueprintsToCollection( + await this.collectionService.addBlueprintsToCollection( collection.blueprints.map((bp) => ({ blueprint_id: bp.blueprintId, - collection_id: collectionId, + collection_id: collectionId.toString(), })), ); - await dataService.upsertHyperboardBlueprintMetadata( + await this.hyperboardsService.upsertHyperboardBlueprintMetadata( collection.blueprints.map((bp) => ({ blueprint_id: bp.blueprintId, hyperboard_id: hyperboardId, - collection_id: collectionId, + collection_id: collectionId.toString(), display_size: bp.factor, })), ); } - await dataService.addCollectionToHyperboard(hyperboardId, collectionId); + await this.hyperboardsService.addCollectionToHyperboard( + hyperboardId, + collectionId.toString(), + ); } catch (e) { console.error(e); this.setStatus(400); @@ -898,8 +965,11 @@ export class HyperboardController extends Controller { }; } - const dataService = new SupabaseDataService(); - const hyperboard = await dataService.getHyperboardById(hyperboardId); + const hyperboard = await this.hyperboardsService.getHyperboard({ + where: { + id: { eq: hyperboardId }, + }, + }); if (!hyperboard) { this.setStatus(404); @@ -909,8 +979,18 @@ export class HyperboardController extends Controller { }; } - const { admins, chain_ids } = hyperboard; - const chain_id = chain_ids[0]; + const { data: admins } = + await this.hyperboardsService.getHyperboardAdmins(hyperboardId); + + const chain_id = hyperboard.chain_ids?.[0]; + if (!chain_id) { + this.setStatus(400); + return { + success: false, + message: "Hyperboard must have a chain id", + }; + } + if ( !admins.find( (admin) => @@ -949,7 +1029,7 @@ export class HyperboardController extends Controller { } try { - await dataService.deleteHyperboard(hyperboardId); + await this.hyperboardsService.deleteHyperboard(hyperboardId); this.setStatus(202); return { success: true, diff --git a/src/controllers/MarketplaceController.ts b/src/controllers/MarketplaceController.ts index 9d4d36f8..7397d472 100644 --- a/src/controllers/MarketplaceController.ts +++ b/src/controllers/MarketplaceController.ts @@ -8,23 +8,37 @@ import { SuccessResponse, Tags, } from "tsoa"; -import { z } from "zod"; import { isAddress, verifyMessage } from "viem"; +import { z } from "zod"; -import { SupabaseDataService } from "../services/SupabaseDataService.js"; +import { isControllerError } from "../lib/errors/controller.js"; +import { createMarketplaceStrategy } from "../lib/marketplace/MarketplaceStrategyFactory.js"; +import { parseCreateOrderRequest } from "../lib/marketplace/request-parser.js"; import type { BaseResponse, CreateOrderRequest, UpdateOrderNonceRequest, ValidateOrderRequest, } from "../types/api.js"; -import { parseCreateOrderRequest } from "../lib/marketplace/request-parser.js"; -import { isControllerError } from "../lib/errors/controller.js"; -import { createMarketplaceStrategy } from "../lib/marketplace/MarketplaceStrategyFactory.js"; -@Route("v1/marketplace") +import { inject, injectable } from "tsyringe"; +import { FractionService } from "../services/database/entities/FractionEntityService.js"; +import { MarketplaceOrdersService } from "../services/database/entities/MarketplaceOrdersEntityService.js"; +import { InvalidOrder } from "../lib/marketplace/errors.js"; + +@injectable() +@Route("v2/marketplace") @Tags("Marketplace") export class MarketplaceController extends Controller { + constructor( + @inject(MarketplaceOrdersService) + private ordersService: MarketplaceOrdersService, + @inject(FractionService) + private fractionService: FractionService, + ) { + super(); + } + /** * Submits a new order for validation and storage on the database. * @@ -101,6 +115,7 @@ export class MarketplaceController extends Controller { success: false, message: "Error processing order", error: error instanceof Error ? error.message : String(error), + ...(error instanceof InvalidOrder ? { result: error.errors } : {}), }; } } @@ -143,32 +158,17 @@ export class MarketplaceController extends Controller { const { address, chainId } = parsedQuery.data; const lowerCaseAddress = address.toLowerCase(); - const supabase = new SupabaseDataService(); - const { data: currentNonce, error: currentNonceError } = - await supabase.getNonce(lowerCaseAddress, chainId); + const nonce = await this.ordersService.getNonce({ + address: lowerCaseAddress, + chain_id: chainId, + }); - if (currentNonceError) { - this.setStatus(500); - return { - success: false, - message: currentNonceError.message, - data: null, - }; - } + if (!nonce) { + const newNonce = await this.ordersService.createNonce({ + address: lowerCaseAddress, + chain_id: chainId, + }); - if (!currentNonce) { - const { data: newNonce, error } = await supabase.createNonce( - lowerCaseAddress, - chainId, - ); - if (error) { - this.setStatus(500); - return { - success: false, - message: error.message, - data: null, - }; - } this.setStatus(200); return { success: true, @@ -177,21 +177,11 @@ export class MarketplaceController extends Controller { }; } - const { data: updatedNonce, error: updatedNonceError } = - await supabase.updateNonce( - lowerCaseAddress, - chainId, - currentNonce.nonce_counter + 1, - ); - - if (updatedNonceError) { - this.setStatus(500); - return { - success: false, - message: updatedNonceError.message, - data: null, - }; - } + const updatedNonce = await this.ordersService.updateNonce({ + address: lowerCaseAddress, + chain_id: chainId, + nonce_counter: nonce.nonce_counter + 1, + }); this.setStatus(200); return { @@ -226,13 +216,12 @@ export class MarketplaceController extends Controller { } const { tokenIds, chainId } = parsedQuery.data; - const supabase = new SupabaseDataService(); try { - const ordersToUpdate = await supabase.validateOrdersByTokenIds({ + const ordersToUpdate = await this.ordersService.validateOrdersByTokenIds( tokenIds, chainId, - }); + ); this.setStatus(200); return { success: true, @@ -280,15 +269,13 @@ export class MarketplaceController extends Controller { const { orderId, signature } = parsedQuery.data; - const supabase = new SupabaseDataService(); - const { data } = supabase.getOrders({ + const order = await this.ordersService.getOrder({ where: { id: { eq: orderId, }, }, }); - const order = await data.executeTakeFirst(); if (!order) { this.setStatus(404); @@ -317,7 +304,7 @@ export class MarketplaceController extends Controller { } try { - await supabase.deleteOrder(orderId); + await this.ordersService.deleteOrder(orderId); this.setStatus(200); return { success: true, diff --git a/src/controllers/MetadataController.ts b/src/controllers/MetadataController.ts index fd208d90..b59a23e4 100644 --- a/src/controllers/MetadataController.ts +++ b/src/controllers/MetadataController.ts @@ -1,4 +1,3 @@ -import { jsonToBlob } from "../utils/jsonToBlob.js"; import { Body, Controller, @@ -8,6 +7,7 @@ import { SuccessResponse, Tags, } from "tsoa"; +import { parseAndValidateMerkleTree } from "../lib/allowlists/parseAndValidateMerkleTreeDump.js"; import { StorageService } from "../services/StorageService.js"; import type { BaseResponse, @@ -17,11 +17,11 @@ import type { ValidateMetadataRequest, ValidationResponse, } from "../types/api.js"; +import { jsonToBlob } from "../utils/jsonToBlob.js"; import { validateMetadataAndClaimdata } from "../utils/validateMetadataAndClaimdata.js"; import { validateRemoteAllowList } from "../utils/validateRemoteAllowList.js"; -import { parseAndValidateMerkleTree } from "../lib/allowlists/parseAndValidateMerkleTreeDump.js"; -@Route("v1/metadata") +@Route("v2/metadata") @Tags("Metadata") export class MetadataController extends Controller { /** @@ -46,12 +46,12 @@ export class MetadataController extends Controller { try { const metadataValidationResult = validateMetadataAndClaimdata(metadata); - if (!metadataValidationResult.valid) { + if (!metadataValidationResult.valid || !metadataValidationResult.data) { this.setStatus(422); return { success: false, valid: false, - message: "Errors while validating metadata", + message: "Metadata validation failed", errors: metadataValidationResult.errors, }; } @@ -66,7 +66,7 @@ export class MetadataController extends Controller { return { success: false, valid: false, - message: "Errors while validating allow list", + message: "Allowlist validation failed", errors: allowListValidationResult.errors, }; } @@ -126,7 +126,7 @@ export class MetadataController extends Controller { this.setStatus(422); return { success: false, - message: "Validation failed", + message: "Metadata validation failed", errors: metadataValidationResult.errors, }; } @@ -149,7 +149,7 @@ export class MetadataController extends Controller { this.setStatus(422); return { success: false, - message: "Validation failed", + message: "Allowlist validation failed", errors: allowlistValidationResult.errors, }; } @@ -205,7 +205,7 @@ export class MetadataController extends Controller { return { success: true, valid: false, - message: "Errors while validating metadata", + message: "Metadata validation failed", errors: metadataValidationResult.errors, }; } @@ -220,8 +220,7 @@ export class MetadataController extends Controller { return { success: true, valid: false, - message: - "Errors while validating allow list referenced in metadata", + message: "Allowlist validation failed", errors: allowListValidationResult.errors, }; } @@ -238,7 +237,7 @@ export class MetadataController extends Controller { return { success: false, valid: false, - message: "Error while validating metadata", + message: "Validation failed", errors: { metadata: (e as Error).message }, }; } @@ -270,7 +269,7 @@ export class MetadataController extends Controller { return { success: true, valid: false, - message: "Validation failed", + message: "Metadata validation failed", errors: metadataValidationResult.errors, }; } @@ -285,7 +284,7 @@ export class MetadataController extends Controller { return { success: true, valid: false, - message: "Validation failed", + message: "Allowlist validation failed", errors: allowlistValidationResult.errors, }; } @@ -301,7 +300,7 @@ export class MetadataController extends Controller { return { success: false, valid: false, - message: "Error while validating metadata", + message: "Validation failed", errors: { metadata: (e as Error).message }, }; } diff --git a/src/controllers/MonitoringController.ts b/src/controllers/MonitoringController.ts index b6a68e28..879bd267 100644 --- a/src/controllers/MonitoringController.ts +++ b/src/controllers/MonitoringController.ts @@ -1,6 +1,6 @@ import { Route, Get, Response } from "tsoa"; -@Route("v1/monitoring") +@Route("/v2/monitoring") export class MonitoringController { @Get("/health") @Response(200, "OK") diff --git a/src/controllers/SignatureRequestController.ts b/src/controllers/SignatureRequestController.ts index 42d70ebc..a7265e1b 100644 --- a/src/controllers/SignatureRequestController.ts +++ b/src/controllers/SignatureRequestController.ts @@ -1,17 +1,18 @@ import { Body, Controller, - Post, Path, + Post, Response, Route, SuccessResponse, Tags, } from "tsoa"; -import { SupabaseDataService } from "../services/SupabaseDataService.js"; -import { verifyAuthSignedData } from "../utils/verifyAuthSignedData.js"; +import { inject, injectable } from "tsyringe"; +import { SignatureRequestsService } from "../services/database/entities/SignatureRequestsEntityService.js"; import SignatureRequestProcessor from "../services/SignatureRequestProcessor.js"; +import { verifyAuthSignedData } from "../utils/verifyAuthSignedData.js"; interface CancelSignatureRequest { signature: string; @@ -19,14 +20,15 @@ interface CancelSignatureRequest { chain_id: number; } -@Route("v1/signature-requests") +@injectable() +@Route("v2/signature-requests") @Tags("SignatureRequests") export class SignatureRequestController extends Controller { - private readonly dataService: SupabaseDataService; - - constructor() { + constructor( + @inject(SignatureRequestsService) + private signatureRequestsService: SignatureRequestsService, + ) { super(); - this.dataService = new SupabaseDataService(); } @Post("{safe_address}-{message_hash}/cancel") @@ -44,10 +46,13 @@ export class SignatureRequestController extends Controller { return this.errorResponse("Unauthorized", 401); } - const signatureRequest = await this.dataService.getSignatureRequest( - safe_address, - message_hash, - ); + const signatureRequest = + await this.signatureRequestsService.getSignatureRequest({ + where: { + safe_address: { eq: safe_address }, + message_hash: { eq: message_hash }, + }, + }); if (!signatureRequest) { return this.errorResponse("Signature request not found", 404); } @@ -62,7 +67,7 @@ export class SignatureRequestController extends Controller { return this.successResponse("Signature request canceled successfully"); case "pending": - await this.dataService.updateSignatureRequestStatus( + await this.signatureRequestsService.updateSignatureRequestStatus( safe_address, message_hash, "canceled", diff --git a/src/controllers/UploadController.ts b/src/controllers/UploadController.ts index c502b12c..9e82bd62 100644 --- a/src/controllers/UploadController.ts +++ b/src/controllers/UploadController.ts @@ -7,15 +7,15 @@ import { Tags, UploadedFiles, } from "tsoa"; -import { StorageService } from "../services/StorageService.js"; -import type { UploadResponse } from "../types/api.js"; import { FileUploadError, NoFilesUploadedError, PartialUploadError, - UploadFailedError, SingleUploadFailedError, + UploadFailedError, } from "../lib/uploads/errors.js"; +import { StorageService } from "../services/StorageService.js"; +import type { UploadResponse } from "../types/api.js"; // Type definitions and guards at module scope type UploadResult = { @@ -39,7 +39,7 @@ function isFailedUpload( * Controller handling file uploads to IPFS storage * @class UploadController */ -@Route("v1/upload") +@Route("v2/upload") @Tags("Upload") export class UploadController extends Controller { /** @@ -53,7 +53,7 @@ export class UploadController extends Controller { * @example * Using curl: * ```bash - * curl -X POST http://api.example.com/v1/upload \ + * curl -X POST http://api.example.com/v2/upload \ * -F "files=@/path/to/file1.txt" \ * -F "files=@/path/to/file2.txt" \ * -F "jsonData={\"key\":\"value\"}" @@ -61,7 +61,7 @@ export class UploadController extends Controller { * * Using HTML Form: * ```html - *
+ * * * * @@ -75,7 +75,7 @@ export class UploadController extends Controller { * formData.append('files', fileInput.files[1]); * formData.append('jsonData', JSON.stringify({key: 'value'})); * - * fetch('/v1/upload', { + * fetch('/v2/upload', { * method: 'POST', * body: formData * }); diff --git a/src/controllers/UserController.ts b/src/controllers/UserController.ts index 2d1bac1c..aecaf2b3 100644 --- a/src/controllers/UserController.ts +++ b/src/controllers/UserController.ts @@ -1,4 +1,3 @@ -import { z } from "zod"; import { Body, Controller, @@ -9,18 +8,19 @@ import { SuccessResponse, Tags, } from "tsoa"; +import { z } from "zod"; +import { ParseError } from "../lib/errors/request-parsing.js"; +import { isUserUpsertError } from "../lib/users/errors.js"; +import { USER_UPDATE_REQUEST_SCHEMA } from "../lib/users/schemas.js"; +import { createStrategy } from "../lib/users/UserUpsertStrategy.js"; import type { AddOrUpdateUserRequest, BaseResponse, UserResponse, } from "../types/api.js"; -import { isUserUpsertError } from "../lib/users/errors.js"; -import { USER_UPDATE_REQUEST_SCHEMA } from "../lib/users/schemas.js"; -import { createStrategy } from "../lib/users/UserUpsertStrategy.js"; -import { ParseError } from "../lib/errors/request-parsing.js"; -@Route("v1/users") +@Route("v2/users") @Tags("Users") export class UserController extends Controller { /** diff --git a/src/cron/OrderInvalidation.ts b/src/cron/OrderInvalidation.ts index 53fa3c04..64d557e8 100644 --- a/src/cron/OrderInvalidation.ts +++ b/src/cron/OrderInvalidation.ts @@ -1,9 +1,10 @@ -import cron from "node-cron"; import { OrderValidatorCode } from "@hypercerts-org/marketplace-sdk"; -import { SupabaseDataService } from "../services/SupabaseDataService.js"; +import { sql } from "kysely"; import _ from "lodash"; +import cron from "node-cron"; +import { inject, singleton } from "tsyringe"; import { kyselyData } from "../client/kysely.js"; -import { sql } from "kysely"; +import { MarketplaceOrdersService } from "../services/database/entities/MarketplaceOrdersEntityService.js"; /** * These error codes are considered temporary and should be @@ -14,18 +15,23 @@ export const TEMPORARILY_INVALID_ERROR_CODES = [ OrderValidatorCode.TOO_EARLY_TO_EXECUTE_ORDER, ]; +@singleton() export default class OrderInvalidationCronjob { - private static instance: OrderInvalidationCronjob; - private dataService: SupabaseDataService; + private cronJob: cron.ScheduledTask | null = null; - private constructor() { - this.dataService = new SupabaseDataService(); - this.setupCronJob(); - } + constructor( + @inject(MarketplaceOrdersService) + private marketplaceOrdersService: MarketplaceOrdersService, + ) {} + + public start(): void { + if (this.cronJob) { + // Already started + return; + } - private setupCronJob() { - // Run every 30 seconds - cron.schedule("*/30 * * * * *", async () => { + // Schedule the cron job + this.cronJob = cron.schedule("*/30 * * * * *", async () => { try { await this.invalidateOrders(); } catch (error) { @@ -34,9 +40,11 @@ export default class OrderInvalidationCronjob { }); } - public static start(): void { - if (!OrderInvalidationCronjob.instance) { - OrderInvalidationCronjob.instance = new OrderInvalidationCronjob(); + // Stop method is useful for testing or graceful shutdown + public stop(): void { + if (this.cronJob) { + this.cronJob.stop(); + this.cronJob = null; } } @@ -64,10 +72,10 @@ export default class OrderInvalidationCronjob { for (const chainId in ordersByChain) { const ordersForChain = ordersByChain[chainId]; const tokenIds = _.uniq(ordersForChain.map((order) => order.itemIds[0])); - await this.dataService.validateOrdersByTokenIds({ + await this.marketplaceOrdersService.validateOrdersByTokenIds( tokenIds, - chainId: parseInt(chainId, 10), - }); + parseInt(chainId, 10), + ); } } } diff --git a/src/graphql/schemas/args/allowlistRecordArgs.ts b/src/graphql/schemas/args/allowlistRecordArgs.ts index b5f0f4b3..affd9a13 100644 --- a/src/graphql/schemas/args/allowlistRecordArgs.ts +++ b/src/graphql/schemas/args/allowlistRecordArgs.ts @@ -1,30 +1,27 @@ -import { ArgsType, Field, InputType } from "type-graphql"; -import { BasicAllowlistRecordWhereInput } from "../inputs/allowlistRecordsInput.js"; -import { withPagination } from "./baseArgs.js"; -import type { OrderOptions } from "../inputs/orderOptions.js"; -import { AllowlistRecord } from "../typeDefs/allowlistRecordTypeDefs.js"; -import { AllowlistRecordSortOptions } from "../inputs/sortOptions.js"; +import { ArgsType } from "type-graphql"; +import { BaseQueryArgs } from "../../../lib/graphql/BaseQueryArgs.js"; +import { createEntityArgs } from "../../../lib/graphql/createEntityArgs.js"; +import { EntityTypeDefs } from "../typeDefs/typeDefs.js"; +import { WhereFieldDefinitions } from "../../../lib/graphql/whereFieldDefinitions.js"; -@InputType() -class AllowlistRecordWhereInput extends BasicAllowlistRecordWhereInput {} - -@InputType() -export class AllowlistRecordFetchInput - implements OrderOptions -{ - @Field(() => AllowlistRecordSortOptions, { nullable: true }) - by?: AllowlistRecordSortOptions; -} - -@ArgsType() -export class AllowlistRecordsArgs { - @Field(() => AllowlistRecordWhereInput, { nullable: true }) - where?: AllowlistRecordWhereInput; - @Field(() => AllowlistRecordFetchInput, { nullable: true }) - sort?: AllowlistRecordFetchInput; -} +const { + WhereInput: AllowlistRecordWhereInput, + SortOptions: AllowlistRecordSortOptions, +} = createEntityArgs("AllowlistRecord", { + ...WhereFieldDefinitions.AllowlistRecord.fields, + hypercert: { + type: "id", + references: { + entity: EntityTypeDefs.Hypercert, + fields: WhereFieldDefinitions.Hypercert.fields, + }, + }, +}); @ArgsType() -export class GetAllowlistRecordsArgs extends withPagination( - AllowlistRecordsArgs, +export class GetAllowlistRecordsArgs extends BaseQueryArgs( + AllowlistRecordWhereInput, + AllowlistRecordSortOptions, ) {} + +export { AllowlistRecordSortOptions, AllowlistRecordWhereInput }; diff --git a/src/graphql/schemas/args/attestationArgs.ts b/src/graphql/schemas/args/attestationArgs.ts index a7c45884..001a2ec0 100644 --- a/src/graphql/schemas/args/attestationArgs.ts +++ b/src/graphql/schemas/args/attestationArgs.ts @@ -1,36 +1,34 @@ -import { ArgsType, Field, InputType } from "type-graphql"; -import { BasicAttestationWhereInput } from "../inputs/attestationInput.js"; -import { BasicMetadataWhereInput } from "../inputs/metadataInput.js"; -import { withPagination } from "./baseArgs.js"; -import { BasicHypercertWhereArgs } from "../inputs/hypercertsInput.js"; -import type { OrderOptions } from "../inputs/orderOptions.js"; -import type { Attestation } from "../typeDefs/attestationTypeDefs.js"; -import { AttestationSortOptions } from "../inputs/sortOptions.js"; -import { BasicAttestationSchemaWhereInput } from "../inputs/attestationSchemaInput.js"; +import { EntityTypeDefs } from "../typeDefs/typeDefs.js"; +import { createEntityArgs } from "../../../lib/graphql/createEntityArgs.js"; +import { BaseQueryArgs } from "../../../lib/graphql/BaseQueryArgs.js"; +import { WhereFieldDefinitions } from "../../../lib/graphql/whereFieldDefinitions.js"; +import { ArgsType } from "type-graphql"; -@InputType() -class AttestationWhereInput extends BasicAttestationWhereInput { - @Field(() => BasicHypercertWhereArgs, { nullable: true }) - hypercerts?: BasicHypercertWhereArgs; - @Field(() => BasicMetadataWhereInput, { nullable: true }) - metadata?: BasicMetadataWhereInput; - @Field(() => BasicAttestationSchemaWhereInput, { nullable: true }) - eas_schema?: BasicAttestationSchemaWhereInput; -} - -@InputType() -class AttestationFetchInput implements OrderOptions { - @Field(() => AttestationSortOptions, { nullable: true }) - by?: AttestationSortOptions; -} +const { + WhereInput: AttestationWhereInput, + SortOptions: AttestationSortOptions, +} = createEntityArgs("Attestation", { + ...WhereFieldDefinitions.Attestation.fields, + hypercert: { + type: "id", + references: { + entity: EntityTypeDefs.Hypercert, + fields: WhereFieldDefinitions.Hypercert.fields, + }, + }, + eas_schema: { + type: "id", + references: { + entity: EntityTypeDefs.AttestationSchema, + fields: WhereFieldDefinitions.AttestationSchema.fields, + }, + }, +}); @ArgsType() -class AttestationArgs { - @Field(() => AttestationWhereInput, { nullable: true }) - where?: AttestationWhereInput; - @Field(() => AttestationFetchInput, { nullable: true }) - sort?: AttestationFetchInput; -} +export class GetAttestationsArgs extends BaseQueryArgs( + AttestationWhereInput, + AttestationSortOptions, +) {} -@ArgsType() -export class GetAttestationsArgs extends withPagination(AttestationArgs) {} +export { AttestationSortOptions, AttestationWhereInput }; diff --git a/src/graphql/schemas/args/attestationSchemaArgs.ts b/src/graphql/schemas/args/attestationSchemaArgs.ts index 6fe3c526..43261bea 100644 --- a/src/graphql/schemas/args/attestationSchemaArgs.ts +++ b/src/graphql/schemas/args/attestationSchemaArgs.ts @@ -1,34 +1,27 @@ -import { ArgsType, Field, InputType } from "type-graphql"; -import { BasicAttestationSchemaWhereInput } from "../inputs/attestationSchemaInput.js"; -import { withPagination } from "./baseArgs.js"; -import type { OrderOptions } from "../inputs/orderOptions.js"; -import type { AttestationSchema } from "../typeDefs/attestationSchemaTypeDefs.js"; -import { AttestationSchemaSortOptions } from "../inputs/sortOptions.js"; -import { BasicAttestationWhereInput } from "../inputs/attestationInput.js"; +import { EntityTypeDefs } from "../typeDefs/typeDefs.js"; +import { createEntityArgs } from "../../../lib/graphql/createEntityArgs.js"; +import { BaseQueryArgs } from "../../../lib/graphql/BaseQueryArgs.js"; +import { WhereFieldDefinitions } from "../../../lib/graphql/whereFieldDefinitions.js"; +import { ArgsType } from "type-graphql"; -@InputType() -export class AttestationSchemaWhereInput extends BasicAttestationSchemaWhereInput { - @Field(() => BasicAttestationWhereInput, { nullable: true }) - attestations?: BasicAttestationWhereInput; -} - -@InputType() -export class AttestationSchemaFetchInput - implements OrderOptions -{ - @Field(() => AttestationSchemaSortOptions, { nullable: true }) - by?: AttestationSchemaSortOptions; -} - -@InputType() -export class AttestationSchemaArgs { - @Field(() => AttestationSchemaWhereInput, { nullable: true }) - where?: AttestationSchemaWhereInput; - @Field(() => AttestationSchemaFetchInput, { nullable: true }) - sort?: AttestationSchemaFetchInput; -} +const { + WhereInput: AttestationSchemaWhereInput, + SortOptions: AttestationSchemaSortOptions, +} = createEntityArgs("AttestationSchema", { + ...WhereFieldDefinitions.AttestationSchema.fields, + attestations: { + type: "id", + references: { + entity: EntityTypeDefs.Attestation, + fields: WhereFieldDefinitions.Attestation.fields, + }, + }, +}); @ArgsType() -export class GetAttestationSchemasArgs extends withPagination( - AttestationSchemaArgs, +export class GetAttestationSchemasArgs extends BaseQueryArgs( + AttestationSchemaWhereInput, + AttestationSchemaSortOptions, ) {} + +export { AttestationSchemaSortOptions, AttestationSchemaWhereInput }; diff --git a/src/graphql/schemas/args/baseArgs.ts b/src/graphql/schemas/args/baseArgs.ts deleted file mode 100644 index 1208f50f..00000000 --- a/src/graphql/schemas/args/baseArgs.ts +++ /dev/null @@ -1,21 +0,0 @@ -import { Field, ArgsType, ClassType, Int } from "type-graphql"; -import { WhereOptions } from "../inputs/whereOptions.js"; -import { OrderOptions } from "../inputs/orderOptions.js"; - -export type BaseArgs = { - where?: WhereOptions; - sort?: OrderOptions; -}; - -export function withPagination(TItemClass: TItem) { - @ArgsType() - class withPaginationClass extends TItemClass { - @Field(() => Int, { nullable: true }) - first?: number; - - @Field(() => Int, { nullable: true }) - offset?: number; - } - - return withPaginationClass; -} diff --git a/src/graphql/schemas/args/blueprintArgs.ts b/src/graphql/schemas/args/blueprintArgs.ts index 42e91e65..b551b0ce 100644 --- a/src/graphql/schemas/args/blueprintArgs.ts +++ b/src/graphql/schemas/args/blueprintArgs.ts @@ -1,26 +1,25 @@ -import { ArgsType, Field, InputType } from "type-graphql"; -import { withPagination } from "./baseArgs.js"; -import type { OrderOptions } from "../inputs/orderOptions.js"; -import { BasicBlueprintWhereInput } from "../inputs/blueprintInput.js"; -import { Blueprint } from "../typeDefs/blueprintTypeDefs.js"; -import { BlueprintSortOptions } from "../inputs/sortOptions.js"; +import { EntityTypeDefs } from "../typeDefs/typeDefs.js"; +import { createEntityArgs } from "../../../lib/graphql/createEntityArgs.js"; +import { BaseQueryArgs } from "../../../lib/graphql/BaseQueryArgs.js"; +import { WhereFieldDefinitions } from "../../../lib/graphql/whereFieldDefinitions.js"; +import { ArgsType } from "type-graphql"; -@InputType() -export class BlueprintWhereInput extends BasicBlueprintWhereInput {} - -@InputType() -export class BlueprintFetchInput implements OrderOptions { - @Field(() => BlueprintSortOptions, { nullable: true }) - by?: BlueprintSortOptions; -} +const { WhereInput: BlueprintWhereInput, SortOptions: BlueprintSortOptions } = + createEntityArgs("Blueprint", { + ...WhereFieldDefinitions.Blueprint.fields, + admins: { + type: "id", + references: { + entity: EntityTypeDefs.User, + fields: WhereFieldDefinitions.User.fields, + }, + }, + }); @ArgsType() -export class BlueprintArgs { - @Field(() => BlueprintWhereInput, { nullable: true }) - where?: BlueprintWhereInput; - @Field(() => BlueprintFetchInput, { nullable: true }) - sort?: BlueprintFetchInput; -} +export class GetBlueprintsArgs extends BaseQueryArgs( + BlueprintWhereInput, + BlueprintSortOptions, +) {} -@ArgsType() -export class GetBlueprintArgs extends withPagination(BlueprintArgs) {} +export { BlueprintSortOptions, BlueprintWhereInput }; diff --git a/src/graphql/schemas/args/collectionArgs.ts b/src/graphql/schemas/args/collectionArgs.ts index d150cbe7..bc00e88d 100644 --- a/src/graphql/schemas/args/collectionArgs.ts +++ b/src/graphql/schemas/args/collectionArgs.ts @@ -1,28 +1,39 @@ -import { ArgsType, Field, InputType } from "type-graphql"; +import { EntityTypeDefs } from "../typeDefs/typeDefs.js"; +import { createEntityArgs } from "../../../lib/graphql/createEntityArgs.js"; +import { BaseQueryArgs } from "../../../lib/graphql/BaseQueryArgs.js"; +import { WhereFieldDefinitions } from "../../../lib/graphql/whereFieldDefinitions.js"; +import { ArgsType } from "type-graphql"; -import { BasicCollectionWhereInput } from "../inputs/collectionInput.js"; -import type { OrderOptions } from "../inputs/orderOptions.js"; -import { Collection } from "../typeDefs/collectionTypeDefs.js"; -import { CollectionSortOptions } from "../inputs/sortOptions.js"; - -import { withPagination } from "./baseArgs.js"; - -@InputType() -export class CollectionWhereInput extends BasicCollectionWhereInput {} - -@InputType() -export class CollectionFetchInput implements OrderOptions { - @Field(() => CollectionSortOptions, { nullable: true }) - by?: CollectionSortOptions; -} +const { WhereInput: CollectionWhereInput, SortOptions: CollectionSortOptions } = + createEntityArgs("Collection", { + ...WhereFieldDefinitions.Collection.fields, + admins: { + type: "id", + references: { + entity: EntityTypeDefs.User, + fields: WhereFieldDefinitions.User.fields, + }, + }, + hypercerts: { + type: "id", + references: { + entity: EntityTypeDefs.Hypercert, + fields: WhereFieldDefinitions.Hypercert.fields, + }, + }, + blueprints: { + type: "id", + references: { + entity: EntityTypeDefs.Blueprint, + fields: WhereFieldDefinitions.Blueprint.fields, + }, + }, + }); @ArgsType() -export class CollectionArgs { - @Field(() => CollectionWhereInput, { nullable: true }) - where?: CollectionWhereInput; - @Field(() => CollectionFetchInput, { nullable: true }) - sort?: CollectionFetchInput; -} +export class GetCollectionsArgs extends BaseQueryArgs( + CollectionWhereInput, + CollectionSortOptions, +) {} -@ArgsType() -export class GetCollectionsArgs extends withPagination(CollectionArgs) {} +export { CollectionSortOptions, CollectionWhereInput }; diff --git a/src/graphql/schemas/args/contractArgs.ts b/src/graphql/schemas/args/contractArgs.ts index d7c57d1d..98bf5180 100644 --- a/src/graphql/schemas/args/contractArgs.ts +++ b/src/graphql/schemas/args/contractArgs.ts @@ -1,26 +1,16 @@ -import { ArgsType, InputType, Field } from "type-graphql"; -import { BasicContractWhereInput } from "../inputs/contractInput.js"; -import { withPagination } from "./baseArgs.js"; -import { ContractSortOptions } from "../inputs/sortOptions.js"; -import { OrderOptions } from "../inputs/orderOptions.js"; -import { Contract } from "../typeDefs/contractTypeDefs.js"; - -@InputType() -export class ContractWhereInput extends BasicContractWhereInput {} - -@InputType() -export class ContractFetchInput implements OrderOptions { - @Field(() => ContractSortOptions, { nullable: true }) - by?: ContractSortOptions; -} +import { ArgsType } from "type-graphql"; +import { BaseQueryArgs } from "../../../lib/graphql/BaseQueryArgs.js"; +import { createEntityArgs } from "../../../lib/graphql/createEntityArgs.js"; +import { WhereFieldDefinitions } from "../../../lib/graphql/whereFieldDefinitions.js"; +const { WhereInput: ContractWhereInput, SortOptions: ContractSortOptions } = + createEntityArgs("Contract", { + ...WhereFieldDefinitions.Contract.fields, + }); @ArgsType() -export class ContractArgs { - @Field(() => ContractWhereInput, { nullable: true }) - where?: ContractWhereInput; - @Field(() => ContractFetchInput, { nullable: true }) - sort?: ContractFetchInput; -} +export class GetContractsArgs extends BaseQueryArgs( + ContractWhereInput, + ContractSortOptions, +) {} -@ArgsType() -export class GetContractsArgs extends withPagination(ContractArgs) {} +export { ContractSortOptions, ContractWhereInput }; diff --git a/src/graphql/schemas/args/fractionArgs.ts b/src/graphql/schemas/args/fractionArgs.ts index a4de55b9..13ecd5ee 100644 --- a/src/graphql/schemas/args/fractionArgs.ts +++ b/src/graphql/schemas/args/fractionArgs.ts @@ -1,30 +1,25 @@ -import { ArgsType, InputType, Field } from "type-graphql"; -import { BasicFractionWhereInput } from "../inputs/fractionInput.js"; -import { withPagination } from "./baseArgs.js"; -import { BasicHypercertWhereArgs } from "../inputs/hypercertsInput.js"; -import type { OrderOptions } from "../inputs/orderOptions.js"; -import { Fraction } from "../typeDefs/fractionTypeDefs.js"; -import { FractionSortOptions } from "../inputs/sortOptions.js"; +import { EntityTypeDefs } from "../typeDefs/typeDefs.js"; +import { createEntityArgs } from "../../../lib/graphql/createEntityArgs.js"; +import { BaseQueryArgs } from "../../../lib/graphql/BaseQueryArgs.js"; +import { WhereFieldDefinitions } from "../../../lib/graphql/whereFieldDefinitions.js"; +import { ArgsType } from "type-graphql"; -@InputType() -export class FractionWhereInput extends BasicFractionWhereInput { - @Field(() => BasicHypercertWhereArgs, { nullable: true }) - hypercerts?: BasicHypercertWhereArgs; -} - -@InputType() -export class FractionFetchInput implements OrderOptions { - @Field(() => FractionSortOptions, { nullable: true }) - by?: FractionSortOptions; -} +const { WhereInput: FractionWhereInput, SortOptions: FractionSortOptions } = + createEntityArgs("Fraction", { + ...WhereFieldDefinitions.Fraction.fields, + metadata: { + type: "id", + references: { + entity: EntityTypeDefs.Metadata, + fields: WhereFieldDefinitions.Metadata.fields, + }, + }, + }); @ArgsType() -export class FractionArgs { - @Field(() => FractionWhereInput, { nullable: true }) - where?: FractionWhereInput; - @Field(() => FractionFetchInput, { nullable: true }) - sort?: FractionFetchInput; -} +export class GetFractionsArgs extends BaseQueryArgs( + FractionWhereInput, + FractionSortOptions, +) {} -@ArgsType() -export class GetFractionsArgs extends withPagination(FractionArgs) {} +export { FractionSortOptions, FractionWhereInput }; diff --git a/src/graphql/schemas/args/hyperboardArgs.ts b/src/graphql/schemas/args/hyperboardArgs.ts index 7a7f88f5..47b32cb4 100644 --- a/src/graphql/schemas/args/hyperboardArgs.ts +++ b/src/graphql/schemas/args/hyperboardArgs.ts @@ -1,26 +1,32 @@ -import { ArgsType, InputType, Field } from "type-graphql"; -import { BasicHyperboardWhereInput } from "../inputs/hyperboardInput.js"; -import { withPagination } from "./baseArgs.js"; -import { OrderOptions } from "../inputs/orderOptions.js"; -import { Hyperboard } from "../typeDefs/hyperboardTypeDefs.js"; -import { HyperboardSortOptions } from "../inputs/sortOptions.js"; +import { EntityTypeDefs } from "../typeDefs/typeDefs.js"; +import { createEntityArgs } from "../../../lib/graphql/createEntityArgs.js"; +import { BaseQueryArgs } from "../../../lib/graphql/BaseQueryArgs.js"; +import { WhereFieldDefinitions } from "../../../lib/graphql/whereFieldDefinitions.js"; +import { ArgsType } from "type-graphql"; -@InputType() -class HyperboardWhereInput extends BasicHyperboardWhereInput {} - -@InputType() -class HyperboardFetchInput implements OrderOptions { - @Field(() => HyperboardSortOptions, { nullable: true }) - by?: HyperboardSortOptions; -} +const { WhereInput: HyperboardWhereInput, SortOptions: HyperboardSortOptions } = + createEntityArgs("Hyperboard", { + ...WhereFieldDefinitions.Hyperboard.fields, + collections: { + type: "id", + references: { + entity: EntityTypeDefs.Collection, + fields: WhereFieldDefinitions.Collection.fields, + }, + }, + admins: { + type: "id", + references: { + entity: EntityTypeDefs.User, + fields: WhereFieldDefinitions.User.fields, + }, + }, + }); @ArgsType() -export class HyperboardArgs { - @Field(() => HyperboardWhereInput, { nullable: true }) - where?: HyperboardWhereInput; - @Field(() => HyperboardFetchInput, { nullable: true }) - sort?: HyperboardFetchInput; -} +export class GetHyperboardsArgs extends BaseQueryArgs( + HyperboardWhereInput, + HyperboardSortOptions, +) {} -@ArgsType() -export class GetHyperboardsArgs extends withPagination(HyperboardArgs) {} +export { HyperboardSortOptions, HyperboardWhereInput }; diff --git a/src/graphql/schemas/args/hypercertsArgs.ts b/src/graphql/schemas/args/hypercertsArgs.ts index 7c2bee7e..ecdd79bf 100644 --- a/src/graphql/schemas/args/hypercertsArgs.ts +++ b/src/graphql/schemas/args/hypercertsArgs.ts @@ -1,41 +1,46 @@ -import { ArgsType, InputType, Field } from "type-graphql"; -import { BasicContractWhereInput } from "../inputs/contractInput.js"; -import { BasicMetadataWhereInput } from "../inputs/metadataInput.js"; -import { BasicAttestationWhereInput } from "../inputs/attestationInput.js"; -import { BasicFractionWhereInput } from "../inputs/fractionInput.js"; -import { withPagination } from "./baseArgs.js"; -import { Hypercert } from "../typeDefs/hypercertTypeDefs.js"; -import type { OrderOptions } from "../inputs/orderOptions.js"; -import { HypercertSortOptions } from "../inputs/sortOptions.js"; -import { BasicHypercertWhereArgs } from "../inputs/hypercertsInput.js"; +import { EntityTypeDefs } from "../typeDefs/typeDefs.js"; +import { createEntityArgs } from "../../../lib/graphql/createEntityArgs.js"; +import { BaseQueryArgs } from "../../../lib/graphql/BaseQueryArgs.js"; +import { WhereFieldDefinitions } from "../../../lib/graphql/whereFieldDefinitions.js"; +import { ArgsType } from "type-graphql"; -@InputType({ - description: "Arguments for filtering hypercerts", -}) -export class HypercertsWhereArgs extends BasicHypercertWhereArgs { - @Field(() => BasicContractWhereInput, { nullable: true }) - contract?: BasicContractWhereInput; - @Field(() => BasicMetadataWhereInput, { nullable: true }) - metadata?: BasicMetadataWhereInput; - @Field(() => BasicAttestationWhereInput, { nullable: true }) - attestations?: BasicAttestationWhereInput; - @Field(() => BasicFractionWhereInput, { nullable: true }) - fractions?: BasicFractionWhereInput; -} - -@InputType() -export class HypercertFetchInput implements OrderOptions { - @Field(() => HypercertSortOptions, { nullable: true }) - by?: HypercertSortOptions; -} +const { SortOptions: HypercertSortOptions, WhereInput: HypercertWhereInput } = + createEntityArgs("Hypercert", { + ...WhereFieldDefinitions.Hypercert.fields, + contract: { + type: "id", + references: { + entity: EntityTypeDefs.Contract, + fields: WhereFieldDefinitions.Contract.fields, + }, + }, + metadata: { + type: "id", + references: { + entity: EntityTypeDefs.Metadata, + fields: WhereFieldDefinitions.Metadata.fields, + }, + }, + attestations: { + type: "id", + references: { + entity: EntityTypeDefs.Attestation, + fields: WhereFieldDefinitions.Attestation.fields, + }, + }, + fractions: { + type: "id", + references: { + entity: EntityTypeDefs.Fraction, + fields: WhereFieldDefinitions.Fraction.fields, + }, + }, + }); @ArgsType() -class HypercertArgs { - @Field(() => HypercertsWhereArgs, { nullable: true }) - where?: HypercertsWhereArgs; - @Field(() => HypercertFetchInput, { nullable: true }) - sort?: HypercertFetchInput; -} +export class GetHypercertsArgs extends BaseQueryArgs( + HypercertWhereInput, + HypercertSortOptions, +) {} -@ArgsType() -export class GetHypercertsArgs extends withPagination(HypercertArgs) {} +export { HypercertSortOptions, HypercertWhereInput }; diff --git a/src/graphql/schemas/args/metadataArgs.ts b/src/graphql/schemas/args/metadataArgs.ts index 7887e93d..2f5f26bf 100644 --- a/src/graphql/schemas/args/metadataArgs.ts +++ b/src/graphql/schemas/args/metadataArgs.ts @@ -1,30 +1,25 @@ -import { ArgsType, Field, InputType } from "type-graphql"; -import { BasicMetadataWhereInput } from "../inputs/metadataInput.js"; -import { withPagination } from "./baseArgs.js"; -import { BasicHypercertWhereArgs } from "../inputs/hypercertsInput.js"; -import type { OrderOptions } from "../inputs/orderOptions.js"; -import { Metadata } from "../typeDefs/metadataTypeDefs.js"; -import { MetadataSortOptions } from "../inputs/sortOptions.js"; +import { ArgsType } from "type-graphql"; +import { BaseQueryArgs } from "../../../lib/graphql/BaseQueryArgs.js"; +import { createEntityArgs } from "../../../lib/graphql/createEntityArgs.js"; +import { WhereFieldDefinitions } from "../../../lib/graphql/whereFieldDefinitions.js"; +import { EntityTypeDefs } from "../typeDefs/typeDefs.js"; -@InputType() -export class MetadataWhereInput extends BasicMetadataWhereInput { - @Field(() => BasicHypercertWhereArgs, { nullable: true }) - hypercerts?: BasicHypercertWhereArgs; -} - -@InputType() -export class MetadataFetchInput implements OrderOptions { - @Field(() => MetadataSortOptions, { nullable: true }) - by?: MetadataSortOptions; -} +const { WhereInput: MetadataWhereInput, SortOptions: MetadataSortOptions } = + createEntityArgs("Metadata", { + ...WhereFieldDefinitions.Metadata.fields, + hypercert: { + type: "id", + references: { + entity: EntityTypeDefs.Hypercert, + fields: WhereFieldDefinitions.Hypercert.fields, + }, + }, + }); @ArgsType() -export class MetadataArgs { - @Field(() => MetadataWhereInput, { nullable: true }) - where?: MetadataWhereInput; - @Field(() => MetadataFetchInput, { nullable: true }) - sort?: MetadataFetchInput; -} +export class GetMetadataArgs extends BaseQueryArgs( + MetadataWhereInput, + MetadataSortOptions, +) {} -@ArgsType() -export class GetMetadataArgs extends withPagination(MetadataArgs) {} +export { MetadataSortOptions, MetadataWhereInput }; diff --git a/src/graphql/schemas/args/orderArgs.ts b/src/graphql/schemas/args/orderArgs.ts index 41681523..6f85b6af 100644 --- a/src/graphql/schemas/args/orderArgs.ts +++ b/src/graphql/schemas/args/orderArgs.ts @@ -1,26 +1,25 @@ -import { ArgsType, Field, InputType } from "type-graphql"; -import { BasicOrderWhereInput } from "../inputs/orderInput.js"; -import { withPagination } from "./baseArgs.js"; -import { OrderSortOptions } from "../inputs/sortOptions.js"; -import type { OrderOptions } from "../inputs/orderOptions.js"; -import { Order } from "../typeDefs/orderTypeDefs.js"; +import { ArgsType } from "type-graphql"; +import { BaseQueryArgs } from "../../../lib/graphql/BaseQueryArgs.js"; +import { createEntityArgs } from "../../../lib/graphql/createEntityArgs.js"; +import { WhereFieldDefinitions } from "../../../lib/graphql/whereFieldDefinitions.js"; +import { EntityTypeDefs } from "../typeDefs/typeDefs.js"; -@InputType() -export class OrderWhereInput extends BasicOrderWhereInput {} - -@InputType() -export class OrderFetchInput implements OrderOptions { - @Field(() => OrderSortOptions, { nullable: true }) - by?: OrderSortOptions; -} +const { WhereInput: OrderWhereInput, SortOptions: OrderSortOptions } = + createEntityArgs("Order", { + ...WhereFieldDefinitions.Order.fields, + hypercert: { + type: "id", + references: { + entity: EntityTypeDefs.Hypercert, + fields: WhereFieldDefinitions.Hypercert.fields, + }, + }, + }); @ArgsType() -class OrderArgs { - @Field(() => OrderWhereInput, { nullable: true }) - where?: OrderWhereInput; - @Field(() => OrderFetchInput, { nullable: true }) - sort?: OrderFetchInput; -} +export class GetOrdersArgs extends BaseQueryArgs( + OrderWhereInput, + OrderSortOptions, +) {} -@ArgsType() -export class GetOrdersArgs extends withPagination(OrderArgs) {} +export { OrderSortOptions, OrderWhereInput }; diff --git a/src/graphql/schemas/args/salesArgs.ts b/src/graphql/schemas/args/salesArgs.ts index 4a8c5186..f5c875e0 100644 --- a/src/graphql/schemas/args/salesArgs.ts +++ b/src/graphql/schemas/args/salesArgs.ts @@ -1,26 +1,25 @@ -import { ArgsType, InputType, Field } from "type-graphql"; -import { BasicSaleWhereInput } from "../inputs/salesInput.js"; -import { withPagination } from "./baseArgs.js"; -import { SaleSortOptions } from "../inputs/sortOptions.js"; -import { Sale } from "../typeDefs/salesTypeDefs.js"; -import { OrderOptions } from "../inputs/orderOptions.js"; +import { ArgsType } from "type-graphql"; +import { BaseQueryArgs } from "../../../lib/graphql/BaseQueryArgs.js"; +import { createEntityArgs } from "../../../lib/graphql/createEntityArgs.js"; +import { WhereFieldDefinitions } from "../../../lib/graphql/whereFieldDefinitions.js"; +import { EntityTypeDefs } from "../typeDefs/typeDefs.js"; -@InputType() -export class SaleWhereInput extends BasicSaleWhereInput {} - -@InputType() -export class SaleFetchInput implements OrderOptions { - @Field(() => SaleSortOptions, { nullable: true }) - by?: SaleSortOptions; -} +const { WhereInput: SalesWhereInput, SortOptions: SalesSortOptions } = + createEntityArgs("Sale", { + ...WhereFieldDefinitions.Sale.fields, + hypercert: { + type: "id", + references: { + entity: EntityTypeDefs.Hypercert, + fields: WhereFieldDefinitions.Hypercert.fields, + }, + }, + }); @ArgsType() -class SalesArgs { - @Field(() => SaleWhereInput, { nullable: true }) - where?: SaleWhereInput; - @Field(() => SaleFetchInput, { nullable: true }) - sort?: SaleFetchInput; -} +export class GetSalesArgs extends BaseQueryArgs( + SalesWhereInput, + SalesSortOptions, +) {} -@ArgsType() -export class GetSalesArgs extends withPagination(SalesArgs) {} +export { SalesSortOptions, SalesWhereInput }; diff --git a/src/graphql/schemas/args/signatureRequestArgs.ts b/src/graphql/schemas/args/signatureRequestArgs.ts index ca4bff61..ef32c1e3 100644 --- a/src/graphql/schemas/args/signatureRequestArgs.ts +++ b/src/graphql/schemas/args/signatureRequestArgs.ts @@ -1,32 +1,22 @@ -import { ArgsType, Field, InputType } from "type-graphql"; +import { ArgsType } from "type-graphql"; +import { BaseQueryArgs } from "../../../lib/graphql/BaseQueryArgs.js"; +import { createEntityArgs } from "../../../lib/graphql/createEntityArgs.js"; -import { BasicSignatureRequestWhereInput } from "../inputs/signatureRequestInput.js"; -import type { OrderOptions } from "../inputs/orderOptions.js"; -import { SignatureRequest } from "../typeDefs/signatureRequestTypeDefs.js"; -import { SignatureRequestSortOptions } from "../inputs/sortOptions.js"; - -import { withPagination } from "./baseArgs.js"; - -@InputType() -export class SignatureRequestWhereInput extends BasicSignatureRequestWhereInput {} - -@InputType() -export class SignatureRequestFetchInput - implements OrderOptions -{ - @Field(() => SignatureRequestSortOptions, { nullable: true }) - by?: SignatureRequestSortOptions; -} +const { + WhereInput: SignatureRequestWhereInput, + SortOptions: SignatureRequestSortOptions, +} = createEntityArgs("SignatureRequest", { + safe_address: "string", + message_hash: "string", + timestamp: "bigint", + chain_id: "bigint", + status: "enum", +}); @ArgsType() -class SignatureRequestArgs { - @Field(() => SignatureRequestWhereInput, { nullable: true }) - where?: SignatureRequestWhereInput; - @Field(() => SignatureRequestFetchInput, { nullable: true }) - sort?: SignatureRequestFetchInput; -} - -@ArgsType() -export class GetSignatureRequestArgs extends withPagination( - SignatureRequestArgs, +export class GetSignatureRequestsArgs extends BaseQueryArgs( + SignatureRequestWhereInput, + SignatureRequestSortOptions, ) {} + +export { SignatureRequestSortOptions, SignatureRequestWhereInput }; diff --git a/src/graphql/schemas/args/userArgs.ts b/src/graphql/schemas/args/userArgs.ts index e7207ac9..c14ab405 100644 --- a/src/graphql/schemas/args/userArgs.ts +++ b/src/graphql/schemas/args/userArgs.ts @@ -1,15 +1,17 @@ -import { ArgsType, InputType, Field } from "type-graphql"; -import { withPagination } from "./baseArgs.js"; -import { BasicUserWhereInput } from "../inputs/userInput.js"; +import { ArgsType } from "type-graphql"; +import { BaseQueryArgs } from "../../../lib/graphql/BaseQueryArgs.js"; +import { createEntityArgs } from "../../../lib/graphql/createEntityArgs.js"; +import { WhereFieldDefinitions } from "../../../lib/graphql/whereFieldDefinitions.js"; -@InputType() -export class UserWhereInput extends BasicUserWhereInput {} +const { WhereInput: UserWhereInput, SortOptions: UserSortOptions } = + createEntityArgs("User", { + ...WhereFieldDefinitions.User.fields, + }); @ArgsType() -class UserArgs { - @Field(() => UserWhereInput, { nullable: true }) - where?: UserWhereInput; -} +export class GetUsersArgs extends BaseQueryArgs( + UserWhereInput, + UserSortOptions, +) {} -@ArgsType() -export class GetUserArgs extends withPagination(UserArgs) {} +export { UserSortOptions, UserWhereInput }; diff --git a/src/graphql/schemas/inputs/allowlistRecordsInput.ts b/src/graphql/schemas/inputs/allowlistRecordsInput.ts deleted file mode 100644 index d44244da..00000000 --- a/src/graphql/schemas/inputs/allowlistRecordsInput.ts +++ /dev/null @@ -1,35 +0,0 @@ -import { Field, InputType } from "type-graphql"; -import type { WhereOptions } from "./whereOptions.js"; -import { - BooleanSearchOptions, - BigIntSearchOptions, - StringArraySearchOptions, - StringSearchOptions, -} from "./searchOptions.js"; -import { AllowlistRecord } from "../typeDefs/allowlistRecordTypeDefs.js"; - -@InputType() -export class BasicAllowlistRecordWhereInput - implements WhereOptions -{ - @Field(() => StringSearchOptions, { nullable: true }) - hypercert_id?: StringSearchOptions; - @Field(() => BigIntSearchOptions, { nullable: true }) - token_id?: BigIntSearchOptions; - @Field(() => StringSearchOptions, { nullable: true }) - leaf?: StringSearchOptions; - @Field(() => BigIntSearchOptions, { nullable: true }) - entry?: BigIntSearchOptions; - @Field(() => StringSearchOptions, { nullable: true }) - user_address?: StringSearchOptions; - @Field(() => BooleanSearchOptions, { nullable: true }) - claimed?: BooleanSearchOptions; - @Field(() => StringArraySearchOptions, { nullable: true }) - proof?: StringArraySearchOptions; - @Field(() => BigIntSearchOptions, { nullable: true }) - units?: BigIntSearchOptions; - @Field(() => BigIntSearchOptions, { nullable: true }) - total_units?: BigIntSearchOptions; - @Field(() => StringSearchOptions, { nullable: true }) - root?: StringSearchOptions; -} diff --git a/src/graphql/schemas/inputs/attestationInput.ts b/src/graphql/schemas/inputs/attestationInput.ts deleted file mode 100644 index 85657adb..00000000 --- a/src/graphql/schemas/inputs/attestationInput.ts +++ /dev/null @@ -1,32 +0,0 @@ -import { Field, InputType } from "type-graphql"; -import type { WhereOptions } from "./whereOptions.js"; -import { BigIntSearchOptions, StringSearchOptions } from "./searchOptions.js"; -import type { Attestation } from "../typeDefs/attestationTypeDefs.js"; - -@InputType() -export class BasicAttestationWhereInput implements WhereOptions { - @Field(() => StringSearchOptions, { nullable: true }) - uid?: StringSearchOptions; - @Field(() => BigIntSearchOptions, { nullable: true }) - creation_block_timestamp?: BigIntSearchOptions; - @Field(() => BigIntSearchOptions, { nullable: true }) - creation_block_number?: BigIntSearchOptions; - @Field(() => BigIntSearchOptions, { nullable: true }) - last_update_block_number?: BigIntSearchOptions; - @Field(() => BigIntSearchOptions, { nullable: true }) - last_update_block_timestamp?: BigIntSearchOptions; - @Field(() => StringSearchOptions, { nullable: true }) - attester?: StringSearchOptions; - @Field(() => StringSearchOptions, { nullable: true }) - recipient?: StringSearchOptions; - @Field(() => StringSearchOptions, { nullable: true }) - resolver?: StringSearchOptions; - @Field(() => StringSearchOptions, { nullable: true }) - attestation?: StringSearchOptions; - @Field(() => BigIntSearchOptions, { nullable: true }) - chain_id?: BigIntSearchOptions; - @Field(() => StringSearchOptions, { nullable: true }) - contract_address?: StringSearchOptions; - @Field(() => StringSearchOptions, { nullable: true }) - token_id?: StringSearchOptions; -} diff --git a/src/graphql/schemas/inputs/attestationSchemaInput.ts b/src/graphql/schemas/inputs/attestationSchemaInput.ts deleted file mode 100644 index 8ba866d1..00000000 --- a/src/graphql/schemas/inputs/attestationSchemaInput.ts +++ /dev/null @@ -1,24 +0,0 @@ -import { Field, InputType } from "type-graphql"; -import type { AttestationSchema } from "../typeDefs/attestationSchemaTypeDefs.js"; -import { - BigIntSearchOptions, - BooleanSearchOptions, - StringSearchOptions, -} from "./searchOptions.js"; -import type { WhereOptions } from "./whereOptions.js"; - -@InputType() -export class BasicAttestationSchemaWhereInput - implements WhereOptions -{ - @Field(() => StringSearchOptions, { nullable: true }) - uid?: StringSearchOptions | null; - @Field(() => BigIntSearchOptions, { nullable: true }) - chain_id?: BigIntSearchOptions | null; - @Field(() => StringSearchOptions, { nullable: true }) - resolver?: BigIntSearchOptions | null; - @Field(() => StringSearchOptions, { nullable: true }) - schema?: StringSearchOptions | null; - @Field(() => BooleanSearchOptions, { nullable: true }) - revocable?: BooleanSearchOptions | null; -} diff --git a/src/graphql/schemas/inputs/blueprintInput.ts b/src/graphql/schemas/inputs/blueprintInput.ts deleted file mode 100644 index 432b87ed..00000000 --- a/src/graphql/schemas/inputs/blueprintInput.ts +++ /dev/null @@ -1,25 +0,0 @@ -import { Field, InputType } from "type-graphql"; -import type { WhereOptions } from "./whereOptions.js"; -import { - StringSearchOptions, - BooleanSearchOptions, - NumberSearchOptions, -} from "./searchOptions.js"; -import { Blueprint } from "../typeDefs/blueprintTypeDefs.js"; - -@InputType() -export class BasicBlueprintWhereInput implements WhereOptions { - @Field(() => NumberSearchOptions, { - nullable: true, - }) - id?: NumberSearchOptions; - - @Field(() => StringSearchOptions, { nullable: true }) - minter_address?: StringSearchOptions | null; - - @Field(() => StringSearchOptions, { nullable: true }) - admin_address?: StringSearchOptions | null; - - @Field(() => BooleanSearchOptions, { nullable: true }) - minted?: BooleanSearchOptions | null; -} diff --git a/src/graphql/schemas/inputs/collectionInput.ts b/src/graphql/schemas/inputs/collectionInput.ts deleted file mode 100644 index 617ebafd..00000000 --- a/src/graphql/schemas/inputs/collectionInput.ts +++ /dev/null @@ -1,18 +0,0 @@ -import { Field, InputType } from "type-graphql"; - -import { Collection } from "../typeDefs/collectionTypeDefs.js"; - -import { IdSearchOptions, StringSearchOptions } from "./searchOptions.js"; -import type { WhereOptions } from "./whereOptions.js"; - -@InputType() -export class BasicCollectionWhereInput implements WhereOptions { - @Field(() => IdSearchOptions, { nullable: true }) - id?: IdSearchOptions | null; - - @Field(() => StringSearchOptions, { nullable: true }) - name?: StringSearchOptions; - - @Field(() => StringSearchOptions, { nullable: true }) - description?: StringSearchOptions; -} diff --git a/src/graphql/schemas/inputs/contractInput.ts b/src/graphql/schemas/inputs/contractInput.ts deleted file mode 100644 index 57993882..00000000 --- a/src/graphql/schemas/inputs/contractInput.ts +++ /dev/null @@ -1,18 +0,0 @@ -import { Field, InputType } from "type-graphql"; -import type { WhereOptions } from "./whereOptions.js"; -import { Contract } from "../typeDefs/contractTypeDefs.js"; -import { - IdSearchOptions, - BigIntSearchOptions, - StringSearchOptions, -} from "./searchOptions.js"; - -@InputType() -export class BasicContractWhereInput implements WhereOptions { - @Field(() => IdSearchOptions, { nullable: true }) - id?: IdSearchOptions; - @Field(() => StringSearchOptions, { nullable: true }) - contract_address?: StringSearchOptions; - @Field(() => BigIntSearchOptions, { nullable: true }) - chain_id?: BigIntSearchOptions; -} diff --git a/src/graphql/schemas/inputs/fractionInput.ts b/src/graphql/schemas/inputs/fractionInput.ts deleted file mode 100644 index 3b3f678b..00000000 --- a/src/graphql/schemas/inputs/fractionInput.ts +++ /dev/null @@ -1,32 +0,0 @@ -import { Field, InputType } from "type-graphql"; -import type { WhereOptions } from "./whereOptions.js"; -import { - IdSearchOptions, - BigIntSearchOptions, - StringSearchOptions, -} from "./searchOptions.js"; -import { Fraction } from "../typeDefs/fractionTypeDefs.js"; - -@InputType() -export class BasicFractionWhereInput implements WhereOptions { - @Field(() => IdSearchOptions, { nullable: true }) - id?: IdSearchOptions; - @Field(() => StringSearchOptions, { nullable: true }) - hypercert_id?: StringSearchOptions; - @Field(() => StringSearchOptions, { nullable: true }) - fraction_id?: StringSearchOptions; - @Field(() => BigIntSearchOptions, { nullable: true }) - creation_block_timestamp?: BigIntSearchOptions; - @Field(() => BigIntSearchOptions, { nullable: true }) - creation_block_number?: BigIntSearchOptions; - @Field(() => BigIntSearchOptions, { nullable: true }) - last_update_block_number?: BigIntSearchOptions; - @Field(() => BigIntSearchOptions, { nullable: true }) - last_update_block_timestamp?: BigIntSearchOptions; - @Field(() => BigIntSearchOptions, { nullable: true }) - token_id?: BigIntSearchOptions; - @Field(() => BigIntSearchOptions, { nullable: true }) - units?: BigIntSearchOptions; - @Field(() => StringSearchOptions, { nullable: true }) - owner_address?: StringSearchOptions; -} diff --git a/src/graphql/schemas/inputs/hyperboardInput.ts b/src/graphql/schemas/inputs/hyperboardInput.ts deleted file mode 100644 index 28893c7e..00000000 --- a/src/graphql/schemas/inputs/hyperboardInput.ts +++ /dev/null @@ -1,18 +0,0 @@ -import { Field, InputType } from "type-graphql"; -import type { WhereOptions } from "./whereOptions.js"; -import { - IdSearchOptions, - BigIntSearchOptions, - StringSearchOptions, -} from "./searchOptions.js"; -import { Hyperboard } from "../typeDefs/hyperboardTypeDefs.js"; - -@InputType() -export class BasicHyperboardWhereInput implements WhereOptions { - @Field(() => IdSearchOptions, { nullable: true }) - id?: IdSearchOptions | null; - @Field(() => BigIntSearchOptions, { nullable: true }) - chain_id?: BigIntSearchOptions | null; - @Field(() => StringSearchOptions, { nullable: true }) - admin_id?: StringSearchOptions | null; -} diff --git a/src/graphql/schemas/inputs/hypercertsInput.ts b/src/graphql/schemas/inputs/hypercertsInput.ts deleted file mode 100644 index 58e442b3..00000000 --- a/src/graphql/schemas/inputs/hypercertsInput.ts +++ /dev/null @@ -1,38 +0,0 @@ -import { InputType, Field } from "type-graphql"; -import type { WhereOptions } from "./whereOptions.js"; -import { Hypercert } from "../typeDefs/hypercertTypeDefs.js"; -import { - IdSearchOptions, - BigIntSearchOptions, - StringSearchOptions, - NumberSearchOptions, -} from "./searchOptions.js"; - -@InputType() -export class BasicHypercertWhereArgs implements WhereOptions { - @Field(() => IdSearchOptions, { nullable: true }) - id?: IdSearchOptions; - @Field(() => BigIntSearchOptions, { nullable: true }) - creation_block_timestamp?: BigIntSearchOptions; - @Field(() => BigIntSearchOptions, { nullable: true }) - creation_block_number?: BigIntSearchOptions; - @Field(() => BigIntSearchOptions, { nullable: true }) - last_update_block_number?: BigIntSearchOptions; - @Field(() => BigIntSearchOptions, { nullable: true }) - last_update_block_timestamp?: BigIntSearchOptions; - @Field(() => BigIntSearchOptions, { nullable: true }) - token_id?: BigIntSearchOptions; - @Field(() => StringSearchOptions, { nullable: true }) - creator_address?: StringSearchOptions; - @Field(() => StringSearchOptions, { nullable: true }) - uri?: StringSearchOptions; - @Field(() => StringSearchOptions, { nullable: true }) - hypercert_id?: StringSearchOptions; - @Field(() => NumberSearchOptions, { - nullable: true, - description: "Count of attestations referencing this hypercert", - }) - attestations_count?: NumberSearchOptions; - @Field(() => NumberSearchOptions, { nullable: true }) - sales_count?: NumberSearchOptions; -} diff --git a/src/graphql/schemas/inputs/metadataInput.ts b/src/graphql/schemas/inputs/metadataInput.ts deleted file mode 100644 index c9df7a77..00000000 --- a/src/graphql/schemas/inputs/metadataInput.ts +++ /dev/null @@ -1,41 +0,0 @@ -import { Field, InputType } from "type-graphql"; -import type { WhereOptions } from "./whereOptions.js"; -import { - IdSearchOptions, - BigIntSearchOptions, - StringArraySearchOptions, - StringSearchOptions, -} from "./searchOptions.js"; -import { Metadata } from "../typeDefs/metadataTypeDefs.js"; - -@InputType() -export class BasicMetadataWhereInput implements WhereOptions { - @Field(() => IdSearchOptions, { nullable: true }) - id?: IdSearchOptions; - @Field(() => StringSearchOptions, { nullable: true }) - name?: StringSearchOptions | null; - @Field(() => StringSearchOptions, { nullable: true }) - description?: StringSearchOptions | null; - @Field(() => StringSearchOptions, { nullable: true }) - uri?: StringSearchOptions | null; - @Field(() => StringArraySearchOptions, { nullable: true }) - contributors?: StringArraySearchOptions | null; - @Field(() => StringArraySearchOptions, { nullable: true }) - work_scope?: StringArraySearchOptions | null; - @Field(() => StringArraySearchOptions, { nullable: true }) - impact_scope?: StringArraySearchOptions | null; - @Field(() => StringArraySearchOptions, { nullable: true }) - rights?: StringArraySearchOptions | null; - @Field(() => BigIntSearchOptions, { nullable: true }) - creation_block_timestamp?: BigIntSearchOptions | null; - @Field(() => BigIntSearchOptions, { nullable: true }) - last_block_update_timestamp?: BigIntSearchOptions | null; - @Field(() => BigIntSearchOptions, { nullable: true }) - work_timeframe_from?: BigIntSearchOptions | null; - @Field(() => BigIntSearchOptions, { nullable: true }) - work_timeframe_to?: BigIntSearchOptions | null; - @Field(() => BigIntSearchOptions, { nullable: true }) - impact_timeframe_from?: BigIntSearchOptions | null; - @Field(() => BigIntSearchOptions, { nullable: true }) - impact_timeframe_to?: BigIntSearchOptions | null; -} diff --git a/src/graphql/schemas/inputs/orderInput.ts b/src/graphql/schemas/inputs/orderInput.ts deleted file mode 100644 index 2ff26d4c..00000000 --- a/src/graphql/schemas/inputs/orderInput.ts +++ /dev/null @@ -1,25 +0,0 @@ -import { Field, InputType } from "type-graphql"; -import type { WhereOptions } from "./whereOptions.js"; -import { - IdSearchOptions, - BigIntSearchOptions, - StringSearchOptions, - BooleanSearchOptions, -} from "./searchOptions.js"; -import { Order } from "../typeDefs/orderTypeDefs.js"; - -@InputType() -export class BasicOrderWhereInput implements WhereOptions { - @Field(() => IdSearchOptions, { nullable: true }) - id?: IdSearchOptions | null; - @Field(() => BigIntSearchOptions, { nullable: true }) - chainId?: BigIntSearchOptions | null; - @Field(() => StringSearchOptions, { nullable: true }) - signer?: StringSearchOptions | null; - @Field(() => StringSearchOptions, { nullable: true }) - hypercert_id?: StringSearchOptions | null; - @Field(() => BooleanSearchOptions, { nullable: true }) - invalidated?: BooleanSearchOptions | null; - @Field(() => StringSearchOptions, { nullable: true }) - currency?: StringSearchOptions | null; -} diff --git a/src/graphql/schemas/inputs/orderOptions.ts b/src/graphql/schemas/inputs/orderOptions.ts deleted file mode 100644 index 32454075..00000000 --- a/src/graphql/schemas/inputs/orderOptions.ts +++ /dev/null @@ -1,5 +0,0 @@ -import { SortOptions } from "./sortOptions.js"; - -export type OrderOptions = { - by?: SortOptions; -}; diff --git a/src/graphql/schemas/inputs/salesInput.ts b/src/graphql/schemas/inputs/salesInput.ts deleted file mode 100644 index 6bd4ce5f..00000000 --- a/src/graphql/schemas/inputs/salesInput.ts +++ /dev/null @@ -1,46 +0,0 @@ -import { Field, InputType } from "type-graphql"; -import type { WhereOptions } from "./whereOptions.js"; -import { - IdSearchOptions, - NumberArraySearchOptions, - BigIntSearchOptions, - StringArraySearchOptions, - StringSearchOptions, -} from "./searchOptions.js"; -import { Sale } from "../typeDefs/salesTypeDefs.js"; - -@InputType() -export class BasicSaleWhereInput implements WhereOptions { - @Field(() => IdSearchOptions, { nullable: true }) - transaction_hash?: IdSearchOptions | null; - - @Field(() => StringSearchOptions, { nullable: true }) - hypercert_id?: StringSearchOptions | null; - - @Field(() => StringArraySearchOptions, { nullable: true }) - item_ids?: StringArraySearchOptions | null; - - @Field(() => StringSearchOptions, { nullable: true }) - currency?: StringSearchOptions | null; - - @Field(() => StringSearchOptions, { nullable: true }) - collection?: StringSearchOptions | null; - - @Field(() => StringSearchOptions, { nullable: true }) - buyer?: StringSearchOptions | null; - - @Field(() => StringSearchOptions, { nullable: true }) - seller?: StringSearchOptions | null; - - @Field(() => BigIntSearchOptions, { nullable: true }) - strategy_id?: BigIntSearchOptions | null; - - @Field(() => BigIntSearchOptions, { nullable: true }) - creation_block_number?: BigIntSearchOptions | null; - - @Field(() => BigIntSearchOptions, { nullable: true }) - creation_block_timestamp?: BigIntSearchOptions | null; - - @Field(() => NumberArraySearchOptions, { nullable: true }) - amounts?: NumberArraySearchOptions | null; -} diff --git a/src/graphql/schemas/inputs/searchOptions.ts b/src/graphql/schemas/inputs/searchOptions.ts index 83aaa46e..b1b112ac 100644 --- a/src/graphql/schemas/inputs/searchOptions.ts +++ b/src/graphql/schemas/inputs/searchOptions.ts @@ -79,11 +79,19 @@ export class NumberSearchOptions { @InputType() export class StringArraySearchOptions { - @Field(() => [String], { nullable: true }) - contains?: string[]; + @Field(() => [String], { + nullable: true, + description: "Array of strings", + name: "contains", + }) + arrayContains?: string[]; - @Field(() => [String], { nullable: true }) - overlaps?: string[]; + @Field(() => [String], { + nullable: true, + description: "Array of strings", + name: "overlaps", + }) + arrayOverlaps?: string[]; } @InputType() @@ -91,14 +99,16 @@ export class NumberArraySearchOptions { @Field(() => [GraphQLBigInt], { nullable: true, description: "Array of numbers", + name: "contains", }) - contains?: bigint[]; + arrayContains?: bigint[]; @Field(() => [GraphQLBigInt], { nullable: true, description: "Array of numbers", + name: "overlaps", }) - overlaps?: bigint[]; + arrayOverlaps?: bigint[]; } @InputType() diff --git a/src/graphql/schemas/inputs/signatureRequestInput.ts b/src/graphql/schemas/inputs/signatureRequestInput.ts deleted file mode 100644 index b2137f3b..00000000 --- a/src/graphql/schemas/inputs/signatureRequestInput.ts +++ /dev/null @@ -1,34 +0,0 @@ -import { Field, InputType } from "type-graphql"; - -import { SignatureRequest } from "../typeDefs/signatureRequestTypeDefs.js"; - -import type { WhereOptions } from "./whereOptions.js"; -import { - BigIntSearchOptions, - SignatureRequestPurposeSearchOptions, - SignatureRequestStatusSearchOptions, - StringSearchOptions, -} from "./searchOptions.js"; - -@InputType() -export class BasicSignatureRequestWhereInput - implements WhereOptions -{ - @Field(() => StringSearchOptions, { nullable: true }) - safe_address?: StringSearchOptions; - - @Field(() => StringSearchOptions, { nullable: true }) - message_hash?: StringSearchOptions; - - @Field(() => BigIntSearchOptions, { nullable: true }) - timestamp?: BigIntSearchOptions; - - @Field(() => BigIntSearchOptions, { nullable: true }) - chain_id?: BigIntSearchOptions; - - @Field(() => SignatureRequestPurposeSearchOptions, { nullable: true }) - purpose?: SignatureRequestPurposeSearchOptions; - - @Field(() => SignatureRequestStatusSearchOptions, { nullable: true }) - status?: SignatureRequestStatusSearchOptions; -} diff --git a/src/graphql/schemas/inputs/sortOptions.ts b/src/graphql/schemas/inputs/sortOptions.ts deleted file mode 100644 index 9c467a61..00000000 --- a/src/graphql/schemas/inputs/sortOptions.ts +++ /dev/null @@ -1,251 +0,0 @@ -import { Field, InputType } from "type-graphql"; -import { SortOrder } from "../enums/sortEnums.js"; -import type { Hypercert } from "../typeDefs/hypercertTypeDefs.js"; -import type { Contract } from "../typeDefs/contractTypeDefs.js"; -import type { Metadata } from "../typeDefs/metadataTypeDefs.js"; -import type { Attestation } from "../typeDefs/attestationTypeDefs.js"; -import type { AttestationSchema } from "../typeDefs/attestationSchemaTypeDefs.js"; -import type { Fraction } from "../typeDefs/fractionTypeDefs.js"; -import { Order } from "../typeDefs/orderTypeDefs.js"; -import { Sale } from "../typeDefs/salesTypeDefs.js"; -import { Hyperboard } from "../typeDefs/hyperboardTypeDefs.js"; -import { Blueprint } from "../typeDefs/blueprintTypeDefs.js"; -import { SignatureRequest } from "../typeDefs/signatureRequestTypeDefs.js"; -import { Collection } from "../typeDefs/collectionTypeDefs.js"; - -export type SortOptions = { - [P in keyof T]: SortOrder | null; -}; - -@InputType() -export class HypercertSortOptions implements SortOptions { - @Field(() => SortOrder, { nullable: true }) - hypercert_id?: SortOrder; - @Field(() => SortOrder, { nullable: true }) - creation_block_timestamp?: SortOrder; - @Field(() => SortOrder, { nullable: true }) - creation_block_number?: SortOrder; - @Field(() => SortOrder, { nullable: true }) - last_update_block_number?: SortOrder; - @Field(() => SortOrder, { nullable: true }) - last_update_block_timestamp?: SortOrder; - @Field(() => SortOrder, { nullable: true }) - token_id?: SortOrder; - @Field(() => SortOrder, { nullable: true }) - units?: SortOrder; - @Field(() => SortOrder, { nullable: true }) - owner_address?: SortOrder; - @Field(() => SortOrder, { nullable: true }) - last_block_update_timestamp?: SortOrder; - @Field(() => SortOrder, { nullable: true }) - uri?: SortOrder; - @Field(() => SortOrder, { nullable: true }) - attestations_count?: SortOrder; - @Field(() => SortOrder, { nullable: true }) - sales_count?: SortOrder; -} - -@InputType() -export class ContractSortOptions implements SortOptions { - @Field(() => SortOrder, { nullable: true }) - contract_id?: SortOrder; - @Field(() => SortOrder, { nullable: true }) - contract_address?: SortOrder; - @Field(() => SortOrder, { nullable: true }) - chain_id?: SortOrder; -} - -@InputType() -export class MetadataSortOptions implements SortOptions { - @Field(() => SortOrder, { nullable: true }) - description?: SortOrder; - @Field(() => SortOrder, { nullable: true }) - external_url?: SortOrder; - @Field(() => SortOrder, { nullable: true }) - metadata_id?: SortOrder; - @Field(() => SortOrder, { nullable: true }) - name?: SortOrder; - @Field(() => SortOrder, { nullable: true }) - uri?: SortOrder; - @Field(() => SortOrder, { nullable: true }) - allow_list_uri?: SortOrder; -} - -@InputType() -export class BlueprintSortOptions implements SortOptions { - @Field(() => SortOrder, { nullable: true }) - created_at?: SortOrder; -} - -@InputType() -export class AttestationSortOptions implements SortOptions { - @Field(() => SortOrder, { nullable: true }) - attestation_uid?: SortOrder; - @Field(() => SortOrder, { nullable: true }) - creation_block_timestamp?: SortOrder; - @Field(() => SortOrder, { nullable: true }) - creation_block_number?: SortOrder; - @Field(() => SortOrder, { nullable: true }) - last_update_block_number?: SortOrder; - @Field(() => SortOrder, { nullable: true }) - last_update_block_timestamp?: SortOrder; - @Field(() => SortOrder, { nullable: true }) - attester_address?: SortOrder; - @Field(() => SortOrder, { nullable: true }) - recipient_address?: SortOrder; - @Field(() => SortOrder, { nullable: true }) - schema?: SortOrder; -} - -@InputType() -export class AttestationSchemaSortOptions - implements SortOptions -{ - @Field(() => SortOrder, { nullable: true }) - eas_schema_id?: SortOrder; - @Field(() => SortOrder, { nullable: true }) - chain_id?: SortOrder; - @Field(() => SortOrder, { nullable: true }) - resolver?: SortOrder; - @Field(() => SortOrder, { nullable: true }) - revocable?: SortOrder; -} - -@InputType() -export class FractionSortOptions implements SortOptions { - @Field(() => SortOrder, { nullable: true }) - creation_block_timestamp?: SortOrder; - @Field(() => SortOrder, { nullable: true }) - creation_block_number?: SortOrder; - @Field(() => SortOrder, { nullable: true }) - last_update_block_number?: SortOrder; - @Field(() => SortOrder, { nullable: true }) - last_update_block_timestamp?: SortOrder; - @Field(() => SortOrder, { nullable: true }) - token_id?: SortOrder; - @Field(() => SortOrder, { nullable: true }) - units?: SortOrder; - @Field(() => SortOrder, { nullable: true }) - owner_address?: SortOrder; -} - -@InputType() -export class AllowlistRecordSortOptions implements SortOptions { - @Field(() => SortOrder, { nullable: true }) - hypercert_id?: SortOrder; - @Field(() => SortOrder, { nullable: true }) - token_id?: SortOrder; - @Field(() => SortOrder, { nullable: true }) - leaf?: SortOrder; - @Field(() => SortOrder, { nullable: true }) - entry?: SortOrder; - @Field(() => SortOrder, { nullable: true }) - user_address?: SortOrder; - @Field(() => SortOrder, { nullable: true }) - claimed?: SortOrder; - @Field(() => SortOrder, { nullable: true }) - proof?: SortOrder; - @Field(() => SortOrder, { nullable: true }) - units?: SortOrder; - @Field(() => SortOrder, { nullable: true }) - total_units?: SortOrder; - @Field(() => SortOrder, { nullable: true }) - root?: SortOrder; -} - -@InputType() -export class OrderSortOptions implements SortOptions { - @Field(() => SortOrder, { nullable: true }) - amounts?: SortOrder; - @Field(() => SortOrder, { nullable: true }) - chainId?: SortOrder; - @Field(() => SortOrder, { nullable: true }) - collection?: SortOrder; - @Field(() => SortOrder, { nullable: true }) - collectionType?: SortOrder; - @Field(() => SortOrder, { nullable: true }) - createdAt?: SortOrder; - @Field(() => SortOrder, { nullable: true }) - currency?: SortOrder; - @Field(() => SortOrder, { nullable: true }) - endTime?: SortOrder; - @Field(() => SortOrder, { nullable: true }) - globalNonce?: SortOrder; - @Field(() => SortOrder, { nullable: true }) - hypercert_id?: SortOrder; - @Field(() => SortOrder, { nullable: true }) - invalidated?: SortOrder; - @Field(() => SortOrder, { nullable: true }) - orderNonce?: SortOrder; - @Field(() => SortOrder, { nullable: true }) - price?: SortOrder; - @Field(() => SortOrder, { nullable: true }) - quoteType?: SortOrder; - @Field(() => SortOrder, { nullable: true }) - signer?: SortOrder; - @Field(() => SortOrder, { nullable: true }) - startTime?: SortOrder; - @Field(() => SortOrder, { nullable: true }) - strategyId?: SortOrder; -} - -@InputType() -export class SaleSortOptions implements SortOptions { - @Field(() => SortOrder, { nullable: true }) - amounts?: SortOrder; - @Field(() => SortOrder, { nullable: true }) - buyer?: SortOrder; - @Field(() => SortOrder, { nullable: true }) - collection?: SortOrder; - @Field(() => SortOrder, { nullable: true }) - creationBlockNumber?: SortOrder; - @Field(() => SortOrder, { nullable: true }) - creationBlockTimestamp?: SortOrder; - @Field(() => SortOrder, { nullable: true }) - currency?: SortOrder; - @Field(() => SortOrder, { nullable: true }) - hypercertId?: SortOrder; - @Field(() => SortOrder, { nullable: true }) - seller?: SortOrder; - @Field(() => SortOrder, { nullable: true }) - strategyId?: SortOrder; - @Field(() => SortOrder, { nullable: true }) - transactionHash?: SortOrder; -} - -@InputType() -export class HyperboardSortOptions implements SortOptions { - @Field(() => SortOrder, { nullable: true }) - name?: SortOrder; - @Field(() => SortOrder, { nullable: true }) - admin_id?: SortOrder; - @Field(() => SortOrder, { nullable: true }) - chainId?: SortOrder; -} - -@InputType() -export class SignatureRequestSortOptions - implements SortOptions -{ - @Field(() => SortOrder, { nullable: true }) - safe_address?: SortOrder; - - @Field(() => SortOrder, { nullable: true }) - message_hash?: SortOrder; - - @Field(() => SortOrder, { nullable: true }) - timestamp?: SortOrder; - - @Field(() => SortOrder, { nullable: true }) - purpose?: SortOrder; -} - -@InputType() -export class CollectionSortOptions implements SortOptions { - @Field(() => SortOrder, { nullable: true }) - name?: SortOrder; - @Field(() => SortOrder, { nullable: true }) - created_at?: SortOrder; - @Field(() => SortOrder, { nullable: true }) - description?: SortOrder; -} diff --git a/src/graphql/schemas/inputs/userInput.ts b/src/graphql/schemas/inputs/userInput.ts deleted file mode 100644 index ec0d53fb..00000000 --- a/src/graphql/schemas/inputs/userInput.ts +++ /dev/null @@ -1,13 +0,0 @@ -import { Field, InputType } from "type-graphql"; -import type { WhereOptions } from "./whereOptions.js"; -import { BigIntSearchOptions, StringSearchOptions } from "./searchOptions.js"; -import { User } from "../typeDefs/userTypeDefs.js"; - -@InputType() -export class BasicUserWhereInput implements WhereOptions { - @Field(() => StringSearchOptions, { nullable: true }) - address?: StringSearchOptions | null; - - @Field(() => BigIntSearchOptions, { nullable: true }) - chain_id?: BigIntSearchOptions | null; -} diff --git a/src/graphql/schemas/inputs/whereOptions.ts b/src/graphql/schemas/inputs/whereOptions.ts deleted file mode 100644 index 40d7b8ac..00000000 --- a/src/graphql/schemas/inputs/whereOptions.ts +++ /dev/null @@ -1,29 +0,0 @@ -import { - BooleanSearchOptions, - IdSearchOptions, - NumberArraySearchOptions, - BigIntSearchOptions, - StringArraySearchOptions, - StringSearchOptions, -} from "./searchOptions.js"; -import type { BasicContractWhereInput } from "./contractInput.js"; -import type { BasicFractionWhereInput } from "./fractionInput.js"; -import type { BasicMetadataWhereInput } from "./metadataInput.js"; -import type { BasicHypercertWhereArgs } from "./hypercertsInput.js"; -import type { BasicSignatureRequestWhereInput } from "./signatureRequestInput.js"; - -export type WhereOptions = { - [P in keyof T]: - | IdSearchOptions - | BooleanSearchOptions - | StringSearchOptions - | BigIntSearchOptions - | StringArraySearchOptions - | NumberArraySearchOptions - | BasicMetadataWhereInput - | BasicHypercertWhereArgs - | BasicContractWhereInput - | BasicFractionWhereInput - | BasicSignatureRequestWhereInput - | null; -}; diff --git a/src/graphql/schemas/resolvers/allowlistRecordResolver.ts b/src/graphql/schemas/resolvers/allowlistRecordResolver.ts deleted file mode 100644 index ce398fd5..00000000 --- a/src/graphql/schemas/resolvers/allowlistRecordResolver.ts +++ /dev/null @@ -1,19 +0,0 @@ -import { Args, ObjectType, Query, Resolver } from "type-graphql"; -import { AllowlistRecord } from "../typeDefs/allowlistRecordTypeDefs.js"; -import { GetAllowlistRecordsArgs } from "../args/allowlistRecordArgs.js"; -import { createBaseResolver, DataResponse } from "./baseTypes.js"; - -@ObjectType() -class GetAllowlistRecordResponse extends DataResponse(AllowlistRecord) {} - -const AllowlistRecordBaseResolver = createBaseResolver("allowlistRecord"); - -@Resolver(() => AllowlistRecord) -class AllowlistRecordResolver extends AllowlistRecordBaseResolver { - @Query(() => GetAllowlistRecordResponse) - async allowlistRecords(@Args() args: GetAllowlistRecordsArgs) { - return await this.getAllowlistRecords(args); - } -} - -export { AllowlistRecordResolver }; diff --git a/src/graphql/schemas/resolvers/attestationResolver.ts b/src/graphql/schemas/resolvers/attestationResolver.ts deleted file mode 100644 index 0a31f838..00000000 --- a/src/graphql/schemas/resolvers/attestationResolver.ts +++ /dev/null @@ -1,73 +0,0 @@ -import { - Args, - FieldResolver, - ObjectType, - Query, - Resolver, - Root, -} from "type-graphql"; -import { GetAttestationsArgs } from "../args/attestationArgs.js"; -import { Attestation } from "../typeDefs/attestationTypeDefs.js"; -import { z } from "zod"; -import { getAddress, isAddress } from "viem"; -import { createBaseResolver, DataResponse } from "./baseTypes.js"; - -const HypercertPointer = z.object({ - chain_id: z.coerce.bigint(), - contract_address: z - .string() - .refine(isAddress, { message: "Invalid contract address" }), - token_id: z.coerce.bigint(), -}); - -@ObjectType() -export default class GetAttestationsResponse extends DataResponse( - Attestation, -) {} - -const AttestationBaseResolver = createBaseResolver("attestations"); - -@Resolver(() => Attestation) -class AttestationResolver extends AttestationBaseResolver { - @Query(() => GetAttestationsResponse) - async attestations(@Args() args: GetAttestationsArgs) { - return await this.getAttestations(args); - } - - @FieldResolver() - async hypercert(@Root() attestation: Attestation) { - if (!attestation.data) return null; - - const { success, data } = HypercertPointer.safeParse(attestation.data); - - if (!success) return null; - - const { chain_id, contract_address, token_id } = data; - const hypercertId = `${chain_id}-${getAddress(contract_address)}-${token_id.toString()}`; - - return await this.getHypercerts( - { - where: { - hypercert_id: { eq: hypercertId }, - }, - }, - true, - ); - } - - @FieldResolver() - async eas_schema(@Root() attestation: Attestation) { - if (!attestation.schema_uid) return; - - return await this.getAttestationSchemas( - { - where: { - uid: { eq: attestation.schema_uid }, - }, - }, - true, - ); - } -} - -export { AttestationResolver }; diff --git a/src/graphql/schemas/resolvers/attestationSchemaResolver.ts b/src/graphql/schemas/resolvers/attestationSchemaResolver.ts deleted file mode 100644 index 8872894c..00000000 --- a/src/graphql/schemas/resolvers/attestationSchemaResolver.ts +++ /dev/null @@ -1,35 +0,0 @@ -import { - Args, - FieldResolver, - ObjectType, - Query, - Resolver, - Root, -} from "type-graphql"; -import { AttestationSchema } from "../typeDefs/attestationSchemaTypeDefs.js"; -import { GetAttestationSchemasArgs } from "../args/attestationSchemaArgs.js"; -import { createBaseResolver, DataResponse } from "./baseTypes.js"; - -@ObjectType() -export default class GetAttestationsSchemaResponse extends DataResponse( - AttestationSchema, -) {} - -const AttestationSchemaBaseResolver = createBaseResolver("attestationSchema"); - -@Resolver(() => AttestationSchema) -class AttestationSchemaResolver extends AttestationSchemaBaseResolver { - @Query(() => GetAttestationsSchemaResponse) - async attestationSchemas(@Args() args: GetAttestationSchemasArgs) { - return await this.getAttestationSchemas(args); - } - - @FieldResolver({ nullable: true }) - async records(@Root() schema: Partial) { - return await this.getAttestations({ - where: { supported_schemas_id: { eq: schema.id } }, - }); - } -} - -export { AttestationSchemaResolver }; diff --git a/src/graphql/schemas/resolvers/baseTypes.ts b/src/graphql/schemas/resolvers/baseTypes.ts deleted file mode 100644 index 450bb169..00000000 --- a/src/graphql/schemas/resolvers/baseTypes.ts +++ /dev/null @@ -1,442 +0,0 @@ -import { container } from "tsyringe"; -import { type ClassType, Field, Int, ObjectType, Resolver } from "type-graphql"; -import { SupabaseCachingService } from "../../../services/SupabaseCachingService.js"; -import { SupabaseDataService } from "../../../services/SupabaseDataService.js"; -import { GetAllowlistRecordsArgs } from "../args/allowlistRecordArgs.js"; -import { GetAttestationsArgs } from "../args/attestationArgs.js"; -import { GetAttestationSchemasArgs } from "../args/attestationSchemaArgs.js"; -import { GetBlueprintArgs } from "../args/blueprintArgs.js"; -import { GetContractsArgs } from "../args/contractArgs.js"; -import { GetFractionsArgs } from "../args/fractionArgs.js"; -import { GetHypercertsArgs } from "../args/hypercertsArgs.js"; -import { GetMetadataArgs } from "../args/metadataArgs.js"; -import { GetOrdersArgs } from "../args/orderArgs.js"; -import { GetSalesArgs } from "../args/salesArgs.js"; -import { GetSignatureRequestArgs } from "../args/signatureRequestArgs.js"; -import { GetUserArgs } from "../args/userArgs.js"; -import { GetCollectionsArgs } from "../args/collectionArgs.js"; - -export function DataResponse( - TItemClass: ClassType, -) { - @ObjectType() - abstract class DataResponseClass { - @Field(() => [TItemClass], { nullable: true }) - data?: TItem[]; - - @Field(() => Int, { nullable: true }) - count?: number; - } - - return DataResponseClass; -} - -export function createBaseResolver( - entityFieldName: string, -) { - @Resolver() - class BaseResolver { - readonly supabaseCachingService = container.resolve(SupabaseCachingService); - readonly supabaseDataService = container.resolve(SupabaseDataService); - - getMetadataWithoutImage(args: GetMetadataArgs, single: boolean = false) { - console.debug( - `[${entityFieldName}Resolver::getMetadata] Fetching metadata`, - ); - - try { - const queries = - this.supabaseCachingService.getMetadataWithoutImage(args); - if (single) { - return queries.data.executeTakeFirst(); - } - - return this.supabaseCachingService.db - .transaction() - .execute(async (transaction) => { - const dataRes = await transaction.executeQuery(queries.data); - const countRes = await transaction.executeQuery(queries.count); - return { - data: dataRes.rows, - count: countRes.rows[0].count, - }; - }); - } catch (e) { - const error = e as Error; - throw new Error( - `[${entityFieldName}Resolver::getMetadata] Error fetching metadata: ${error.message}`, - ); - } - } - - getBlueprints(args: GetBlueprintArgs, single: boolean = false) { - console.debug( - `[${entityFieldName}Resolver::getBlueprints] Fetching blueprints`, - ); - - try { - const queries = this.supabaseDataService.getBlueprints(args); - if (single) { - return queries.data.executeTakeFirst(); - } - - return this.supabaseDataService.db - .transaction() - .execute(async (transaction) => { - const dataRes = await transaction.executeQuery(queries.data); - const countRes = await transaction.executeQuery(queries.count); - return { - data: dataRes.rows, - count: countRes.rows[0].count, - }; - }); - } catch (e) { - const error = e as Error; - throw new Error( - `[${entityFieldName}Resolver::getBlueprints] Error fetching blueprints: ${error.message}`, - ); - } - } - - getContracts(args: GetContractsArgs, single: boolean = false) { - console.debug( - `[${entityFieldName}Resolver::getContract] Fetching contracts`, - ); - - try { - const queries = this.supabaseCachingService.getContracts(args); - if (single) { - return queries.data.executeTakeFirst(); - } - - return this.supabaseCachingService.db - .transaction() - .execute(async (transaction) => { - const dataRes = await transaction.executeQuery(queries.data); - const countRes = await transaction.executeQuery(queries.count); - return { - data: dataRes.rows, - count: countRes.rows[0].count, - }; - }); - } catch (e) { - const error = e as Error; - throw new Error( - `[${entityFieldName}Resolver::getContract] Error fetching contracts: ${error.message}`, - ); - } - } - - getHypercerts(args: GetHypercertsArgs, single: boolean = false) { - console.debug( - `[${entityFieldName}Resolver::getHypercerts] Fetching hypercerts`, - ); - - try { - const queries = this.supabaseCachingService.getHypercerts(args); - if (single) { - return queries.data.executeTakeFirst(); - } - - return this.supabaseCachingService.db - .transaction() - .execute(async (transaction) => { - const dataRes = await transaction.executeQuery(queries.data); - - const countRes = await transaction.executeQuery(queries.count); - - return { - data: dataRes.rows, - count: countRes.rows[0].count, - }; - }); - } catch (e) { - const error = e as Error; - throw new Error( - `[${entityFieldName}Resolver::getHypercerts] Error fetching hypercerts: ${error.message}`, - ); - } - } - - getFractions(args: GetFractionsArgs, single: boolean = false) { - console.debug( - `[${entityFieldName}Resolver::getFractions] Fetching fractions`, - ); - - try { - const queries = this.supabaseCachingService.getFractions(args); - if (single) { - return queries.data.executeTakeFirst(); - } - - return this.supabaseCachingService.db - .transaction() - .execute(async (transaction) => { - const dataRes = await transaction.executeQuery(queries.data); - const countRes = await transaction.executeQuery(queries.count); - - return { - data: dataRes.rows, - count: countRes.rows[0].count, - }; - }); - } catch (e) { - const error = e as Error; - throw new Error( - `[${entityFieldName}Resolver::getFractions] Error fetching fractions: ${error.message}`, - ); - } - } - - getAllowlistRecords( - args: GetAllowlistRecordsArgs, - single: boolean = false, - ) { - console.debug( - `[${entityFieldName}Resolver::getAllowlistRecords] Fetching allowlist records`, - ); - - try { - const queries = this.supabaseCachingService.getAllowlistRecords(args); - if (single) { - return queries.data.executeTakeFirst(); - } - - return this.supabaseCachingService.db - .transaction() - .execute(async (transaction) => { - const dataRes = await transaction.executeQuery(queries.data); - const countRes = await transaction.executeQuery(queries.count); - return { - data: dataRes.rows, - count: countRes.rows[0].count, - }; - }); - } catch (e) { - const error = e as Error; - throw new Error( - `[${entityFieldName}Resolver::getAllowlistRecords] Error fetching allowlist records: ${error.message}`, - ); - } - } - - getAttestationSchemas( - args: GetAttestationSchemasArgs, - single: boolean = false, - ) { - console.debug( - `[${entityFieldName}Resolver::getAttestationSchemas] Fetching attestation schemas`, - ); - - try { - const queries = this.supabaseCachingService.getAttestationSchemas(args); - if (single) { - return queries.data.executeTakeFirst(); - } - - return this.supabaseCachingService.db - .transaction() - .execute(async (transaction) => { - const dataRes = await transaction.executeQuery(queries.data); - const countRes = await transaction.executeQuery(queries.count); - return { - data: dataRes.rows, - count: countRes.rows[0].count, - }; - }); - } catch (e) { - const error = e as Error; - throw new Error( - `[${entityFieldName}Resolver::getAttestationSchemas] Error fetching attestation schemas: ${error.message}`, - ); - } - } - - async getAttestations(args: GetAttestationsArgs, single: boolean = false) { - console.debug( - `[${entityFieldName}Resolver::getAttestations] Fetching attestations`, - ); - - try { - const queries = this.supabaseCachingService.getAttestations(args); - - if (single) { - const res = await queries.data.executeTakeFirst(); - return res ? this.parseAttestation(res) : null; - } - - return this.supabaseCachingService.db - .transaction() - .execute(async (transaction) => { - const dataRes = await transaction.executeQuery(queries.data); - const countRes = await transaction.executeQuery(queries.count); - return { - data: dataRes ? dataRes.rows?.map(this.parseAttestation) : [], - count: countRes.rows[0].count, - }; - }); - } catch (e) { - const error = e as Error; - throw new Error( - `[${entityFieldName}Resolver::getAttestations] Error fetching attestations: ${error.message}`, - ); - } - } - - getSales(args: GetSalesArgs, single: boolean = false) { - console.debug(`[${entityFieldName}Resolver::getSales] Fetching sales`); - - try { - const queries = this.supabaseCachingService.getSales(args); - if (single) { - return queries.data.executeTakeFirst(); - } - - return this.supabaseCachingService.db - .transaction() - .execute(async (transaction) => { - const dataRes = await transaction.executeQuery(queries.data); - const countRes = await transaction.executeQuery(queries.count); - return { - data: dataRes.rows, - count: countRes.rows[0].count, - }; - }); - } catch (e) { - const error = e as Error; - throw new Error( - `[${entityFieldName}Resolver::getSales] Error fetching sales: ${error.message}`, - ); - } - } - - getUsers(args: GetUserArgs, single: boolean = false) { - console.debug(`[${entityFieldName}Resolver::getUsers] Fetching users`); - - try { - const queries = this.supabaseDataService.getUsers(args); - if (single) { - return queries.data.executeTakeFirst(); - } - - return this.supabaseDataService.db - .transaction() - .execute(async (transaction) => { - const dataRes = await transaction.executeQuery(queries.data); - const countRes = await transaction.executeQuery(queries.count); - return { - data: dataRes.rows, - count: countRes.rows[0].count, - }; - }); - } catch (e) { - const error = e as Error; - throw new Error( - `[${entityFieldName}Resolver::getUsers] Error fetching users: ${error.message}`, - ); - } - } - - getOrders(args: GetOrdersArgs, single: boolean = false) { - console.debug(`[${entityFieldName}Resolver::getOrders] Fetching orders`); - - try { - const queries = this.supabaseDataService.getOrders(args); - if (single) { - return queries.data.executeTakeFirst(); - } - - return this.supabaseDataService.db - .transaction() - .execute(async (transaction) => { - const dataRes = await transaction.executeQuery(queries.data); - const countRes = await transaction.executeQuery(queries.count); - return { - data: dataRes.rows, - count: countRes.rows[0].count, - }; - }); - } catch (e) { - const error = e as Error; - throw new Error( - `[${entityFieldName}Resolver::getOrders] Error fetching orders: ${error.message}`, - ); - } - } - - parseAttestation(item: { [K in keyof T]: T[K] }) { - const decodedData = item?.data; - // TODO cleaner handling of bigints in created attestations - if (decodedData?.token_id) { - decodedData.token_id = BigInt(decodedData.token_id).toString(); - } - return { - ...item, - attestation: decodedData, - }; - } - - getSignatureRequests( - args: GetSignatureRequestArgs, - single: boolean = false, - ) { - console.debug( - `[${entityFieldName}Resolver::getSignatureRequests] Fetching signature requests`, - ); - - try { - const queries = this.supabaseDataService.getSignatureRequests(args); - if (single) { - return queries.data.executeTakeFirst(); - } - - return this.supabaseDataService.db - .transaction() - .execute(async (transaction) => { - const dataRes = await transaction.executeQuery(queries.data); - const countRes = await transaction.executeQuery(queries.count); - return { - data: dataRes.rows, - count: countRes.rows[0].count, - }; - }); - } catch (e) { - const error = e as Error; - throw new Error( - `[${entityFieldName}Resolver::getSignatureRequests] Error fetching signature requests: ${error.message}`, - ); - } - } - - getCollections(args: GetCollectionsArgs, single: boolean = false) { - console.debug( - `[${entityFieldName}Resolver::getCollections] Fetching collections`, - ); - - try { - const queries = this.supabaseDataService.getCollections(args); - if (single) { - return queries.data.executeTakeFirst(); - } - - return this.supabaseDataService.db - .transaction() - .execute(async (transaction) => { - const dataRes = await transaction.executeQuery(queries.data); - const countRes = await transaction.executeQuery(queries.count); - return { - data: dataRes.rows, - count: countRes.rows[0].count, - }; - }); - } catch (e) { - const error = e as Error; - throw new Error( - `[${entityFieldName}Resolver::getCollections] Error fetching collections: ${error.message}`, - ); - } - } - } - - return BaseResolver; -} diff --git a/src/graphql/schemas/resolvers/blueprintResolver.ts b/src/graphql/schemas/resolvers/blueprintResolver.ts deleted file mode 100644 index 5f2e8ff7..00000000 --- a/src/graphql/schemas/resolvers/blueprintResolver.ts +++ /dev/null @@ -1,70 +0,0 @@ -import { - Args, - FieldResolver, - ObjectType, - Query, - Resolver, - Root, -} from "type-graphql"; -import { createBaseResolver, DataResponse } from "./baseTypes.js"; -import { Blueprint } from "../typeDefs/blueprintTypeDefs.js"; -import { GetBlueprintArgs } from "../args/blueprintArgs.js"; -import _ from "lodash"; -import { DataDatabase } from "../../../types/kyselySupabaseData.js"; - -@ObjectType() -export class GetBlueprintResponse extends DataResponse(Blueprint) {} - -const BlueprintBaseResolver = createBaseResolver("blueprint"); - -@Resolver(() => Blueprint) -class BlueprintResolver extends BlueprintBaseResolver { - @Query(() => GetBlueprintResponse) - async blueprints(@Args() args: GetBlueprintArgs) { - const { data, count } = await this.getBlueprints(args); - - // Deduplicate by blueprint id - const formattedData = _.chain( - data as DataDatabase["blueprints_with_admins"][], - ) - .groupBy("id") - .map((blueprints) => { - const admins = blueprints.map( - ({ - admin_address, - admin_chain_id, - avatar, - display_name, - hypercert_ids, - }) => ({ - address: admin_address, - chain_id: admin_chain_id, - avatar, - display_name, - hypercert_ids, - }), - ); - return { - ...blueprints[0], - admins, - }; - }); - - return { - data: formattedData, - count, - }; - } - - @FieldResolver() - async hypercerts(@Root() blueprint: Blueprint) { - const hypercertIds = blueprint.hypercert_ids; - const { data: hypercerts, count } = await this.getHypercerts({ - where: { hypercert_id: { in: hypercertIds } }, - }); - - return { data: hypercerts, count }; - } -} - -export { BlueprintResolver }; diff --git a/src/graphql/schemas/resolvers/collectionResolver.ts b/src/graphql/schemas/resolvers/collectionResolver.ts deleted file mode 100644 index 75dc4d54..00000000 --- a/src/graphql/schemas/resolvers/collectionResolver.ts +++ /dev/null @@ -1,96 +0,0 @@ -import { - Args, - FieldResolver, - ObjectType, - Query, - Resolver, - Root, -} from "type-graphql"; - -import { GetCollectionsArgs } from "../args/collectionArgs.js"; -import { Collection } from "../typeDefs/collectionTypeDefs.js"; -import { Blueprint } from "../typeDefs/blueprintTypeDefs.js"; -import { User } from "../typeDefs/userTypeDefs.js"; - -import { createBaseResolver, DataResponse } from "./baseTypes.js"; -import GetHypercertsResponse from "./hypercertResolver.js"; - -@ObjectType() -class GetCollectionsResponse extends DataResponse(Collection) {} - -const CollectionBaseResolver = createBaseResolver("collection"); - -@Resolver(() => Collection) -class CollectionResolver extends CollectionBaseResolver { - @Query(() => GetCollectionsResponse) - async collections(@Args() args: GetCollectionsArgs) { - try { - return this.getCollections(args); - } catch (e) { - console.error("[CollectionResolver::collections] Error:", e); - throw new Error(`Error fetching collections: ${(e as Error).message}`); - } - } - - @FieldResolver(() => GetHypercertsResponse) - async hypercerts(@Root() collection: Collection) { - if (!collection.id) { - console.error( - "[CollectionResolver::hypercerts] Collection ID is undefined", - ); - return []; - } - - const hypercerts = await this.supabaseDataService.getCollectionHypercerts( - collection.id, - ); - - if (!hypercerts?.length) { - return []; - } - - const hypercertIds = hypercerts - .map((h) => h.hypercert_id) - .filter((id): id is string => id !== undefined); - - if (hypercertIds.length === 0) { - return []; - } - - const hypercertsData = await this.getHypercerts({ - where: { hypercert_id: { in: hypercertIds } }, - }); - - return hypercertsData.data || []; - } - - @FieldResolver(() => [User]) - async admins(@Root() collection: Collection) { - if (!collection.id) { - console.error("[CollectionResolver::admins] Collection ID is undefined"); - return []; - } - - const admins = await this.supabaseDataService.getCollectionAdmins( - collection.id, - ); - return admins || []; - } - - @FieldResolver(() => [Blueprint]) - async blueprints(@Root() collection: Collection) { - if (!collection.id) { - console.error( - "[CollectionResolver::blueprints] Collection ID is undefined", - ); - return []; - } - - const blueprints = await this.supabaseDataService.getCollectionBlueprints( - collection.id, - ); - return blueprints || []; - } -} - -export { CollectionResolver }; diff --git a/src/graphql/schemas/resolvers/contractResolver.ts b/src/graphql/schemas/resolvers/contractResolver.ts deleted file mode 100644 index 6f81022e..00000000 --- a/src/graphql/schemas/resolvers/contractResolver.ts +++ /dev/null @@ -1,19 +0,0 @@ -import { Args, ObjectType, Query, Resolver } from "type-graphql"; -import { Contract } from "../typeDefs/contractTypeDefs.js"; -import { GetContractsArgs } from "../args/contractArgs.js"; -import { createBaseResolver, DataResponse } from "./baseTypes.js"; - -@ObjectType({ description: "Pointer to a contract deployed on a chain" }) -export default class GetContractsResponse extends DataResponse(Contract) {} - -const ContractBaseResolver = createBaseResolver("contract"); - -@Resolver(() => Contract) -class ContractResolver extends ContractBaseResolver { - @Query(() => GetContractsResponse) - async contracts(@Args() args: GetContractsArgs) { - return await this.getContracts(args, false); - } -} - -export { ContractResolver }; diff --git a/src/graphql/schemas/resolvers/fractionResolver.ts b/src/graphql/schemas/resolvers/fractionResolver.ts deleted file mode 100644 index b2c1cf02..00000000 --- a/src/graphql/schemas/resolvers/fractionResolver.ts +++ /dev/null @@ -1,133 +0,0 @@ -import { - Args, - FieldResolver, - ObjectType, - Query, - Resolver, - Root, -} from "type-graphql"; -import { Fraction } from "../typeDefs/fractionTypeDefs.js"; -import { GetFractionsArgs } from "../args/fractionArgs.js"; -import { parseClaimOrFractionId } from "@hypercerts-org/sdk"; -import { createBaseResolver, DataResponse } from "./baseTypes.js"; - -@ObjectType({ description: "Fraction of an hypercert" }) -export default class GetFractionsResponse extends DataResponse(Fraction) {} - -const FractionBaseResolver = createBaseResolver("fraction"); - -@Resolver(() => Fraction) -class FractionResolver extends FractionBaseResolver { - @Query(() => GetFractionsResponse) - async fractions(@Args() args: GetFractionsArgs) { - return await this.getFractions(args); - } - - @FieldResolver() - async metadata(@Root() fraction: Fraction) { - if (!fraction.claims_id) { - return; - } - - return await this.getMetadataWithoutImage( - { - where: { hypercerts: { id: { eq: fraction.claims_id } } }, - }, - true, - ); - } - - @FieldResolver() - async orders(@Root() fraction: Fraction) { - if (!fraction.fraction_id) { - return null; - } - - const { id } = parseClaimOrFractionId(fraction.fraction_id); - - if (!id) { - console.warn( - `[FractionResolver::orders] Error parsing hypercert_id for fraction ${fraction.id}`, - ); - return null; - } - - try { - const res = await this.supabaseDataService.getOrdersForFraction( - id.toString(), - ); - - if (!res) { - console.warn( - `[FractionResolver::orders] Error fetching orders for fraction ${fraction.id}: `, - res, - ); - return { data: [] }; - } - - const { data, error, count } = res; - - if (error) { - console.warn( - `[FractionResolver::orders] Error fetching orders for fraction ${fraction.id}: `, - error, - ); - return { data: [] }; - } - - return { data: data || [], count: count || 0 }; - } catch (e) { - const error = e as Error; - throw new Error( - `[FractionResolver::orders] Error fetching orders for fraction ${fraction.id}: ${error.message}`, - ); - } - } - - @FieldResolver() - async sales(@Root() fraction: Fraction) { - if (!fraction.fraction_id) { - return null; - } - - const { id } = parseClaimOrFractionId(fraction.fraction_id); - - if (!id) { - console.warn( - `[FractionResolver::sales] Error parsing hypercert_id for fraction ${fraction.id}`, - ); - return null; - } - - try { - const res = await this.supabaseCachingService.getSalesForTokenIds([id]); - - if (!res) { - console.warn( - `[FractionResolver::sales] Error fetching sales for fraction ${fraction.id}: `, - res, - ); - return { data: [] }; - } - - const { data, error, count } = res; - - if (error) { - console.warn( - `[FractionResolver::sales] Error fetching sales for fraction ${fraction.id}: `, - error, - ); - return { data: [] }; - } - - return { data: data || [], count: count || 0 }; - } catch (e) { - const error = e as Error; - throw new Error( - `[FractionResolver::sales] Error fetching sales for fraction ${fraction.id}: ${error.message}`, - ); - } - } -} - -export { FractionResolver }; diff --git a/src/graphql/schemas/resolvers/hyperboardResolver.ts b/src/graphql/schemas/resolvers/hyperboardResolver.ts deleted file mode 100644 index b29dc858..00000000 --- a/src/graphql/schemas/resolvers/hyperboardResolver.ts +++ /dev/null @@ -1,145 +0,0 @@ -import { Args, ObjectType, Query, Resolver } from "type-graphql"; -import { Hyperboard } from "../typeDefs/hyperboardTypeDefs.js"; -import { GetHyperboardsArgs } from "../args/hyperboardArgs.js"; -import { createBaseResolver, DataResponse } from "./baseTypes.js"; -import _ from "lodash"; -import { processCollectionToSection } from "../../../utils/processCollectionToSection.js"; -import { processSectionsToHyperboardOwnership } from "../../../utils/processSectionsToHyperboardOwnership.js"; - -@ObjectType() -class GetHyperboardsResponse extends DataResponse(Hyperboard) {} - -const HyperboardBaseResolver = createBaseResolver("hyperboard"); - -@Resolver(() => Hyperboard) -class HyperboardResolver extends HyperboardBaseResolver { - @Query(() => GetHyperboardsResponse) - async hyperboards(@Args() args: GetHyperboardsArgs) { - try { - const res = await this.supabaseDataService.getHyperboards(args); - - const hypercertIds = - res.data - ?.map((hyperboard) => - hyperboard.collections.map((collection) => - collection.hypercerts.map((hypercert) => hypercert.hypercert_id), - ), - ) - .flat(2) || []; - - const [fractions, allowlistEntries, hypercerts] = await Promise.all([ - this.getFractions({ - where: { hypercert_id: { in: hypercertIds } }, - }).then((res) => res.data), - this.getAllowlistRecords({ - where: { - hypercert_id: { in: hypercertIds }, - claimed: { eq: false }, - }, - }).then((res) => res.data), - this.getHypercerts({ - where: { hypercert_id: { in: hypercertIds } }, - }).then((res) => res.data), - ]); - - const metadata = await this.getMetadataWithoutImage({ - where: { hypercerts: { hypercert_id: { in: hypercertIds } } }, - }) - .then((res) => res.data) - .then((res) => - res.map((metadata) => { - const hypercert = hypercerts.find( - (hypercert) => hypercert.uri === metadata.uri, - ); - return { - ...(metadata || {}), - hypercert_id: hypercert?.hypercert_id, - }; - }), - ) - .then((res) => res.map((metadata) => _.omit(metadata, "image"))); - - // Get a deduplicated list of all owners - const ownerAddresses = _.uniq([ - ...fractions.map((x) => x?.owner_address), - ...allowlistEntries.flatMap((x) => x?.user_address), - ...(res.data?.flatMap( - (hyperboard) => - hyperboard?.collections?.flatMap((collection) => - collection.blueprints.flatMap( - (blueprint) => blueprint.minter_address, - ), - ) || [], - ) || []), - ]).filter((x) => !!x) as string[]; - - const users = await this.getUsers({ - where: { address: { in: ownerAddresses } }, - }).then((res) => res.data); - - const metadataByUri = _.keyBy(metadata, "uri"); - const { error, data, count } = res; - - if (error) { - console.warn( - `[HyperboardResolver::hyperboards] Error fetching hyperboards: `, - error, - ); - return { data }; - } - - const hyperboardWithSections = - res.data?.map((hyperboard) => { - const sections = hyperboard.collections.map((collection) => - processCollectionToSection({ - collection, - hypercert_metadata: hyperboard.hypercert_metadata, - blueprints: collection.blueprints, - fractions: fractions - .filter((x) => !!x) - .filter((fraction) => - collection.hypercerts - .map((x) => x.hypercert_id) - .includes(fraction.hypercert_id), - ), - blueprintMetadata: collection.blueprint_metadata, - allowlistEntries: allowlistEntries - .filter((entry) => !!entry) - .filter((entry) => - collection.hypercerts - .map((x) => x.hypercert_id) - .includes(entry.hypercert_id), - ), - hypercerts: hypercerts - .filter((x) => !!x) - .map((hypercert) => ({ - ...hypercert, - name: metadataByUri[hypercert.uri]?.name, - })), - users: users.filter((x) => !!x), - }), - ); - const owners = processSectionsToHyperboardOwnership(sections); - return { - ...hyperboard, - owners, - sections: { - data: sections, - count: sections.length, - }, - }; - }) || []; - - return { - data: hyperboardWithSections, - count: count ? count : data?.length, - }; - } catch (e) { - throw new Error( - `[HyperboardResolver::hyperboards] Error fetching hyperboards: ${(e as Error).message}`, - ); - } - } -} - -export { HyperboardResolver }; diff --git a/src/graphql/schemas/resolvers/hypercertResolver.ts b/src/graphql/schemas/resolvers/hypercertResolver.ts deleted file mode 100644 index a97ed4c3..00000000 --- a/src/graphql/schemas/resolvers/hypercertResolver.ts +++ /dev/null @@ -1,196 +0,0 @@ -import { - Args, - FieldResolver, - ObjectType, - Query, - Resolver, - Root, -} from "type-graphql"; -import { Hypercert } from "../typeDefs/hypercertTypeDefs.js"; -import { GetHypercertsArgs } from "../args/hypercertsArgs.js"; -import { parseClaimOrFractionId } from "@hypercerts-org/sdk"; -import _ from "lodash"; -import { getCheapestOrder } from "../../../utils/getCheapestOrder.js"; -import { getMaxUnitsForSaleInOrders } from "../../../utils/getMaxUnitsForSaleInOrders.js"; -import { addPriceInUsdToOrder } from "../../../utils/addPriceInUSDToOrder.js"; -import { Database } from "../../../types/supabaseData.js"; -import { createBaseResolver, DataResponse } from "./baseTypes.js"; -import "reflect-metadata"; - -@ObjectType({ - description: - "Hypercert with metadata, contract, orders, sales and fraction information", -}) -export default class GetHypercertsResponse extends DataResponse(Hypercert) {} - -const HypercertBaseResolver = createBaseResolver("hypercert"); - -@Resolver(() => Hypercert) -class HypercertResolver extends HypercertBaseResolver { - @Query(() => GetHypercertsResponse) - async hypercerts(@Args() args: GetHypercertsArgs) { - return await this.getHypercerts(args); - } - - @FieldResolver({ nullable: true }) - async metadata(@Root() hypercert: Hypercert) { - if (!hypercert.uri) { - return; - } - - return await this.getMetadataWithoutImage( - { where: { uri: { eq: hypercert.uri } } }, - true, - ); - } - - @FieldResolver() - async contract(@Root() hypercert: Hypercert) { - if (!hypercert.contracts_id) { - return; - } - - return await this.getContracts( - { where: { id: { eq: hypercert.contracts_id } } }, - true, - ); - } - - @FieldResolver() - async attestations(@Root() hypercert: Hypercert) { - if (!hypercert.id) { - return; - } - - return await this.getAttestations({ - where: { hypercerts: { id: { eq: hypercert.id } } }, - }); - } - - @FieldResolver() - async fractions(@Root() hypercert: Hypercert) { - if (!hypercert.hypercert_id) { - return; - } - - return await this.getFractions({ - where: { hypercert_id: { eq: hypercert.hypercert_id } }, - }); - } - - @FieldResolver() - async orders(@Root() hypercert: Hypercert) { - if (!hypercert.id || !hypercert.hypercert_id) { - return; - } - - const defaultValue = { - data: [], - count: 0, - totalUnitsForSale: BigInt(0), - }; - - try { - const { data: fractionsRes } = await this.getFractions({ - where: { hypercert_id: { eq: hypercert.hypercert_id } }, - }); - - if (!fractionsRes) { - console.warn( - `[HypercertResolver::orders] Error fetching fractions for ${hypercert.hypercert_id}`, - fractionsRes, - ); - return defaultValue; - } - - const orders = await this.getOrders({ - where: { hypercert_id: { eq: hypercert.hypercert_id } }, - }); - - if (!orders) { - console.warn( - `[HypercertResolver::orders] Error fetching orders for ${hypercert.hypercert_id}`, - orders, - ); - return defaultValue; - } - - const { data: ordersData, count: ordersCount } = orders; - - const ordersByFraction = _.groupBy(ordersData, (order) => - order.itemIds[0].toString(), - ); - - const { chainId, contractAddress } = parseClaimOrFractionId( - hypercert.hypercert_id, - ); - - const ordersWithPrices: (Database["public"]["Tables"]["marketplace_orders"]["Row"] & { - priceInUSD: string; - pricePerPercentInUSD: string; - })[] = []; - - const activeOrders = ordersData.filter((order) => !order.invalidated); - const activeOrdersByFraction = _.groupBy(activeOrders, (order) => - order.itemIds[0].toString(), - ); - // For each fraction, find all orders and find the max units for sale for that fraction - const totalUnitsForSale = ( - await Promise.all( - Object.keys(activeOrdersByFraction).map(async (tokenId) => { - const fractionId = `${chainId}-${contractAddress}-${tokenId}`; - const fraction = fractionsRes.find( - (fraction) => fraction.fraction_id === fractionId, - ); - - if (!fraction) { - console.error( - `[HypercertResolver::orders] Fraction not found for ${fractionId}`, - ); - return BigInt(0); - } - - const ordersPerFraction = ordersByFraction[tokenId]; - const ordersWithPricesForChain = await Promise.all( - ordersPerFraction.map(async (order) => { - return addPriceInUsdToOrder(order, hypercert.units as bigint); - }), - ); - ordersWithPrices.push(...ordersWithPricesForChain); - return getMaxUnitsForSaleInOrders( - ordersPerFraction, - BigInt(fraction.units), - ); - }), - ) - ).reduce((acc, val) => acc + val, BigInt(0)); - - const cheapestOrder = getCheapestOrder(ordersWithPrices); - - return { - totalUnitsForSale, - cheapestOrder, - data: ordersWithPrices || [], - count: ordersCount || 0, - }; - } catch (e) { - console.error( - `[HypercertResolver::orders] Error fetching orders for ${hypercert.hypercert_id}: ${(e as Error).toString()}`, - ); - return defaultValue; - } - } - - @FieldResolver() - async sales(@Root() hypercert: Hypercert) { - if (!hypercert.hypercert_id) { - return null; - } - - return await this.getSales({ - where: { hypercert_id: { eq: hypercert.hypercert_id } }, - }); - } -} - -export { HypercertResolver }; diff --git a/src/graphql/schemas/resolvers/metadataResolver.ts b/src/graphql/schemas/resolvers/metadataResolver.ts deleted file mode 100644 index 1f56ded1..00000000 --- a/src/graphql/schemas/resolvers/metadataResolver.ts +++ /dev/null @@ -1,44 +0,0 @@ -import { - Args, - FieldResolver, - ObjectType, - Query, - Resolver, - Root, -} from "type-graphql"; -import { inject, singleton } from "tsyringe"; -import { Metadata } from "../typeDefs/metadataTypeDefs.js"; -import { GetMetadataArgs } from "../args/metadataArgs.js"; -import { createBaseResolver, DataResponse } from "./baseTypes.js"; -import { MetadataImageService } from "../../../services/MetadataImageService.js"; - -@ObjectType() -export class GetMetadataResponse extends DataResponse(Metadata) {} - -const MetadataBaseResolver = createBaseResolver("metadata"); - -@singleton() -@Resolver(() => Metadata) -class MetadataResolver extends MetadataBaseResolver { - constructor( - @inject(MetadataImageService) private imageService: MetadataImageService, - ) { - super(); - } - - @Query(() => GetMetadataResponse) - async metadata(@Args() args: GetMetadataArgs) { - return await this.getMetadataWithoutImage(args); - } - - @FieldResolver(() => String, { - nullable: true, - description: "Base64 encoded representation of the image of the hypercert", - }) - async image(@Root() metadata: Metadata) { - if (!metadata.uri) return null; - return await this.imageService.getImageByUri(metadata.uri); - } -} - -export { MetadataResolver }; diff --git a/src/graphql/schemas/resolvers/orderResolver.ts b/src/graphql/schemas/resolvers/orderResolver.ts deleted file mode 100644 index 0e7947b4..00000000 --- a/src/graphql/schemas/resolvers/orderResolver.ts +++ /dev/null @@ -1,130 +0,0 @@ -import { - Args, - FieldResolver, - ObjectType, - Query, - Resolver, - Root, -} from "type-graphql"; -import { Order } from "../typeDefs/orderTypeDefs.js"; -import { GetOrdersArgs } from "../args/orderArgs.js"; -import { getHypercertTokenId } from "../../../utils/tokenIds.js"; -import { getAddress } from "viem"; -import { addPriceInUsdToOrder } from "../../../utils/addPriceInUSDToOrder.js"; -import _ from "lodash"; -import { createBaseResolver, DataResponse } from "./baseTypes.js"; - -@ObjectType() -export default class GetOrdersResponse extends DataResponse(Order) {} - -const OrderBaseResolver = createBaseResolver("order"); - -@Resolver(() => Order) -class OrderResolver extends OrderBaseResolver { - @Query(() => GetOrdersResponse) - async orders(@Args() args: GetOrdersArgs, single: boolean = false) { - try { - const ordersRes = await this.getOrders(args, single); - - if (!ordersRes) { - return { - data: [], - count: 0, - }; - } - - const { data, count } = ordersRes; - - const allHypercertIds = _.uniq(data.map((order) => order.hypercert_id)); - // TODO: Update this once array filters are available - const allHypercerts = await Promise.all( - allHypercertIds.map(async (hypercertId) => { - return await this.getHypercerts( - { - where: { - hypercert_id: { - eq: hypercertId, - }, - }, - }, - true, - ); - }), - ).then((res) => - _.keyBy( - res.filter((hypercert) => !!hypercert), - (hypercert) => hypercert?.hypercert_id?.toLowerCase(), - ), - ); - - const ordersWithPrices = await Promise.all( - data.map(async (order) => { - const hypercert = allHypercerts[order.hypercert_id.toLowerCase()]; - if (!hypercert?.units) { - console.warn( - `[OrderResolver::orders] No hypercert found for hypercert_id: ${order.hypercert_id}`, - ); - return order; - } - return addPriceInUsdToOrder(order, hypercert.units as bigint); - }), - ); - - return { - data: ordersWithPrices, - count: count ? count : ordersWithPrices?.length, - }; - } catch (e) { - throw new Error( - `[OrderResolver::orders] Error fetching orders: ${(e as Error).message}`, - ); - } - } - - @FieldResolver({ nullable: true }) - async hypercert(@Root() order: Order) { - const tokenId = order.itemIds?.[0]; - const collectionId = order.collection; - const chainId = order.chainId; - - if (!tokenId || !collectionId || !chainId) { - console.warn( - `[OrderResolver::hypercert] Missing tokenId or collectionId`, - ); - return null; - } - - const hypercertId = getHypercertTokenId(BigInt(tokenId)); - const formattedHypercertId = `${chainId}-${getAddress(collectionId)}-${hypercertId.toString()}`; - const hypercert = await this.getHypercerts( - { - where: { - hypercert_id: { - eq: formattedHypercertId, - }, - }, - }, - true, - ); - - const metadata = await this.getMetadataWithoutImage( - { - where: { - hypercerts: { - hypercert_id: { - eq: formattedHypercertId, - }, - }, - }, - }, - true, - ); - - return { - ...hypercert, - metadata: metadata || null, - }; - } -} - -export { OrderResolver }; diff --git a/src/graphql/schemas/resolvers/salesResolver.ts b/src/graphql/schemas/resolvers/salesResolver.ts deleted file mode 100644 index 760079c5..00000000 --- a/src/graphql/schemas/resolvers/salesResolver.ts +++ /dev/null @@ -1,78 +0,0 @@ -import { - Args, - FieldResolver, - ObjectType, - Query, - Resolver, - Root, -} from "type-graphql"; -import { Sale } from "../typeDefs/salesTypeDefs.js"; -import { GetSalesArgs } from "../args/salesArgs.js"; -import { createBaseResolver, DataResponse } from "./baseTypes.js"; - -@ObjectType() -export default class GetSalesResponse extends DataResponse(Sale) {} - -const SalesBaseResolver = createBaseResolver("sales"); - -@Resolver(() => Sale) -class SalesResolver extends SalesBaseResolver { - @Query(() => GetSalesResponse) - async sales(@Args() args: GetSalesArgs) { - return await this.getSales(args); - } - - @FieldResolver({ nullable: true }) - async hypercert(@Root() sale: Sale) { - if (!sale.hypercert_id) { - console.warn(`[SalesResolver::hypercert_id] Missing hypercert_id`); - return null; - } - - const hypercertId = sale.hypercert_id; - const hypercert = await this.getHypercerts( - { - where: { - hypercert_id: { - eq: hypercertId, - }, - }, - }, - true, - ); - - if (!hypercert) { - console.warn( - `[SalesResolver::hypercert] No hypercert found for hypercertId: ${hypercertId}`, - ); - return null; - } - - const metadata = await this.getMetadataWithoutImage( - { - where: { - hypercerts: { - hypercert_id: { - eq: hypercertId, - }, - }, - }, - }, - true, - ); - - if (!metadata) { - console.warn( - `[SalesResolver::hypercert] No metadata found for hypercert: ${hypercertId}`, - ); - return null; - } - - return { - ...hypercert, - metadata: metadata || null, - }; - } -} - -export { SalesResolver }; diff --git a/src/graphql/schemas/resolvers/signatureRequestResolver.ts b/src/graphql/schemas/resolvers/signatureRequestResolver.ts deleted file mode 100644 index e3d8b6ec..00000000 --- a/src/graphql/schemas/resolvers/signatureRequestResolver.ts +++ /dev/null @@ -1,35 +0,0 @@ -import { - Args, - ObjectType, - Query, - Resolver, - FieldResolver, - Root, -} from "type-graphql"; - -import { SignatureRequest } from "../typeDefs/signatureRequestTypeDefs.js"; -import { GetSignatureRequestArgs } from "../args/signatureRequestArgs.js"; - -import { createBaseResolver, DataResponse } from "./baseTypes.js"; - -@ObjectType() -class GetSignatureRequestResponse extends DataResponse(SignatureRequest) {} - -const SignatureRequestBaseResolver = createBaseResolver("signatureRequest"); - -@Resolver(() => SignatureRequest) -class SignatureRequestResolver extends SignatureRequestBaseResolver { - @Query(() => GetSignatureRequestResponse) - async signatureRequests(@Args() args: GetSignatureRequestArgs) { - return await this.getSignatureRequests(args); - } - - @FieldResolver(() => String) - message(@Root() signatureRequest: SignatureRequest): string { - return typeof signatureRequest.message === "object" - ? JSON.stringify(signatureRequest.message) - : signatureRequest.message || "could not parse message"; - } -} - -export { SignatureRequestResolver }; diff --git a/src/graphql/schemas/resolvers/userResolver.ts b/src/graphql/schemas/resolvers/userResolver.ts deleted file mode 100644 index 3810052a..00000000 --- a/src/graphql/schemas/resolvers/userResolver.ts +++ /dev/null @@ -1,50 +0,0 @@ -import { - Args, - ObjectType, - Query, - Resolver, - FieldResolver, - Root, -} from "type-graphql"; - -import { User } from "../typeDefs/userTypeDefs.js"; -import { GetUserArgs } from "../args/userArgs.js"; -import { SignatureRequest } from "../typeDefs/signatureRequestTypeDefs.js"; - -import { createBaseResolver, DataResponse } from "./baseTypes.js"; - -@ObjectType() -export default class GetUsersResponse extends DataResponse(User) {} - -const UserBaseResolver = createBaseResolver("user"); - -@Resolver(() => User) -class UserResolver extends UserBaseResolver { - @Query(() => GetUsersResponse) - async users(@Args() args: GetUserArgs) { - return this.getUsers(args); - } - - @FieldResolver(() => [SignatureRequest]) - async signature_requests(@Root() user: User) { - if (!user.address) { - return []; - } - - try { - const queryResult = await this.getSignatureRequests({ - where: { - safe_address: { - eq: user.address, - }, - }, - }); - return queryResult.data || []; - } catch (error) { - console.error("Error fetching signature requests:", error); - return []; - } - } -} - -export { UserResolver }; diff --git a/src/graphql/schemas/typeDefs/allowlistRecordTypeDefs.ts b/src/graphql/schemas/typeDefs/allowlistRecordTypeDefs.ts index 6869ee9b..8d0ae9a4 100644 --- a/src/graphql/schemas/typeDefs/allowlistRecordTypeDefs.ts +++ b/src/graphql/schemas/typeDefs/allowlistRecordTypeDefs.ts @@ -1,12 +1,18 @@ import { Field, ObjectType } from "type-graphql"; -import { BasicTypeDef } from "./baseTypes/basicTypeDef.js"; +import { DataResponse } from "../../../lib/graphql/DataResponse.js"; import { EthBigInt } from "../../scalars/ethBigInt.js"; +import { Hypercert } from "./hypercertTypeDefs.js"; @ObjectType({ description: "Records of allow list entries for claimable fractions", simpleResolvers: true, }) -class AllowlistRecord extends BasicTypeDef { +export class AllowlistRecord { + @Field({ + nullable: true, + description: "The ID of the allow list record", + }) + id?: string; @Field({ nullable: true, description: "The hypercert ID the claimable fraction belongs to", @@ -59,6 +65,13 @@ class AllowlistRecord extends BasicTypeDef { description: "The root of the allow list Merkle tree", }) root?: string; + + @Field(() => Hypercert, { + nullable: true, + description: "The hypercert that the allow list record belongs to", + }) + hypercert?: Hypercert; } -export { AllowlistRecord }; +@ObjectType() +export class GetAllowlistRecordResponse extends DataResponse(AllowlistRecord) {} diff --git a/src/graphql/schemas/typeDefs/attestationSchemaTypeDefs.ts b/src/graphql/schemas/typeDefs/attestationSchemaTypeDefs.ts index 9a8a0df3..ca84bd1c 100644 --- a/src/graphql/schemas/typeDefs/attestationSchemaTypeDefs.ts +++ b/src/graphql/schemas/typeDefs/attestationSchemaTypeDefs.ts @@ -1,15 +1,41 @@ import { Field, ObjectType } from "type-graphql"; -import { AttestationBaseType } from "./baseTypes/attestationBaseType.js"; +import { DataResponse } from "../../../lib/graphql/DataResponse.js"; +import { GetAttestationsResponse } from "./attestationTypeDefs.js"; import { AttestationSchemaBaseType } from "./baseTypes/attestationSchemaBaseType.js"; +/** + * GraphQL object type representing an EAS (Ethereum Attestation Service) schema. + * Extends the base type with additional fields for related attestations. + * + * This type provides: + * - All fields from AttestationSchemaBaseType (id, chain_id, schema, resolver, revocable, uid) + * - Additional field for accessing related attestations + * + * @extends {AttestationSchemaBaseType} + */ @ObjectType({ description: "Supported EAS attestation schemas and their related records", }) -class AttestationSchema extends AttestationSchemaBaseType { - @Field(() => [AttestationBaseType], { +export class AttestationSchema extends AttestationSchemaBaseType { + /** + * Collection of attestations that use this schema. + * Includes both the attestation records and a total count. + */ + @Field(() => GetAttestationsResponse, { description: "List of attestations related to the attestation schema", }) - records?: AttestationBaseType[] | null; + attestations?: GetAttestationsResponse | null; } -export { AttestationSchema }; +/** + * GraphQL response type for attestation schema queries. + * Wraps an array of AttestationSchema objects with pagination information. + * + * This type provides: + * - data: Array of attestation schemas + * - count: Total number of schemas matching the query + */ +@ObjectType() +export class GetAttestationsSchemaResponse extends DataResponse( + AttestationSchema, +) {} diff --git a/src/graphql/schemas/typeDefs/attestationTypeDefs.ts b/src/graphql/schemas/typeDefs/attestationTypeDefs.ts index 2e4ceba5..434e3dae 100644 --- a/src/graphql/schemas/typeDefs/attestationTypeDefs.ts +++ b/src/graphql/schemas/typeDefs/attestationTypeDefs.ts @@ -2,6 +2,8 @@ import { Field, ObjectType } from "type-graphql"; import { AttestationBaseType } from "./baseTypes/attestationBaseType.js"; import { HypercertBaseType } from "./baseTypes/hypercertBaseType.js"; import { AttestationSchemaBaseType } from "./baseTypes/attestationSchemaBaseType.js"; +import { Metadata } from "./metadataTypeDefs.js"; +import { DataResponse } from "../../../lib/graphql/DataResponse.js"; @ObjectType({ description: "Attestation on the Ethereum Attestation Service", @@ -17,6 +19,14 @@ class Attestation extends AttestationBaseType { description: "Schema related to the attestation", }) eas_schema?: AttestationSchemaBaseType; + + @Field(() => Metadata, { + description: "Metadata related to the attestation", + }) + metadata?: Metadata; } -export { Attestation }; +@ObjectType() +class GetAttestationsResponse extends DataResponse(Attestation) {} + +export { Attestation, GetAttestationsResponse }; diff --git a/src/graphql/schemas/typeDefs/baseTypes/attestationBaseType.ts b/src/graphql/schemas/typeDefs/baseTypes/attestationBaseType.ts index fec73c38..6a17ecb4 100644 --- a/src/graphql/schemas/typeDefs/baseTypes/attestationBaseType.ts +++ b/src/graphql/schemas/typeDefs/baseTypes/attestationBaseType.ts @@ -4,6 +4,7 @@ import { EthBigInt } from "../../../scalars/ethBigInt.js"; import type { Json } from "../../../../types/supabaseCaching.js"; import { GraphQLJSON } from "graphql-scalars"; +// TODO: Add chain ID, contract address, token ID to the attestation @ObjectType() class AttestationBaseType extends BasicTypeDef { @Field(() => ID, { @@ -12,11 +13,12 @@ class AttestationBaseType extends BasicTypeDef { }) uid?: string; @Field({ + name: "schema_uid", nullable: true, description: "Unique identifier of the EAS schema used to create the attestation", }) - schema_uid?: string; + supported_schemas_id?: string; @Field(() => EthBigInt, { nullable: true, @@ -49,11 +51,6 @@ class AttestationBaseType extends BasicTypeDef { description: "Address of the recipient of the attestation", }) recipient?: string; - @Field({ - nullable: true, - description: "Address of the resolver contract for the attestation", - }) - resolver?: string; @Field(() => GraphQLJSON, { nullable: true, description: "Encoded data of the attestation", diff --git a/src/graphql/schemas/typeDefs/baseTypes/attestationSchemaBaseType.ts b/src/graphql/schemas/typeDefs/baseTypes/attestationSchemaBaseType.ts index b5187e03..c872ae4e 100644 --- a/src/graphql/schemas/typeDefs/baseTypes/attestationSchemaBaseType.ts +++ b/src/graphql/schemas/typeDefs/baseTypes/attestationSchemaBaseType.ts @@ -2,27 +2,63 @@ import { Field, ID, ObjectType } from "type-graphql"; import { EthBigInt } from "../../../scalars/ethBigInt.js"; import { BasicTypeDef } from "./basicTypeDef.js"; +/** + * Base GraphQL object type for EAS (Ethereum Attestation Service) schemas. + * Provides the core fields that define an attestation schema. + * + * This type provides: + * - Basic identification fields (id from BasicTypeDef) + * - Schema-specific fields (chain_id, uid, schema, resolver, revocable) + * + * Used as a base class for more specific schema types that may add additional fields. + * + * @extends {BasicTypeDef} + */ @ObjectType({ description: "Supported EAS attestation schemas and their related records", }) class AttestationSchemaBaseType extends BasicTypeDef { + /** + * Chain ID where this schema is supported. + * Can be represented as a bigint, number, or string. + */ @Field(() => EthBigInt, { description: "Chain ID of the chains where the attestation schema is supported", }) chain_id?: bigint | number | string; + + /** + * Unique identifier for the schema on EAS. + * This is different from the database id field. + */ @Field(() => ID, { description: "Unique identifier for the attestation schema", }) uid?: string; + + /** + * Address of the resolver contract for this schema. + * The resolver contract handles the validation and processing of attestations. + */ @Field({ description: "Address of the resolver contract for the attestation schema", }) resolver?: string; + + /** + * Whether attestations using this schema can be revoked. + * If true, attesters can revoke their attestations after creation. + */ @Field({ description: "Whether the attestation schema is revocable", }) revocable?: boolean; + + /** + * String representation of the schema definition. + * Defines the structure and types of data that can be attested. + */ @Field({ description: "String representation of the attestation schema", }) diff --git a/src/graphql/schemas/typeDefs/baseTypes/basicTypeDef.ts b/src/graphql/schemas/typeDefs/baseTypes/basicTypeDef.ts index 6d98ce9d..54aaedca 100644 --- a/src/graphql/schemas/typeDefs/baseTypes/basicTypeDef.ts +++ b/src/graphql/schemas/typeDefs/baseTypes/basicTypeDef.ts @@ -2,7 +2,7 @@ import { Field, ID, ObjectType } from "type-graphql"; @ObjectType() class BasicTypeDef { - @Field(() => ID) + @Field(() => ID, { nullable: true }) id?: string; } diff --git a/src/graphql/schemas/typeDefs/baseTypes/hypercertBaseType.ts b/src/graphql/schemas/typeDefs/baseTypes/hypercertBaseType.ts index e56025e5..bc556adc 100644 --- a/src/graphql/schemas/typeDefs/baseTypes/hypercertBaseType.ts +++ b/src/graphql/schemas/typeDefs/baseTypes/hypercertBaseType.ts @@ -1,7 +1,6 @@ -import { Field, ID, ObjectType, Int } from "type-graphql"; -import { BasicTypeDef } from "./basicTypeDef.js"; +import { Field, ID, Int, ObjectType } from "type-graphql"; import { EthBigInt } from "../../../scalars/ethBigInt.js"; -import { Metadata } from "../metadataTypeDefs.js"; +import { BasicTypeDef } from "./basicTypeDef.js"; @ObjectType() class HypercertBaseType extends BasicTypeDef { @@ -37,12 +36,6 @@ class HypercertBaseType extends BasicTypeDef { }) uri?: string; - @Field(() => Metadata, { - nullable: true, - description: "The metadata for the hypercert as referenced by the uri", - }) - metadata?: Metadata; - @Field(() => EthBigInt, { nullable: true }) creation_block_number?: bigint | number | string; @Field(() => EthBigInt, { nullable: true }) @@ -63,6 +56,12 @@ class HypercertBaseType extends BasicTypeDef { description: "Count of sales of fractions that belong to this hypercert", }) sales_count?: number; + + @Field({ + nullable: true, + description: "Whether the hypercert has been burned", + }) + burned?: boolean; } export { HypercertBaseType }; diff --git a/src/graphql/schemas/typeDefs/baseTypes/hypercertBaseWithMetadata.ts b/src/graphql/schemas/typeDefs/baseTypes/hypercertBaseWithMetadata.ts new file mode 100644 index 00000000..578eed57 --- /dev/null +++ b/src/graphql/schemas/typeDefs/baseTypes/hypercertBaseWithMetadata.ts @@ -0,0 +1,19 @@ +import { ObjectType } from "type-graphql"; + +import { Field } from "type-graphql"; +import { Metadata } from "../metadataTypeDefs.js"; +import { HypercertBaseType } from "./hypercertBaseType.js"; + +@ObjectType({ + description: + "Hypercert with metadata, contract, orders, sales and fraction information", + simpleResolvers: true, +}) +export class HypercertWithMetadata extends HypercertBaseType { + // Resolved fields + @Field(() => Metadata, { + nullable: true, + description: "The metadata for the hypercert as referenced by the uri", + }) + metadata?: Metadata; +} diff --git a/src/graphql/schemas/typeDefs/blueprintTypeDefs.ts b/src/graphql/schemas/typeDefs/blueprintTypeDefs.ts index b2bfd021..d2433cee 100644 --- a/src/graphql/schemas/typeDefs/blueprintTypeDefs.ts +++ b/src/graphql/schemas/typeDefs/blueprintTypeDefs.ts @@ -1,10 +1,13 @@ -import { Field, ObjectType } from "type-graphql"; import { GraphQLJSON } from "graphql-scalars"; +import { Field, ObjectType } from "type-graphql"; +import { DataResponse } from "../../../lib/graphql/DataResponse.js"; +import { HypercertsResponse } from "./hypercertTypeDefs.js"; import { User } from "./userTypeDefs.js"; -import GetHypercertsResponse from "../resolvers/hypercertResolver.js"; -@ObjectType() -class Blueprint { +@ObjectType({ + description: "Blueprint for hypercert creation", +}) +export class Blueprint { @Field() id?: number; @@ -23,11 +26,14 @@ class Blueprint { @Field(() => [User]) admins?: User[]; - @Field(() => GetHypercertsResponse) - hypercerts?: GetHypercertsResponse; + @Field(() => HypercertsResponse) + hypercerts?: HypercertsResponse; // Internal field, not queryable hypercert_ids?: string[]; } -export { Blueprint }; +@ObjectType({ + description: "Blueprints for hypercert creation", +}) +export class GetBlueprintsResponse extends DataResponse(Blueprint) {} diff --git a/src/graphql/schemas/typeDefs/collectionTypeDefs.ts b/src/graphql/schemas/typeDefs/collectionTypeDefs.ts index 019f4378..e74efa3c 100644 --- a/src/graphql/schemas/typeDefs/collectionTypeDefs.ts +++ b/src/graphql/schemas/typeDefs/collectionTypeDefs.ts @@ -4,13 +4,15 @@ import { EthBigInt } from "../../scalars/ethBigInt.js"; import { BasicTypeDef } from "./baseTypes/basicTypeDef.js"; import { User } from "./userTypeDefs.js"; -import { Hypercert } from "./hypercertTypeDefs.js"; +import { HypercertsResponse } from "./hypercertTypeDefs.js"; import { Blueprint } from "./blueprintTypeDefs.js"; +import { DataResponse } from "../../../lib/graphql/DataResponse.js"; @ObjectType({ description: "Collection of hypercerts for reference and display purposes", }) export class Collection extends BasicTypeDef { + //TODO convert to timestamp in seconds @Field({ description: "Creation timestamp of the collection" }) created_at?: string; @Field({ description: "Name of the collection" }) @@ -26,9 +28,14 @@ export class Collection extends BasicTypeDef { @Field(() => [User]) admins?: User[]; - @Field(() => [Hypercert], { nullable: true }) - hypercerts?: Hypercert[]; + @Field(() => HypercertsResponse, { nullable: true }) + hypercerts?: HypercertsResponse; @Field(() => [Blueprint], { nullable: true }) blueprints?: Blueprint[]; } + +@ObjectType({ + description: "Collection of hypercerts for reference and display purposes", +}) +export class GetCollectionsResponse extends DataResponse(Collection) {} diff --git a/src/graphql/schemas/typeDefs/contractTypeDefs.ts b/src/graphql/schemas/typeDefs/contractTypeDefs.ts index 7f9d8d80..8a04a5d0 100644 --- a/src/graphql/schemas/typeDefs/contractTypeDefs.ts +++ b/src/graphql/schemas/typeDefs/contractTypeDefs.ts @@ -1,9 +1,10 @@ import { Field, ObjectType } from "type-graphql"; import { BasicTypeDef } from "./baseTypes/basicTypeDef.js"; import { EthBigInt } from "../../scalars/ethBigInt.js"; +import { DataResponse } from "../../../lib/graphql/DataResponse.js"; @ObjectType({ description: "Pointer to a contract deployed on a chain" }) -class Contract extends BasicTypeDef { +export class Contract extends BasicTypeDef { @Field(() => EthBigInt, { nullable: true, description: "The ID of the chain on which the contract is deployed", @@ -18,4 +19,5 @@ class Contract extends BasicTypeDef { start_block?: bigint | number | null; } -export { Contract }; +@ObjectType() +export class GetContractsResponse extends DataResponse(Contract) {} diff --git a/src/graphql/schemas/typeDefs/fractionTypeDefs.ts b/src/graphql/schemas/typeDefs/fractionTypeDefs.ts index e657b62b..927c0dfe 100644 --- a/src/graphql/schemas/typeDefs/fractionTypeDefs.ts +++ b/src/graphql/schemas/typeDefs/fractionTypeDefs.ts @@ -1,17 +1,27 @@ import { Field, ID, ObjectType } from "type-graphql"; import { EthBigInt } from "../../scalars/ethBigInt.js"; import { BasicTypeDef } from "./baseTypes/basicTypeDef.js"; -import GetOrdersResponse from "../resolvers/orderResolver.js"; import { Metadata } from "./metadataTypeDefs.js"; -import GetSalesResponse from "../resolvers/salesResolver.js"; +import { DataResponse } from "../../../lib/graphql/DataResponse.js"; +import { GetOrdersResponse } from "./orderTypeDefs.js"; +import { GetSalesResponse } from "./salesTypeDefs.js"; @ObjectType({ description: "Fraction of an hypercert", simpleResolvers: true, }) -class Fraction extends BasicTypeDef { - claims_id?: string; +export class Fraction extends BasicTypeDef { + @Field(() => EthBigInt, { + nullable: true, + description: "The token ID of the fraction", + }) + token_id?: bigint; + @Field({ + nullable: true, + description: "The ID of the claims", + }) + claims_id?: string; @Field({ nullable: true, description: "Address of the owner of the fractions", @@ -38,6 +48,27 @@ class Fraction extends BasicTypeDef { }) fraction_id?: string; + @Field(() => EthBigInt, { + nullable: true, + description: "Block number of the creation of the fraction", + }) + creation_block_number?: bigint | number | string; + @Field(() => EthBigInt, { + nullable: true, + description: "Timestamp of the block of the creation of the fraction", + }) + creation_block_timestamp?: bigint | number | string; + @Field(() => EthBigInt, { + nullable: true, + description: "Block number of the last update of the fraction", + }) + last_update_block_number?: bigint | number | string; + @Field(() => EthBigInt, { + nullable: true, + description: "Timestamp of the block of the last update of the fraction", + }) + last_update_block_timestamp?: bigint | number | string; + // Resolved fields @Field(() => GetOrdersResponse, { nullable: true, @@ -57,26 +88,12 @@ class Fraction extends BasicTypeDef { }) sales?: GetSalesResponse; - @Field(() => EthBigInt, { - nullable: true, - description: "Block number of the creation of the fraction", - }) - creation_block_number?: bigint | number | string; - @Field(() => EthBigInt, { - nullable: true, - description: "Timestamp of the block of the creation of the fraction", - }) - creation_block_timestamp?: bigint | number | string; - @Field(() => EthBigInt, { - nullable: true, - description: "Block number of the last update of the fraction", - }) - last_update_block_number?: bigint | number | string; - @Field(() => EthBigInt, { + @Field({ nullable: true, - description: "Timestamp of the block of the last update of the fraction", + description: "Whether the fraction has been burned", }) - last_update_block_timestamp?: bigint | number | string; + burned?: boolean; } -export { Fraction }; +@ObjectType() +export class GetFractionsResponse extends DataResponse(Fraction) {} diff --git a/src/graphql/schemas/typeDefs/hyperboardTypeDefs.ts b/src/graphql/schemas/typeDefs/hyperboardTypeDefs.ts index 0b7a7f97..968a92e6 100644 --- a/src/graphql/schemas/typeDefs/hyperboardTypeDefs.ts +++ b/src/graphql/schemas/typeDefs/hyperboardTypeDefs.ts @@ -1,77 +1,54 @@ import { Field, ObjectType } from "type-graphql"; -import { BasicTypeDef } from "./baseTypes/basicTypeDef.js"; +import { DataResponse } from "../../../lib/graphql/DataResponse.js"; import { EthBigInt } from "../../scalars/ethBigInt.js"; -import { User } from "./userTypeDefs.js"; -import { GraphQLBigInt } from "graphql-scalars"; +import { BasicTypeDef } from "./baseTypes/basicTypeDef.js"; import { Collection } from "./collectionTypeDefs.js"; +import GetUsersResponse, { User } from "./userTypeDefs.js"; -@ObjectType({ - description: "Hyperboard of hypercerts for reference and display purposes", -}) -class Hyperboard extends BasicTypeDef { - @Field({ description: "Name of the hyperboard" }) - name?: string; - @Field(() => [EthBigInt], { - nullable: true, - description: "Chain ID of the hyperboard", - }) - chain_ids?: (bigint | number | string)[]; - @Field({ nullable: true, description: "Background image of the hyperboard" }) - background_image?: string; - @Field({ - nullable: true, - description: - "Whether the hyperboard should be rendered as a grayscale image", - }) - grayscale_images?: boolean; - @Field({ - nullable: true, - description: "Color of the borders of the hyperboard", - }) - tile_border_color?: string; - - @Field(() => [User]) - admins?: User[]; - - @Field(() => SectionResponseType) - sections?: SectionResponseType[]; - - @Field(() => [HyperboardOwner]) - owners?: HyperboardOwner[]; -} - -@ObjectType({}) -class SectionResponseType { - @Field(() => [Section]) - data?: Section[]; - +@ObjectType() +export class HyperboardOwner extends User { @Field() - count?: number; + percentage_owned?: number; } +@ObjectType() +export class GetHyperboardOwnersResponse extends DataResponse( + HyperboardOwner, +) {} + @ObjectType({ - description: "Section representing a collection within a hyperboard", + description: + "Section representing one or more collectionswithin a hyperboard", }) export class Section { @Field() label?: string; - @Field(() => Collection) - collection?: Collection; + @Field(() => [Collection]) + collections?: Collection[]; @Field(() => [SectionEntry]) entries?: SectionEntry[]; - @Field(() => [HyperboardOwner]) - owners?: HyperboardOwner[]; + @Field(() => GetHyperboardOwnersResponse) + owners?: GetHyperboardOwnersResponse; } +@ObjectType({}) +export class GetSectionsResponse extends DataResponse(Section) {} @ObjectType() -export class HyperboardOwner extends User { +class SectionEntryOwner extends User { @Field() - percentage_owned?: number; + percentage?: number; + @Field(() => EthBigInt, { nullable: true }) + units?: bigint | number | string; } +@ObjectType() +export class GetSectionEntryOwnersResponse extends DataResponse( + SectionEntryOwner, +) {} + @ObjectType({ description: "Entry representing a hypercert or blueprint within a section", }) @@ -86,19 +63,47 @@ class SectionEntry { display_size?: number; @Field({ description: "Name of the hypercert or blueprint", nullable: true }) name?: string; - @Field(() => GraphQLBigInt, { nullable: true }) + @Field(() => EthBigInt, { nullable: true }) total_units?: bigint | number | string; - @Field(() => [SectionEntryOwner]) - owners?: SectionEntryOwner[]; + @Field(() => GetSectionEntryOwnersResponse) + owners?: GetSectionEntryOwnersResponse; } -@ObjectType() -class SectionEntryOwner extends User { - @Field() - percentage?: number; - @Field(() => GraphQLBigInt, { nullable: true }) - units?: bigint | number | string; +@ObjectType({ + description: "Hyperboard of hypercerts for reference and display purposes", +}) +export class Hyperboard extends BasicTypeDef { + @Field({ description: "Name of the hyperboard" }) + name?: string; + @Field(() => [EthBigInt], { + nullable: true, + description: "Chain ID of the hyperboard", + }) + chain_ids?: (bigint | number | string)[]; + @Field({ nullable: true, description: "Background image of the hyperboard" }) + background_image?: string; + @Field({ + nullable: true, + description: + "Whether the hyperboard should be rendered as a grayscale image", + }) + grayscale_images?: boolean; + @Field({ + nullable: true, + description: "Color of the borders of the hyperboard", + }) + tile_border_color?: string; + + @Field(() => GetUsersResponse) + admins?: GetUsersResponse; + + @Field(() => GetSectionsResponse) + sections?: GetSectionsResponse; + + @Field(() => GetHyperboardOwnersResponse) + owners?: GetHyperboardOwnersResponse; } -export { Hyperboard }; +@ObjectType() +export class GetHyperboardsResponse extends DataResponse(Hyperboard) {} diff --git a/src/graphql/schemas/typeDefs/hypercertTypeDefs.ts b/src/graphql/schemas/typeDefs/hypercertTypeDefs.ts index bd78b1da..c23662e7 100644 --- a/src/graphql/schemas/typeDefs/hypercertTypeDefs.ts +++ b/src/graphql/schemas/typeDefs/hypercertTypeDefs.ts @@ -1,15 +1,17 @@ +import { GraphQLBigInt } from "graphql-scalars"; import { Field, ObjectType } from "type-graphql"; -import GetAttestationsResponse from "../resolvers/attestationResolver.js"; -import GetFractionsResponse from "../resolvers/fractionResolver.js"; -import { Contract } from "./contractTypeDefs.js"; -import GetOrdersResponse from "../resolvers/orderResolver.js"; -import GetSalesResponse from "../resolvers/salesResolver.js"; +import { DataResponse } from "../../../lib/graphql/DataResponse.js"; +import { GetAttestationsResponse } from "./attestationTypeDefs.js"; import { HypercertBaseType } from "./baseTypes/hypercertBaseType.js"; +import { Contract } from "./contractTypeDefs.js"; import { Metadata } from "./metadataTypeDefs.js"; -import { Order } from "./orderTypeDefs.js"; -import { GraphQLBigInt } from "graphql-scalars"; - -@ObjectType() +import { GetOrdersResponse, Order } from "./orderTypeDefs.js"; +import { GetSalesResponse } from "./salesTypeDefs.js"; +import { GetFractionsResponse } from "./fractionTypeDefs.js"; +@ObjectType({ + description: + "Hypercert with metadata, contract, orders, sales and fraction information", +}) class GetOrdersForHypercertResponse extends GetOrdersResponse { @Field(() => Order, { nullable: true }) cheapestOrder?: Order; @@ -23,8 +25,14 @@ class GetOrdersForHypercertResponse extends GetOrdersResponse { "Hypercert with metadata, contract, orders, sales and fraction information", simpleResolvers: true, }) -class Hypercert extends HypercertBaseType { +export class Hypercert extends HypercertBaseType { // Resolved fields + @Field(() => Metadata, { + nullable: true, + description: "The metadata for the hypercert as referenced by the uri", + }) + metadata?: Metadata; + @Field(() => Contract, { nullable: true, description: "The contract that the hypercert is associated with", @@ -55,12 +63,16 @@ class Hypercert extends HypercertBaseType { description: "Sales related to this hypercert", }) sales?: GetSalesResponse; - - @Field(() => Metadata, { - nullable: true, - description: "The metadata for the hypercert as referenced by the uri", - }) - declare metadata?: Metadata; } -export { Hypercert }; +@ObjectType({ + description: + "Hypercert with metadata, contract, orders, sales and fraction information", +}) +export class GetHypercertsResponse extends DataResponse(Hypercert) {} + +@ObjectType({ + description: + "Hypercert without metadata, contract, orders, sales and fraction information", +}) +export class HypercertsResponse extends DataResponse(HypercertBaseType) {} diff --git a/src/graphql/schemas/typeDefs/metadataTypeDefs.ts b/src/graphql/schemas/typeDefs/metadataTypeDefs.ts index 99c325eb..129f40d0 100644 --- a/src/graphql/schemas/typeDefs/metadataTypeDefs.ts +++ b/src/graphql/schemas/typeDefs/metadataTypeDefs.ts @@ -1,15 +1,15 @@ +import { GraphQLJSON } from "graphql-scalars"; import { Field, ObjectType } from "type-graphql"; +import { DataResponse } from "../../../lib/graphql/DataResponse.js"; import type { Json } from "../../../types/supabaseData.js"; -import { GraphQLJSON } from "graphql-scalars"; -import { BasicTypeDef } from "./baseTypes/basicTypeDef.js"; import { EthBigInt } from "../../scalars/ethBigInt.js"; +import { BasicTypeDef } from "./baseTypes/basicTypeDef.js"; @ObjectType({ description: "Metadata related to the hypercert describing work, impact, timeframes and other relevant information", - simpleResolvers: true, }) -class Metadata extends BasicTypeDef { +export class Metadata extends BasicTypeDef { @Field({ nullable: true, description: "Name of the hypercert" }) name?: string; @Field({ nullable: true, description: "Description of the hypercert" }) @@ -73,4 +73,5 @@ class Metadata extends BasicTypeDef { work_timeframe_to?: bigint | number; } -export { Metadata }; +@ObjectType() +export class GetMetadataResponse extends DataResponse(Metadata) {} diff --git a/src/graphql/schemas/typeDefs/orderTypeDefs.ts b/src/graphql/schemas/typeDefs/orderTypeDefs.ts index bb49dd53..1c9d202b 100644 --- a/src/graphql/schemas/typeDefs/orderTypeDefs.ts +++ b/src/graphql/schemas/typeDefs/orderTypeDefs.ts @@ -1,14 +1,18 @@ -import { Field, ObjectType } from "type-graphql"; -import { BasicTypeDef } from "./baseTypes/basicTypeDef.js"; +import { Field, Int, ObjectType } from "type-graphql"; +import { DataResponse } from "../../../lib/graphql/DataResponse.js"; import { EthBigInt } from "../../scalars/ethBigInt.js"; +import { BasicTypeDef } from "./baseTypes/basicTypeDef.js"; import { HypercertBaseType } from "./baseTypes/hypercertBaseType.js"; +import { HypercertWithMetadata } from "./baseTypes/hypercertBaseWithMetadata.js"; -@ObjectType() -class Order extends BasicTypeDef { +@ObjectType({ + description: "Marketplace order for a hypercert", +}) +export class Order extends BasicTypeDef { @Field() hypercert_id?: string; @Field() - createdAt?: string; + createdAt?: number; @Field() quoteType?: number; @Field() @@ -45,19 +49,20 @@ class Order extends BasicTypeDef { amounts?: number[]; @Field() invalidated?: boolean; - @Field(() => [String], { nullable: true }) - validator_codes?: string[]; + @Field(() => [Int], { nullable: true }) + validator_codes?: number[]; @Field() pricePerPercentInUSD?: string; @Field() pricePerPercentInToken?: string; - @Field(() => HypercertBaseType, { + @Field(() => HypercertWithMetadata, { nullable: true, description: "The hypercert associated with this order", }) hypercert?: HypercertBaseType; } -export { Order }; +@ObjectType() +export class GetOrdersResponse extends DataResponse(Order) {} diff --git a/src/graphql/schemas/typeDefs/salesTypeDefs.ts b/src/graphql/schemas/typeDefs/salesTypeDefs.ts index c35df08e..bcf34b78 100644 --- a/src/graphql/schemas/typeDefs/salesTypeDefs.ts +++ b/src/graphql/schemas/typeDefs/salesTypeDefs.ts @@ -1,10 +1,12 @@ import { Field, ObjectType } from "type-graphql"; -import { BasicTypeDef } from "./baseTypes/basicTypeDef.js"; import { EthBigInt } from "../../scalars/ethBigInt.js"; +import { BasicTypeDef } from "./baseTypes/basicTypeDef.js"; +import { DataResponse } from "../../../lib/graphql/DataResponse.js"; import { HypercertBaseType } from "./baseTypes/hypercertBaseType.js"; +import { HypercertWithMetadata } from "./baseTypes/hypercertBaseWithMetadata.js"; @ObjectType() -class Sale extends BasicTypeDef { +export class Sale extends BasicTypeDef { @Field({ description: "The address of the buyer" }) buyer?: string; @Field({ description: "The address of the seller" }) @@ -50,7 +52,7 @@ class Sale extends BasicTypeDef { }) creation_block_timestamp?: bigint | number | string; - @Field(() => HypercertBaseType, { + @Field(() => HypercertWithMetadata, { nullable: true, description: "The hypercert associated with this order", }) @@ -60,4 +62,5 @@ class Sale extends BasicTypeDef { currency_amount?: bigint | number | string; } -export { Sale }; +@ObjectType() +export class GetSalesResponse extends DataResponse(Sale) {} diff --git a/src/graphql/schemas/typeDefs/signatureRequestTypeDefs.ts b/src/graphql/schemas/typeDefs/signatureRequestTypeDefs.ts index af854376..13ef7cb1 100644 --- a/src/graphql/schemas/typeDefs/signatureRequestTypeDefs.ts +++ b/src/graphql/schemas/typeDefs/signatureRequestTypeDefs.ts @@ -1,12 +1,13 @@ import { Field, ObjectType, registerEnumType } from "type-graphql"; import { EthBigInt } from "../../scalars/ethBigInt.js"; +import { DataResponse } from "../../../lib/graphql/DataResponse.js"; -enum SignatureRequestPurpose { +export enum SignatureRequestPurpose { UPDATE_USER_DATA = "update_user_data", } -enum SignatureRequestStatus { +export enum SignatureRequestStatus { PENDING = "pending", EXECUTED = "executed", CANCELED = "canceled", @@ -26,7 +27,7 @@ registerEnumType(SignatureRequestStatus, { description: "Pending signature request for a user", simpleResolvers: true, }) -class SignatureRequest { +export class SignatureRequest { @Field({ description: "The safe address of the user who needs to sign", }) @@ -63,4 +64,7 @@ class SignatureRequest { chain_id?: bigint | number | string; } -export { SignatureRequest, SignatureRequestPurpose, SignatureRequestStatus }; +@ObjectType() +export class GetSignatureRequestResponse extends DataResponse( + SignatureRequest, +) {} diff --git a/src/graphql/schemas/typeDefs/typeDefs.ts b/src/graphql/schemas/typeDefs/typeDefs.ts new file mode 100644 index 00000000..d16cbb51 --- /dev/null +++ b/src/graphql/schemas/typeDefs/typeDefs.ts @@ -0,0 +1,18 @@ +export const EntityTypeDefs = { + Metadata: "Metadata", + Hypercert: "Hypercert", + Fraction: "Fraction", + Contract: "Contract", + Attestation: "Attestation", + AttestationSchema: "AttestationSchema", + AllowlistRecord: "AllowlistRecord", + Blueprint: "Blueprint", + SignatureRequest: "SignatureRequest", + Collection: "Collection", + Order: "Order", + Sale: "Sale", + Hyperboard: "Hyperboard", + User: "User", +} as const; + +export type EntityTypeDefs = keyof typeof EntityTypeDefs; diff --git a/src/graphql/schemas/typeDefs/userTypeDefs.ts b/src/graphql/schemas/typeDefs/userTypeDefs.ts index 716b0948..d21cedc6 100644 --- a/src/graphql/schemas/typeDefs/userTypeDefs.ts +++ b/src/graphql/schemas/typeDefs/userTypeDefs.ts @@ -1,10 +1,11 @@ import { Field, ObjectType } from "type-graphql"; +import { DataResponse } from "../../../lib/graphql/DataResponse.js"; import { EthBigInt } from "../../scalars/ethBigInt.js"; - -import { SignatureRequest } from "./signatureRequestTypeDefs.js"; +import { GetSignatureRequestResponse } from "./signatureRequestTypeDefs.js"; +import { BasicTypeDef } from "./baseTypes/basicTypeDef.js"; @ObjectType() -class User { +export class User extends BasicTypeDef { @Field({ description: "The address of the user" }) address?: string; @@ -20,11 +21,12 @@ class User { }) chain_id?: bigint | number | string; - @Field(() => [SignatureRequest], { + @Field(() => GetSignatureRequestResponse, { nullable: true, description: "Pending signature requests for the user", }) - signature_requests?: SignatureRequest[]; + signature_requests?: GetSignatureRequestResponse; } -export { User }; +@ObjectType() +export default class GetUsersResponse extends DataResponse(User) {} diff --git a/src/graphql/schemas/utils/filters-kysely.ts b/src/graphql/schemas/utils/filters-kysely.ts deleted file mode 100644 index 0ac2d2f3..00000000 --- a/src/graphql/schemas/utils/filters-kysely.ts +++ /dev/null @@ -1,248 +0,0 @@ -import { sql, SqlBool } from "kysely"; -import { - NumberSearchOptions, - StringSearchOptions, - StringArraySearchOptions, - NumberArraySearchOptions, -} from "../inputs/searchOptions.js"; - -export type OperandType = string | number | bigint | string[] | bigint[]; - -export type NumericOperatorType = "eq" | "gt" | "gte" | "lt" | "lte"; -export type StringOperatorType = "contains" | "startsWith" | "endsWith"; -export type ArrayOperatorType = "overlaps" | "contains"; -export type OperatorType = - | NumericOperatorType - | StringOperatorType - | ArrayOperatorType; - -enum OperatorSymbols { - eq = "=", - gt = ">", - gte = ">=", - lt = "<", - lte = "<=", - ilike = "~*", - overlaps = "&&", - contains = "@>", -} - -// TODO: remove when data client is updated -export const generateFilterValues = ( - column: string, - operator: OperatorType, - operand: OperandType, -) => { - switch (operator) { - case "eq": - return [column, OperatorSymbols.eq, operand]; - case "gt": - return [column, OperatorSymbols.gt, operand]; - case "gte": - return [column, OperatorSymbols.gte, operand]; - case "lt": - return [column, OperatorSymbols.lt, operand]; - case "lte": - return [column, OperatorSymbols.lte, operand]; - case "contains": - return [column, OperatorSymbols.ilike, `%${operand}%`]; - case "startsWith": - return [column, OperatorSymbols.ilike, `${operand}%`]; - case "endsWith": - return [column, OperatorSymbols.ilike, `%${operand}`]; - } - - return []; -}; - -export const getTablePrefix = (column: string): string => { - switch (column) { - case "eas_schema": - return "supported_schemas"; - case "hypercerts": - return "claims"; - case "contract": - return "contracts"; - case "fractions": - return "fractions_view"; - case "metadata": - return "metadata"; - case "attestations": - return "attestations"; - default: - return column; - } -}; - -export const isFilterObject = (obj: never): boolean => { - const filterKeys = [ - "eq", - "gt", - "gte", - "lt", - "lte", - "contains", - "startsWith", - "endsWith", - "in", - "overlaps", - "contains", - ]; - return Object.keys(obj).some((key) => filterKeys.includes(key)); -}; - -// Helper functions for building conditions -const buildEqualityCondition = ( - column: string, - // eslint-disable-next-line @typescript-eslint/no-explicit-any - value: any, - tableName: string, -): SqlBool => sql`${sql.raw(`"${tableName}"."${column}"`)} = -${value}`; - -const buildInCondition = ( - column: string, - // eslint-disable-next-line @typescript-eslint/no-explicit-any - values: any[], - tableName: string, -): SqlBool => sql`${sql.raw(`"${tableName}"."${column}"`)} = ANY(${values})`; - -const buildComparisonCondition = ( - column: string, - operator: string, - // eslint-disable-next-line @typescript-eslint/no-explicit-any - value: any, - tableName: string, -): SqlBool => - sql`${sql.raw(`"${tableName}"."${column}"`)} - ${sql.raw(operator)} - ${value}`; - -const buildLikeCondition = ( - column: string, - pattern: string, - tableName: string, -): SqlBool => sql`${sql.raw(`"${tableName}"."${column}"`)} ILIKE -${pattern}`; - -const buildArrayCondition = ( - column: string, - operator: string, - // eslint-disable-next-line @typescript-eslint/no-explicit-any - values: any[], - tableName: string, -): SqlBool => - sql`${sql.raw(`"${tableName}"."${column}"`)} - ${sql.raw(operator)} - ${sql.raw(`ARRAY[${values.map((v) => `'${v}'`).join(", ")}]`)}`; - -const conditionBuilders = { - eq: buildEqualityCondition, - // eslint-disable-next-line @typescript-eslint/no-explicit-any - in: (column: string, value: any, tableName: string) => - buildInCondition(column, value, tableName), - // eslint-disable-next-line @typescript-eslint/no-explicit-any - gt: (column: string, value: any, tableName: string) => - buildComparisonCondition(column, ">", value, tableName), - // eslint-disable-next-line @typescript-eslint/no-explicit-any - gte: (column: string, value: any, tableName: string) => - buildComparisonCondition(column, ">=", value, tableName), - // eslint-disable-next-line @typescript-eslint/no-explicit-any - lt: (column: string, value: any, tableName: string) => - buildComparisonCondition(column, "<", value, tableName), - // eslint-disable-next-line @typescript-eslint/no-explicit-any - lte: (column: string, value: any, tableName: string) => - buildComparisonCondition(column, "<=", value, tableName), - contains: (column: string, value: string, tableName: string) => - buildLikeCondition(column, `%${value}%`, tableName), - startsWith: (column: string, value: string, tableName: string) => - buildLikeCondition(column, `${value}%`, tableName), - endsWith: (column: string, value: string, tableName: string) => - buildLikeCondition(column, `%${value}`, tableName), -}; - -export const buildCondition = ( - column: string, - // eslint-disable-next-line @typescript-eslint/no-explicit-any - value: any, - tableName: string, -): SqlBool => { - const conditions: SqlBool[] = []; - - if ( - value instanceof StringSearchOptions || - value instanceof NumberSearchOptions - ) { - Object.entries(value).forEach(([key, val]) => { - if (key in conditionBuilders && val !== undefined) { - conditions.push(conditionBuilders[key](column, val, tableName)); - } - }); - } else if ( - value instanceof StringArraySearchOptions || - value instanceof NumberArraySearchOptions - ) { - if (value.contains && value.contains.length > 0) { - conditions.push( - buildArrayCondition(column, "@>", value.contains, tableName), - ); - } - if (value.overlaps && value.overlaps.length > 0) { - conditions.push( - buildArrayCondition(column, "&&", value.overlaps, tableName), - ); - } - } else if (typeof value === "object" && value !== null) { - Object.entries(value).forEach(([key, val]) => { - if (key in conditionBuilders && val !== undefined) { - conditions.push(conditionBuilders[key](column, val, tableName)); - } else if (key === "contains" && Array.isArray(val)) { - conditions.push(buildArrayCondition(column, "@>", val, tableName)); - } else if (key === "overlaps" && Array.isArray(val)) { - conditions.push(buildArrayCondition(column, "&&", val, tableName)); - } - }); - } - - return sql.join(conditions, sql` AND `); -}; - -export const buildWhereCondition = ( - column: string, - // eslint-disable-next-line @typescript-eslint/no-explicit-any - value: any, - tableName: T, - // eslint-disable-next-line @typescript-eslint/no-explicit-any - eb: any, -): SqlBool | null => { - if (!column || value === undefined) return null; - - if (typeof value === "object" && value !== null) { - if (isFilterObject(value)) { - return buildCondition(column, value, tableName); - } - - const relatedTable = getTablePrefix(column); - const nestedConditions: SqlBool[] = []; - - for (const [nestedColumn, nestedValue] of Object.entries(value)) { - if (!nestedColumn || nestedValue === undefined) continue; - const nestedCondition = buildWhereCondition( - nestedColumn, - nestedValue, - relatedTable, - eb, - ); - if (nestedCondition) { - nestedConditions.push(nestedCondition); - } - } - - return nestedConditions.length > 0 - ? sql.join(nestedConditions, sql` AND `) - : null; - } - - return sql`${sql.raw(`"${tableName}"."${column}"`)} = - ${value}`; -}; diff --git a/src/graphql/schemas/utils/filters.ts b/src/graphql/schemas/utils/filters.ts deleted file mode 100644 index 7aefcc41..00000000 --- a/src/graphql/schemas/utils/filters.ts +++ /dev/null @@ -1,221 +0,0 @@ -import { - IdSearchOptions, - NumberArraySearchOptions, - BigIntSearchOptions, - StringArraySearchOptions, - StringSearchOptions, -} from "../inputs/searchOptions.js"; -import type { WhereOptions } from "../inputs/whereOptions.js"; -import type { Database as CachingDatabase } from "../../../types/supabaseCaching.js"; -import { PostgrestTransformBuilder } from "@supabase/postgrest-js"; - -interface ApplyFilters< - T extends object, - QueryType extends PostgrestTransformBuilder< - CachingDatabase["public"], - Record, - unknown, - unknown, - unknown - >, -> { - query: QueryType; - where?: WhereOptions; -} - -type OperandType = string | number | bigint | string[] | bigint[]; -type OperatorType = - | "eq" - | "gt" - | "gte" - | "lt" - | "lte" - | "ilike" - | "contains" - | "startsWith" - | "endsWith"; - -const generateFilters = ( - value: BigIntSearchOptions | StringSearchOptions, - column: string, -) => { - const filters: [OperatorType, string, OperandType][] = []; - - for (const [operator, operand] of Object.entries(value) as [ - OperatorType, - string, - ][]) { - if (!operand) continue; - - switch (operator) { - case "eq": - case "gt": - case "gte": - case "lt": - case "lte": - filters.push([operator, column, operand]); - break; - case "contains": - filters.push(["ilike", column, `%${operand}%`]); - break; - case "startsWith": - filters.push(["ilike", column, `${operand}%`]); - break; - case "endsWith": - filters.push(["ilike", column, `%${operand}`]); - break; - } - } - return filters; -}; - -const generateArrayFilters = ( - value: NumberArraySearchOptions | StringArraySearchOptions, - column: string, -) => { - const filters: [OperatorType, string, OperandType][] = []; - for (const [operator, operand] of Object.entries(value)) { - if (!operand) continue; - - // Assert operand is an array of numbers - if (!Array.isArray(operand)) { - throw new Error( - `Expected operand to be an array, but got ${typeof operand}`, - ); - } - - switch (operator) { - case "contains": - filters.push(["contains", column, operand]); - break; - } - } - return filters; -}; - -function isStringSearchOptions(value: unknown): value is StringSearchOptions { - if (typeof value !== "object" || value === null) { - return false; - } - - const possibleStringSearchOptions = value as Partial; - - // Check for properties unique to StringSearchOptions - const keys = ["eq", "contains", "startsWith", "endsWith"]; - return keys.some((key) => key in possibleStringSearchOptions); -} - -function isNumberSearchOptions(value: unknown): value is BigIntSearchOptions { - if (typeof value !== "object" || value === null) { - return false; - } - - const possibleNumberSearchOptions = value as Partial; - - // Check for properties unique to NumberSearchOptions - const keys = ["eq", "gt", "gte", "lt", "lte"]; - return keys.some((key) => key in possibleNumberSearchOptions); -} - -function isIdSearchOptions(value: unknown): value is IdSearchOptions { - if (typeof value !== "object" || value === null) { - return false; - } - - const possibleIdSearchOptions = value as Partial; - - // Check for properties unique to IdSearchOptions - const keys = ["eq", "contains", "startsWith", "endsWith"]; - return keys.some((key) => key in possibleIdSearchOptions); -} - -function isStringArraySearchOptions( - value: unknown, -): value is StringArraySearchOptions { - if (!Array.isArray(value) || value === null) { - return false; - } - - const possibleStringArraySearchOptions = - value as Partial; - - // Check for properties unique to StringArraySearchOptions - const keys = ["contains"]; - return keys.some((key) => key in possibleStringArraySearchOptions); -} - -function isNumberArraySearchOptions( - value: unknown, -): value is NumberArraySearchOptions { - if (!Array.isArray(value) || value === null) { - return false; - } - - const possibleNumberArraySearchOptions = - value as Partial; - - // Check for properties unique to NumberArraySearchOptions - const keys = ["contains"]; - return keys.some((key) => key in possibleNumberArraySearchOptions); -} - -const buildFilters = (value: unknown, column: string) => { - if ( - isNumberSearchOptions(value) || - isStringSearchOptions(value) || - isIdSearchOptions(value) - ) { - return generateFilters(value, column); - } - - if (isStringArraySearchOptions(value) || isNumberArraySearchOptions(value)) { - return generateArrayFilters(value, column); - } - - return []; -}; - -export const applyFilters = < - T extends object, - QueryType extends PostgrestTransformBuilder< - CachingDatabase["public"], - Record, - unknown, - unknown, - unknown - >, ->({ - query, - where, -}: ApplyFilters) => { - if (!where) return query; - - const filters = []; - for (const [column, value] of Object.entries(where)) { - if (!value) continue; - - filters.push(...buildFilters(value, column)); - - // If the value is an object, recursively apply filters - if (typeof value === "object" && !Array.isArray(value)) { - const nestedFilters = []; - // TODO resolve better handling of column name exceptions - for (const [_column, _value] of Object.entries(value)) { - if (!_value) continue; - if (column === "hypercerts" || column === "hypercert") - nestedFilters.push(...buildFilters(_value, `claims.${_column}`)); - else if (column === "contract") - nestedFilters.push(...buildFilters(_value, `contracts.${_column}`)); - else - nestedFilters.push(...buildFilters(_value, `${column}.${_column}`)); - } - filters.push(...nestedFilters); - } - } - - query = filters.reduce((acc, [filter, ...args]) => { - return acc[filter](...args); - }, query); - - return query as unknown as QueryType; -}; diff --git a/src/graphql/schemas/utils/pagination.ts b/src/graphql/schemas/utils/pagination.ts deleted file mode 100644 index 760e7610..00000000 --- a/src/graphql/schemas/utils/pagination.ts +++ /dev/null @@ -1,27 +0,0 @@ -import {PostgrestTransformBuilder} from "@supabase/postgrest-js"; -import type {Database as CachingDatabase} from "../../../types/supabaseCaching.js"; -import {PaginationArgs} from "../args/paginationArgs.js"; - -interface ApplyPagination< - QueryType extends PostgrestTransformBuilder, unknown, unknown, unknown> -> { - query: QueryType; - pagination?: PaginationArgs; -} - - -export const applyPagination = , unknown, unknown, unknown>>({ - query, - pagination - }: ApplyPagination) => { - if (!pagination) return query; - - const {first, offset} = pagination; - - if (first && !offset) return query.limit(first); - - if (first && offset) return query.range(offset, offset + first - 1); - - return query; - -} \ No newline at end of file diff --git a/src/graphql/schemas/utils/sorting.ts b/src/graphql/schemas/utils/sorting.ts deleted file mode 100644 index 1d1d09e0..00000000 --- a/src/graphql/schemas/utils/sorting.ts +++ /dev/null @@ -1,86 +0,0 @@ -import { PostgrestTransformBuilder } from "@supabase/postgrest-js"; - -import { Database as DataDatabase } from "../../../types/supabaseData.js"; -import type { Database as CachingDatabase } from "../../../types/supabaseCaching.js"; -import type { OrderOptions } from "../inputs/orderOptions.js"; -import { - AttestationSchemaSortOptions, - AttestationSortOptions, - ContractSortOptions, - FractionSortOptions, - HypercertSortOptions, - MetadataSortOptions, -} from "../inputs/sortOptions.js"; -import { SortOrder } from "../enums/sortEnums.js"; - -interface ApplySorting< - T extends object, - QueryType extends PostgrestTransformBuilder< - CachingDatabase["public"] | DataDatabase["public"], - Record, - unknown, - unknown, - unknown - >, -> { - query: QueryType; - sort?: OrderOptions; -} - -type ColumnOpts = { - ascending?: boolean; - nullsFirst?: boolean; - referencedTable?: string; -}; - -export const applySorting = < - T extends object, - QueryType extends PostgrestTransformBuilder< - CachingDatabase["public"] | DataDatabase["public"], - Record, - unknown, - unknown, - unknown - >, ->({ - query, - sort, -}: ApplySorting) => { - if (!sort) return query; - - const sorting: [string, ColumnOpts][] = []; - for (const [key, value] of Object.entries(sort.by || {})) { - if (!value) continue; - - // Handle direct sorting parameters - if (typeof value === "string") { - sorting.push([key, { ascending: value !== SortOrder.descending }]); - continue; - } - - // Handle nested sorting options - // FIXME: This is brittle. We should find a way to generalize this - if ( - value instanceof HypercertSortOptions || - value instanceof FractionSortOptions || - value instanceof ContractSortOptions || - value instanceof AttestationSortOptions || - value instanceof MetadataSortOptions || - value instanceof AttestationSchemaSortOptions - ) { - for (const [column, direction] of Object.entries(value)) { - if (!column || !direction) continue; - sorting.push([ - `${key}.${column}`, - { ascending: direction !== SortOrder.descending }, - ]); - } - } - } - - query = sorting.reduce((acc, [column, options]) => { - return acc.order(column, options); - }, query); - - return query as unknown as QueryType; -}; diff --git a/src/index.ts b/src/index.ts index 98b19b3f..ecb50be6 100644 --- a/src/index.ts +++ b/src/index.ts @@ -5,11 +5,14 @@ import cors from "cors"; import { getRequiredEnvVar } from "./utils/envVars.js"; import { yoga } from "./client/graphql.js"; import swaggerUi from "swagger-ui-express"; -import swaggerJson from "./__generated__/swagger.json" assert { type: "json" }; +import swaggerJson from "./__generated__/swagger.json" with { type: "json" }; import { RegisterRoutes } from "./__generated__/routes/routes.js"; import * as Sentry from "@sentry/node"; import SignatureRequestProcessorCron from "./cron/SignatureRequestProcessing.js"; import OrderInvalidationCronjob from "./cron/OrderInvalidation.js"; +import { container } from "tsyringe"; +import { ENABLE_CRON_JOBS } from "./utils/constants.js"; +import { SupabaseRealtimeManager } from "./client/supabase.js"; // @ts-expect-error BigInt is not supported by JSON BigInt.prototype.toJSON = function () { @@ -46,12 +49,21 @@ RegisterRoutes(app); Sentry.setupExpressErrorHandler(app); // Start Safe signature request processing cron job -SignatureRequestProcessorCron.start(); -OrderInvalidationCronjob.start(); +if (ENABLE_CRON_JOBS) { + console.log("🚀 Starting cron jobs"); + SignatureRequestProcessorCron.start(); + const cronJob = container.resolve(OrderInvalidationCronjob); + cronJob.start(); +} else { + console.log("🚨 Cron jobs are disabled"); +} + +const supabaseRealtimeManager = container.resolve(SupabaseRealtimeManager); +await supabaseRealtimeManager.subscribeToEvents(); app.listen(PORT, () => { console.log( - `🕸️ Running a GraphQL API server at http://localhost:${PORT}/v1/graphql`, + `🕸️ Running a GraphQL API server at http://localhost:${PORT}/v2/graphql`, ); console.log(`🚀 Running Swagger docs at http://localhost:${PORT}/spec`); diff --git a/src/lib/db/queryModifiers/applyPagination.ts b/src/lib/db/queryModifiers/applyPagination.ts new file mode 100644 index 00000000..60779294 --- /dev/null +++ b/src/lib/db/queryModifiers/applyPagination.ts @@ -0,0 +1,57 @@ +import { SelectQueryBuilder, Selectable } from "kysely"; +import { SupportedDatabase } from "../../../services/database/strategies/QueryStrategy.js"; + +/** + * Type definition for pagination parameters + * @typeParam first - The maximum number of records to return (limit) + * @typeParam offset - The number of records to skip before starting to return results + */ +type PaginationArgs = { + first?: number; + offset?: number; +}; + +/** + * Applies pagination to a database query using limit and offset parameters + * + * @typeParam DB - The database type extending SupportedDatabases + * @typeParam T - The table name type (must be a key of DB and a string) + * @typeParam Args - The pagination arguments type extending PaginationArgs + * + * @param query - The Kysely SelectQueryBuilder instance to apply pagination to + * @param args - The pagination arguments containing optional first (limit) and offset values + * + * @returns The modified SelectQueryBuilder instance with pagination applied + * + * @remarks + * - If no 'first' parameter is provided, defaults to a limit of 100 records + * - If no 'offset' parameter is provided, starts from the beginning (offset 0) + * - Modifies and returns the input query builder instance + * - Note: Kysely query builders are mutable by design + * + * @example + * ```typescript + * const query = db.selectFrom('users'); + * const paginatedQuery = applyPagination(query, { first: 10, offset: 20 }); + * ``` + */ +export function applyPagination< + DB extends SupportedDatabase, + T extends keyof DB & string, + Args extends PaginationArgs, +>( + query: SelectQueryBuilder>, + args: Args, +): SelectQueryBuilder> { + if (args.first) { + query = query.limit(args.first); + } else { + query = query.limit(100); // Default limit + } + + if (args.offset) { + query = query.offset(args.offset); + } + + return query; +} diff --git a/src/lib/db/queryModifiers/applySort.ts b/src/lib/db/queryModifiers/applySort.ts new file mode 100644 index 00000000..33e0dac0 --- /dev/null +++ b/src/lib/db/queryModifiers/applySort.ts @@ -0,0 +1,72 @@ +import { SelectQueryBuilder, Selectable } from "kysely"; +import { SortOrder } from "../../../graphql/schemas/enums/sortEnums.js"; +import { SupportedDatabase } from "../../../services/database/strategies/QueryStrategy.js"; + +/** + * Applies sorting to a query based on the provided arguments. + * This function processes each sort condition and applies them in sequence to the query. + * + * @typeParam DB - The database type extending SupportedDatabases + * @typeParam T - The table name type (must be a key of DB and a string) + * @typeParam Args - The arguments type containing optional sortBy property + * + * @param query - The Kysely SelectQueryBuilder instance to apply sorting to + * @param args - The arguments containing sort conditions + * + * @returns The modified query with sorting applied + * + * @remarks + * - If no sort conditions are provided (args.sortBy is undefined), returns the original query + * - Null or undefined sort directions are filtered out + * - Sort conditions are applied in sequence, maintaining the order specified + * - TypeScript type checking should prevent invalid field names at compile time + * - SortOrder.ascending maps to 'asc', SortOrder.descending maps to 'desc' + * + * @example + * ```typescript + * const query = db.selectFrom('users'); + * const args = { + * sortBy: { + * name: SortOrder.ascending, + * created_at: SortOrder.descending + * } + * }; + * const result = applySort(query, args); + * ``` + */ +export function applySort< + DB extends SupportedDatabase, + T extends keyof DB & string, + Args extends { + sortBy?: { [K in keyof DB[T]]?: SortOrder | null | undefined }; + }, +>( + query: SelectQueryBuilder>, + args: Args, +): SelectQueryBuilder> { + if (!args.sortBy) { + return query; + } + + // Filter out null/undefined values + const sortEntries = Object.entries(args.sortBy).filter( + // eslint-disable-next-line @typescript-eslint/no-unused-vars + ([_, direction]) => direction !== null && direction !== undefined, + ); + + if (sortEntries.length === 0) { + return query; + } + + let modifiedQuery = query; + + for (const [field, direction] of sortEntries) { + const orderDirection = direction === SortOrder.ascending ? "asc" : "desc"; + modifiedQuery = modifiedQuery.orderBy( + field as keyof DB[T] & string, + orderDirection, + ); + } + + return modifiedQuery; +} diff --git a/src/lib/db/queryModifiers/applyWhere.ts b/src/lib/db/queryModifiers/applyWhere.ts new file mode 100644 index 00000000..6b0bba4b --- /dev/null +++ b/src/lib/db/queryModifiers/applyWhere.ts @@ -0,0 +1,65 @@ +import { expressionBuilder, SelectQueryBuilder, Selectable } from "kysely"; +import { SupportedDatabase } from "../../../services/database/strategies/QueryStrategy.js"; +import { BaseQueryArgsType } from "../../graphql/BaseQueryArgs.js"; +import { SortOrder } from "../../../graphql/schemas/enums/sortEnums.js"; +import { + buildWhereCondition, + FilterValue, +} from "../../../lib/db/queryModifiers/buildWhereCondition.js"; + +/** + * Applies where conditions to a query based on the provided arguments. + * This function processes each condition in the where clause and applies them to the query. + * + * @typeParam DB - The database type extending SupportedDatabases + * @typeParam T - The table name type (must be a key of DB and a string) + * @typeParam Args - The arguments type extending BaseQueryArgsType + * + * @param tableName - The name of the table to query + * @param query - The Kysely SelectQueryBuilder instance to apply where conditions to + * @param args - The arguments containing where conditions + * + * @returns The modified query with where conditions applied + * + * @remarks + * - If no where conditions are provided (args.where is undefined), returns the original query + * - Each property in the where object is processed independently + * - Invalid conditions (those that return undefined from buildWhereCondition) are skipped + * - The conditions are applied in sequence using AND logic + * + * @example + * ```typescript + * const query = db.selectFrom('users'); + * const args = { + * where: { + * name: { eq: "John" }, + * age: { gt: 18 } + * } + * }; + * const result = applyWhere('users', query, args); + * ``` + */ +export function applyWhere< + DB extends SupportedDatabase, + T extends keyof DB & string, + // TODO: cleaner typing than object, object. We'd need to have a general where input type + Args extends BaseQueryArgsType< + object, + Record + >, +>( + tableName: T, + query: SelectQueryBuilder>, + args: Args, +): SelectQueryBuilder> { + if (!args.where) return query; + + return Object.entries(args.where).reduce((q, [column, value]) => { + const condition = buildWhereCondition( + tableName, + { [column]: value as FilterValue }, // Cast to FilterValue since we know the type from WhereArgs + expressionBuilder(q), + ); + return condition ? q.where(condition) : q; + }, query); +} diff --git a/src/lib/db/queryModifiers/buildWhereCondition.ts b/src/lib/db/queryModifiers/buildWhereCondition.ts new file mode 100644 index 00000000..807f93ba --- /dev/null +++ b/src/lib/db/queryModifiers/buildWhereCondition.ts @@ -0,0 +1,290 @@ +import { Expression, ExpressionBuilder, sql, SqlBool } from "kysely"; +import { SupportedDatabase } from "../../../services/database/strategies/QueryStrategy.js"; +import { getRelation, hasRelation } from "./tableRelations.js"; + +// Define more specific types for our filter values +type BaseFilterValue = string | number | bigint | boolean | undefined; +type ArrayFilterValue = Array; + +// Define valid filter operators +type FilterOperator = + | "eq" + | "gt" + | "gte" + | "lt" + | "lte" + | "contains" + | "startsWith" + | "endsWith" + | "in" + | "arrayContains" + | "arrayOverlaps"; + +type OperatorFilterValue = Partial< + Record +>; +type NestedFilterValue = Record; + +// Generic filter builder function type +type FilterBuilder = ( + tableName: string, + column: string, + value: BaseFilterValue | ArrayFilterValue, +) => Expression; + +/** + * The type for the filter value. + * + * @example + * ```typescript + * const value: FilterValue = { eq: "123" }; + * const value: FilterValue = { id: { eq: "123" } }; + * const value: FilterValue = { id: { eq: "123" }, name: { contains: "John" } }; + * ``` + */ +export type FilterValue = + | BaseFilterValue + | NestedFilterValue + | ArrayFilterValue + | OperatorFilterValue; + +/** + * The type for the where filter. + * + * @example + * ```typescript + * const where: WhereFilter = { id: { eq: "123" } }; + * ``` + */ +export type WhereFilter = Record; + +/** + * Get the table prefix for a given column. We use this to handle nested relations where the displayed column is not the actual table name. + * + * @param column - The column name to get the prefix for + * @returns The table prefix for the given column + */ +const getTablePrefix = (column: string): string => { + switch (column) { + case "admins": + return "users"; + case "blueprints": + return "blueprints_with_admins"; + case "eas_schema": + return "supported_schemas"; + case "hypercert": + case "hypercerts": + return "claims"; + case "contract": + return "contracts"; + case "fractions": + return "fractions_view"; + default: + return column; + } +}; + +// Type guard for filter objects +const isFilterObject = (obj: unknown): obj is OperatorFilterValue => { + if (!obj || typeof obj !== "object") return false; + return Object.keys(obj).some((key) => key in filterBuilders); +}; + +// Type guard for nested filters +const isNestedFilter = (value: FilterValue): value is NestedFilterValue => + typeof value === "object" && + !Array.isArray(value) && + value !== null && + !isFilterObject(value); + +/** + * Filter builders for different operators + * + * @type {Record} + */ +// TODO: add support for negated filters +const filterBuilders: Record = { + eq: (tableName, column, value) => + sql`${sql.raw(`"${tableName}"."${column}"`)} = ${value}`, + gt: (tableName, column, value) => + sql`${sql.raw(`"${tableName}"."${column}"`)} > ${value}`, + gte: (tableName, column, value) => + sql`${sql.raw(`"${tableName}"."${column}"`)} >= ${value}`, + lt: (tableName, column, value) => + sql`${sql.raw(`"${tableName}"."${column}"`)} < ${value}`, + lte: (tableName, column, value) => + sql`${sql.raw(`"${tableName}"."${column}"`)} <= ${value}`, + contains: (tableName, column, value) => + sql`lower(${sql.raw(`"${tableName}"."${column}"`)}) like lower(${"%" + String(value) + "%"})`, + startsWith: (tableName, column, value) => + sql`lower(${sql.raw(`"${tableName}"."${column}"`)}) like lower(${String(value) + "%"})`, + endsWith: (tableName, column, value) => + sql`lower(${sql.raw(`"${tableName}"."${column}"`)}) like lower(${"%" + String(value)})`, + in: (tableName, column, value) => { + // Ensure value is an array and filter out any null/undefined values + const values = (Array.isArray(value) ? value : [value]).filter( + (v) => v != null, + ); + + // If no valid values, return null or a false condition + if (values.length === 0) { + return sql`1 = 0`; + } + + return sql`${sql.raw(`"${tableName}"."${column}"`)} IN (${sql.join( + values.map((v) => sql`${v}`), + sql`, `, + )})`; + }, + arrayContains: (tableName, column, value) => { + const values = Array.isArray(value) ? value : [value]; + return sql`${sql.raw(`"${tableName}"."${column}"`)} @> ARRAY[${sql.join( + values.map((v) => sql`${v}`), + sql`, `, + )}]`; + }, + arrayOverlaps: (tableName, column, value) => { + const values = Array.isArray(value) ? value : [value]; + return sql`${sql.raw(`"${tableName}"."${column}"`)} && ARRAY[${sql.join( + values.map((v) => sql`${v}`), + sql`, `, + )}]`; + }, +}; + +/** + * Builds a SQL WHERE condition for filtering database queries based on provided criteria. + * Supports basic comparisons, string operations, array operations, and nested relations. + * + * @template DB - The database type extending SupportedDatabases + * @template T - The table name type (must be a key of DB) + * + * @param tableName - The name of the base table to query + * @param where - Filter conditions to apply. Can include: + * - Direct field comparisons (e.g., { id: { eq: 123 } }) + * - String operations (e.g., { name: { contains: "John" } }) + * - Array operations (e.g., { roles: { arrayContains: ["admin"] } }) + * - Nested relations (e.g., { company: { name: { eq: "Acme" } } }) + * @param eb - Kysely expression builder for the current query + * + * @returns An Expression that can be used in a WHERE clause, or undefined if no conditions + * + * @example + * ```typescript + * // Basic field comparison + * const condition = buildWhereCondition("users", { age: { gt: 18 } }, eb); + * + * // String operation + * const condition = buildWhereCondition("users", { name: { contains: "John" } }, eb); + * + * // Nested relation using default foreign key + * const condition = buildWhereCondition("users", { + * company: { name: { eq: "Acme" } } + * }, eb); + * + * // Nested relation using custom TABLE_RELATIONS join + * const condition = buildWhereCondition("claims", { + * fractions_view: { amount: { gt: 100 } } + * }, eb); + * ``` + * + * @remarks + * - For nested relations, it first checks TABLE_RELATIONS for custom join conditions + * - If no custom relation exists, falls back to default foreign key pattern (table_id) + * - Multiple conditions within the same level are combined with AND + * - Undefined values in filter conditions are ignored + */ +export function buildWhereCondition< + DB extends SupportedDatabase, + T extends keyof DB, +>( + tableName: T, + where: WhereFilter, + eb: ExpressionBuilder, +): Expression | undefined { + const conditions: Expression[] = []; + + Object.entries(where).forEach((entry) => { + const [key, value] = entry; + + if (!key || value === undefined) return; + + if (isFilterObject(value)) { + Object.entries(value).forEach(([operator, operandValue]) => { + if (operator in filterBuilders && operandValue !== undefined) { + conditions.push( + filterBuilders[operator as FilterOperator]( + tableName as string, + key, + operandValue as BaseFilterValue | ArrayFilterValue, + ), + ); + } + }); + } else if (isNestedFilter(value)) { + // Nested table filter (e.g., contract.chain_id) + const relatedTable = getTablePrefix(key); + const nestedConditions = buildWhereCondition( + relatedTable as T, + value as WhereFilter, + eb, + ); + + if (nestedConditions) { + if (hasRelation(tableName as string, relatedTable)) { + const relation = getRelation(tableName as string, relatedTable); + conditions.push( + sql`exists ( + select from ${sql.raw(`"${relatedTable}"`)} + where ${sql.raw(relation.joinCondition)} + and ${nestedConditions} + )`, + ); + } else if (tableName === "collections" && relatedTable === "users") { + // TODO: This is a hack to support the collections.users relation + // TODO: This should be removed once we have a proper relation in TABLE_RELATIONS or a view in the database + conditions.push( + sql`exists ( + select 1 from "users" + inner join "collection_admins" on "users".id = "collection_admins".user_id + inner join "collections" on "collections".id = "collection_admins".collection_id + and ${nestedConditions} + )`, + ); + } else if ( + tableName === "collections" && + relatedTable === "blueprints_with_admins" + ) { + // TODO: This is a hack to support the collections.blueprints relation + // TODO: This should be removed once we have a proper relation in TABLE_RELATIONS or a view in the database + conditions.push( + sql`exists ( + select from "blueprints_with_admins" + inner join "collection_blueprints" on "blueprints_with_admins".id = "collection_blueprints".blueprint_id + inner join "collections" on "collections".id = "collection_blueprints".collection_id + and ${nestedConditions} + )`, + ); + } else { + // Fall back to default foreign key pattern for standard relationships + conditions.push( + sql`exists ( + select from ${sql.raw(`"${relatedTable}"`)} + where ${sql.raw(`"${relatedTable}".id = "${tableName.toString()}".${relatedTable}_id`)} + and ${nestedConditions} + )`, + ); + } + } + } + }); + + // if conditions length is 0, return undefined + if (conditions.length === 0) return undefined; + + // if conditions length is 1, return the first condition + if (conditions.length === 1) return conditions[0]; + + // if conditions length is greater than 1, return the and of the conditions + return eb.and(conditions); +} diff --git a/src/lib/db/queryModifiers/queryModifiers.ts b/src/lib/db/queryModifiers/queryModifiers.ts new file mode 100644 index 00000000..085e528e --- /dev/null +++ b/src/lib/db/queryModifiers/queryModifiers.ts @@ -0,0 +1,122 @@ +import { SelectQueryBuilder, Selectable } from "kysely"; +import { SortOrder } from "../../../graphql/schemas/enums/sortEnums.js"; +import { SupportedDatabase } from "../../../services/database/strategies/QueryStrategy.js"; +import { BaseQueryArgsType } from "../../graphql/BaseQueryArgs.js"; +import { applyPagination } from "./applyPagination.js"; +import { applySort } from "./applySort.js"; +import { applyWhere } from "./applyWhere.js"; + +/** + * Type definition for a query modifier function. + * Query modifiers are functions that take a query and arguments and return a modified query. + * They are used to compose complex queries from simpler, reusable parts. + * + * @typeParam DB - The database type extending SupportedDatabases + * @typeParam T - The table name type (must be a key of DB and a string) + * @typeParam Args - The arguments type containing query modification parameters + * + * @param query - The Kysely SelectQueryBuilder instance to modify + * @param args - The arguments containing modification parameters + * @returns The modified SelectQueryBuilder instance + * + * @example + * ```typescript + * const sortModifier: QueryModifier = (query, args) => { + * return args.sortBy ? query.orderBy(args.sortBy) : query; + * }; + * ``` + */ +export type QueryModifier< + DB extends SupportedDatabase, + T extends keyof DB & string, + Args, +> = ( + query: SelectQueryBuilder>, + args: Args, +) => SelectQueryBuilder>; + +/** + * Composes multiple query modifiers into a single function. + * The modifiers are applied in sequence, with each modifier receiving the query + * produced by the previous modifier. + * + * @typeParam DB - The database type extending SupportedDatabases + * @typeParam T - The table name type (must be a key of DB and a string) + * @typeParam Args - The arguments type containing query modification parameters + * + * @param modifiers - The query modifiers to compose, applied in order + * @returns A function that applies all modifiers in sequence + * + * @remarks + * - Modifiers are applied left to right + * - Each modifier receives the query produced by the previous modifier + * - If a modifier returns undefined or null, the original query is used + * - The args object is passed unchanged to each modifier + * + * @example + * ```typescript + * const fullModifier = composeQueryModifiers( + * applyWhere, + * applySort, + * applyPagination + * ); + * const result = fullModifier(query, { where: {...}, sortBy: {...} }); + * ``` + */ +export function composeQueryModifiers< + DB extends SupportedDatabase, + T extends keyof DB & string, + Args, +>(...modifiers: QueryModifier[]) { + return (query: SelectQueryBuilder>, args: Args) => + modifiers.reduce((q, modifier) => { + const result = modifier(q, args); + return result ?? q; // Fall back to previous query if modifier returns null/undefined + }, query); +} + +/** + * Creates a composed query modifier that applies where, sort, and pagination in a standard order. + * This is a convenience function that combines the most commonly used query modifiers. + * + * @typeParam DB - The database type extending SupportedDatabases + * @typeParam T - The table name type (must be a key of DB and a string) + * @typeParam Args - The arguments type extending BaseQueryArgsType + * + * @param tableName - The name of the table to query + * @returns A function that applies where, sort, and pagination modifiers in sequence + * + * @remarks + * - Modifiers are applied in this order: where → sort → pagination + * - Where conditions are applied first to filter the dataset + * - Sort is applied next to order the filtered results + * - Pagination is applied last to limit the final result set + * - Each modifier is optional and will be skipped if its args are not provided + * + * @example + * ```typescript + * const usersModifier = createStandardQueryModifier("users"); + * const result = usersModifier(query, { + * where: { active: true }, + * sortBy: { created_at: SortOrder.descending }, + * first: 10, + * offset: 0 + * }); + * ``` + */ +export function createStandardQueryModifier< + DB extends SupportedDatabase, + T extends keyof DB & string, + Args extends BaseQueryArgsType< + object, + { [K in keyof DB[T]]?: SortOrder | null | undefined } + >, +>(tableName: T) { + return composeQueryModifiers( + (query, args) => applyWhere(tableName, query, args), + applySort, + applyPagination, + ); +} + +export { applyPagination, applySort, applyWhere }; diff --git a/src/lib/db/queryModifiers/tableRelations.ts b/src/lib/db/queryModifiers/tableRelations.ts new file mode 100644 index 00000000..30248566 --- /dev/null +++ b/src/lib/db/queryModifiers/tableRelations.ts @@ -0,0 +1,90 @@ +/** + * Type representing a table relation configuration + */ +export interface TableRelation { + /** SQL condition for joining the tables */ + joinCondition: string; + /** Optional foreign key override if not following standard naming */ + foreignKey?: string; +} + +/** + * Type representing all possible relations for a table + */ +export type TableRelations = { + [tableName: string]: { + [relatedTable: string]: TableRelation; + }; +}; + +/** + * Database table relationship configurations + * Defines how tables are related to each other for nested queries + */ +export const TABLE_RELATIONS: TableRelations = { + metadata: { + claims: { + joinCondition: "metadata.uri = claims.uri", + }, + }, + claims: { + metadata: { + joinCondition: "claims.uri = metadata.uri", + }, + fractions_view: { + joinCondition: "claims.hypercert_id = fractions_view.hypercert_id", + }, + }, + claims_view: { + metadata: { + joinCondition: "metadata.uri = claims_view.uri", + }, + fractions_view: { + joinCondition: "fractions_view.hypercert_id = claims_view.hypercert_id", + }, + }, + sales: { + claims: { + joinCondition: "claims.hypercert_id = sales.hypercert_id", + }, + }, +} as const; + +/** + * Type guard to check if a relation exists between tables + */ +export function hasRelation( + tableName: string, + relatedTable: string, +): relatedTable is Extract< + keyof (typeof TABLE_RELATIONS)[typeof tableName], + string +> { + return ( + tableName in TABLE_RELATIONS && + relatedTable in (TABLE_RELATIONS[tableName] ?? {}) + ); +} + +/** + * Get the relation configuration between two tables + * @throws {Error} If relation doesn't exist + */ +export function getRelation( + tableName: string, + relatedTable: string, +): TableRelation { + if (!hasRelation(tableName, relatedTable)) { + throw new Error( + `No relation defined between ${tableName} and ${relatedTable}`, + ); + } + return TABLE_RELATIONS[tableName][relatedTable]; +} + +/** + * Default foreign key pattern if not specified in relation + */ +export function getDefaultForeignKey(relatedTable: string): string { + return `${relatedTable}_id`; +} diff --git a/src/lib/graphql/BaseQueryArgs.ts b/src/lib/graphql/BaseQueryArgs.ts new file mode 100644 index 00000000..57978f2a --- /dev/null +++ b/src/lib/graphql/BaseQueryArgs.ts @@ -0,0 +1,91 @@ +import { ArgsType, ClassType, Field, Int } from "type-graphql"; +import { SortOrder } from "../../graphql/schemas/enums/sortEnums.js"; +import { EntityTypeDefs } from "../../graphql/schemas/typeDefs/typeDefs.js"; +import { EntityFields } from "./createEntityArgs.js"; +import type { SortByArgsType } from "./createEntitySortArgs.js"; +import type { WhereArgsType } from "./createEntityWhereArgs.js"; + +/** + * Base type for GraphQL query arguments that supports filtering, sorting, and pagination. + * + * @typeParam TWhereInput - The type of the where clause input for filtering + * @typeParam TSortOptions - The type of the sort options for ordering results + */ +export type BaseQueryArgsType< + TWhereInput extends object, + TSortOptions extends Record, +> = { + /** Maximum number of items to return */ + first?: number; + /** Number of items to skip */ + offset?: number; + /** Filter conditions for the query */ + where?: TWhereInput; + /** Sorting options for the query results */ + sortBy?: TSortOptions; +}; + +/** + * Creates a GraphQL arguments class with support for filtering, sorting, and pagination. + * This function generates a type-safe class that can be used as arguments in GraphQL queries. + * + * @param WhereArgs - The class type for filtering conditions + * @param SortArgs - The class type for sorting options + * + * @typeParam TEntity - The entity type definition + * @typeParam TFields - The entity fields configuration + * + * @returns A decorated class that can be used as GraphQL query arguments + * + * @example + * ```typescript + * // First create the entity args + * const { WhereInput, SortOptions } = createEntityArgs("Attestation", { + * id: "string", + * claim: "string", + * timestamp: "number", + * }); + * + * // Create a named args class extending BaseQueryArgs + * @ArgsType() + * export class GetAttestationsArgs extends BaseQueryArgs( + * AttestationWhereInput, + * AttestationSortOptions, + * ) {} + * + * // Use in a resolver + * @Resolver() + * class AttestationResolver { + * @Query(() => [Attestation]) + * async attestations(@Args() args: GetAttestationsArgs) { + * // Implementation using args.where, args.sortBy, args.first, args.offset + * } + * } + * ``` + */ +export function BaseQueryArgs< + TEntity extends EntityTypeDefs, + TFields extends EntityFields, +>( + WhereArgs: ClassType>, + SortArgs: ClassType>, +) { + @ArgsType() + class QueryArgs { + @Field(() => WhereArgs, { nullable: true }) + where?: WhereArgsType; + + @Field(() => SortArgs, { nullable: true }) + sortBy?: SortByArgsType; + + @Field(() => Int, { nullable: true }) + first?: number; + + @Field(() => Int, { nullable: true }) + offset?: number; + } + + return QueryArgs as ClassType< + BaseQueryArgsType, SortByArgsType> + >; +} diff --git a/src/lib/graphql/DataResponse.ts b/src/lib/graphql/DataResponse.ts new file mode 100644 index 00000000..7ecc97a1 --- /dev/null +++ b/src/lib/graphql/DataResponse.ts @@ -0,0 +1,76 @@ +import { type ClassType, Field, Int, ObjectType } from "type-graphql"; + +/** + * Creates a GraphQL object type that wraps a list of items with pagination metadata. + * This is a generic response type for queries that return paginated lists of items. + * + * @template T - The type of items in the response + * @param TItemClass - The class type of items to be wrapped + * @returns An abstract class decorated as a GraphQL object type with data and count fields + * + * @example + * ```typescript + * // Define your base type + * @ObjectType() + * class UserBaseType { + * @Field() + * id: string; + * + * @Field() + * name: string; + * } + * + * // Define your main type with additional fields/relations + * @ObjectType({ + * description: "User entity with related data" + * }) + * class User extends UserBaseType { + * @Field(() => [String], { + * description: "List of roles assigned to the user" + * }) + * roles?: string[]; + * } + * + * // Create the response type for paginated results + * @ObjectType() + * export default class GetUsersResponse extends DataResponse(User) {} + * + * // Use in a resolver + * @Resolver(() => User) + * class UserResolver { + * constructor( + * @inject(UserService) + * private userService: UserService, + * ) {} + * + * @Query(() => GetUsersResponse) + * async users(@Args() args: GetUsersArgs): Promise { + * return await this.userService.getUsers(args); + * } + * } + * ``` + */ +export function DataResponse(TItemClass: ClassType) { + /** + * Abstract class representing a paginated response containing a list of items. + * This class is automatically decorated as a GraphQL object type. + */ + @ObjectType() + abstract class DataResponseClass { + /** + * The list of items in the response. + * Can be null/undefined if no items are found. + */ + @Field(() => [TItemClass], { nullable: true }) + data?: T[]; + + /** + * The total count of items. + * Can be null/undefined if count is not available or relevant. + */ + @Field(() => Int, { nullable: true }) + count?: number; + } + + return DataResponseClass; +} diff --git a/src/lib/graphql/TypeRegistry.ts b/src/lib/graphql/TypeRegistry.ts new file mode 100644 index 00000000..fd715acc --- /dev/null +++ b/src/lib/graphql/TypeRegistry.ts @@ -0,0 +1,128 @@ +import "reflect-metadata"; +import { ClassType } from "type-graphql"; +import { container, singleton } from "tsyringe"; +import { EntityTypeDefs } from "../../graphql/schemas/typeDefs/typeDefs.js"; + +/** + * Registry for managing GraphQL input types across the application. + * + * @description + * The TypeRegistry ensures that we only create one instance of each GraphQL input type + * for a given type name. This is crucial because GraphQL schema generation will fail if + * there are duplicate type definitions. + * + * The registry maintains separate caches for: + * - WhereInput types (used for filtering) + * - SortOptions types (used for sorting) + * + * @example + * ```typescript + * import { container } from 'tsyringe'; + * + * // Get the singleton instance + * const registry = container.resolve(TypeRegistry); + * ``` + */ +@singleton() +export class TypeRegistry { + private whereInput: Map>; + private sortOptions: Map>; + + /** + * Creates a new instance of the registry with empty caches. + */ + constructor() { + this.whereInput = new Map>(); + this.sortOptions = new Map>(); + } + + /** + * Clears all cached types from the registry. + */ + clear(): void { + this.whereInput.clear(); + this.sortOptions.clear(); + } + + /** + * Gets an existing WhereInput type from the registry or creates a new one. + * + * @description + * This method ensures that we only create one WhereInput type for each type name. + * If a type already exists for the given name, it is returned. + * Otherwise, the creator function is called to create a new type. + * + * @template T - The type of the WhereInput class instance + * @param typeName - The entity type (must be a valid EntityTypeDefs value) + * @param creator - Function that creates the WhereInput type if it doesn't exist + * @returns The WhereInput type for the given name + * @throws {Error} If the type cannot be found after creation attempt + * + * @example + * ```typescript + * const WhereInput = registry.getOrCreateWhereInput>( + * EntityTypeDefs.Contract, + * () => createEntityWhereArgs(EntityTypeDefs.Contract, { + * address: "string", + * chain_id: "number" + * }) + * ); + * ``` + */ + getOrCreateWhereInput( + typeName: EntityTypeDefs, + creator: () => ClassType, + ): ClassType { + if (!this.whereInput.has(typeName)) { + this.whereInput.set(typeName, creator()); + } + + const strategy = this.whereInput.get(typeName); + if (!strategy) { + throw new Error(`WhereInput not found for type ${typeName}`); + } + return strategy as ClassType; + } + + /** + * Gets an existing SortOptions type from the registry or creates a new one. + * + * @description + * This method ensures that we only create one SortOptions type for each type name. + * If a type already exists for the given name, it is returned. + * Otherwise, the creator function is called to create a new type. + * + * @template T - The type of the SortOptions class instance + * @param typeName - The entity type (must be a valid EntityTypeDefs value) + * @param creator - Function that creates the SortOptions type if it doesn't exist + * @returns The SortOptions type for the given name + * @throws {Error} If the type cannot be found after creation attempt + * + * @example + * ```typescript + * const SortOptions = registry.getOrCreateSortOptions>( + * EntityTypeDefs.Contract, + * () => createEntitySortArgs(EntityTypeDefs.Contract, { + * address: "string", + * chain_id: "number" + * }) + * ); + * ``` + */ + getOrCreateSortOptions( + typeName: EntityTypeDefs, + creator: () => ClassType, + ): ClassType { + if (!this.sortOptions.has(typeName)) { + this.sortOptions.set(typeName, creator()); + } + + const strategy = this.sortOptions.get(typeName); + if (!strategy) { + throw new Error(`SortOptions not found for type ${typeName}`); + } + return strategy as ClassType; + } +} + +export const registry = container.resolve(TypeRegistry); diff --git a/src/lib/graphql/createEntityArgs.ts b/src/lib/graphql/createEntityArgs.ts new file mode 100644 index 00000000..d860ee8d --- /dev/null +++ b/src/lib/graphql/createEntityArgs.ts @@ -0,0 +1,174 @@ +import { EntityTypeDefs } from "../../graphql/schemas/typeDefs/typeDefs.js"; +import { SearchOptionMap } from "../../types/argTypes.js"; +import { + createEntitySortArgs, + type SortOptions, +} from "./createEntitySortArgs.js"; +import { createEntityWhereArgs } from "./createEntityWhereArgs.js"; +import { registry } from "./TypeRegistry.js"; + +/** + * Represents the primitive field types that can be used in entity definitions. + * These types map directly to the search options available in SearchOptionMap. + */ +export type BaseFieldType = keyof typeof SearchOptionMap; + +/** + * Represents a reference to another entity in the schema. + * References must have a type (usually "id") and specify the referenced entity and its fields. + * + * @example + * ```typescript + * const referenceDefinition: BaseReferenceDefinition = { + * type: "id", + * references: { + * entity: EntityTypeDefs.Metadata, + * fields: { name: "string" } + * } + * }; + * ``` + */ +export type BaseReferenceDefinition = { + type: Exclude; + references: { + entity: EntityTypeDefs; + fields: Record; + }; +}; + +/** + * Represents the structure of entity fields. + * Each field can be either a primitive type (string, number, etc.) or a reference to another entity. + */ +export type EntityFields = Record< + string, + BaseFieldType | BaseReferenceDefinition +>; + +/** + * A strongly-typed version of BaseReferenceDefinition that enforces field types. + * + * @template TFields - The type of fields in the referenced entity + * @template TRefEntity - The type of the referenced entity (must be in EntityTypeDefs) + */ +export type ReferenceDefinition< + TFields extends EntityFields, + TRefEntity extends EntityTypeDefs = EntityTypeDefs, +> = { + type: Exclude; + references: { + entity: TRefEntity; + fields: TFields; + }; +}; + +/** + * Maps field definitions to their appropriate types. + * Handles both primitive fields and reference fields with proper type inference. + * + * @template TFields - The type of fields being defined + */ +export type FieldDefinition = { + [K in keyof TFields]: TFields[K] extends BaseFieldType + ? TFields[K] + : TFields[K] extends BaseReferenceDefinition + ? ReferenceDefinition + : never; +}; + +/** + * Type guard to check if a definition is a reference definition. + * + * @param def - The definition to check + * @returns True if the definition is a valid reference definition + */ +export function isReferenceDefinition( + def: unknown, +): def is BaseReferenceDefinition { + return ( + typeof def === "object" && + def !== null && + "references" in def && + "type" in def && + typeof (def as BaseReferenceDefinition).type === "string" && + (def as BaseReferenceDefinition).type in SearchOptionMap + ); +} + +/** + * Maps a base field type to its corresponding filter type. + * Used to create the appropriate filter options for each field type. + * + * @template T - The base field type to map + */ +type FilterTypeMap = + T extends keyof typeof SearchOptionMap + ? Partial> + : never; + +/** + * Creates GraphQL input types for entity filtering and sorting. + * + * @description + * This function generates two classes: + * 1. A WhereInput class for filtering entities based on their fields + * 2. A SortOptions class for specifying sort order of results + * + * The generated classes can support both primitive fields and nested reference fields. However, + * the current implementation does not support nested reference fields in sort options. + * Classes are cached in the registry to prevent unnecessary re-creation of the same classes. + * + * @example + * ```typescript + * const { WhereInput, SortOptions } = createEntityArgs(EntityTypeDefs.Hypercert, { + * token_id: "bigint", + * metadata: { + * type: "id", + * references: { + * entity: EntityTypeDefs.Metadata, + * fields: { name: "string" } + * } + * } + * }); + * + * const filter = new WhereInput(); + * filter.token_id = { eq: 1 }; + * filter.metadata = { name: { contains: "test" } }; + * + * const sort = new SortOptions(); + * sort.token_id = "ascending"; + * ``` + * + * @param entityName - The name of the entity (must be a valid EntityTypeDefs value) + * @param fieldDefinitions - Object defining the fields and their types for the entity + * @returns An object containing the WhereInput and SortOptions classes + * + * @remarks + * - Generated classes are cached in the registry + * - Same entity name will return the same class instances + * - Supports primitive fields (string, number, bigint) and nested references + * - All filter fields are optional + * - All sort fields are nullable + */ +export function createEntityArgs< + TEntity extends EntityTypeDefs, + TFields extends EntityFields, +>(entityName: TEntity, fieldDefinitions: FieldDefinition) { + // Cast fieldDefinitions to TFields since we know they are compatible + const fields = fieldDefinitions as unknown as TFields; + + const WhereInput = registry.getOrCreateWhereInput(entityName, () => + createEntityWhereArgs(entityName, fields), + ); + const SortOptions = registry.getOrCreateSortOptions>( + entityName, + () => createEntitySortArgs(entityName, fields), + ); + + return { + WhereInput, + SortOptions, + } as const; +} + +export { type FilterTypeMap }; diff --git a/src/lib/graphql/createEntitySortArgs.ts b/src/lib/graphql/createEntitySortArgs.ts new file mode 100644 index 00000000..c07527ee --- /dev/null +++ b/src/lib/graphql/createEntitySortArgs.ts @@ -0,0 +1,86 @@ +import { ClassType, Field, InputType } from "type-graphql"; +import { SortOrder } from "../../graphql/schemas/enums/sortEnums.js"; +import { EntityTypeDefs } from "../../graphql/schemas/typeDefs/typeDefs.js"; +import { BaseFieldType, EntityFields } from "./createEntityArgs.js"; + +/** + * Type representing sort options for entity fields. + * Maps field names to their sort order, but only for primitive fields. + * @template T - The entity fields type + */ +export type SortOptions = { + [K in keyof T as T[K] extends BaseFieldType ? K : never]?: SortOrder | null; +}; + +/** + * Type alias for sort arguments, used for type consistency. + * @template T - The entity fields type + */ +export type SortByArgsType = SortOptions; + +/** + * Creates a GraphQL input type class for sorting entity fields. + * + * @description + * This function generates a class that can be used to specify sort options for entity queries. + * The generated class will have fields corresponding to the primitive fields in the field definitions, + * where each field can be set to either ascending, descending, or null. + * + * @example + * ```typescript + * const SortArgs = createEntitySortArgs(EntityTypeDefs.Contract, { + * address: "string", + * chain_id: "number" + * }); + * + * const instance = new SortArgs(); + * instance.address = SortOrder.ascending; + * instance.chain_id = SortOrder.descending; + * ``` + * + * @param entityName - The name of the entity (must be a valid EntityTypeDefs value) + * @param fieldDefinitions - Object defining the fields and their types for the entity + * @returns A class that can be used as a GraphQL input type for sorting + * + * @remarks + * - Only primitive fields (string, number, bigint) will be included in the sort options + * - Complex fields (objects, references) will be excluded + * - All sort fields are nullable and default to null + * - The generated class name will be `${entityName}SortOptions` + */ +export function createEntitySortArgs< + TEntity extends EntityTypeDefs, + TFields extends EntityFields, +>(entityName: TEntity, fieldDefinitions: TFields) { + @InputType(`${entityName}SortOptions`) + class EntitySortOptions { + constructor() { + // Initialize all fields with default sort order (null) + Object.entries(fieldDefinitions).forEach(([key, definition]) => { + if (typeof definition === "string") { + Object.defineProperty(this, key, { + value: null, + writable: true, + enumerable: true, + }); + } + }); + } + } + + // Add field decorators for each sortable field + Object.entries(fieldDefinitions).forEach(([key, definition]) => { + if (typeof definition === "string") { + Field(() => SortOrder, { nullable: true })( + EntitySortOptions.prototype, + key, + ); + } + }); + + Object.defineProperty(EntitySortOptions, "name", { + value: `${entityName}SortOptions`, + }); + + return EntitySortOptions as ClassType>; +} diff --git a/src/lib/graphql/createEntityWhereArgs.ts b/src/lib/graphql/createEntityWhereArgs.ts new file mode 100644 index 00000000..144fb226 --- /dev/null +++ b/src/lib/graphql/createEntityWhereArgs.ts @@ -0,0 +1,246 @@ +import { ClassType, InputType } from "type-graphql"; + +import { Field } from "type-graphql"; + +import { EntityTypeDefs } from "../../graphql/schemas/typeDefs/typeDefs.js"; +import { SearchOptionMap } from "../../types/argTypes.js"; +import { + BaseFieldType, + BaseReferenceDefinition, + EntityFields, + FilterTypeMap, + isReferenceDefinition, +} from "./createEntityArgs.js"; +import { registry } from "./TypeRegistry.js"; +/** + * Type representing where clause arguments for entity fields. + * Maps field names to their filter types, handling both primitive and reference fields. + * @template TEntity - The entity type as defined in EntityTypeDefs + * @template TFields - The entity fields type + */ +export type WhereArgsType< + TEntity extends EntityTypeDefs, + TFields extends EntityFields, +> = { + [K in keyof TFields]?: TFields[K] extends BaseFieldType + ? FilterTypeMap + : TFields[K] extends BaseReferenceDefinition + ? WhereArgsType + : never; +}; + +/** + * Creates a unique name for a where input type based on its context. + * + * @param entity - The entity type + * @param context - Optional context string to create unique names for nested types + * @returns A unique name for the where input type + * + * @example + * ```typescript + * createTypeName(EntityTypeDefs.Contract) // "ContractWhereInput" + * createTypeName(EntityTypeDefs.Metadata, "Contract") // "ContractMetadataWhereInput" + * ``` + */ +function createTypeName(entity: EntityTypeDefs, context?: string): string { + // If there's no context, just return the entity name with WhereInput + if (!context) { + return `${entity}WhereInput`; + } + + // Remove the WhereInput suffix from the context if it exists + const cleanContext = context.replace(/WhereInput$/, ""); + + // Create the name with context before entity + return `${cleanContext}${entity}WhereInput`; +} + +/** + * Creates a GraphQL input type class for entity filtering. + * + * @description + * This function generates a class that can be used to specify filter conditions for entity queries. + * The generated class supports both primitive fields (string, number, bigint) and nested reference fields. + * Each field can be filtered using type-specific operators (e.g., eq, contains, gt, lt). + * + * @example + * ```typescript + * const WhereArgs = createEntityWhereArgs(EntityTypeDefs.Contract, { + * address: "string", + * chain_id: "number", + * metadata: { + * type: "id", + * references: { + * entity: EntityTypeDefs.Metadata, + * fields: { name: "string" } + * } + * } + * }); + * + * const instance = new WhereArgs(); + * instance.address = { contains: "0x123" }; + * instance.chain_id = { eq: 1 }; + * instance.metadata.name = { contains: "Test" }; + * ``` + * + * @param entityName - The name of the entity (must be a valid EntityTypeDefs value) + * @param fieldDefinitions - Object defining the fields and their types for the entity + * @param context - Optional context string for creating unique names for nested types + * @returns A class that can be used as a GraphQL input type for filtering + * + * @remarks + * - Supports primitive fields (string, number, bigint) with type-specific filter operators + * - Handles nested reference fields by creating separate where input types + * - All fields are nullable and default to undefined + * - Validates field types against SearchOptionMap at creation time + * - Creates unique names for nested types using context + * - Registers created types in the TypeRegistry for future reference + * + * @throws {Error} If a field type is not found in SearchOptionMap + * @throws {Error} If a nested class cannot be found during creation + */ +export function createEntityWhereArgs< + TEntity extends EntityTypeDefs, + TFields extends EntityFields, +>( + entityName: TEntity, + fieldDefinitions: TFields, + context?: string, +): ClassType> { + // Add validation at the start + Object.entries(fieldDefinitions).forEach(([key, definition]) => { + if (typeof definition === "string" && !(definition in SearchOptionMap)) { + throw new Error(`Invalid field type "${definition}" for field "${key}"`); + } + }); + + // Create a map to store all classes that need to be created + const classesToCreate = new Map< + string, + { entity: EntityTypeDefs; fields: EntityFields; context?: string } + >(); + + // First pass: collect all classes that need to be created + function collectClassesToCreate( + entity: EntityTypeDefs, + fields: EntityFields, + context?: string, + ) { + // Create a unique name for this type + const typeName = createTypeName(entity, context); + + // Add this class to the map if not already present + if (!classesToCreate.has(typeName)) { + classesToCreate.set(typeName, { entity, fields, context }); + } + + // Recursively collect nested classes + // eslint-disable-next-line @typescript-eslint/no-unused-vars + Object.entries(fields).forEach(([_, definition]) => { + if (typeof definition === "object" && isReferenceDefinition(definition)) { + const nestedEntity = definition.references.entity; + const nestedFields = definition.references.fields; + + // Recursively collect nested classes with the current type name as context + collectClassesToCreate(nestedEntity, nestedFields, typeName); + } + }); + } + + // Collect all classes that need to be created + collectClassesToCreate(entityName, fieldDefinitions, context); + + // Second pass: create all classes from deepest to shallowest + // This ensures that when we create a class, all its dependencies are already created + const createdClasses = new Map>(); + + // Create classes in reverse order (deepest first) + Array.from(classesToCreate.entries()) + .reverse() + .forEach(([typeName, { fields }]) => { + if (!createdClasses.has(typeName)) { + // Create the class + @InputType(typeName) + class EntityWhereInput { + // TODO remover any declarations in this file + // eslint-disable-next-line @typescript-eslint/no-explicit-any + [key: string]: any; + + constructor() { + Object.entries(fields).forEach(([key, definition]) => { + if ( + typeof definition === "object" && + isReferenceDefinition(definition) + ) { + const nestedEntity = definition.references.entity; + const nestedTypeName = createTypeName(nestedEntity, typeName); + const NestedClass = createdClasses.get(nestedTypeName); + if (NestedClass) { + // eslint-disable-next-line @typescript-eslint/no-explicit-any + (this as any)[key] = new NestedClass(); + } else { + throw new Error(`Class for ${nestedTypeName} not found`); + } + } else { + // eslint-disable-next-line @typescript-eslint/no-explicit-any + (this as any)[key] = undefined; + } + }); + } + } + + // Define properties on the prototype + Object.entries(fields).forEach(([key]) => { + Object.defineProperty(EntityWhereInput.prototype, key, { + enumerable: true, + writable: true, + value: undefined, + }); + }); + + // Set the class name + Object.defineProperty(EntityWhereInput, "name", { + value: typeName, + }); + + // Apply field decorators + Object.entries(fields).forEach(([key, definition]) => { + if (typeof definition === "string") { + Field(() => SearchOptionMap[definition as BaseFieldType], { + nullable: true, + })(EntityWhereInput.prototype, key); + } else if (definition && isReferenceDefinition(definition)) { + const nestedEntity = definition.references.entity; + const nestedTypeName = createTypeName(nestedEntity, typeName); + const NestedClass = createdClasses.get(nestedTypeName); + if (NestedClass) { + Field(() => NestedClass, { nullable: true })( + EntityWhereInput.prototype, + key, + ); + } + } + }); + + // Store the created class + createdClasses.set(typeName, EntityWhereInput); + + // Also register it in the registry using the public method + // This ensures the class is available for future references + registry.getOrCreateWhereInput( + typeName as EntityTypeDefs, + () => EntityWhereInput, + ); + } + }); + + // Return the class for the requested entity + const result = createdClasses.get(createTypeName(entityName, context)); + if (!result) { + throw new Error( + `Class for ${createTypeName(entityName, context)} not found`, + ); + } + + return result as ClassType>; +} diff --git a/src/lib/graphql/whereFieldDefinitions.ts b/src/lib/graphql/whereFieldDefinitions.ts new file mode 100644 index 00000000..3efcf400 --- /dev/null +++ b/src/lib/graphql/whereFieldDefinitions.ts @@ -0,0 +1,197 @@ +/** + * Defines the field types for filtering entities in GraphQL queries. + * This constant provides a schema-like structure that maps entity types to their + * filterable fields and their corresponding data types. + * + * Each entity (like Attestation, Blueprint, Collection, etc.) has a fields object + * that defines what properties can be used in where clauses and their expected types. + * This is useful for: + * - Type checking in GraphQL queries + * - Building dynamic filters + * - Validating query parameters + * - Generating TypeScript types for query builders + * + * @example + * // Structure for each entity: + * // EntityName: { + * // fields: { + * // fieldName: "fieldType" + * // } + * // } + */ +// TODO: key values can be keyof EntityTypeDefs +export const WhereFieldDefinitions = { + AllowlistRecord: { + fields: { + hypercert_id: "string", + token_id: "bigint", + leaf: "string", + entry: "number", + user_address: "string", + claimed: "boolean", + proof: "stringArray", + units: "bigint", + total_units: "bigint", + root: "string", + }, + }, + Attestation: { + fields: { + id: "string", + uid: "string", + creation_block_timestamp: "bigint", + creation_block_number: "bigint", + last_update_block_number: "bigint", + last_update_block_timestamp: "bigint", + attester: "string", + recipient: "string", + resolver: "string", + supported_schemas_id: "string", + contract_address: "string", + }, + }, + AttestationSchema: { + fields: { + id: "string", + chain_id: "number", + uid: "string", + resolver: "string", + revocable: "boolean", + }, + }, + Blueprint: { + fields: { + id: "number", + created_at: "string", + minter_address: "string", + minted: "boolean", + admin_address: "string", + }, + }, + Collection: { + fields: { + id: "string", + name: "string", + description: "string", + created_at: "string", + }, + }, + Contract: { + fields: { + id: "string", + contract_address: "string", + chain_id: "bigint", + }, + }, + Fraction: { + fields: { + id: "string", + creation_block_timestamp: "bigint", + creation_block_number: "bigint", + last_update_block_number: "bigint", + last_update_block_timestamp: "bigint", + owner_address: "string", + units: "bigint", + hypercert_id: "string", + fraction_id: "string", + token_id: "bigint", + burned: "boolean", + }, + }, + Hypercert: { + fields: { + id: "string", + hypercert_id: "string", + creator_address: "string", + token_id: "bigint", + units: "bigint", + creation_block_timestamp: "bigint", + last_update_block_timestamp: "bigint", + last_update_block_number: "bigint", + creation_block_number: "bigint", + sales_count: "number", + attestations_count: "number", + uri: "string", + burned: "boolean", + }, + }, + Hyperboard: { + fields: { + id: "string", + chain_ids: "numberArray", + admin_address: "string", + }, + }, + Metadata: { + fields: { + id: "string", + name: "string", + description: "string", + uri: "string", + allow_list_uri: "string", + contributors: "stringArray", + external_url: "string", + impact_scope: "stringArray", + rights: "stringArray", + work_scope: "stringArray", + work_timeframe_from: "bigint", + work_timeframe_to: "bigint", + impact_timeframe_from: "bigint", + impact_timeframe_to: "bigint", + }, + }, + Order: { + fields: { + id: "string", + hypercert_id: "string", + createdAt: "string", + quoteType: "number", + globalNonce: "string", + orderNonce: "string", + strategyId: "number", + collectionType: "number", + collection: "string", + currency: "string", + signer: "string", + startTime: "number", + endTime: "number", + price: "string", + chainId: "bigint", + subsetNonce: "number", + itemIds: "stringArray", + amounts: "numberArray", + invalidated: "boolean", + }, + }, + Sale: { + fields: { + id: "string", + buyer: "string", + seller: "string", + strategy_id: "number", + currency: "string", + collection: "string", + item_ids: "stringArray", + hypercert_id: "string", + amounts: "numberArray", + transaction_hash: "string", + creation_block_number: "bigint", + creation_block_timestamp: "bigint", + }, + }, + User: { + fields: { + id: "string", + address: "string", + display_name: "string", + chain_id: "number", + }, + }, +} as const; + +/** + * Type definition for the WhereFieldDefinitions constant. + * This type is used to ensure type safety when working with field definitions + * and can be used to extract field types for specific entities. + */ +export type WhereFieldDefinition = typeof WhereFieldDefinitions; diff --git a/src/lib/marketplace/EOACreateOrderStrategy.ts b/src/lib/marketplace/EOACreateOrderStrategy.ts index 2b8c74fe..34496992 100644 --- a/src/lib/marketplace/EOACreateOrderStrategy.ts +++ b/src/lib/marketplace/EOACreateOrderStrategy.ts @@ -1,5 +1,6 @@ import { HypercertExchangeClient, + OrderValidatorCode, utils, } from "@hypercerts-org/marketplace-sdk"; import { verifyTypedData } from "ethers"; @@ -10,14 +11,27 @@ import { getFractionsById } from "../../utils/getFractionsById.js"; import { getHypercertTokenId } from "../../utils/tokenIds.js"; import { MarketplaceStrategy } from "./MarketplaceStrategy.js"; -import { EOACreateOrderRequest } from "./schemas.js"; +import type { EOACreateOrderRequest } from "./schemas.js"; import * as Errors from "./errors.js"; +import { inject, injectable } from "tsyringe"; +import { MarketplaceOrdersService } from "../../services/database/entities/MarketplaceOrdersEntityService.js"; +@injectable() export default class EOACreateOrderStrategy extends MarketplaceStrategy { - constructor(private readonly request: Omit) { + private request!: Omit; + + constructor( + @inject(MarketplaceOrdersService) + private readonly marketplaceOrdersService: MarketplaceOrdersService, + ) { super(); } + initialize(request: Omit): this { + this.request = request; + return this; + } + // TODO: Clean up this long ass method. I copied it 1:1 from the controller. async executeCreate(): Promise> { const { signature, chainId, ...makerOrder } = this.request; @@ -41,9 +55,15 @@ export default class EOACreateOrderStrategy extends MarketplaceStrategy { } const [validationResult] = await hec.checkOrdersValidity([ - { ...makerOrder, signature, chainId, id: "temporary" }, + { + ...makerOrder, + signature, + chainId, + id: "temporary", + }, ]); - if (!validationResult.valid) { + if (!this.evaluateOrderValidationResult(validationResult)) { + // Check if only error code is TOO_EARLY_TO_EXECUTE_ORDER throw new Errors.InvalidOrder(validationResult); } @@ -84,19 +104,40 @@ export default class EOACreateOrderStrategy extends MarketplaceStrategy { }; console.log("[marketplace-api] Inserting order entity", insertEntity); - const result = await this.dataService.storeOrder(insertEntity); + const result = await this.marketplaceOrdersService.storeOrder(insertEntity); return this.returnSuccess( "Added order to database", - result.data + result ? { - ...result.data, - itemIds: result.data.itemIds as string[], - amounts: result.data.amounts as number[], + ...result, + itemIds: result.itemIds as string[], + amounts: result.amounts as number[], status: "VALID", hash: "0x", } : null, ); } + + evaluateOrderValidationResult(validationResult: { + valid: boolean; + validatorCodes: OrderValidatorCode[]; + }) { + if (validationResult.valid) { + return true; + } + + if ( + validationResult.validatorCodes + .filter( + (code) => code !== OrderValidatorCode.TOO_EARLY_TO_EXECUTE_ORDER, + ) + .every((code) => code === OrderValidatorCode.ORDER_EXPECTED_TO_BE_VALID) + ) { + return true; + } + + return false; + } } diff --git a/src/lib/marketplace/MarketplaceStrategy.ts b/src/lib/marketplace/MarketplaceStrategy.ts index a95ff4f9..d55112fd 100644 --- a/src/lib/marketplace/MarketplaceStrategy.ts +++ b/src/lib/marketplace/MarketplaceStrategy.ts @@ -1,13 +1,7 @@ +import { OrderValidatorCode } from "@hypercerts-org/marketplace-sdk"; import { DataResponse } from "../../types/api.js"; -import { SupabaseDataService } from "../../services/SupabaseDataService.js"; export abstract class MarketplaceStrategy { - protected readonly dataService: SupabaseDataService; - - constructor() { - this.dataService = new SupabaseDataService(); - } - abstract executeCreate(): Promise>; protected returnSuccess( @@ -16,4 +10,9 @@ export abstract class MarketplaceStrategy { ): DataResponse { return { success: true, message, data }; } + + abstract evaluateOrderValidationResult(validationResult: { + valid: boolean; + validatorCodes: OrderValidatorCode[]; + }): boolean; } diff --git a/src/lib/marketplace/MarketplaceStrategyFactory.ts b/src/lib/marketplace/MarketplaceStrategyFactory.ts index 9487c0db..6e5011c9 100644 --- a/src/lib/marketplace/MarketplaceStrategyFactory.ts +++ b/src/lib/marketplace/MarketplaceStrategyFactory.ts @@ -5,20 +5,23 @@ import { import { MarketplaceStrategy } from "./MarketplaceStrategy.js"; import EOACreateOrderStrategy from "./EOACreateOrderStrategy.js"; import MultisigCreateOrderStrategy from "./MultisigCreateOrderStrategy.js"; +import { container } from "tsyringe"; export function createMarketplaceStrategy({ type, ...request }: MultisigCreateOrderRequest | EOACreateOrderRequest): MarketplaceStrategy { switch (type) { - case "eoa": - return new EOACreateOrderStrategy( - request as Omit, - ); - case "multisig": - return new MultisigCreateOrderStrategy( - request as Omit, - ); + case "eoa": { + return container + .resolve(EOACreateOrderStrategy) + .initialize(request as Omit); + } + case "multisig": { + return container + .resolve(MultisigCreateOrderStrategy) + .initialize(request as Omit); + } default: throw new Error("Invalid marketplace request type"); } diff --git a/src/lib/marketplace/MultisigCreateOrderStrategy.ts b/src/lib/marketplace/MultisigCreateOrderStrategy.ts index fea26acf..85be5cf0 100644 --- a/src/lib/marketplace/MultisigCreateOrderStrategy.ts +++ b/src/lib/marketplace/MultisigCreateOrderStrategy.ts @@ -5,20 +5,23 @@ import { } from "@hypercerts-org/marketplace-sdk"; import SafeApiKit from "@safe-global/api-kit"; -import { DataResponse } from "../../types/api.js"; import { EvmClientFactory } from "../../client/evmClient.js"; +import { DataResponse } from "../../types/api.js"; import { getFractionsById } from "../../utils/getFractionsById.js"; -import { getHypercertTokenId } from "../../utils/tokenIds.js"; import { isTypedMessage } from "../../utils/signatures.js"; +import { getHypercertTokenId } from "../../utils/tokenIds.js"; import { SafeApiStrategyFactory } from "../safe/SafeApiKitStrategy.js"; +import { inject, injectable } from "tsyringe"; +import { MarketplaceOrdersService } from "../../services/database/entities/MarketplaceOrdersEntityService.js"; +import { SignatureRequestsService } from "../../services/database/entities/SignatureRequestsEntityService.js"; +import * as Errors from "./errors.js"; import { MarketplaceStrategy } from "./MarketplaceStrategy.js"; import { MultisigCreateOrderRequest, SAFE_CREATE_ORDER_MESSAGE_SCHEMA, SafeCreateOrderMessage, } from "./schemas.js"; -import * as Errors from "./errors.js"; type ValidatableOrder = Omit< Order, @@ -27,16 +30,26 @@ type ValidatableOrder = Omit< type OrderDetails = SafeCreateOrderMessage["message"]; +@injectable() export default class MultisigCreateOrderStrategy extends MarketplaceStrategy { - private readonly safeApiKit: SafeApiKit.default; + private safeApiKit!: SafeApiKit.default; + private request!: Omit; constructor( - private readonly request: Omit, + @inject(MarketplaceOrdersService) + private readonly marketplaceOrdersService: MarketplaceOrdersService, + @inject(SignatureRequestsService) + private readonly signatureRequestsService: SignatureRequestsService, ) { super(); + } + + initialize(request: Omit): this { this.safeApiKit = SafeApiStrategyFactory.getStrategy( request.chainId, ).createInstance(); + this.request = request; + return this; } async executeCreate(): Promise> { @@ -50,10 +63,13 @@ export default class MultisigCreateOrderStrategy extends MarketplaceStrategy { } // Check if signature request already exists - const existingRequest = await this.dataService.getSignatureRequest( - safeAddress, - messageHash, - ); + const existingRequest = + await this.signatureRequestsService.getSignatureRequest({ + where: { + safe_address: { eq: safeAddress }, + message_hash: { eq: messageHash }, + }, + }); if (existingRequest) { return this.returnSuccess("Signature request already exists", { @@ -119,30 +135,11 @@ export default class MultisigCreateOrderStrategy extends MarketplaceStrategy { const [validationResult] = await hec.checkOrdersValidity([orderToValidate]); - if (!validationResult.valid) { - const errorCodes = validationResult.validatorCodes || []; - - // Check if error codes follow the expected pattern. Everything needs to be 0 (valid), - // except for the signature validation error. This is because when this request is - // made, the message is missing one or more signatures. - // The signature will be validated in the command. It's only skipped for now. - // TODO: get the command name when ready - const isValidErrorPattern = errorCodes.every((code, index) => { - if (index === 3) { - return ( - code === - OrderValidatorCode.MISSING_IS_VALID_SIGNATURE_FUNCTION_EIP1271 || - code === OrderValidatorCode.ORDER_EXPECTED_TO_BE_VALID - ); - } - return code === 0; - }); - - // Only proceed if it's the expected signature validation error pattern - if (!isValidErrorPattern) { - throw new Errors.InvalidOrder(validationResult); - } + // Only proceed if it's the expected signature validation error pattern + if (!this.evaluateOrderValidationResult(validationResult)) { + throw new Errors.InvalidOrder(validationResult); } + const tokenIds = orderDetails.itemIds.map( (id) => `${this.request.chainId}-${orderDetails.collection}-${id}`, ); @@ -184,7 +181,7 @@ export default class MultisigCreateOrderStrategy extends MarketplaceStrategy { amounts: orderDetails.amounts.map((amount) => amount.toString()), }; - await this.dataService.addSignatureRequest({ + await this.signatureRequestsService.addSignatureRequest({ chain_id: this.request.chainId, safe_address: safeAddress, message_hash: messageHash, @@ -193,4 +190,31 @@ export default class MultisigCreateOrderStrategy extends MarketplaceStrategy { timestamp: Math.floor(Date.now() / 1000), }); } + + evaluateOrderValidationResult(validationResult: { + valid: boolean; + validatorCodes: OrderValidatorCode[]; + }): boolean { + const errorCodes = validationResult.validatorCodes || []; + + // Check if error codes follow the expected pattern. Everything needs to be 0 (valid), + // except for the signature validation error. This is because when this request is + // made, the message is missing one or more signatures. + // The signature will be validated in the command. It's only skipped for now. + // TODO: get the command name when ready + return errorCodes.every((code, index) => { + if (index === 3) { + return ( + code === + OrderValidatorCode.MISSING_IS_VALID_SIGNATURE_FUNCTION_EIP1271 || + code === OrderValidatorCode.ORDER_EXPECTED_TO_BE_VALID || + code === OrderValidatorCode.TOO_EARLY_TO_EXECUTE_ORDER + ); + } + return ( + code === OrderValidatorCode.ORDER_EXPECTED_TO_BE_VALID || + code === OrderValidatorCode.TOO_EARLY_TO_EXECUTE_ORDER + ); + }); + } } diff --git a/src/lib/strategies/isWhereEmpty.ts b/src/lib/strategies/isWhereEmpty.ts new file mode 100644 index 00000000..a338607e --- /dev/null +++ b/src/lib/strategies/isWhereEmpty.ts @@ -0,0 +1,16 @@ +import { FilterValue } from "../db/queryModifiers/buildWhereCondition.js"; +import { WhereArgsType } from "../../lib/graphql/createEntityWhereArgs.js"; +import { EntityTypeDefs } from "../../graphql/schemas/typeDefs/typeDefs.js"; +import { EntityFields } from "../graphql/createEntityArgs.js"; + +export function isWhereEmpty( + where: + | WhereArgsType + | FilterValue + | Record + | undefined, +): boolean { + if (!where) return true; + if (Array.isArray(where)) return where.length === 0; + return Object.values(where).filter((x) => x !== undefined).length === 0; +} diff --git a/src/lib/tsoa/iocContainer.ts b/src/lib/tsoa/iocContainer.ts new file mode 100644 index 00000000..1b016239 --- /dev/null +++ b/src/lib/tsoa/iocContainer.ts @@ -0,0 +1,14 @@ +import { IocContainer } from "@tsoa/runtime"; +import { container } from "tsyringe"; + +export const iocContainer: IocContainer = { + get: (controller: { prototype: T }): T => { + try { + return container.resolve(controller as never); + } catch (err) { + throw new Error( + `Error resolving controller: ${err instanceof Error ? err.message : String(err)}`, + ); + } + }, +}; diff --git a/src/lib/users/EOAUpsertStrategy.ts b/src/lib/users/EOAUpsertStrategy.ts index a21ac9a4..130784bb 100644 --- a/src/lib/users/EOAUpsertStrategy.ts +++ b/src/lib/users/EOAUpsertStrategy.ts @@ -1,20 +1,17 @@ import { verifyAuthSignedData } from "../../utils/verifyAuthSignedData.js"; -import { SupabaseDataService } from "../../services/SupabaseDataService.js"; import type { UserResponse } from "../../types/api.js"; import type { EOAUpdateRequest } from "./schemas.js"; import type { UserUpsertStrategy } from "./UserUpsertStrategy.js"; import { UserUpsertError } from "./errors.js"; +import { UsersService } from "../../services/database/entities/UsersEntityService.js"; -export default class EOAUpdateStrategy implements UserUpsertStrategy { - private readonly dataService: SupabaseDataService; - +export default class EOAUpsertStrategy implements UserUpsertStrategy { constructor( private readonly address: string, private readonly request: EOAUpdateRequest, - ) { - this.dataService = new SupabaseDataService(); - } + private readonly usersService: UsersService, + ) {} async execute(): Promise { await this.throwIfInvalidSignature(); @@ -28,7 +25,7 @@ export default class EOAUpdateStrategy implements UserUpsertStrategy { private async upsertUser(): Promise<{ address: string }> { try { - const users = await this.dataService.upsertUsers([ + const users = await this.usersService.upsertUsers([ { address: this.address, display_name: this.request.display_name, diff --git a/src/lib/users/MultisigUpsertStrategy.ts b/src/lib/users/MultisigUpsertStrategy.ts index 5642dd1a..ffbac818 100644 --- a/src/lib/users/MultisigUpsertStrategy.ts +++ b/src/lib/users/MultisigUpsertStrategy.ts @@ -2,14 +2,14 @@ import { z } from "zod"; import SafeApiKit from "@safe-global/api-kit"; import { SignatureRequestPurpose } from "../../graphql/schemas/typeDefs/signatureRequestTypeDefs.js"; -import { SupabaseDataService } from "../../services/SupabaseDataService.js"; import { UserResponse } from "../../types/api.js"; import { isTypedMessage } from "../../utils/signatures.js"; import { SafeApiStrategyFactory } from "../safe/SafeApiKitStrategy.js"; +import { SignatureRequestsService } from "../../services/database/entities/SignatureRequestsEntityService.js"; import type { UserUpsertStrategy } from "./UserUpsertStrategy.js"; -import type { MultisigUpdateRequest } from "./schemas.js"; import { UserUpsertError } from "./errors.js"; +import type { MultisigUpdateRequest } from "./schemas.js"; const MESSAGE_SCHEMA = z.object({ metadata: z.object({ @@ -27,8 +27,7 @@ const MESSAGE_SCHEMA = z.object({ }), }); -export default class MultisigUpdateStrategy implements UserUpsertStrategy { - private readonly dataService: SupabaseDataService; +export default class MultisigUpsertStrategy implements UserUpsertStrategy { // Safe SDKs only support CommonJS, so TS interprets `SafeApiKit` as a namespace. // https://docs.safe.global/sdk/overview // Hence the explicit `default` here. @@ -37,11 +36,11 @@ export default class MultisigUpdateStrategy implements UserUpsertStrategy { constructor( private readonly address: string, private readonly request: MultisigUpdateRequest, + private readonly signatureRequestsService: SignatureRequestsService, ) { this.safeApiKit = SafeApiStrategyFactory.getStrategy( this.request.chain_id, ).createInstance(); - this.dataService = new SupabaseDataService(); } // We could check if it's a 1 of 1 and execute right away @@ -71,7 +70,7 @@ export default class MultisigUpdateStrategy implements UserUpsertStrategy { ); } console.log("Creating signature request for", parseResult); - await this.dataService.addSignatureRequest({ + await this.signatureRequestsService.addSignatureRequest({ chain_id: this.request.chain_id, safe_address: this.address, message_hash: this.request.messageHash, diff --git a/src/lib/users/UserUpsertStrategy.ts b/src/lib/users/UserUpsertStrategy.ts index 772baa49..2b7ba613 100644 --- a/src/lib/users/UserUpsertStrategy.ts +++ b/src/lib/users/UserUpsertStrategy.ts @@ -1,4 +1,7 @@ import { UserResponse } from "../../types/api.js"; +import { container } from "tsyringe"; +import { UsersService } from "../../services/database/entities/UsersEntityService.js"; +import { SignatureRequestsService } from "../../services/database/entities/SignatureRequestsEntityService.js"; import MultisigUpsertStrategy from "./MultisigUpsertStrategy.js"; import EOAUpsertStrategy from "./EOAUpsertStrategy.js"; @@ -13,10 +16,20 @@ export function createStrategy( request: MultisigUpdateRequest | EOAUpdateRequest, ): UserUpsertStrategy { switch (request.type) { - case "eoa": - return new EOAUpsertStrategy(address, request); - case "multisig": - return new MultisigUpsertStrategy(address, request); + case "eoa": { + const usersService = container.resolve(UsersService); + return new EOAUpsertStrategy(address, request, usersService); + } + case "multisig": { + const signatureRequestsService = container.resolve( + SignatureRequestsService, + ); + return new MultisigUpsertStrategy( + address, + request, + signatureRequestsService, + ); + } default: throw new Error("Invalid user update request type"); } diff --git a/src/services/BaseSupabaseService.ts b/src/services/BaseSupabaseService.ts deleted file mode 100644 index 46742586..00000000 --- a/src/services/BaseSupabaseService.ts +++ /dev/null @@ -1,97 +0,0 @@ -import { expressionBuilder, Kysely, SqlBool } from "kysely"; -import { BaseArgs } from "../graphql/schemas/args/baseArgs.js"; -import { SortOrder } from "../graphql/schemas/enums/sortEnums.js"; -import { buildWhereCondition } from "../graphql/schemas/utils/filters-kysely.js"; - -export abstract class BaseSupabaseService { - protected db: Kysely; - - protected constructor(db: Kysely) { - this.db = db; - } - - abstract getDataQuery( - tableName: T, - args: BaseArgs, // eslint-disable-next-line @typescript-eslint/no-explicit-any - ): any; - - abstract getCountQuery( - tableName: T, - args: BaseArgs, // eslint-disable-next-line @typescript-eslint/no-explicit-any - ): any; - - handleGetData( - tableName: T, - args: BaseArgs & { - first?: number; - offset?: number; - }, - ) { - let query = this.getDataQuery(tableName, args); - const { where, first, offset, sort } = args; - const eb = expressionBuilder(query); - - if (where) { - query = this.applyWhereConditions(query, where, tableName, eb); - } - - if (sort?.by) { - query = this.applySorting(query, sort.by); - } - - if (first) query = query.limit(first); - if (offset) query = query.offset(offset); - - return query; - } - - handleGetCount( - tableName: T, - args: BaseArgs & { - first?: number; - offset?: number; - }, - ) { - let query = this.getCountQuery(tableName, args); - - const { where } = args; - const eb = expressionBuilder(query); - - if (where) { - query = this.applyWhereConditions(query, where, tableName, eb); - } - - return query; - } - - private applyWhereConditions( - // eslint-disable-next-line @typescript-eslint/no-explicit-any - query: any, - // eslint-disable-next-line @typescript-eslint/no-explicit-any - where: any, - tableName: T, - // eslint-disable-next-line @typescript-eslint/no-explicit-any - eb: any, - ) { - const conditions = Object.entries(where) - .map(([column, value]) => - buildWhereCondition(column, value, tableName, eb), - ) - .filter(Boolean); - - return conditions.reduce((q, condition) => { - return q.where(condition as SqlBool); - }, query); - } - - // eslint-disable-next-line @typescript-eslint/no-explicit-any - applySorting(query: any, sortBy: any) { - for (const [column, direction] of Object.entries(sortBy)) { - if (!column || !direction) continue; - const dir: "asc" | "desc" = - direction === SortOrder.ascending ? "asc" : "desc"; - query = query.orderBy(column, dir); - } - return query; - } -} diff --git a/src/services/MetadataImageService.ts b/src/services/MetadataImageService.ts deleted file mode 100644 index 7f14e694..00000000 --- a/src/services/MetadataImageService.ts +++ /dev/null @@ -1,30 +0,0 @@ -import { singleton } from "tsyringe"; -import { kyselyCaching } from "../client/kysely.js"; -import { CachingDatabase } from "../types/kyselySupabaseCaching.js"; -import { BaseSupabaseService } from "./BaseSupabaseService.js"; - -@singleton() -export class MetadataImageService extends BaseSupabaseService { - constructor() { - super(kyselyCaching); - } - - // TODO: remove these when we more refactor the services to improve typing and performance - getDataQuery() { - throw new Error("Method not implemented - not needed for image service"); - } - - getCountQuery() { - throw new Error("Method not implemented - not needed for image service"); - } - - async getImageByUri(uri: string): Promise { - const result = await this.db - .selectFrom("metadata") - .select(["image"]) - .where("uri", "=", uri) - .executeTakeFirst(); - - return result?.image ?? null; - } -} diff --git a/src/services/SignatureRequestProcessor.ts b/src/services/SignatureRequestProcessor.ts index 0b9087eb..f254f90e 100644 --- a/src/services/SignatureRequestProcessor.ts +++ b/src/services/SignatureRequestProcessor.ts @@ -1,27 +1,32 @@ import { SignatureRequestStatus } from "../graphql/schemas/typeDefs/signatureRequestTypeDefs.js"; -import { Database } from "../types/supabaseData.js"; -import { getCommand } from "../commands/CommandFactory.js"; +import { getCommand, SignatureRequest } from "../commands/CommandFactory.js"; -import { SupabaseDataService } from "./SupabaseDataService.js"; import { SafeApiQueue } from "./SafeApiQueue.js"; +import { container, inject, injectable } from "tsyringe"; +import { SignatureRequestsService } from "./database/entities/SignatureRequestsEntityService.js"; +import { DataKyselyService } from "../client/kysely.js"; +import { Selectable } from "kysely"; -type SignatureRequest = - Database["public"]["Tables"]["signature_requests"]["Row"]; - +@injectable() export default class SignatureRequestProcessor { private static instance: SignatureRequestProcessor; - - private readonly dataService: SupabaseDataService; private readonly queue: SafeApiQueue; - constructor() { - this.dataService = new SupabaseDataService(); + constructor( + @inject(SignatureRequestsService) + private signatureRequestService: SignatureRequestsService, + @inject(DataKyselyService) private dbService: DataKyselyService, + ) { this.queue = SafeApiQueue.getInstance(); } async processPendingRequests(): Promise { const pendingRequests = await this.getPendingRequests(); + if (pendingRequests.length === 0) { + return; + } + console.log(`Found ${pendingRequests.length} pending signature requests`); for (const request of pendingRequests) { @@ -33,22 +38,22 @@ export default class SignatureRequestProcessor { } } - private async getPendingRequests(): Promise { - const response = await this.dataService.getSignatureRequests({ + private async getPendingRequests(): Promise[]> { + const { data } = await this.signatureRequestService.getSignatureRequests({ where: { status: { eq: SignatureRequestStatus.PENDING }, }, }); - return this.dataService.db.transaction().execute(async (transaction) => { - const dataRes = await transaction.executeQuery(response.data); - return dataRes.rows as SignatureRequest[]; - }); + return data; } static getInstance(): SignatureRequestProcessor { if (!SignatureRequestProcessor.instance) { - SignatureRequestProcessor.instance = new SignatureRequestProcessor(); + SignatureRequestProcessor.instance = new SignatureRequestProcessor( + container.resolve(SignatureRequestsService), + container.resolve(DataKyselyService), + ); } return SignatureRequestProcessor.instance; } diff --git a/src/services/SupabaseCachingService.ts b/src/services/SupabaseCachingService.ts deleted file mode 100644 index 1e4a56eb..00000000 --- a/src/services/SupabaseCachingService.ts +++ /dev/null @@ -1,287 +0,0 @@ -import { CachingDatabase } from "../types/kyselySupabaseCaching.js"; -import type { GetContractsArgs } from "../graphql/schemas/args/contractArgs.js"; -import type { GetMetadataArgs } from "../graphql/schemas/args/metadataArgs.js"; -import { GetHypercertsArgs } from "../graphql/schemas/args/hypercertsArgs.js"; -import { GetAttestationSchemasArgs } from "../graphql/schemas/args/attestationSchemaArgs.js"; -import { type GetAttestationsArgs } from "../graphql/schemas/args/attestationArgs.js"; -import { GetFractionsArgs } from "../graphql/schemas/args/fractionArgs.js"; -import { GetSalesArgs } from "../graphql/schemas/args/salesArgs.js"; -import { kyselyCaching } from "../client/kysely.js"; -import { supabaseCaching as supabaseClient } from "../client/supabase.js"; -import { GetAllowlistRecordsArgs } from "../graphql/schemas/args/allowlistRecordArgs.js"; -import { singleton } from "tsyringe"; -import { BaseArgs } from "../graphql/schemas/args/baseArgs.js"; -import { BaseSupabaseService } from "./BaseSupabaseService.js"; - -@singleton() -export class SupabaseCachingService extends BaseSupabaseService { - constructor() { - super(kyselyCaching); - } - - // Getters - - getAllowlistRecords(args: GetAllowlistRecordsArgs) { - return { - data: this.handleGetData("claimable_fractions_with_proofs", args), - count: this.handleGetCount("claimable_fractions_with_proofs", args), - }; - } - - getAttestations = (args: GetAttestationsArgs) => { - return { - data: this.handleGetData("attestations", args), - count: this.handleGetCount("attestations", args), - }; - }; - - getAttestationSchemas(args: GetAttestationSchemasArgs) { - return { - data: this.handleGetData("supported_schemas", args), - count: this.handleGetCount("supported_schemas", args), - }; - } - - getContracts(args: GetContractsArgs) { - return { - data: this.handleGetData("contracts", args), - count: this.handleGetCount("contracts", args), - }; - } - - getFractions(args: GetFractionsArgs) { - return { - data: this.handleGetData("fractions_view", args), - count: this.handleGetCount("fractions_view", args), - }; - } - - getMetadataWithoutImage(args: GetMetadataArgs) { - return { - data: this.handleGetData("metadata", args), - count: this.handleGetCount("metadata", args), - }; - } - - getHypercerts = (args: GetHypercertsArgs) => { - return { - data: this.handleGetData("claims", args), - count: this.handleGetCount("claims", args), - }; - }; - - getSales(args: GetSalesArgs) { - return { - data: this.handleGetData("sales", args), - count: this.handleGetCount("sales", args), - }; - } - - // Build initial query per table - - getDataQuery< - DB extends CachingDatabase, - T extends keyof DB & string, - A extends object, - >(tableName: T, args: BaseArgs) { - switch (tableName) { - case "allowlist_records": - case "claimable_fractions_with_proofs": - return this.db - .selectFrom("claimable_fractions_with_proofs") - .selectAll(); - case "attestations": - return this.db - .selectFrom("attestations") - .innerJoin( - "supported_schemas", - "supported_schemas.id", - "attestations.supported_schemas_id", - ) - .select([ - "attestations.id", - "attestations.uid", - "attestations.chain_id", - "attestations.contract_address", - "attestations.token_id", - "attestations.claims_id", - "attestations.recipient", - "attestations.attester", - "attestations.attestation", - "attestations.data", - "attestations.creation_block_timestamp", - "attestations.creation_block_number", - "attestations.last_update_block_number", - "attestations.last_update_block_timestamp", - "supported_schemas.uid as schema_uid", - ]) - .$if(args.where?.hypercerts, (qb) => - qb.innerJoin( - "claims as claims", - "claims.id", - "attestations.claims_id", - ), - ) - .$if(args.where?.metadata, (qb) => - qb.innerJoin("metadata", "metadata.uri", "claims.uri"), - ); - case "eas_schema": - case "supported_schemas": - case "attestation_schema": - return this.db.selectFrom("supported_schemas").selectAll(); - case "hypercerts": - case "claims": - return this.db - .selectFrom("claims") - .$if(args.where?.metadata, (qb) => - qb.innerJoin("metadata", "metadata.uri", "claims.uri"), - ) - .$if(args.where?.attestations, (qb) => - qb.innerJoin("attestations", "attestations.claims_id", "claims.id"), - ) - .$if(args.where?.fractions, (qb) => - qb.innerJoin( - "fractions_view", - "fractions_view.claims_id", - "claims.id", - ), - ) - .$if(args.where?.contract, (qb) => - qb.innerJoin("contracts", "contracts.id", "claims.contracts_id"), - ) - .selectAll("claims"); // Select all columns from the claims table - case "contracts": - return this.db.selectFrom("contracts").selectAll(); - case "fractions": - case "fractions_view": - return this.db.selectFrom("fractions_view").selectAll(); - case "metadata": - return this.db - .selectFrom("metadata") - .select([ - "metadata.id", - "metadata.name", - "metadata.description", - "metadata.external_url", - "metadata.work_scope", - "metadata.work_timeframe_from", - "metadata.work_timeframe_to", - "metadata.impact_scope", - "metadata.impact_timeframe_from", - "metadata.impact_timeframe_to", - "metadata.contributors", - "metadata.rights", - "metadata.uri", - "metadata.properties", - "metadata.allow_list_uri", - "metadata.parsed", - ]) - .$if(args.where?.hypercerts, (qb) => - qb.innerJoin("claims", "claims.uri", "metadata.uri"), - ); - case "sales": - return this.db.selectFrom("sales").selectAll(); - default: - throw new Error(`Table ${tableName.toString()} not found`); - } - } - - getCountQuery< - DB extends CachingDatabase, - T extends keyof DB & string, - A extends object, - >(tableName: T, args: BaseArgs) { - switch (tableName) { - case "allowlist_records": - case "claimable_fractions_with_proofs": - return this.db - .selectFrom("claimable_fractions_with_proofs") - .select((expressionBuilder) => { - return expressionBuilder.fn.countAll().as("count"); - }); - case "attestations": - return this.db - .selectFrom("attestations") - .$if(args.where?.hypercerts, (qb) => - qb.innerJoin("claims", "claims.id", "attestations.claims_id"), - ) - .$if(args.where?.metadata, (qb) => - qb.innerJoin("metadata", "metadata.uri", "claims.uri"), - ) - .$if(args.where?.eas_schema, (qb) => - qb.innerJoin( - "supported_schemas", - "supported_schemas.id", - "attestations.supported_schemas_id", - ), - ) - .select((expressionBuilder) => { - return expressionBuilder.fn.countAll().as("count"); - }); - case "eas_schema": - case "supported_schemas": - case "attestation_schema": - return this.db - .selectFrom("supported_schemas") - .select((expressionBuilder) => { - return expressionBuilder.fn.countAll().as("count"); - }); - case "claims": - case "hypercerts": - return this.db - .selectFrom("claims") - .$if(args.where?.metadata, (qb) => - qb.innerJoin("metadata", "metadata.uri", "claims.uri"), - ) - .$if(args.where?.attestations, (qb) => - qb.innerJoin("attestations", "attestations.claims_id", "claims.id"), - ) - .$if(args.where?.fractions, (qb) => - qb.innerJoin( - "fractions_view", - "fractions_view.claims_id", - "claims.id", - ), - ) - .$if(args.where?.contract, (qb) => - qb.innerJoin("contracts", "contracts.id", "claims.contracts_id"), - ) - .select((expressionBuilder) => { - return expressionBuilder.fn.countAll().as("count"); - }); - case "contracts": - return this.db.selectFrom("contracts").select((expressionBuilder) => { - return expressionBuilder.fn.countAll().as("count"); - }); - case "fractions": - case "fractions_view": - return this.db - .selectFrom("fractions_view") - .select((expressionBuilder) => { - return expressionBuilder.fn.countAll().as("count"); - }); - case "metadata": - return this.db - .selectFrom("metadata") - .$if(args.where?.hypercerts, (qb) => - qb.innerJoin("claims", "claims.uri", "metadata.uri"), - ) - .select((expressionBuilder) => { - return expressionBuilder.fn.countAll().as("count"); - }); - case "sales": - return this.db.selectFrom("sales").select((expressionBuilder) => { - return expressionBuilder.fn.countAll().as("count"); - }); - default: - throw new Error(`Table ${tableName.toString()} not found`); - } - } - - getSalesForTokenIds(tokenIds: bigint[]) { - return supabaseClient - .from("sales") - .select("*", { count: "exact", head: false }) - .overlaps("item_ids", tokenIds); - } -} diff --git a/src/services/SupabaseDataService.ts b/src/services/SupabaseDataService.ts deleted file mode 100644 index bb3059f2..00000000 --- a/src/services/SupabaseDataService.ts +++ /dev/null @@ -1,852 +0,0 @@ -import { - HypercertExchangeClient, - OrderValidatorCode, -} from "@hypercerts-org/marketplace-sdk"; -import type { SupabaseClient } from "@supabase/supabase-js"; -import { sql } from "kysely"; -import { jsonArrayFrom } from "kysely/helpers/postgres"; -import { singleton } from "tsyringe"; -import { kyselyData } from "../client/kysely.js"; -import { supabaseData } from "../client/supabase.js"; -import { BaseArgs } from "../graphql/schemas/args/baseArgs.js"; -import { GetBlueprintArgs } from "../graphql/schemas/args/blueprintArgs.js"; -import { GetHyperboardsArgs } from "../graphql/schemas/args/hyperboardArgs.js"; -import { GetOrdersArgs } from "../graphql/schemas/args/orderArgs.js"; -import { GetSignatureRequestArgs } from "../graphql/schemas/args/signatureRequestArgs.js"; -import { GetCollectionsArgs } from "../graphql/schemas/args/collectionArgs.js"; -import { GetUserArgs } from "../graphql/schemas/args/userArgs.js"; -import { applyFilters } from "../graphql/schemas/utils/filters.js"; -import { applyPagination } from "../graphql/schemas/utils/pagination.js"; -import { applySorting } from "../graphql/schemas/utils/sorting.js"; -import type { DataDatabase as KyselyDataDatabase } from "../types/kyselySupabaseData.js"; -import type { Database as DataDatabase } from "../types/supabaseData.js"; -import { BaseSupabaseService } from "./BaseSupabaseService.js"; -import { EvmClientFactory } from "../client/evmClient.js"; -import _ from "lodash"; - -@singleton() -export class SupabaseDataService extends BaseSupabaseService { - private supabaseData: SupabaseClient; - - constructor() { - super(kyselyData); - this.supabaseData = supabaseData; - } - - async mintBlueprintAndSwapInCollections( - blueprintId: number, - hypercertId: string, - ) { - await this.db.transaction().execute(async (trx) => { - // Get all blueprint hyperboard metadata for this blueprint - const oldBlueprintMetadata = await trx - .deleteFrom("hyperboard_blueprint_metadata") - .where("blueprint_id", "=", blueprintId) - .returning(["hyperboard_id", "collection_id", "display_size"]) - .execute(); - - if (oldBlueprintMetadata.length) { - // Insert the new hypercert for each collection - await trx - .insertInto("hypercerts") - .values( - oldBlueprintMetadata.map((oldBlueprintMetadata) => ({ - hypercert_id: hypercertId, - collection_id: oldBlueprintMetadata.collection_id, - })), - ) - .onConflict((oc) => - oc.columns(["hypercert_id", "collection_id"]).doUpdateSet((eb) => ({ - hypercert_id: eb.ref("excluded.hypercert_id"), - collection_id: eb.ref("excluded.collection_id"), - })), - ) - .returning(["hypercert_id", "collection_id"]) - .execute(); - - // Insert the new hypercert metadata for each collection - await trx - .insertInto("hyperboard_hypercert_metadata") - .values( - oldBlueprintMetadata.map((oldBlueprintMetadata) => ({ - hyperboard_id: oldBlueprintMetadata.hyperboard_id, - hypercert_id: hypercertId, - collection_id: oldBlueprintMetadata.collection_id, - display_size: oldBlueprintMetadata.display_size, - })), - ) - .onConflict((oc) => - oc - .columns(["hyperboard_id", "hypercert_id", "collection_id"]) - .doUpdateSet((eb) => ({ - hypercert_id: eb.ref("excluded.hypercert_id"), - collection_id: eb.ref("excluded.collection_id"), - hyperboard_id: eb.ref("excluded.hyperboard_id"), - display_size: eb.ref("excluded.display_size"), - })), - ) - .returning(["hyperboard_id", "hypercert_id", "collection_id"]) - .execute(); - } - - // Set blueprint to minted - await trx - .updateTable("blueprints") - .set((eb) => ({ - minted: true, - hypercert_ids: sql`array_append(${eb.ref("hypercert_ids")}, ${hypercertId})`, - })) - .where("id", "=", blueprintId) - .execute(); - - // Delete blueprint from collections, because it has been replaced by a hypercert - await trx - .deleteFrom("collection_blueprints") - .where("blueprint_id", "=", blueprintId) - .execute(); - }); - } - - storeOrder( - order: DataDatabase["public"]["Tables"]["marketplace_orders"]["Insert"], - ) { - return this.supabaseData - .from("marketplace_orders") - .insert([order]) - .select("*") - .single() - .throwOnError(); - } - - getNonce(address: string, chainId: number) { - return this.supabaseData - .from("marketplace_order_nonces") - .select("*") - .eq("address", address) - .eq("chain_id", chainId) - .maybeSingle(); - } - - createNonce(address: string, chainId: number) { - return this.supabaseData - .from("marketplace_order_nonces") - .insert({ - address, - chain_id: chainId, - nonce_counter: 0, - }) - .select("*") - .single(); - } - - updateNonce(address: string, chainId: number, nonce: number) { - return this.supabaseData - .from("marketplace_order_nonces") - .update({ - nonce_counter: nonce, - }) - .eq("address", address) - .eq("chain_id", chainId) - .select("*") - .single(); - } - - getOrders(args: GetOrdersArgs) { - return { - data: this.handleGetData("marketplace_orders", args), - count: this.handleGetCount("marketplace_orders", args), - }; - } - - getOrdersByTokenId({ - tokenId, - chainId, - }: { - tokenId: string; - chainId: number; - }) { - return this.supabaseData - .from("marketplace_orders") - .select("*") - .contains("itemIds", [tokenId]) - .eq("chainId", chainId) - .order("createdAt", { ascending: false }) - .throwOnError(); - } - - updateOrders( - orders: DataDatabase["public"]["Tables"]["marketplace_orders"]["Update"][], - ) { - return Promise.all( - orders.map((order) => { - if (!order?.id) { - throw new Error("Order must have an id to update."); - } - return this.supabaseData - .from("marketplace_orders") - .update(order) - .eq("id", order.id) - .throwOnError(); - }), - ); - } - - getOrdersForFraction(fractionIds: string | string[]) { - const ids = Array.isArray(fractionIds) ? fractionIds : [fractionIds]; - return this.supabaseData - .from("marketplace_orders") - .select("*", { count: "exact" }) - .overlaps("itemIds", ids) - .order("createdAt", { ascending: false }) - .throwOnError(); - } - - getHyperboards(args: GetHyperboardsArgs) { - let query = this.supabaseData.from("hyperboards").select( - `*, - collections!hyperboard_collections( - *, - hypercerts!claims_registry_id_fkey(*), - blueprints(*), - blueprint_metadata:hyperboard_blueprint_metadata(*), - admins:users!collection_admins(*) - ), - admins:users!inner(*), - users!inner(address), - hypercert_metadata:hyperboard_hypercert_metadata!hyperboard_hypercert_metadata_hyperboard_id_fkey(*) - `, - { - count: "exact", - }, - ); - const { where, sort, offset, first } = args; - - if (where?.id?.eq) { - query = query.eq( - "collections.blueprint_metadata.hyperboard_id", - where.id.eq, - ); - } - - // Filter by admin according to https://github.com/orgs/supabase/discussions/16234#discussioncomment-6642525 - if (where?.admin_id?.eq) { - query = query.eq("users.address", where?.admin_id?.eq); - delete where.admin_id; - } - - query = applyFilters({ query, where }); - query = applySorting({ query, sort }); - query = applyPagination({ query, pagination: { first, offset } }); - - return query; - } - - async validateOrdersByTokenIds({ - tokenIds, - chainId, - }: { - tokenIds: string[]; - chainId: number; - }) { - const ordersToUpdate: { - id: string; - invalidated: boolean; - validator_codes: OrderValidatorCode[]; - }[] = []; - const getOrdersResults = await Promise.all( - tokenIds.map(async (tokenId) => - this.getOrdersByTokenId({ - tokenId, - chainId, - }), - ), - ); - - if (getOrdersResults.some((res) => res.error)) { - throw new Error( - `[SupabaseDataService::validateOrderByTokenId] Error fetching orders: ${getOrdersResults.find((res) => res.error)?.error?.message}`, - ); - } - - const matchingOrders = getOrdersResults - .flatMap((res) => res.data) - .filter((x) => x !== null); - - // Validate orders using logic in the SDK - const hec = new HypercertExchangeClient( - chainId, - // @ts-expect-error Typing issue with provider - EvmClientFactory.createEthersClient(chainId), - ); - const validationResults = await hec.checkOrdersValidity(matchingOrders); - - // Determine all orders that have changed validity or validator codes so we don't - // update the order if it hasn't changed - for (const order of matchingOrders) { - const validationResult = validationResults.find( - (result) => result.id === order.id, - ); - - if (!validationResult) { - throw new Error( - `[SupabaseDataService::validateOrderByTokenId] No validation result found for order ${order.id}`, - ); - } - - const currentOrderIsValid = !order.invalidated; - - // If the order validity has changed, we need to update the order and add the validator codes - if (validationResult.valid !== currentOrderIsValid) { - ordersToUpdate.push({ - id: order.id, - invalidated: !validationResult.valid, - validator_codes: validationResult.validatorCodes, - }); - continue; - } - - if ( - order.validator_codes === null && - validationResult.validatorCodes.every( - (code) => code === OrderValidatorCode.ORDER_EXPECTED_TO_BE_VALID, - ) - ) { - // Orders are added to the database by default with validator_codes set to null - // The contract will return an array of ORDER_EXPECTED_TO_BE_VALID if the order is valid - // In this special case we won't have to update the order - continue; - } - - // If the validator codes have changed, we need to update the order - if (!_.isEqual(validationResult.validatorCodes, order.validator_codes)) { - ordersToUpdate.push({ - id: order.id, - invalidated: !validationResult.valid, - validator_codes: validationResult.validatorCodes, - }); - } - } - - console.log( - "[SupabaseDataService::validateOrderByTokenId] Updating orders from validation results", - ordersToUpdate, - ); - await this.updateOrders(ordersToUpdate); - return ordersToUpdate; - } - - async deleteOrder(orderId: string) { - return this.supabaseData - .from("marketplace_orders") - .delete() - .eq("id", orderId) - .single(); - } - - async upsertUsers( - users: DataDatabase["public"]["Tables"]["users"]["Insert"][], - ) { - return this.db - .insertInto("users") - .values(users) - .onConflict((oc) => - oc.constraint("users_address_chain_id").doUpdateSet((eb) => ({ - avatar: eb.ref("excluded.avatar"), - display_name: eb.ref("excluded.display_name"), - })), - ) - .returning(["address"]) - .execute(); - } - - getUsers(args: GetUserArgs) { - return { - data: this.handleGetData("users", args), - count: this.handleGetCount("users", args), - }; - } - - getBlueprints(args: GetBlueprintArgs) { - return { - data: this.handleGetData("blueprints_with_admins", args), - count: this.handleGetCount("blueprints_with_admins", args), - }; - } - - async deleteAllHypercertsFromCollection(collectionId: string) { - return this.db - .deleteFrom("hypercerts") - .where("collection_id", "=", collectionId) - .returning("hypercert_id") - .execute(); - } - - async upsertHypercerts( - hypercerts: DataDatabase["public"]["Tables"]["hypercerts"]["Insert"][], - ) { - return this.db - .insertInto("hypercerts") - .values(hypercerts) - .onConflict((oc) => - oc.columns(["hypercert_id", "collection_id"]).doUpdateSet((eb) => ({ - hypercert_id: eb.ref("excluded.hypercert_id"), - collection_id: eb.ref("excluded.collection_id"), - })), - ) - .returning(["hypercert_id", "collection_id"]) - .execute(); - } - - async upsertCollections( - collections: DataDatabase["public"]["Tables"]["collections"]["Insert"][], - ) { - return this.db - .insertInto("collections") - .values(collections) - .onConflict((oc) => - oc.column("id").doUpdateSet((eb) => ({ - id: eb.ref("excluded.id"), - name: eb.ref("excluded.name"), - description: eb.ref("excluded.description"), - chain_ids: eb.ref("excluded.chain_ids"), - hidden: eb.ref("excluded.hidden"), - })), - ) - .returning(["id"]) - .execute(); - } - - async upsertHyperboardHypercertMetadata( - metadata: DataDatabase["public"]["Tables"]["hyperboard_hypercert_metadata"]["Insert"][], - ) { - return this.db - .insertInto("hyperboard_hypercert_metadata") - .values(metadata) - .onConflict((oc) => - oc - .columns(["hyperboard_id", "hypercert_id", "collection_id"]) - .doUpdateSet((eb) => ({ - hypercert_id: eb.ref("excluded.hypercert_id"), - collection_id: eb.ref("excluded.collection_id"), - hyperboard_id: eb.ref("excluded.hyperboard_id"), - display_size: eb.ref("excluded.display_size"), - })), - ) - .returning(["hyperboard_id", "hypercert_id", "collection_id"]) - .execute(); - } - - async upsertHyperboards( - hyperboards: DataDatabase["public"]["Tables"]["hyperboards"]["Insert"][], - ) { - return this.db - .insertInto("hyperboards") - .values(hyperboards) - .onConflict((oc) => - oc.column("id").doUpdateSet((eb) => ({ - id: eb.ref("excluded.id"), - name: eb.ref("excluded.name"), - chain_ids: eb.ref("excluded.chain_ids"), - background_image: eb.ref("excluded.background_image"), - grayscale_images: eb.ref("excluded.grayscale_images"), - tile_border_color: eb.ref("excluded.tile_border_color"), - })), - ) - .returning(["id"]) - .execute(); - } - - async getHyperboardById(hyperboardId: string) { - const res = await this.getHyperboards({ - where: { id: { eq: hyperboardId } }, - }); - return res.data?.[0]; - } - - async deleteHyperboard(hyperboardId: string) { - return this.db - .deleteFrom("hyperboards") - .where("id", "=", hyperboardId) - .execute(); - } - - getCollections(args: GetCollectionsArgs) { - return { - data: this.handleGetData("collections", args), - count: this.handleGetCount("collections", args), - }; - } - - async getCollectionHypercerts(collectionId: string) { - return this.db - .selectFrom("hypercerts") - .select(["hypercert_id", "collection_id"]) - .where("collection_id", "=", collectionId) - .execute(); - } - - async getCollectionAdmins(collectionId: string) { - return this.db - .selectFrom("users") - .innerJoin("collection_admins", "collection_admins.user_id", "users.id") - .select([ - "users.address", - "users.chain_id", - "users.display_name", - "users.avatar", - ]) - .where("collection_admins.collection_id", "=", collectionId) - .execute(); - } - - async getCollectionBlueprints(collectionId: string) { - return this.db - .selectFrom("blueprints") - .innerJoin( - "collection_blueprints", - "collection_blueprints.blueprint_id", - "blueprints.id", - ) - .selectAll("blueprints") - .where("collection_blueprints.collection_id", "=", collectionId) - .execute(); - } - - async getCollectionById(collectionId: string) { - return this.db - .selectFrom("collections") - .select((eb) => [ - "id", - "chain_ids", - jsonArrayFrom( - eb - .selectFrom("collection_admins") - .select((eb) => [ - jsonArrayFrom( - eb - .selectFrom("users") - .select(["address", "chain_id", "user_id"]) - .whereRef("user_id", "=", "user_id"), - ).as("admins"), - ]) - .whereRef("collection_id", "=", "collections.id"), - ).as("collection_admins"), - ]) - .where("id", "=", collectionId) - .executeTakeFirst(); - } - - async addCollectionToHyperboard(hyperboardId: string, collectionId: string) { - return this.db - .insertInto("hyperboard_collections") - .values([ - { - hyperboard_id: hyperboardId, - collection_id: collectionId, - }, - ]) - .onConflict((oc) => - oc.columns(["hyperboard_id", "collection_id"]).doUpdateSet((eb) => ({ - hyperboard_id: eb.ref("excluded.hyperboard_id"), - collection_id: eb.ref("excluded.collection_id"), - })), - ) - .returning(["hyperboard_id", "collection_id"]) - .execute(); - } - - async getOrCreateUser(address: string, chainId: number) { - const user = await this.db - .selectFrom("users") - .select(["id"]) - .where("address", "=", address) - .where("chain_id", "=", chainId) - .execute(); - - if (user.length === 0) { - return this.db - .insertInto("users") - .values([ - { - address, - chain_id: chainId, - }, - ]) - .returning(["id"]) - .execute() - .then((res) => res[0]); - } - - return user[0]; - } - - async addAdminToHyperboard( - hyperboardId: string, - adminAddress: string, - chainId: number, - ) { - const user = await this.getOrCreateUser(adminAddress, chainId); - return this.db - .insertInto("hyperboard_admins") - .values([ - { - hyperboard_id: hyperboardId, - user_id: user.id, - }, - ]) - .onConflict((oc) => - oc.columns(["hyperboard_id", "user_id"]).doUpdateSet((eb) => ({ - hyperboard_id: eb.ref("excluded.hyperboard_id"), - user_id: eb.ref("excluded.user_id"), - })), - ) - .returning(["hyperboard_id", "user_id"]) - .executeTakeFirst(); - } - - async addAdminToCollection( - collectionId: string, - adminAddress: string, - chainId: number, - ) { - const user = await this.getOrCreateUser(adminAddress, chainId); - return this.db - .insertInto("collection_admins") - .values([ - { - collection_id: collectionId, - user_id: user.id, - }, - ]) - .onConflict((oc) => - oc.columns(["collection_id", "user_id"]).doUpdateSet((eb) => ({ - collection_id: eb.ref("excluded.collection_id"), - user_id: eb.ref("excluded.user_id"), - })), - ) - .returning(["collection_id", "user_id"]) - .executeTakeFirst(); - } - - async deleteAllBlueprintsFromCollection(collectionId: string) { - return this.db - .deleteFrom("collection_blueprints") - .where("collection_id", "=", collectionId) - .returning("blueprint_id") - .execute(); - } - - async upsertBlueprints( - blueprints: DataDatabase["public"]["Tables"]["blueprints"]["Insert"][], - ) { - return this.db - .insertInto("blueprints") - .values(blueprints) - .onConflict((oc) => - oc.columns(["id"]).doUpdateSet((eb) => ({ - id: eb.ref("excluded.id"), - form_values: eb.ref("excluded.form_values"), - minter_address: eb.ref("excluded.minter_address"), - minted: eb.ref("excluded.minted"), - })), - ) - .returning(["id"]) - .execute(); - } - - async upsertHyperboardBlueprintMetadata( - metadata: DataDatabase["public"]["Tables"]["hyperboard_blueprint_metadata"]["Insert"][], - ) { - return this.db - .insertInto("hyperboard_blueprint_metadata") - .values(metadata) - .onConflict((oc) => - oc - .columns(["hyperboard_id", "blueprint_id", "collection_id"]) - .doUpdateSet((eb) => ({ - blueprint_id: eb.ref("excluded.blueprint_id"), - collection_id: eb.ref("excluded.collection_id"), - hyperboard_id: eb.ref("excluded.hyperboard_id"), - display_size: eb.ref("excluded.display_size"), - })), - ) - .returning(["hyperboard_id", "blueprint_id", "collection_id"]) - .execute(); - } - - async addBlueprintsToCollection( - values: DataDatabase["public"]["Tables"]["collection_blueprints"]["Insert"][], - ) { - return this.db - .insertInto("collection_blueprints") - .values(values) - .onConflict((oc) => - oc.columns(["blueprint_id", "collection_id"]).doNothing(), - ) - .returning(["blueprint_id", "collection_id"]) - .execute(); - } - - async addAdminToBlueprint( - blueprintId: number, - adminAddress: string, - chainId: number, - ) { - const user = await this.getOrCreateUser(adminAddress, chainId); - return this.db - .insertInto("blueprint_admins") - .values([ - { - blueprint_id: blueprintId, - user_id: user.id, - }, - ]) - .onConflict((oc) => - oc.columns(["blueprint_id", "user_id"]).doUpdateSet((eb) => ({ - blueprint_id: eb.ref("excluded.blueprint_id"), - user_id: eb.ref("excluded.user_id"), - })), - ) - .returning(["blueprint_id", "user_id"]) - .executeTakeFirst(); - } - - async getBlueprintById(blueprintId: number) { - return this.db - .selectFrom("blueprints") - .where("id", "=", blueprintId) - .select((eb) => [ - "id", - "created_at", - "form_values", - "minter_address", - "minted", - jsonArrayFrom( - eb - .selectFrom("users") - .innerJoin( - "blueprint_admins", - "blueprint_admins.user_id", - "users.id", - ) - .select(["id", "address", "chain_id", "display_name", "avatar"]) - .whereRef("blueprint_admins.blueprint_id", "=", "blueprints.id"), - ).as("admins"), - ]) - .executeTakeFirst(); - } - - async deleteBlueprint(blueprintId: number) { - return this.db - .deleteFrom("blueprints") - .where("id", "=", blueprintId) - .execute(); - } - - async addSignatureRequest( - request: DataDatabase["public"]["Tables"]["signature_requests"]["Insert"], - ) { - return this.db - .insertInto("signature_requests") - .values(request) - .returning(["safe_address", "message_hash"]) - .execute(); - } - - async getSignatureRequest(safe_address: string, message_hash: string) { - return this.db - .selectFrom("signature_requests") - .selectAll() - .where("safe_address", "=", safe_address) - .where("message_hash", "=", message_hash) - .executeTakeFirst(); - } - - async updateSignatureRequestStatus( - safe_address: string, - message_hash: string, - status: DataDatabase["public"]["Enums"]["signature_request_status_enum"], - ) { - return this.db - .updateTable("signature_requests") - .set({ status }) - .where("safe_address", "=", safe_address) - .where("message_hash", "=", message_hash) - .execute(); - } - - getSignatureRequests(args: GetSignatureRequestArgs) { - return { - data: this.handleGetData("signature_requests", args), - count: this.handleGetCount("signature_requests", args), - }; - } - - getDataQuery< - DB extends KyselyDataDatabase, - T extends keyof DB & string, - A extends object, - // eslint-disable-next-line @typescript-eslint/no-unused-vars - >(tableName: T, args: BaseArgs) { - switch (tableName) { - case "blueprints_with_admins": - case "blueprints": - return this.db.selectFrom("blueprints_with_admins").selectAll(); - case "orders": - case "marketplace_orders": - return this.db.selectFrom("marketplace_orders").selectAll(); - case "users": - return this.db.selectFrom("users").selectAll(); - case "signature_requests": - return this.db.selectFrom("signature_requests").selectAll(); - case "collections": - return this.db.selectFrom("collections").selectAll(); - default: - throw new Error(`Table ${tableName.toString()} not found`); - } - } - - getCountQuery< - DB extends KyselyDataDatabase, - T extends keyof DB & string, - A extends object, - // eslint-disable-next-line @typescript-eslint/no-unused-vars - >(tableName: T, args: BaseArgs) { - switch (tableName) { - case "blueprints_with_admins": - case "blueprints": - return this.db - .selectFrom("blueprints_with_admins") - .select((expressionBuilder) => { - return expressionBuilder.fn.countAll().as("count"); - }); - case "hyperboards": - return this.db.selectFrom("hyperboards").select((expressionBuilder) => { - return expressionBuilder.fn.countAll().as("count"); - }); - case "orders": - case "marketplace_orders": - return this.db - .selectFrom("marketplace_orders") - .select((expressionBuilder) => { - return expressionBuilder.fn.countAll().as("count"); - }); - case "signature_requests": - return this.db - .selectFrom("signature_requests") - .select((expressionBuilder) => { - return expressionBuilder.fn.countAll().as("count"); - }); - case "users": - return this.db.selectFrom("users").select((expressionBuilder) => { - return expressionBuilder.fn.countAll().as("count"); - }); - case "collections": - return this.db.selectFrom("collections").select((expressionBuilder) => { - return expressionBuilder.fn.countAll().as("count"); - }); - default: - throw new Error(`Table ${tableName.toString()} not found`); - } - } -} diff --git a/src/services/database/entities/AllowListRecordEntityService.ts b/src/services/database/entities/AllowListRecordEntityService.ts new file mode 100644 index 00000000..d2ff1cb6 --- /dev/null +++ b/src/services/database/entities/AllowListRecordEntityService.ts @@ -0,0 +1,71 @@ +import { Insertable, Selectable, Updateable } from "kysely"; +import { injectable } from "tsyringe"; +import { kyselyCaching } from "../../../client/kysely.js"; +import { GetAllowlistRecordsArgs } from "../../../graphql/schemas/args/allowlistRecordArgs.js"; +import { CachingDatabase } from "../../../types/kyselySupabaseCaching.js"; +import { + createEntityService, + type EntityService, +} from "./EntityServiceFactory.js"; + +/** The name of the allowlist records table */ +type TableName = "claimable_fractions_with_proofs"; +/** The type of the allowlist records table */ +type Table = CachingDatabase[TableName]; + +/** Type representing a selectable record from the claimable_fractions_with_proofs table */ +export type AllowlistRecordSelect = Selectable; + +/** Type representing an insertable record for the claimable_fractions_with_proofs table */ +export type AllowlistRecordInsert = Insertable
; + +/** Type representing an updateable record for the claimable_fractions_with_proofs table */ +export type AllowlistRecordUpdate = Updateable
; + +/** + * Service class for managing allowlist records in the claimable_fractions_with_proofs table. + * This service provides methods to query and retrieve allowlist records using the EntityService pattern. + * + * @injectable + */ +@injectable() +export class AllowlistRecordService { + /** The underlying entity service instance for database operations */ + private entityService: EntityService; + + /** + * Initializes a new instance of the AllowlistRecordService. + * Creates an EntityService instance for the claimable_fractions_with_proofs table. + */ + constructor() { + this.entityService = createEntityService< + CachingDatabase, + TableName, + GetAllowlistRecordsArgs + >( + "claimable_fractions_with_proofs", + "AllowlistRecordEntityService", + kyselyCaching, + ); + } + + /** + * Retrieves multiple allowlist records based on the provided arguments. + * + * @param args - Query arguments for filtering allowlist records + * @returns A promise that resolves to an array of allowlist records and a count of total records + */ + async getAllowlistRecords(args: GetAllowlistRecordsArgs) { + return this.entityService.getMany(args); + } + + /** + * Retrieves a single allowlist record based on the provided arguments. + * + * @param args - Query arguments for filtering the allowlist record + * @returns A promise that resolves to a single allowlist record or null if not found + */ + async getAllowlistRecord(args: GetAllowlistRecordsArgs) { + return this.entityService.getSingle(args); + } +} diff --git a/src/services/database/entities/AttestationEntityService.ts b/src/services/database/entities/AttestationEntityService.ts new file mode 100644 index 00000000..736d718e --- /dev/null +++ b/src/services/database/entities/AttestationEntityService.ts @@ -0,0 +1,124 @@ +import { Selectable } from "kysely"; +import { injectable } from "tsyringe"; +import { kyselyCaching } from "../../../client/kysely.js"; +import { GetAttestationsArgs } from "../../../graphql/schemas/args/attestationArgs.js"; +import { CachingDatabase } from "../../../types/kyselySupabaseCaching.js"; +import { Json } from "../../../types/supabaseCaching.js"; +import { + createEntityService, + type EntityService, +} from "./EntityServiceFactory.js"; + +export type AttestationSelect = Selectable; + +/** + * Service for managing attestation entities in the database. + * Handles CRUD operations for attestations, including data parsing and validation. + * + * This service: + * - Provides methods for retrieving single or multiple attestations + * - Handles parsing of attestation data, particularly bigint conversions + * - Uses an EntityService for database operations + * - Supports filtering by attestation fields and related entities + * + * @injectable Marks the class as injectable for dependency injection + */ +@injectable() +export class AttestationService { + private entityService: EntityService< + CachingDatabase["attestations"], + GetAttestationsArgs + >; + + constructor() { + this.entityService = createEntityService< + CachingDatabase, + "attestations", + GetAttestationsArgs + >("attestations", "AttestationEntityService", kyselyCaching); + } + + /** + * Retrieves multiple attestations based on provided arguments. + * Handles filtering and parsing of attestation data. + * + * @param args - Query arguments for filtering attestations + * @returns Promise resolving to: + * - data: Array of attestations with parsed data + * - count: Total number of matching attestations + * @throws {Error} If the database query fails + * + * @example + * ```typescript + * // Get attestations by ID + * const result = await attestationService.getAttestations({ + * where: { id: { eq: "123" } } + * }); + * + * // Get attestations by related schema + * const result = await attestationService.getAttestations({ + * where: { eas_schema: { id: { eq: "schema-id" } } } + * }); + * ``` + */ + async getAttestations(args: GetAttestationsArgs) { + const respone = await this.entityService.getMany(args); + return { + ...respone, + data: respone.data.map(({ data, ...rest }) => ({ + ...rest, + data: this.parseAttestation(data), + })), + }; + } + + /** + * Retrieves a single attestation based on provided arguments. + * + * @param args - Query arguments for filtering attestations + * @returns Promise resolving to: + * - The found attestation if it exists + * - undefined if no attestation matches the query + * @throws {Error} If the database query fails + * + * @example + * ```typescript + * const attestation = await attestationService.getAttestation({ + * where: { id: { eq: "123" } } + * }); + * ``` + */ + async getAttestation(args: GetAttestationsArgs) { + return await this.entityService.getSingle(args); + } + + /** + * Parses attestation data, converting bigint values to strings. + * This is necessary because GraphQL cannot handle bigint values directly. + * + * @param data - Raw attestation data from the database + * @returns Parsed data with bigint values converted to strings + * + * @example + * ```typescript + * const parsed = attestationService.parseAttestation({ + * token_id: 123456789n, + * other_field: "value" + * }); + * // parsed = { token_id: "123456789", other_field: "value" } + * ``` + */ + parseAttestation(data: Json) { + // TODO cleaner handling of bigints in created attestations + if ( + typeof data === "object" && + data !== null && + "token_id" in data && + data.token_id + ) { + const tokenId = Number(data.token_id); + return { ...data, token_id: BigInt(tokenId).toString() }; + } + return data; + } +} diff --git a/src/services/database/entities/AttestationSchemaEntityService.ts b/src/services/database/entities/AttestationSchemaEntityService.ts new file mode 100644 index 00000000..8e23b095 --- /dev/null +++ b/src/services/database/entities/AttestationSchemaEntityService.ts @@ -0,0 +1,89 @@ +import { Selectable } from "kysely"; +import { injectable } from "tsyringe"; +import { kyselyCaching } from "../../../client/kysely.js"; +import { GetAttestationSchemasArgs } from "../../../graphql/schemas/args/attestationSchemaArgs.js"; +import { CachingDatabase } from "../../../types/kyselySupabaseCaching.js"; +import { + createEntityService, + type EntityService, +} from "./EntityServiceFactory.js"; + +/** Type representing a selected attestation schema record from the database */ +export type AttestationSchemaSelect = Selectable< + CachingDatabase["supported_schemas"] +>; + +/** + * Service class for managing attestation schema entities in the database. + * Handles CRUD operations for EAS (Ethereum Attestation Service) schemas. + * + * This service provides methods to: + * - Retrieve multiple attestation schemas with filtering and pagination + * - Retrieve a single attestation schema by its criteria + * + * @injectable Marks the class as injectable for dependency injection with tsyringe + */ +@injectable() +export class AttestationSchemaService { + private entityService: EntityService< + CachingDatabase["supported_schemas"], + GetAttestationSchemasArgs + >; + + /** + * Creates a new instance of AttestationSchemaService. + * Initializes the underlying entity service for database operations. + */ + constructor() { + this.entityService = createEntityService< + CachingDatabase, + "supported_schemas", + GetAttestationSchemasArgs + >("supported_schemas", "AttestationSchemaEntityService", kyselyCaching); + } + + /** + * Retrieves multiple attestation schemas based on provided arguments. + * + * @param args - Query arguments for filtering and pagination + * @returns A promise that resolves to an object containing: + * - data: Array of attestation schemas matching the query + * - count: Total number of matching schemas + * @throws {Error} If the database query fails + * + * @example + * ```typescript + * const result = await service.getAttestationSchemas({ + * where: { id: { eq: "schema-id" } } + * }); + * console.log(result.data); // Array of matching schemas + * console.log(result.count); // Total count + * ``` + */ + async getAttestationSchemas(args: GetAttestationSchemasArgs) { + return this.entityService.getMany(args); + } + + /** + * Retrieves a single attestation schema based on provided arguments. + * + * @param args - Query arguments for filtering + * @returns A promise that resolves to: + * - The matching attestation schema if found + * - undefined if no schema matches the criteria + * @throws {Error} If the database query fails + * + * @example + * ```typescript + * const schema = await service.getAttestationSchema({ + * where: { id: { eq: "schema-id" } } + * }); + * if (schema) { + * console.log("Found schema:", schema); + * } + * ``` + */ + async getAttestationSchema(args: GetAttestationSchemasArgs) { + return this.entityService.getSingle(args); + } +} diff --git a/src/services/database/entities/BlueprintsEntityService.ts b/src/services/database/entities/BlueprintsEntityService.ts new file mode 100644 index 00000000..6d637020 --- /dev/null +++ b/src/services/database/entities/BlueprintsEntityService.ts @@ -0,0 +1,272 @@ +import { Insertable, Selectable, sql, Updateable } from "kysely"; +import { inject, singleton } from "tsyringe"; +import { DataKyselyService, kyselyData } from "../../../client/kysely.js"; +import type { GetBlueprintsArgs } from "../../../graphql/schemas/args/blueprintArgs.js"; +import type { DataDatabase } from "../../../types/kyselySupabaseData.js"; +import type { EntityService } from "./EntityServiceFactory.js"; +import { createEntityService } from "./EntityServiceFactory.js"; +import { UsersService } from "./UsersEntityService.js"; + +export type BlueprintSelect = Selectable; +export type BlueprintInsert = Insertable; +export type BlueprintUpdate = Updateable; + +export type BlueprintAdminSelect = Selectable; + +/** + * Service for handling blueprint-related database operations. + * Provides methods for CRUD operations on blueprints and managing blueprint admins. + * + * Features: + * - Fetch blueprints with filtering and pagination + * - Manage blueprint administrators + * - Handle blueprint minting and collection updates + * - Transaction support for complex operations + * + * @singleton Marks the class as a singleton for dependency injection + */ +@singleton() +export class BlueprintsService { + private entityService: EntityService< + DataDatabase["blueprints_with_admins"], + GetBlueprintsArgs + >; + + /** + * Creates a new instance of BlueprintsService. + * + * @param dbService - Service for database operations + * @param usersService - Service for user-related operations + */ + constructor( + @inject(DataKyselyService) private dbService: DataKyselyService, + @inject(UsersService) private usersService: UsersService, + ) { + this.entityService = createEntityService< + DataDatabase, + "blueprints_with_admins", + GetBlueprintsArgs + >("blueprints_with_admins", "BlueprintsEntityService", kyselyData); + } + + /** + * Retrieves blueprints based on provided arguments. + * + * @param args - Query arguments for filtering and pagination + * @returns Promise resolving to an object containing: + * - data: Array of matching blueprints + * - count: Total number of matching blueprints + * @throws Error if database operation fails + */ + async getBlueprints(args: GetBlueprintsArgs) { + return this.entityService.getMany(args); + } + + /** + * Retrieves a single blueprint based on provided arguments. + * + * @param args - Query arguments for filtering + * @returns Promise resolving to a single blueprint or undefined if not found + * @throws Error if database operation fails + */ + async getBlueprint(args: GetBlueprintsArgs) { + return this.entityService.getSingle(args); + } + + /** + * Retrieves administrators for a specific blueprint. + * + * @param blueprintId - ID of the blueprint + * @returns Promise resolving to an array of admin users + * @throws Error if database operation fails + */ + async getBlueprintAdmins(blueprintId: number) { + return await this.dbService + .getConnection() + .selectFrom("blueprint_admins") + .where("blueprint_id", "=", blueprintId) + .innerJoin("users", "blueprint_admins.user_id", "users.id") + .selectAll("users") + .execute() + .then((res) => + res.map((admin) => ({ + ...admin, + // TODO: Investigate why chain_id is returned as a string + chain_id: Number(admin.chain_id), + })), + ); + } + + /** + * Deletes a blueprint by ID. + * + * @param blueprintId - ID of the blueprint to delete + * @returns Promise resolving when deletion is complete + * @throws Error if database operation fails + */ + async deleteBlueprint(blueprintId: number) { + return this.dbService + .getConnection() + .deleteFrom("blueprints") + .where("id", "=", blueprintId) + .execute(); + } + + /** + * Creates or updates multiple blueprints. + * + * @param blueprints - Array of blueprints to create or update + * @returns Promise resolving to an array of created/updated blueprint IDs + * @throws Error if database operation fails + */ + async upsertBlueprints(blueprints: BlueprintInsert[]) { + return this.dbService + .getConnection() + .insertInto("blueprints") + .values(blueprints) + .onConflict((oc) => + oc.columns(["id"]).doUpdateSet((eb) => ({ + id: eb.ref("excluded.id"), + form_values: eb.ref("excluded.form_values"), + minter_address: eb.ref("excluded.minter_address"), + minted: eb.ref("excluded.minted"), + })), + ) + .returning(["id"]) + .execute(); + } + + /** + * Adds an administrator to a blueprint. + * Creates the user if they don't exist. + * + * @param blueprintId - ID of the blueprint + * @param adminAddress - Ethereum address of the admin + * @param chainId - Chain ID where the admin address is valid + * @returns Promise resolving to the created/updated admin record + * @throws Error if database operation fails + */ + async addAdminToBlueprint( + blueprintId: number, + adminAddress: string, + chainId: number, + ) { + const user = await this.usersService.getOrCreateUser({ + address: adminAddress, + chain_id: chainId, + }); + + return this.dbService + .getConnection() + .insertInto("blueprint_admins") + .values([ + { + blueprint_id: blueprintId, + user_id: user.id, + }, + ]) + .onConflict((oc) => + oc.columns(["blueprint_id", "user_id"]).doUpdateSet((eb) => ({ + blueprint_id: eb.ref("excluded.blueprint_id"), + user_id: eb.ref("excluded.user_id"), + })), + ) + .returning(["blueprint_id", "user_id"]) + .executeTakeFirst(); + } + + /** + * Mints a blueprint and updates related collections. + * This operation: + * 1. Gets all blueprint hyperboard metadata + * 2. Inserts the new hypercert into collections + * 3. Updates hyperboard metadata + * 4. Marks the blueprint as minted + * 5. Removes the blueprint from collections + * + * All operations are wrapped in a transaction for atomicity. + * + * @param blueprintId - ID of the blueprint to mint + * @param hypercertId - ID of the newly created hypercert + * @returns Promise resolving when all operations are complete + * @throws Error if any database operation fails (triggers rollback) + */ + async mintBlueprintAndSwapInCollections( + blueprintId: number, + hypercertId: string, + ) { + await this.dbService + .getConnection() + .transaction() + .execute(async (trx) => { + // Get all blueprint hyperboard metadata for this blueprint + const oldBlueprintMetadata = await trx + .deleteFrom("hyperboard_blueprint_metadata") + .where("blueprint_id", "=", blueprintId) + .returning(["hyperboard_id", "collection_id", "display_size"]) + .execute(); + + if (oldBlueprintMetadata.length) { + // Insert the new hypercert for each collection + await trx + .insertInto("hypercerts") + .values( + oldBlueprintMetadata.map((oldBlueprintMetadata) => ({ + hypercert_id: hypercertId, + collection_id: oldBlueprintMetadata.collection_id, + })), + ) + .onConflict((oc) => + oc + .columns(["hypercert_id", "collection_id"]) + .doUpdateSet((eb) => ({ + hypercert_id: eb.ref("excluded.hypercert_id"), + collection_id: eb.ref("excluded.collection_id"), + })), + ) + .returning(["hypercert_id", "collection_id"]) + .execute(); + + // Insert the new hypercert metadata for each collection + await trx + .insertInto("hyperboard_hypercert_metadata") + .values( + oldBlueprintMetadata.map((oldBlueprintMetadata) => ({ + hyperboard_id: oldBlueprintMetadata.hyperboard_id, + hypercert_id: hypercertId, + collection_id: oldBlueprintMetadata.collection_id, + display_size: oldBlueprintMetadata.display_size, + })), + ) + .onConflict((oc) => + oc + .columns(["hyperboard_id", "hypercert_id", "collection_id"]) + .doUpdateSet((eb) => ({ + hypercert_id: eb.ref("excluded.hypercert_id"), + collection_id: eb.ref("excluded.collection_id"), + hyperboard_id: eb.ref("excluded.hyperboard_id"), + display_size: eb.ref("excluded.display_size"), + })), + ) + .returning(["hyperboard_id", "hypercert_id", "collection_id"]) + .execute(); + } + + // Set blueprint to minted + await trx + .updateTable("blueprints") + .set((eb) => ({ + minted: true, + hypercert_ids: sql`array_append(${eb.ref("hypercert_ids")}, ${hypercertId})`, + })) + .where("id", "=", blueprintId) + .execute(); + + // Delete blueprint from collections, because it has been replaced by a hypercert + await trx + .deleteFrom("collection_blueprints") + .where("blueprint_id", "=", blueprintId) + .execute(); + }); + } +} diff --git a/src/services/database/entities/CollectionEntityService.ts b/src/services/database/entities/CollectionEntityService.ts new file mode 100644 index 00000000..c906bc04 --- /dev/null +++ b/src/services/database/entities/CollectionEntityService.ts @@ -0,0 +1,341 @@ +import { Insertable, Selectable } from "kysely"; +import { jsonArrayFrom } from "kysely/helpers/postgres"; +import { inject, injectable } from "tsyringe"; +import { DataKyselyService, kyselyData } from "../../../client/kysely.js"; +import { GetCollectionsArgs } from "../../../graphql/schemas/args/collectionArgs.js"; +import { DataDatabase } from "../../../types/kyselySupabaseData.js"; +import { BlueprintsService } from "./BlueprintsEntityService.js"; +import { + createEntityService, + type EntityService, +} from "./EntityServiceFactory.js"; +import { HypercertsService } from "./HypercertsEntityService.js"; +import { UserInsert, UsersService } from "./UsersEntityService.js"; + +export type CollectionSelect = Selectable; +export type CollectionInsert = Insertable; + +/** + * Service for managing collection entities in the database. + * Handles CRUD operations and relationships for collections, including hypercerts, blueprints, and admins. + * + * Features: + * - Fetch collections with filtering and pagination + * - Manage collection contents (hypercerts and blueprints) + * - Handle collection administrators + * - Support for complex queries with JSON aggregation + * - Transaction support for data integrity + * + * @injectable Marks the class as injectable for dependency injection with tsyringe + */ +@injectable() +export class CollectionService { + private entityService: EntityService< + DataDatabase["collections"], + GetCollectionsArgs + >; + + /** + * Creates a new instance of CollectionService. + * + * @param hypercertsService - Service for hypercert-related operations + * @param dbService - Service for database operations + * @param blueprintsService - Service for blueprint-related operations + * @param usersService - Service for user-related operations + */ + constructor( + @inject(HypercertsService) + private hypercertsService: HypercertsService, + @inject(DataKyselyService) + private dbService: DataKyselyService, + @inject(BlueprintsService) + private blueprintsService: BlueprintsService, + @inject(UsersService) + private usersService: UsersService, + ) { + this.entityService = createEntityService< + DataDatabase, + "collections", + GetCollectionsArgs + >("collections", "CollectionEntityService", kyselyData); + } + + /** + * Retrieves multiple collections based on provided arguments. + * + * @param args - Query arguments for filtering and pagination + * @returns A promise resolving to an object containing: + * - data: Array of collections matching the query + * - count: Total number of matching collections + * @throws {Error} If the database query fails + */ + async getCollections(args: GetCollectionsArgs) { + return this.entityService.getMany(args); + } + + /** + * Retrieves a single collection based on provided arguments. + * + * @param args - Query arguments for filtering + * @returns A promise resolving to a single collection or undefined if not found + * @throws {Error} If the database query fails + */ + async getCollection(args: GetCollectionsArgs) { + return this.entityService.getSingle(args); + } + + /** + * Retrieves blueprint IDs associated with a collection. + * + * @param collectionId - ID of the collection + * @returns Promise resolving to an array of blueprint IDs + * @throws {Error} If the database query fails + */ + async getCollectionBlueprintIds(collectionId: string) { + return await this.dbService + .getConnection() + .selectFrom("collection_blueprints") + .select("blueprint_id") + .where("collection_id", "=", collectionId) + .execute(); + } + + /** + * Retrieves all blueprints associated with a collection. + * + * @param collectionId - ID of the collection + * @returns Promise resolving to an array of blueprints + * @throws {Error} If the database query fails + */ + async getCollectionBlueprints(collectionId: string) { + const collectionBlueprintIds = + await this.getCollectionBlueprintIds(collectionId); + const blueprintIds = collectionBlueprintIds.map( + (blueprint) => blueprint.blueprint_id, + ); + + return this.blueprintsService.getBlueprints({ + where: { id: { in: blueprintIds } }, + }); + } + + /** + * Retrieves hypercert IDs associated with a collection. + * + * @param collectionId - ID of the collection + * @returns Promise resolving to an array of hypercert IDs + * @throws {Error} If the database query fails + */ + async getCollectionHypercertIds(collectionId: string) { + return await this.dbService + .getConnection() + .selectFrom("hypercerts") + .select("hypercert_id") + .where("collection_id", "=", collectionId) + .execute(); + } + + /** + * Retrieves all hypercerts associated with a collection. + * + * @param collectionId - ID of the collection + * @returns Promise resolving to an array of hypercerts + * @throws {Error} If the database query fails + */ + async getCollectionHypercerts(collectionId: string) { + const hypercerts = await this.getCollectionHypercertIds(collectionId); + const hypercertIds = hypercerts.map((hypercert) => hypercert.hypercert_id); + + return this.hypercertsService.getHypercerts({ + where: { hypercert_id: { in: hypercertIds } }, + }); + } + + /** + * Retrieves all administrators of a collection. + * + * @param collectionId - ID of the collection + * @returns Promise resolving to an array of users who are admins + * @throws {Error} If the database query fails + */ + async getCollectionAdmins(collectionId: string) { + return await this.dbService + .getConnection() + .selectFrom("users") + .innerJoin("collection_admins", "collection_admins.user_id", "users.id") + .select([ + "users.address", + "users.chain_id", + "users.display_name", + "users.avatar", + ]) + .where("collection_admins.collection_id", "=", collectionId) + .execute() + .then((res) => + res.map((x) => ({ + ...x, + chain_id: Number(x.chain_id), + })), + ); + } + + /** + * Retrieves detailed collection information including admins. + * Uses JSON aggregation for efficient data retrieval. + * + * @param collectionId - ID of the collection + * @returns Promise resolving to the collection with admin details + * @throws {Error} If the database query fails + */ + // TODO this type and query can be cleaner. Do we need a view? + async getCollectionById(collectionId: string) { + return await this.dbService + .getConnection() + .selectFrom("collections") + .select((eb) => [ + "id", + "chain_ids", + jsonArrayFrom( + eb + .selectFrom("collection_admins") + .select((eb) => [ + jsonArrayFrom( + eb + .selectFrom("users") + .select(["address", "chain_id", "user_id"]) + .whereRef("user_id", "=", "user_id"), + ).as("admins"), + ]) + .whereRef("collection_id", "=", "collections.id"), + ).as("collection_admins"), + ]) + .where("id", "=", collectionId) + .executeTakeFirst(); + } + + /** + * Creates or updates multiple collections. + * + * @param collections - Array of collection data to upsert + * @returns Promise resolving to the result of the upsert operation + * @throws {Error} If the database operation fails + */ + async upsertCollections(collections: CollectionInsert[]) { + return this.dbService + .getConnection() + .insertInto("collections") + .values(collections) + .onConflict((oc) => + oc.column("id").doUpdateSet((eb) => ({ + id: eb.ref("excluded.id"), + name: eb.ref("excluded.name"), + description: eb.ref("excluded.description"), + chain_ids: eb.ref("excluded.chain_ids"), + hidden: eb.ref("excluded.hidden"), + })), + ) + .returningAll() + .execute(); + } + + /** + * Removes all hypercerts from a collection. + * + * @param collectionId - ID of the collection + * @returns Promise resolving to the result of the delete operation + * @throws {Error} If the database operation fails + */ + async deleteAllHypercertsFromCollection(collectionId: string) { + return this.dbService + .getConnection() + .deleteFrom("hypercerts") + .where("collection_id", "=", collectionId) + .execute(); + } + + /** + * Removes all blueprints from a collection. + * + * @param collectionId - ID of the collection + * @returns Promise resolving to the result of the delete operation + * @throws {Error} If the database operation fails + */ + async deleteAllBlueprintsFromCollection(collectionId: string) { + return this.dbService + .getConnection() + .deleteFrom("collection_blueprints") + .where("collection_id", "=", collectionId) + .execute(); + } + + /** + * Associates hypercerts with collections. + * + * @param hypercerts - Array of hypercert-collection associations to create or update + * @returns Promise resolving to the result of the upsert operation + * @throws {Error} If the database operation fails + */ + async upsertHypercertCollections( + hypercerts: Insertable[], + ) { + return this.dbService + .getConnection() + .insertInto("hypercerts") + .values(hypercerts) + .onConflict((oc) => + oc.columns(["hypercert_id", "collection_id"]).doUpdateSet((eb) => ({ + hypercert_id: eb.ref("excluded.hypercert_id"), + collection_id: eb.ref("excluded.collection_id"), + })), + ) + .execute(); + } + + /** + * Adds an administrator to a collection. + * + * @param collectionId - ID of the collection + * @param admin - User data for the new admin + * @returns Promise resolving to the result of the operation + * @throws {Error} If the database operation fails + */ + async addAdminToCollection(collectionId: string, admin: UserInsert) { + const user = await this.usersService.getOrCreateUser(admin); + return this.dbService + .getConnection() + .insertInto("collection_admins") + .values([ + { + collection_id: collectionId, + user_id: user.id, + }, + ]) + .onConflict((oc) => + oc.columns(["collection_id", "user_id"]).doUpdateSet((eb) => ({ + collection_id: eb.ref("excluded.collection_id"), + user_id: eb.ref("excluded.user_id"), + })), + ) + .executeTakeFirst(); + } + + /** + * Associates blueprints with a collection. + * + * @param values - Array of blueprint-collection associations to create + * @returns Promise resolving to the result of the insert operation + * @throws {Error} If the database operation fails + */ + async addBlueprintsToCollection( + values: Insertable[], + ) { + return this.dbService + .getConnection() + .insertInto("collection_blueprints") + .values(values) + .onConflict((oc) => + oc.columns(["blueprint_id", "collection_id"]).doNothing(), + ) + .execute(); + } +} diff --git a/src/services/database/entities/ContractEntityService.ts b/src/services/database/entities/ContractEntityService.ts new file mode 100644 index 00000000..25312804 --- /dev/null +++ b/src/services/database/entities/ContractEntityService.ts @@ -0,0 +1,99 @@ +import { Selectable } from "kysely"; +import { injectable } from "tsyringe"; +import { kyselyCaching } from "../../../client/kysely.js"; +import { GetContractsArgs } from "../../../graphql/schemas/args/contractArgs.js"; +import { CachingDatabase } from "../../../types/kyselySupabaseCaching.js"; +import { + createEntityService, + type EntityService, +} from "./EntityServiceFactory.js"; + +/** Type representing a selected contract record from the database */ +export type ContractSelect = Selectable; + +/** + * Service class for managing contract entities in the database. + * Handles CRUD operations for contracts deployed on various chains. + * + * This service provides methods to: + * - Retrieve multiple contracts with filtering and pagination + * - Retrieve a single contract by its criteria + * + * Each contract represents a smart contract deployed on a blockchain, + * containing information such as: + * - Chain ID + * - Contract address + * - Deployment block number + * + * @injectable Marks the class as injectable for dependency injection with tsyringe + */ +@injectable() +export class ContractService { + private entityService: EntityService< + CachingDatabase["contracts"], + GetContractsArgs + >; + + /** + * Creates a new instance of ContractService. + * Initializes the underlying entity service for database operations. + */ + constructor() { + this.entityService = createEntityService< + CachingDatabase, + "contracts", + GetContractsArgs + >("contracts", "ContractEntityService", kyselyCaching); + } + + /** + * Retrieves multiple contracts based on provided arguments. + * + * @param args - Query arguments for filtering and pagination + * @returns A promise that resolves to an object containing: + * - data: Array of contracts matching the query + * - count: Total number of matching contracts + * @throws {Error} If the database query fails + * + * @example + * ```typescript + * const result = await service.getContracts({ + * where: { + * chain_id: { eq: "1" }, + * contract_address: { eq: "0x..." } + * } + * }); + * console.log(result.data); // Array of matching contracts + * console.log(result.count); // Total count + * ``` + */ + async getContracts(args: GetContractsArgs) { + return this.entityService.getMany(args); + } + + /** + * Retrieves a single contract based on provided arguments. + * + * @param args - Query arguments for filtering + * @returns A promise that resolves to: + * - The matching contract if found + * - undefined if no contract matches the criteria + * @throws {Error} If the database query fails + * + * @example + * ```typescript + * const contract = await service.getContract({ + * where: { + * chain_id: { eq: "1" }, + * contract_address: { eq: "0x..." } + * } + * }); + * if (contract) { + * console.log("Found contract:", contract); + * } + * ``` + */ + async getContract(args: GetContractsArgs) { + return this.entityService.getSingle(args); + } +} diff --git a/src/services/database/entities/EntityServiceFactory.ts b/src/services/database/entities/EntityServiceFactory.ts new file mode 100644 index 00000000..2abb634b --- /dev/null +++ b/src/services/database/entities/EntityServiceFactory.ts @@ -0,0 +1,130 @@ +import { Kysely, Selectable, SelectQueryBuilder } from "kysely"; +import { SortOrder } from "../../../graphql/schemas/enums/sortEnums.js"; +import { + applyWhere, + createStandardQueryModifier, + QueryModifier, +} from "../../../lib/db/queryModifiers/queryModifiers.js"; +import { BaseQueryArgsType } from "../../../lib/graphql/BaseQueryArgs.js"; +import { + QueryStrategy, + SupportedDatabase, +} from "../strategies/QueryStrategy.js"; +import { QueryStrategyFactory } from "../strategies/QueryStrategyFactory.js"; + +/** + * Interface defining the core functionality of an entity service + * @template TEntity - The entity type this service manages + * @template TArgs - The arguments type for queries + */ +export interface EntityService { + /** + * Retrieves a single entity based on the provided arguments + * @param args - Query arguments + * @returns Promise resolving to the entity or undefined if not found + */ + getSingle(args: TArgs): Promise | undefined>; + + /** + * Retrieves multiple entities based on the provided arguments + * @param args - Query arguments + * @returns Promise resolving to an object containing the data and total count of all matching entities + */ + getMany(args: TArgs): Promise<{ data: Selectable[]; count: number }>; +} + +/** + * Creates an entity service for a specific database table + * @template DB - The database schema type + * @template T - The table name type + * @template Args - The arguments type for queries + * @param tableName - Name of the table this service will manage + * @param ServiceName - Name to be assigned to the generated service class + * @param dbConnection - Database connection instance + * @returns An instance of EntityService for the specified table + * @throws {Error} If the strategy for the table cannot be found + */ +export function createEntityService< + DB extends SupportedDatabase, + T extends keyof DB & string, + Args extends BaseQueryArgsType< + Record, + { [K in keyof DB[T]]?: SortOrder | null } + >, +>( + tableName: T, + ServiceName: string, + dbConnection: Kysely, +): EntityService { + /** + * Internal service class generated for the specific entity + */ + class GeneratedEntityService implements EntityService { + private readonly strategy: QueryStrategy; + private readonly db: Kysely; + private readonly tableName: T; + private readonly applyQueryModifiers: QueryModifier; + + constructor(dbConnection: Kysely, tableName: T) { + this.db = dbConnection; + this.strategy = QueryStrategyFactory.getStrategy(tableName); + this.tableName = tableName; + this.applyQueryModifiers = createStandardQueryModifier( + tableName, + ); + } + + /** + * @inheritdoc + */ + async getSingle(args: Args): Promise | undefined> { + const query = this.applyQueryModifiers( + this.strategy.buildDataQuery(this.db, args), + args, + ); + + return await query.executeTakeFirst(); + } + + /** + * @inheritdoc + */ + async getMany( + args: Args, + ): Promise<{ data: Selectable[]; count: number }> { + const dataQuery = this.applyQueryModifiers( + this.strategy.buildDataQuery(this.db, args), + args, + ); + + // For count query, we only need to apply where conditions + let countQuery = this.strategy.buildCountQuery(this.db, args); + if (args.where) { + countQuery = applyWhere( + this.tableName, + countQuery as unknown as SelectQueryBuilder>, + args, + ) as unknown as typeof countQuery; + } + + const result = await this.db + .transaction() + .execute(async (transaction) => { + const [dataRes, countRes] = await Promise.all([ + transaction.executeQuery(dataQuery), + transaction.executeQuery(countQuery), + ]); + + return { + data: dataRes.rows as unknown as Selectable[], + count: Number(countRes.rows[0]?.count ?? dataRes.rows.length), + }; + }); + + return result; + } + } + + Object.defineProperty(GeneratedEntityService, "name", { value: ServiceName }); + return new GeneratedEntityService(dbConnection, tableName); +} diff --git a/src/services/database/entities/FractionEntityService.ts b/src/services/database/entities/FractionEntityService.ts new file mode 100644 index 00000000..fea7b5c7 --- /dev/null +++ b/src/services/database/entities/FractionEntityService.ts @@ -0,0 +1,83 @@ +import { Selectable } from "kysely"; +import { injectable } from "tsyringe"; +import { kyselyCaching } from "../../../client/kysely.js"; +import { GetFractionsArgs } from "../../../graphql/schemas/args/fractionArgs.js"; +import { CachingDatabase } from "../../../types/kyselySupabaseCaching.js"; +import { + createEntityService, + type EntityService, +} from "./EntityServiceFactory.js"; + +/** Type representing a selected fraction record from the database */ +export type FractionSelect = Selectable; + +/** + * Service class for managing fraction entities in the database. + * Handles CRUD operations for hypercert fractions, which represent ownership units of hypercerts. + * + * This service provides methods to: + * - Query multiple fractions with filtering and pagination + * - Retrieve single fraction records by various criteria + * + * @injectable + */ +@injectable() +export class FractionService { + /** The underlying entity service instance for database operations */ + private entityService: EntityService< + CachingDatabase["fractions_view"], + GetFractionsArgs + >; + + /** + * Initializes a new instance of the FractionService. + * Creates an EntityService instance for the fractions_view table. + */ + constructor() { + this.entityService = createEntityService< + CachingDatabase, + "fractions_view", + GetFractionsArgs + >("fractions_view", "FractionEntityService", kyselyCaching); + } + + /** + * Retrieves multiple fractions based on the provided arguments. + * + * @param args - Query arguments for filtering fractions + * @returns Promise resolving to an object containing: + * - data: Array of fraction records + * - count: Total number of matching records + * @throws {Error} If the database query fails + * + * @example + * ```typescript + * // Get all fractions owned by a specific address + * const result = await fractionService.getFractions({ + * where: { owner_address: { eq: "0x..." } } + * }); + * ``` + */ + async getFractions(args: GetFractionsArgs) { + return this.entityService.getMany(args); + } + + /** + * Retrieves a single fraction based on the provided arguments. + * + * @param args - Query arguments for filtering the fraction + * @returns Promise resolving to a single fraction record or undefined if not found + * @throws {Error} If the database query fails + * + * @example + * ```typescript + * // Get a specific fraction by ID + * const fraction = await fractionService.getFraction({ + * where: { units: { eq: 100n } } + * }); + * ``` + */ + async getFraction(args: GetFractionsArgs) { + return this.entityService.getSingle(args); + } +} diff --git a/src/services/database/entities/HyperboardEntityService.ts b/src/services/database/entities/HyperboardEntityService.ts new file mode 100644 index 00000000..06a0acfc --- /dev/null +++ b/src/services/database/entities/HyperboardEntityService.ts @@ -0,0 +1,320 @@ +import { Insertable, Selectable, Updateable } from "kysely"; +import { inject, injectable } from "tsyringe"; +import { DataKyselyService, kyselyData } from "../../../client/kysely.js"; +import { GetHyperboardsArgs } from "../../../graphql/schemas/args/hyperboardArgs.js"; +import { DataDatabase } from "../../../types/kyselySupabaseData.js"; +import { CollectionService } from "./CollectionEntityService.js"; +import { + createEntityService, + type EntityService, +} from "./EntityServiceFactory.js"; +import { UserInsert, UsersService } from "./UsersEntityService.js"; + +export type HyperboardSelect = Selectable; +export type HyperboardInsert = Insertable; +export type HyperboardUpdate = Updateable; + +export type HyperboardCollectionSelect = Selectable< + DataDatabase["hyperboard_collections"] +>; +export type HyperboardAdminSelect = Selectable< + DataDatabase["hyperboard_admins"] +>; +export type HyperboardAdminInsert = Insertable< + DataDatabase["hyperboard_admins"] +>; +export type HyperboardHypercertMetadataSelect = Selectable< + DataDatabase["hyperboard_hypercert_metadata"] +>; +export type HyperboardBlueprintMetadataSelect = Selectable< + DataDatabase["hyperboard_blueprint_metadata"] +>; +export type HyperboardHypercertMetadataInsert = Insertable< + DataDatabase["hyperboard_hypercert_metadata"] +>; +export type HyperboardBlueprintMetadataInsert = Insertable< + DataDatabase["hyperboard_blueprint_metadata"] +>; + +/** + * Service for managing hyperboard entities and their relationships. + * Handles CRUD operations and relationship management for hyperboards. + * + * This service provides methods for: + * - Retrieving hyperboards and their related data + * - Managing hyperboard collections + * - Managing hyperboard admins + * - Managing hyperboard metadata (hypercerts and blueprints) + * - Creating and updating hyperboards + */ +@injectable() +export class HyperboardService { + private entityService: EntityService< + DataDatabase["hyperboards_with_admins"], + GetHyperboardsArgs + >; + + constructor( + @inject(DataKyselyService) private dbService: DataKyselyService, + @inject(CollectionService) private collectionService: CollectionService, + @inject(UsersService) private usersService: UsersService, + ) { + this.entityService = createEntityService< + DataDatabase, + "hyperboards_with_admins", + GetHyperboardsArgs + >("hyperboards_with_admins", "HyperboardEntityService", kyselyData); + } + + /** + * Retrieves multiple hyperboards based on provided arguments. + * @param args - Query arguments for filtering hyperboards + * @returns Promise resolving to hyperboards matching the criteria + * @throws {Error} If there's an error executing the query + */ + async getHyperboards(args: GetHyperboardsArgs) { + return this.entityService.getMany(args); + } + + /** + * Retrieves a single hyperboard based on provided arguments. + * @param args - Query arguments for filtering the hyperboard + * @returns Promise resolving to the matching hyperboard + * @throws {Error} If there's an error executing the query + */ + async getHyperboard(args: GetHyperboardsArgs) { + return this.entityService.getSingle(args); + } + + /** + * Retrieves collections associated with a hyperboard. + * @param hyperboardId - ID of the hyperboard + * @returns Promise resolving to associated collections + * @throws {DatabaseError} If there's an error executing the query + */ + async getHyperboardCollections(hyperboardId: string) { + const hyperboardCollections = await this.dbService + .getConnection() + .selectFrom("hyperboard_collections") + .where("hyperboard_id", "=", hyperboardId) + .select("collection_id") + .execute(); + + const collectionIds = hyperboardCollections.map( + (collection) => collection.collection_id, + ); + return this.collectionService.getCollections({ + where: { + id: { + in: collectionIds, + }, + }, + }); + } + + /** + * Retrieves admin users associated with a hyperboard. + * @param hyperboardId - ID of the hyperboard + * @returns Promise resolving to admin users + * @throws {Error} If there's an error executing the query + */ + async getHyperboardAdmins(hyperboardId: string) { + const hyperboardAdminIds = await this.dbService + .getConnection() + .selectFrom("hyperboard_admins") + .where("hyperboard_id", "=", hyperboardId) + .select("user_id") + .execute(); + + const userIds = hyperboardAdminIds.map((admin) => admin.user_id); + return this.usersService.getUsers({ + where: { + id: { + in: userIds, + }, + }, + }); + } + + /** + * Retrieves hypercert metadata for a hyperboard. + * @param hyperboardId - ID of the hyperboard + * @returns Promise resolving to hypercert metadata + * @throws {Error} If there's an error executing the query + */ + async getHyperboardHypercertMetadata( + hyperboardId: string, + ): Promise { + return this.dbService + .getConnection() + .selectFrom("hyperboard_hypercert_metadata") + .where("hyperboard_id", "=", hyperboardId as unknown as string) + .selectAll() + .execute(); + } + + /** + * Retrieves blueprint metadata for a hyperboard. + * @param hyperboardId - ID of the hyperboard + * @returns Promise resolving to blueprint metadata + * @throws {Error} If there's an error executing the query + */ + async getHyperboardBlueprintMetadata( + hyperboardId: string, + ): Promise { + return this.dbService + .getConnection() + .selectFrom("hyperboard_blueprint_metadata") + .where("hyperboard_id", "=", hyperboardId as unknown as string) + .selectAll() + .execute(); + } + + /** + * Deletes a hyperboard by ID. + * @param hyperboardId - ID of the hyperboard to delete + * @returns Promise resolving to the deleted hyperboard + * @throws {Error} If there's an error executing the query + */ + async deleteHyperboard(hyperboardId: string) { + return this.dbService + .getConnection() + .deleteFrom("hyperboards") + .where("id", "=", hyperboardId) + .executeTakeFirstOrThrow(); + } + + /** + * Creates or updates hyperboards. + * @param hyperboards - Array of hyperboard data to upsert + * @returns Promise resolving to the upserted hyperboards + * @throws {Error} If there's an error executing the query + */ + async upsertHyperboard(hyperboards: HyperboardInsert[]) { + return this.dbService + .getConnection() + .insertInto("hyperboards") + .values(hyperboards) + .onConflict((oc) => + oc.column("id").doUpdateSet((eb) => ({ + id: eb.ref("excluded.id"), + name: eb.ref("excluded.name"), + chain_ids: eb.ref("excluded.chain_ids"), + background_image: eb.ref("excluded.background_image"), + grayscale_images: eb.ref("excluded.grayscale_images"), + tile_border_color: eb.ref("excluded.tile_border_color"), + })), + ) + .returningAll() + .execute(); + } + + /** + * Creates or updates hypercert metadata for a hyperboard. + * @param metadata - Array of metadata to upsert + * @returns Promise resolving to the upserted metadata + * @throws {Error} If there's an error executing the query + */ + async upsertHyperboardHypercertMetadata( + metadata: HyperboardHypercertMetadataInsert[], + ) { + return this.dbService + .getConnection() + .insertInto("hyperboard_hypercert_metadata") + .values(metadata) + .onConflict((oc) => + oc + .columns(["hyperboard_id", "hypercert_id", "collection_id"]) + .doUpdateSet((eb) => ({ + hypercert_id: eb.ref("excluded.hypercert_id"), + collection_id: eb.ref("excluded.collection_id"), + hyperboard_id: eb.ref("excluded.hyperboard_id"), + display_size: eb.ref("excluded.display_size"), + })), + ) + .returningAll() + .execute(); + } + + /** + * Creates or updates blueprint metadata for a hyperboard. + * @param metadata - Array of metadata to upsert + * @returns Promise resolving to the upserted metadata + * @throws {Error} If there's an error executing the query + */ + async upsertHyperboardBlueprintMetadata( + metadata: HyperboardBlueprintMetadataInsert[], + ) { + return this.dbService + .getConnection() + .insertInto("hyperboard_blueprint_metadata") + .values(metadata) + .onConflict((oc) => + oc + .columns(["hyperboard_id", "blueprint_id", "collection_id"]) + .doUpdateSet((eb) => ({ + blueprint_id: eb.ref("excluded.blueprint_id"), + collection_id: eb.ref("excluded.collection_id"), + hyperboard_id: eb.ref("excluded.hyperboard_id"), + display_size: eb.ref("excluded.display_size"), + })), + ) + .returningAll() + .execute(); + } + + /** + * Adds a collection to a hyperboard. + * @param hyperboardId - ID of the hyperboard + * @param collectionId - ID of the collection to add + * @returns Promise resolving to the created relationship + * @throws {Error} If there's an error executing the query + */ + async addCollectionToHyperboard(hyperboardId: string, collectionId: string) { + return this.dbService + .getConnection() + .insertInto("hyperboard_collections") + .values([ + { + hyperboard_id: hyperboardId, + collection_id: collectionId, + }, + ]) + .onConflict((oc) => + oc.columns(["hyperboard_id", "collection_id"]).doUpdateSet((eb) => ({ + hyperboard_id: eb.ref("excluded.hyperboard_id"), + collection_id: eb.ref("excluded.collection_id"), + })), + ) + .returningAll() + .executeTakeFirstOrThrow(); + } + + /** + * Adds an admin user to a hyperboard. + * @param hyperboardId - ID of the hyperboard + * @param user - User data to add as admin + * @returns Promise resolving to the created relationship + * @throws {Error} If there's an error executing the query + */ + async addAdminToHyperboard(hyperboardId: string, user: UserInsert) { + const { id: user_id } = await this.usersService.getOrCreateUser(user); + return this.dbService + .getConnection() + .insertInto("hyperboard_admins") + .values([ + { + hyperboard_id: hyperboardId, + user_id, + }, + ]) + .onConflict((oc) => + oc.columns(["hyperboard_id", "user_id"]).doUpdateSet((eb) => ({ + hyperboard_id: eb.ref("excluded.hyperboard_id"), + user_id: eb.ref("excluded.user_id"), + })), + ) + .returningAll() + .executeTakeFirstOrThrow(); + } +} diff --git a/src/services/database/entities/HypercertsEntityService.ts b/src/services/database/entities/HypercertsEntityService.ts new file mode 100644 index 00000000..4d3dd3a7 --- /dev/null +++ b/src/services/database/entities/HypercertsEntityService.ts @@ -0,0 +1,209 @@ +import { Expression, Selectable, SqlBool } from "kysely"; +import { inject, injectable } from "tsyringe"; +import { CachingKyselyService, kyselyCaching } from "../../../client/kysely.js"; +import { GetHypercertsArgs } from "../../../graphql/schemas/args/hypercertsArgs.js"; +import { CachingDatabase } from "../../../types/kyselySupabaseCaching.js"; +import { + createEntityService, + type EntityService, +} from "./EntityServiceFactory.js"; + +export type HypercertSelect = Selectable; + +/** + * Service for handling hypercert operations in the system. + * Provides methods for retrieving hypercerts and their associated metadata. + * + * A hypercert represents a claim about work or impact, with: + * - Unique identifier (hypercert_id) + * - Associated metadata (work scope, timeframes, etc.) + * - Contract information + * - Fractions and ownership details + * + * This service uses an EntityService for database operations, providing: + * - Consistent error handling + * - Type safety through Kysely + * - Caching support + */ +@injectable() +export class HypercertsService { + private entityService: EntityService< + CachingDatabase["claims_view"], + GetHypercertsArgs + >; + + constructor( + @inject(CachingKyselyService) + private cachingKyselyService: CachingKyselyService, + ) { + this.entityService = createEntityService< + CachingDatabase, + "claims_view", + GetHypercertsArgs + >("claims_view", "HypercertsEntityService", kyselyCaching); + } + + /** + * Retrieves multiple hypercerts based on provided arguments. + * + * @param args - Query arguments for filtering hypercerts + * @returns A promise resolving to: + * - data: Array of hypercerts matching the criteria + * - count: Total number of matching records + * + * @example + * ```typescript + * const result = await hypercertsService.getHypercerts({ + * where: { + * hypercert_id: { eq: "1-0x1234...5678-123" } + * } + * }); + * ``` + */ + async getHypercerts(args: GetHypercertsArgs) { + return this.entityService.getMany(args); + } + + /** + * Retrieves a single hypercert based on provided arguments. + * + * @param args - Query arguments for filtering hypercerts + * @returns A promise resolving to: + * - The matching hypercert if found + * - null if no matching record exists + * + * @example + * ```typescript + * const hypercert = await hypercertsService.getHypercert({ + * where: { + * hypercert_id: { eq: "1-0x1234...5678-123" } + * } + * }); + * ``` + */ + async getHypercert(args: GetHypercertsArgs) { + return this.entityService.getSingle(args); + } + + /** + * Retrieves metadata for a hypercert using either claims_id or hypercert_id. + * Uses a left join to fetch metadata associated with the hypercert through the claims table. + * + * @param args - Object containing either claims_id or hypercert_id (or both) + * @returns A promise resolving to: + * - The matching metadata record if found + * - null if: + * - No arguments provided + * - No matching record exists + * + * @example + * ```typescript + * // Using claims_id + * const metadata1 = await hypercertsService.getHypercertMetadata({ + * claims_id: "claim-123" + * }); + * + * // Using hypercert_id + * const metadata2 = await hypercertsService.getHypercertMetadata({ + * hypercert_id: "1-0x1234...5678-123" + * }); + * + * // Using both (will match if either condition is true) + * const metadata3 = await hypercertsService.getHypercertMetadata({ + * claims_id: "claim-123", + * hypercert_id: "1-0x1234...5678-123" + * }); + * ``` + */ + async getHypercertMetadata(args: { + claims_id?: string; + hypercert_id?: string; + }) { + if (!args.claims_id && !args.hypercert_id) { + console.warn( + `[HypercertsService::getHypercertMetadata] No claims_id or hypercert_id provided`, + ); + return null; + } + + const query = this.cachingKyselyService + .getConnection() + .selectFrom("metadata") + .leftJoin("claims", "metadata.uri", "claims.uri") + .selectAll("metadata") + .where((eb) => { + const conditions: Expression[] = []; + + if (args.claims_id) { + conditions.push(eb("claims.id", "=", args.claims_id)); + } + + if (args.hypercert_id) { + conditions.push(eb("claims.hypercert_id", "=", args.hypercert_id)); + } + + return eb.or(conditions); + }); + + return await query.executeTakeFirst(); + } + + /** + * Retrieves metadata for multiple hypercerts using arrays of claims_ids or hypercert_ids. + * Uses a left join to fetch metadata associated with the hypercerts through the claims table. + * + * @param args - Object containing arrays of claims_ids or hypercert_ids (or both) + * @returns A promise resolving to: + * - Array of metadata records if found + * - Empty array if: + * - No matching records exist + * - null if: + * - No arguments provided + * - No claims_ids or hypercert_ids provided + * + * @example + * ```typescript + * // Using claims_ids + * const metadata1 = await hypercertsService.getHypercertMetadataSets({ + * claims_ids: ["claim-123", "claim-456"] + * }); + * + * // Using hypercert_ids + * const metadata2 = await hypercertsService.getHypercertMetadataSets({ + * hypercert_ids: ["1-0x1234...5678-123", "1-0x1234...5678-456"] + * }); + * ``` + */ + async getHypercertMetadataSets(args: { + claims_ids?: string[]; + hypercert_ids?: string[]; + }) { + if (!args.claims_ids?.length && !args.hypercert_ids?.length) { + console.warn( + `[HypercertsService::getHypercertMetadataSets] No claims_ids or hypercert_ids provided`, + ); + return null; + } + + const query = this.cachingKyselyService + .getConnection() + .selectFrom("metadata") + .leftJoin("claims", "metadata.uri", "claims.uri") + .selectAll("metadata") + .where((eb) => { + const conditions: Expression[] = []; + + if (args.claims_ids?.length) { + conditions.push(eb("claims.id", "in", args.claims_ids)); + } + + if (args.hypercert_ids?.length) { + conditions.push(eb("claims.hypercert_id", "in", args.hypercert_ids)); + } + + return eb.or(conditions); + }); + + return await query.execute(); + } +} diff --git a/src/services/database/entities/MarketplaceOrdersEntityService.ts b/src/services/database/entities/MarketplaceOrdersEntityService.ts new file mode 100644 index 00000000..abee949b --- /dev/null +++ b/src/services/database/entities/MarketplaceOrdersEntityService.ts @@ -0,0 +1,330 @@ +import { HypercertExchangeClient } from "@hypercerts-org/marketplace-sdk"; +import { Insertable, Selectable, Updateable } from "kysely"; +import { inject, injectable } from "tsyringe"; +import { EvmClientFactory } from "../../../client/evmClient.js"; +import { DataKyselyService, kyselyData } from "../../../client/kysely.js"; +import type { GetOrdersArgs } from "../../../graphql/schemas/args/orderArgs.js"; +import { SortOrder } from "../../../graphql/schemas/enums/sortEnums.js"; +import type { DataDatabase } from "../../../types/kyselySupabaseData.js"; +import { createEntityService, EntityService } from "./EntityServiceFactory.js"; + +export type MarketplaceOrderSelect = Selectable< + DataDatabase["marketplace_orders"] +>; +export type MarketplaceOrderInsert = Insertable< + DataDatabase["marketplace_orders"] +>; +export type MarketplaceOrderUpdate = Updateable< + DataDatabase["marketplace_orders"] +>; + +export type MarketplaceOrderNonceSelect = Selectable< + DataDatabase["marketplace_order_nonces"] +>; +export type MarketplaceOrderNonceInsert = Insertable< + DataDatabase["marketplace_order_nonces"] +>; +export type MarketplaceOrderNonceUpdate = Updateable< + DataDatabase["marketplace_order_nonces"] +>; + +/** + * Service class for managing marketplace orders in the database. + * Handles CRUD operations for orders and their associated nonces. + * + * This service provides methods to: + * - Query and manage marketplace orders + * - Handle order nonces for transaction validation + * - Validate orders against token IDs + * - Perform batch operations on orders + * + * @injectable + */ +@injectable() +export class MarketplaceOrdersService { + private entityService: EntityService< + DataDatabase["marketplace_orders"], + GetOrdersArgs + >; + + /** + * Initializes a new instance of the MarketplaceOrdersService. + * Creates an EntityService instance for the marketplace_orders table. + * + * @param dbService - The database service instance for direct database operations + */ + constructor(@inject(DataKyselyService) private dbService: DataKyselyService) { + this.entityService = createEntityService< + DataDatabase, + "marketplace_orders", + GetOrdersArgs + >("marketplace_orders", "MarketplaceOrdersEntityService", kyselyData); + } + + /** + * Retrieves multiple orders based on the provided arguments. + * + * @param args - Query arguments for filtering orders + * @returns Promise resolving to an object containing order data and count + */ + async getOrders(args: GetOrdersArgs) { + return this.entityService.getMany(args); + } + + /** + * Retrieves a single order based on the provided arguments. + * + * @param args - Query arguments for filtering the order + * @returns Promise resolving to a single order record or undefined if not found + */ + async getOrder(args: GetOrdersArgs) { + return this.entityService.getSingle(args); + } + + // TODO can this be a getOrders call? + /** + * Retrieves orders associated with specific token IDs. + * + * @param tokenIds - Array of token IDs to search for + * @param chainId - Chain ID to filter orders by + * @returns Promise resolving to matching orders + */ + async getOrdersByTokenIds(tokenIds: string[], chainId: number) { + return this.entityService.getMany({ + where: { + itemIds: { + arrayOverlaps: tokenIds, + }, + chainId: { eq: chainId }, + }, + sortBy: { createdAt: SortOrder.descending }, + }); + } + + /** + * Creates a new nonce record for order validation. + * + * @param nonce - The nonce record to create + * @returns Promise resolving to the created nonce counter + * @throws {Error} If the database operation fails + */ + async createNonce(nonce: MarketplaceOrderNonceInsert) { + return this.dbService + .getConnection() + .insertInto("marketplace_order_nonces") + .values(nonce) + .returning("nonce_counter") + .executeTakeFirstOrThrow(); + } + + /** + * Retrieves a nonce record for a specific address and chain. + * + * @param nonce - Object containing address and chain_id + * @returns Promise resolving to the nonce record or undefined if not found + */ + async getNonce( + nonce: Pick, + ) { + if (!nonce.address || !nonce.chain_id) { + throw new Error("Address and chain ID are required"); + } + + return ( + this.dbService + .getConnection() + .selectFrom("marketplace_order_nonces") + .selectAll() + .where("address", "=", nonce.address) + .where("chain_id", "=", nonce.chain_id) + .executeTakeFirst() + // TODO: Investigate why chain_id and nonce_counter are returned as strings + .then((res) => ({ + ...res, + chain_id: Number(res?.chain_id), + nonce_counter: Number(res?.nonce_counter), + })) + ); + } + + /** + * Updates a nonce record's counter. + * + * @param nonce - The nonce record to update + * @returns Promise resolving to the updated nonce record + * @throws {Error} If address or chain ID is missing + */ + async updateNonce(nonce: MarketplaceOrderNonceUpdate) { + if (!nonce.address || !nonce.chain_id) { + throw new Error("Address and chain ID are required"); + } + + return this.dbService + .getConnection() + .updateTable("marketplace_order_nonces") + .set({ nonce_counter: nonce.nonce_counter }) + .where("address", "=", nonce.address) + .where("chain_id", "=", nonce.chain_id) + .returningAll() + .executeTakeFirstOrThrow(); + } + + /** + * Creates a new marketplace order. + * + * @param order - The order record to create + * @returns Promise resolving to the created order + * @throws {Error} If the database operation fails + */ + async storeOrder(order: MarketplaceOrderInsert) { + return this.dbService + .getConnection() + .insertInto("marketplace_orders") + .values(order) + .returningAll() + .executeTakeFirstOrThrow(); + } + + /** + * Updates an existing marketplace order. + * + * @param order - The order record to update + * @returns Promise resolving to the updated order + * @throws {Error} If order ID is missing or unknown + */ + async updateOrder(order: MarketplaceOrderUpdate) { + if (!order.id) { + throw new Error("Order ID is required"); + } + + return this.dbService + .getConnection() + .updateTable("marketplace_orders") + .set(order) + .where("id", "=", order.id) + .returningAll() + .executeTakeFirstOrThrow(); + } + + /** + * Updates multiple marketplace orders. + * + * @param orders - Array of order records to update + * @returns Promise resolving to array of updated orders + * @throws {Error} If any order ID is missing + */ + async updateOrders(orders: MarketplaceOrderUpdate[]) { + const results = []; + for (const order of orders) { + if (!order.id) { + throw new Error("Order ID is required for update"); + } + + const result = await this.dbService + .getConnection() + .updateTable("marketplace_orders") + .set(order) + .where("id", "=", order.id) + .returningAll() + .executeTakeFirstOrThrow(); + + results.push(result); + } + + return results; + } + + /** + * Upserts multiple marketplace orders. + * + * @param orders - Array of order records to upsert + * @returns Promise resolving to array of upserted orders + */ + async upsertOrders(orders: MarketplaceOrderInsert[]) { + return this.dbService + .getConnection() + .insertInto("marketplace_orders") + .values(orders) + .onConflict((oc) => + oc.column("id").doUpdateSet((eb) => ({ + invalidated: eb.ref("excluded.invalidated"), + validator_codes: eb.ref("excluded.validator_codes"), + })), + ) + .returningAll() + .execute(); + } + + /** + * Deletes a marketplace order. + * + * @param orderId - ID of the order to delete + * @returns Promise resolving to the deleted order + * @throws {Error} If the database operation fails + */ + async deleteOrder(orderId: string) { + return this.dbService + .getConnection() + .deleteFrom("marketplace_orders") + .where("id", "=", orderId) + .returningAll() + .executeTakeFirstOrThrow(); + } + + /** + * Validates orders associated with specific token IDs. + * Uses the HypercertExchangeClient to check order validity. + * + * @param tokenIds - Array of token IDs to validate orders for + * @param chainId - Chain ID to filter orders by + * @returns Promise resolving to array of updated invalid orders + * @throws {Error} If validation or update fails + */ + async validateOrdersByTokenIds(tokenIds: string[], chainId: number) { + const ordersToUpdate: MarketplaceOrderUpdate[] = []; + for (const tokenId of tokenIds) { + const { data: matchingOrders } = await this.getOrdersByTokenIds( + [tokenId], + chainId, + ); + + if (!matchingOrders) { + console.warn( + `[SupabaseDataService::validateOrderByTokenId] No orders found for tokenId: ${tokenId}`, + ); + continue; + } + + const hec = new HypercertExchangeClient( + chainId, + // @ts-expect-error Typing issue with provider + EvmClientFactory.createEthersClient(chainId), + ); + const validationResults = await hec.checkOrdersValidity( + matchingOrders.map((order) => ({ + ...order, + chainId: Number(order.chainId), + })), + ); + + // filter all orders that have changed validity or validator codes + const _changedOrders = validationResults + .filter((x) => { + const order = matchingOrders.find((y) => y.id === x.id); + return ( + order?.invalidated !== x.valid || + order?.validator_codes !== x.validatorCodes + ); + }) + .map((x) => ({ + id: x.id, + invalidated: !x.valid, + validator_codes: x.validatorCodes, + })); + + ordersToUpdate.push(..._changedOrders); + } + + return await this.updateOrders(ordersToUpdate); + } +} diff --git a/src/services/database/entities/MetadataEntityService.ts b/src/services/database/entities/MetadataEntityService.ts new file mode 100644 index 00000000..577df01a --- /dev/null +++ b/src/services/database/entities/MetadataEntityService.ts @@ -0,0 +1,88 @@ +import { Selectable } from "kysely"; +import { injectable } from "tsyringe"; +import { kyselyCaching } from "../../../client/kysely.js"; +import { GetMetadataArgs } from "../../../graphql/schemas/args/metadataArgs.js"; +import { CachingDatabase } from "../../../types/kyselySupabaseCaching.js"; +import { + createEntityService, + type EntityService, +} from "./EntityServiceFactory.js"; + +export type MetadataSelect = Selectable; + +/** + * Service for handling metadata operations in the system. + * Provides methods for retrieving metadata records with support for filtering and relationships. + * + * Metadata represents descriptive information about hypercerts, including: + * - Basic information (name, description) + * - Work and impact timeframes + * - Contributors and rights + * - External references (URLs, URIs) + * - Custom properties + * + * This service uses an EntityService for database operations, providing: + * - Consistent error handling + * - Type safety through Kysely + * - Caching support + */ +@injectable() +export class MetadataService { + private entityService: EntityService< + CachingDatabase["metadata"], + GetMetadataArgs + >; + + constructor() { + this.entityService = createEntityService< + CachingDatabase, + "metadata", + GetMetadataArgs + >("metadata", "MetadataEntityService", kyselyCaching); + } + + /** + * Retrieves multiple metadata records based on provided arguments. + * + * @param args - Query arguments for filtering metadata records + * @returns A promise resolving to: + * - data: Array of metadata records matching the criteria + * - count: Total number of matching records + * + * @example + * ```typescript + * const result = await metadataService.getMetadata({ + * where: { + * hypercerts: { + * id: { eq: "some-hypercert-id" } + * } + * } + * }); + * ``` + */ + async getMetadata(args: GetMetadataArgs) { + return this.entityService.getMany(args); + } + + /** + * Retrieves a single metadata record based on provided arguments. + * Useful when you expect exactly one matching record. + * + * @param args - Query arguments for filtering metadata records + * @returns A promise resolving to: + * - The matching metadata record if found + * - undefined if no matching record exists + * + * @example + * ```typescript + * const metadata = await metadataService.getMetadataSingle({ + * where: { + * uri: { eq: "ipfs://..." } + * } + * }); + * ``` + */ + async getMetadataSingle(args: GetMetadataArgs) { + return this.entityService.getSingle(args); + } +} diff --git a/src/services/database/entities/SalesEntityService.ts b/src/services/database/entities/SalesEntityService.ts new file mode 100644 index 00000000..862309a2 --- /dev/null +++ b/src/services/database/entities/SalesEntityService.ts @@ -0,0 +1,52 @@ +import { Selectable } from "kysely"; +import { injectable } from "tsyringe"; +import { kyselyCaching } from "../../../client/kysely.js"; +import type { GetSalesArgs } from "../../../graphql/schemas/args/salesArgs.js"; +import type { CachingDatabase } from "../../../types/kyselySupabaseCaching.js"; +import type { EntityService } from "./EntityServiceFactory.js"; +import { createEntityService } from "./EntityServiceFactory.js"; + +export type SaleSelect = Selectable; + +/** + * Service for handling sales-related database operations. + * This service provides functionality to: + * 1. Query multiple sales with filtering and pagination + * 2. Query a single sale by ID + */ +@injectable() +export class SalesService { + private entityService: EntityService; + + constructor() { + this.entityService = createEntityService< + CachingDatabase, + "sales", + GetSalesArgs + >("sales", "SalesEntityService", kyselyCaching); + } + + /** + * Retrieves multiple sales based on the provided query arguments. + * + * @param args - Query arguments including where conditions, sorting, and pagination + * @returns A promise resolving to an object containing: + * - data: Array of sales matching the query criteria + * - count: Total number of matching sales + * @throws {Error} If the database query fails + */ + async getSales(args: GetSalesArgs) { + return this.entityService.getMany(args); + } + + /** + * Retrieves a single sale based on the provided query arguments. + * + * @param args - Query arguments including where conditions to identify the sale + * @returns A promise resolving to the matching sale + * @throws {Error} If the database query fails + */ + async getSale(args: GetSalesArgs) { + return this.entityService.getSingle(args); + } +} diff --git a/src/services/database/entities/SignatureRequestsEntityService.ts b/src/services/database/entities/SignatureRequestsEntityService.ts new file mode 100644 index 00000000..e5a5c8db --- /dev/null +++ b/src/services/database/entities/SignatureRequestsEntityService.ts @@ -0,0 +1,149 @@ +import { Insertable, Selectable, Updateable } from "kysely"; +import { inject, injectable } from "tsyringe"; +import { DataKyselyService, kyselyData } from "../../../client/kysely.js"; +import type { GetSignatureRequestsArgs } from "../../../graphql/schemas/args/signatureRequestArgs.js"; +import type { DataDatabase } from "../../../types/kyselySupabaseData.js"; +import type { EntityService } from "./EntityServiceFactory.js"; +import { createEntityService } from "./EntityServiceFactory.js"; + +export type SignatureRequestSelect = Selectable< + DataDatabase["signature_requests"] +>; +export type SignatureRequestInsert = Insertable< + DataDatabase["signature_requests"] +>; +export type SignatureRequestUpdate = Updateable< + DataDatabase["signature_requests"] +>; + +/** + * Service for handling signature request operations in the system. + * Provides methods for retrieving, creating and updating signature requests. + * + * A signature request represents a request for a user to sign a message, with: + * - Safe address (the address that needs to sign) + * - Message hash (hash of the message to be signed) + * - Status (pending, executed, canceled) + * - Purpose (e.g. update_user_data) + * + * This service uses an EntityService for database operations, providing: + * - Consistent error handling + * - Type safety through Kysely + * - Standard query interface + */ +@injectable() +export class SignatureRequestsService { + private entityService: EntityService< + DataDatabase["signature_requests"], + GetSignatureRequestsArgs + >; + + constructor(@inject(DataKyselyService) private dbService: DataKyselyService) { + this.entityService = createEntityService< + DataDatabase, + "signature_requests", + GetSignatureRequestsArgs + >("signature_requests", "SignatureRequestsEntityService", kyselyData); + } + + /** + * Retrieves multiple signature requests based on provided arguments. + * + * @param args - Query arguments for filtering signature requests + * @returns A promise resolving to: + * - data: Array of signature requests matching the criteria + * - count: Total number of matching records + * + * @example + * ```typescript + * const result = await signatureRequestsService.getSignatureRequests({ + * where: { + * safe_address: { eq: "0x1234...5678" } + * } + * }); + * ``` + */ + async getSignatureRequests(args: GetSignatureRequestsArgs) { + return this.entityService.getMany(args); + } + + /** + * Retrieves a single signature request based on provided arguments. + * + * @param args - Query arguments for filtering signature requests + * @returns A promise resolving to: + * - The matching signature request if found + * - null if no matching record exists + * + * @example + * ```typescript + * const request = await signatureRequestsService.getSignatureRequest({ + * where: { + * safe_address: { eq: "0x1234...5678" }, + * message_hash: { eq: "0xabcd...ef12" } + * } + * }); + * ``` + */ + async getSignatureRequest(args: GetSignatureRequestsArgs) { + return this.entityService.getSingle(args); + } + + // Mutations + + /** + * Creates a new signature request. + * + * @param signatureRequest - The signature request data to insert + * @returns A promise resolving to the created signature request's safe_address and message_hash + * + * @example + * ```typescript + * const result = await signatureRequestsService.addSignatureRequest({ + * safe_address: "0x1234...5678", + * message_hash: "0xabcd...ef12", + * status: "pending", + * purpose: "update_user_data" + * }); + * ``` + */ + async addSignatureRequest(signatureRequest: SignatureRequestInsert) { + return this.dbService + .getConnection() + .insertInto("signature_requests") + .values(signatureRequest) + .returning(["safe_address", "message_hash"]) + .executeTakeFirst(); + } + + /** + * Updates the status of an existing signature request. + * + * @param safe_address - The safe address associated with the request + * @param message_hash - The message hash of the request + * @param status - The new status to set + * @returns A promise resolving to the number of affected rows + * + * @example + * ```typescript + * await signatureRequestsService.updateSignatureRequestStatus( + * "0x1234...5678", + * "0xabcd...ef12", + * "executed" + * ); + * ``` + */ + async updateSignatureRequestStatus( + safe_address: string, + message_hash: string, + status: SignatureRequestUpdate["status"], + ) { + return this.dbService + .getConnection() + .updateTable("signature_requests") + .set({ status }) + .where("safe_address", "=", safe_address) + .where("message_hash", "=", message_hash) + .execute(); + } +} diff --git a/src/services/database/entities/UsersEntityService.ts b/src/services/database/entities/UsersEntityService.ts new file mode 100644 index 00000000..9aad3afc --- /dev/null +++ b/src/services/database/entities/UsersEntityService.ts @@ -0,0 +1,72 @@ +import { Insertable, Selectable, Updateable } from "kysely"; +import { inject, injectable } from "tsyringe"; +import { DataKyselyService, kyselyData } from "../../../client/kysely.js"; +import type { GetUsersArgs } from "../../../graphql/schemas/args/userArgs.js"; +import type { DataDatabase } from "../../../types/kyselySupabaseData.js"; +import type { EntityService } from "./EntityServiceFactory.js"; +import { createEntityService } from "./EntityServiceFactory.js"; + +export type UserSelect = Selectable; +export type UserInsert = Insertable; +export type UserUpdate = Updateable; + +@injectable() +export class UsersService { + private entityService: EntityService; + + constructor(@inject(DataKyselyService) private dbService: DataKyselyService) { + this.entityService = createEntityService< + DataDatabase, + "users", + GetUsersArgs + >("users", "UsersEntityService", kyselyData); + } + + async getUsers(args: GetUsersArgs) { + return this.entityService.getMany(args).then((res) => ({ + ...res, + data: res.data.map((user) => ({ + ...user, + // TODO: Investigate why chain_id is returned as a string + chain_id: Number(user.chain_id), + })), + })); + } + + async getUser(args: GetUsersArgs) { + return this.entityService.getSingle(args); + } + + // Mutations + async getOrCreateUser(user: UserInsert) { + const _user = await this.getUser({ + where: { + address: { eq: user.address }, + chain_id: { eq: user.chain_id }, + }, + }); + + if (!_user) { + const [createdUser] = await this.upsertUsers([user]); + + return createdUser; + } + + return _user; + } + + async upsertUsers(users: UserInsert[]) { + return this.dbService + .getConnection() + .insertInto("users") + .values(users) + .onConflict((oc) => + oc.constraint("users_address_chain_id").doUpdateSet((eb) => ({ + avatar: eb.ref("excluded.avatar"), + display_name: eb.ref("excluded.display_name"), + })), + ) + .returningAll() + .execute(); + } +} diff --git a/src/services/database/strategies/AllowlistQueryStrategy.ts b/src/services/database/strategies/AllowlistQueryStrategy.ts new file mode 100644 index 00000000..4ccf5629 --- /dev/null +++ b/src/services/database/strategies/AllowlistQueryStrategy.ts @@ -0,0 +1,69 @@ +import { Kysely } from "kysely"; +import { GetAllowlistRecordsArgs } from "../../../graphql/schemas/args/allowlistRecordArgs.js"; +import { isWhereEmpty } from "../../../lib/strategies/isWhereEmpty.js"; +import { CachingDatabase } from "../../../types/kyselySupabaseCaching.js"; +import { QueryStrategy } from "./QueryStrategy.js"; + +/** + * Strategy class for querying allowlist records from the claimable_fractions_with_proofs view table. + * This class extends the base QueryStrategy to provide specific implementation for allowlist-related queries. + */ +export class AllowlistQueryStrategy extends QueryStrategy< + CachingDatabase, + "claimable_fractions_with_proofs", + GetAllowlistRecordsArgs +> { + /** The name of the table this strategy queries against */ + protected readonly tableName = "claimable_fractions_with_proofs" as const; + + /** + * Builds a query to fetch allowlist records from the database. + */ + buildDataQuery(db: Kysely, args?: GetAllowlistRecordsArgs) { + if (!args) { + return db.selectFrom(this.tableName).selectAll(); + } + + return db + .selectFrom(this.tableName) + .$if(!isWhereEmpty(args.where?.hypercert), (qb) => + qb.where(({ exists, selectFrom }) => + exists( + selectFrom("claims").whereRef( + "claims.hypercert_id", + "=", + "claimable_fractions_with_proofs.hypercert_id", + ), + ), + ), + ) + .selectAll(this.tableName); + } + + /** + * Builds a query to count the total number of allowlist records. + */ + buildCountQuery(db: Kysely, args?: GetAllowlistRecordsArgs) { + if (!args) { + return db.selectFrom(this.tableName).select((eb) => { + return eb.fn.countAll().as("count"); + }); + } + return db + .selectFrom(this.tableName) + .$if(!isWhereEmpty(args.where?.hypercert), (qb) => + qb.where(({ exists, selectFrom }) => + exists( + selectFrom("claims").whereRef( + "claims.hypercert_id", + "=", + "claimable_fractions_with_proofs.hypercert_id", + ), + ), + ), + ) + .select((eb) => { + return eb.fn.countAll().as("count"); + }); + } +} diff --git a/src/services/database/strategies/AttestationQueryStrategy.ts b/src/services/database/strategies/AttestationQueryStrategy.ts new file mode 100644 index 00000000..cee05a70 --- /dev/null +++ b/src/services/database/strategies/AttestationQueryStrategy.ts @@ -0,0 +1,126 @@ +import { Kysely } from "kysely"; +import { GetAttestationsArgs } from "../../../graphql/schemas/args/attestationArgs.js"; +import { CachingDatabase } from "../../../types/kyselySupabaseCaching.js"; +import { QueryStrategy } from "./QueryStrategy.js"; +import { isWhereEmpty } from "../../../lib/strategies/isWhereEmpty.js"; + +/** + * Strategy for building database queries for attestations. + * Implements complex query logic for attestation retrieval, including: + * - Joins with related tables (claims, supported_schemas) + * - Filtering based on related entities + * - Count queries for total matching records + * + * This strategy extends the base QueryStrategy to provide attestation-specific query building. + */ +export class AttestationsQueryStrategy extends QueryStrategy< + CachingDatabase, + "attestations", + GetAttestationsArgs +> { + protected readonly tableName = "attestations" as const; + + /** + * Builds a query to retrieve attestation data with optional filtering. + * Handles complex joins and relationships with other tables. + * + * @param db - Kysely database instance + * @param args - Optional query arguments for filtering + * @returns A query builder for retrieving attestation data + * + * Key features: + * - Joins with supported_schemas when eas_schema filter is present + * - Joins with claims when hypercert filter is present + * - Returns all columns from the attestations table + * + * @example + * ```typescript + * // Basic query without filters + * buildDataQuery(db); + * // SELECT * FROM attestations + * + * // Query with schema filter + * buildDataQuery(db, { where: { eas_schema: { id: { eq: 'schema-id' } } } }); + * // SELECT * FROM attestations WHERE EXISTS (SELECT * FROM supported_schemas ...) + * ``` + */ + buildDataQuery(db: Kysely, args?: GetAttestationsArgs) { + if (!args) { + return db.selectFrom(this.tableName).selectAll(); + } + return db + .selectFrom(this.tableName) + .$if(!isWhereEmpty(args?.where?.eas_schema), (qb) => { + return qb.where(({ exists, selectFrom }) => + exists( + selectFrom("supported_schemas").whereRef( + "supported_schemas.id", + "=", + "attestations.supported_schemas_id", + ), + ), + ); + }) + .$if(!isWhereEmpty(args.where?.hypercert), (qb) => { + return qb.where(({ exists, selectFrom }) => + exists( + selectFrom("claims").whereRef( + "claims.id", + "=", + "attestations.claims_id", + ), + ), + ); + }) + .selectAll(); + } + + /** + * Builds a query to count attestations with optional filtering. + * Uses the same filtering logic as buildDataQuery but returns a count. + * + * @param db - Kysely database instance + * @param args - Optional query arguments for filtering + * @returns A query builder for counting attestations + * + * Key features: + * - Applies the same joins and filters as buildDataQuery + * - Returns a count of matching attestations + * - Optimized for counting by selecting only the count + */ + buildCountQuery(db: Kysely, args?: GetAttestationsArgs) { + if (!args) { + return db.selectFrom(this.tableName).select((eb) => { + return eb.fn.countAll().as("count"); + }); + } + + return db + .selectFrom(this.tableName) + .$if(!isWhereEmpty(args?.where?.eas_schema), (qb) => { + return qb.where(({ exists, selectFrom }) => + exists( + selectFrom("supported_schemas").whereRef( + "supported_schemas.id", + "=", + "attestations.supported_schemas_id", + ), + ), + ); + }) + .$if(!isWhereEmpty(args.where?.hypercert), (qb) => { + return qb.where(({ exists, selectFrom }) => + exists( + selectFrom("claims").whereRef( + "claims.id", + "=", + "attestations.claims_id", + ), + ), + ); + }) + .select((eb) => { + return eb.fn.countAll().as("count"); + }); + } +} diff --git a/src/services/database/strategies/BlueprintsQueryStrategy.ts b/src/services/database/strategies/BlueprintsQueryStrategy.ts new file mode 100644 index 00000000..af2ec433 --- /dev/null +++ b/src/services/database/strategies/BlueprintsQueryStrategy.ts @@ -0,0 +1,20 @@ +import { Kysely } from "kysely"; +import { DataDatabase } from "../../../types/kyselySupabaseData.js"; +import { QueryStrategy } from "./QueryStrategy.js"; + +export class BlueprintsQueryStrategy extends QueryStrategy< + DataDatabase, + "blueprints_with_admins" +> { + protected readonly tableName = "blueprints_with_admins" as const; + + buildDataQuery(db: Kysely) { + return db.selectFrom(this.tableName).selectAll(); + } + + buildCountQuery(db: Kysely) { + return db.selectFrom(this.tableName).select((eb) => { + return eb.fn.countAll().as("count"); + }); + } +} diff --git a/src/services/database/strategies/ClaimsQueryStrategy.ts b/src/services/database/strategies/ClaimsQueryStrategy.ts new file mode 100644 index 00000000..eb7b2473 --- /dev/null +++ b/src/services/database/strategies/ClaimsQueryStrategy.ts @@ -0,0 +1,187 @@ +import { Kysely } from "kysely"; +import { GetHypercertsArgs } from "../../../graphql/schemas/args/hypercertsArgs.js"; +import { isWhereEmpty } from "../../../lib/strategies/isWhereEmpty.js"; +import { CachingDatabase } from "../../../types/kyselySupabaseCaching.js"; +import { QueryStrategy } from "./QueryStrategy.js"; + +/** + * Strategy for building database queries for claims. + * Implements query logic for claim retrieval and counting. + * + * This strategy extends the base QueryStrategy to provide claim-specific query building. + * It handles: + * - Basic data retrieval from the claims table + * - Filtering based on relationships with: + * - contracts + * - fractions + * - metadata + * - attestations + * - Counting operations with appropriate joins + * + * @template CachingDatabase - The database type containing the claims table + */ +export class ClaimsQueryStrategy extends QueryStrategy< + CachingDatabase, + "claims_view", + GetHypercertsArgs +> { + protected readonly tableName = "claims_view" as const; + + /** + * Builds a query to retrieve claim data. + * Handles optional filtering through joins with related tables. + * + * @param db - Kysely database instance + * @param args - Optional query arguments for filtering + * @returns A query builder for retrieving claim data + * + * @example + * ```typescript + * // Basic query without filters + * buildDataQuery(db); + * // SELECT * FROM claims + * + * // Query with contract filtering + * buildDataQuery(db, { where: { contract: { ... } } }); + * // SELECT * FROM claims + * // WHERE EXISTS ( + * // SELECT * FROM contracts + * // WHERE contracts.id = claims.contracts_id + * // ) + * ``` + */ + buildDataQuery(db: Kysely, args?: GetHypercertsArgs) { + if (!args) { + return db.selectFrom(this.tableName).selectAll(); + } + + return db + .selectFrom(this.tableName) + .$if(!isWhereEmpty(args.where?.contract), (qb) => { + return qb.where(({ exists, selectFrom }) => + exists( + selectFrom("contracts").whereRef( + "contracts.id", + "=", + "claims_view.contracts_id", + ), + ), + ); + }) + .$if(!isWhereEmpty(args.where?.fractions), (qb) => { + return qb.where(({ exists, selectFrom }) => + exists( + selectFrom("fractions_view").whereRef( + "fractions_view.claims_id", + "=", + "claims_view.id", + ), + ), + ); + }) + .$if(!isWhereEmpty(args.where?.metadata), (qb) => { + return qb.where(({ exists, selectFrom }) => + exists( + selectFrom("metadata").whereRef( + "metadata.uri", + "=", + "claims_view.uri", + ), + ), + ); + }) + .$if(!isWhereEmpty(args.where?.attestations), (qb) => { + return qb.where(({ exists, selectFrom }) => + exists( + selectFrom("attestations").whereRef( + "attestations.claims_id", + "=", + "claims_view.id", + ), + ), + ); + }) + .selectAll(this.tableName); + } + + /** + * Builds a query to count claims. + * Handles optional filtering through joins with related tables. + * + * @param db - Kysely database instance + * @param args - Optional query arguments for filtering + * @returns A query builder for counting claims + * + * @example + * ```typescript + * // Count all claims + * buildCountQuery(db); + * // SELECT COUNT(*) as count FROM claims + * + * // Count with metadata filtering + * buildCountQuery(db, { where: { metadata: { ... } } }); + * // SELECT COUNT(*) as count FROM claims + * // WHERE EXISTS ( + * // SELECT * FROM metadata + * // WHERE metadata.uri = claims.uri + * // ) + * ``` + */ + buildCountQuery(db: Kysely, args?: GetHypercertsArgs) { + if (!args) { + return db.selectFrom(this.tableName).select((eb) => { + return eb.fn.countAll().as("count"); + }); + } + + return db + .selectFrom(this.tableName) + .$if(!isWhereEmpty(args.where?.contract), (qb) => + qb.where(({ exists, selectFrom }) => + exists( + selectFrom("contracts").whereRef( + "contracts.id", + "=", + "claims_view.contracts_id", + ), + ), + ), + ) + .$if(!isWhereEmpty(args.where?.fractions), (qb) => + qb.where(({ exists, selectFrom }) => + exists( + selectFrom("fractions_view").whereRef( + "fractions_view.claims_id", + "=", + "claims_view.id", + ), + ), + ), + ) + .$if(!isWhereEmpty(args.where?.metadata), (qb) => + qb.where(({ exists, selectFrom }) => + exists( + selectFrom("metadata").whereRef( + "metadata.uri", + "=", + "claims_view.uri", + ), + ), + ), + ) + .$if(!isWhereEmpty(args.where?.attestations), (qb) => + qb.where(({ exists, selectFrom }) => + exists( + selectFrom("attestations").whereRef( + "attestations.claims_id", + "=", + "claims_view.id", + ), + ), + ), + ) + .select((eb) => { + return eb.fn.countAll().as("count"); + }); + } +} diff --git a/src/services/database/strategies/CollectionsQueryStrategy.ts b/src/services/database/strategies/CollectionsQueryStrategy.ts new file mode 100644 index 00000000..443e1aaa --- /dev/null +++ b/src/services/database/strategies/CollectionsQueryStrategy.ts @@ -0,0 +1,151 @@ +import { Kysely } from "kysely"; +import { GetCollectionsArgs } from "../../../graphql/schemas/args/collectionArgs.js"; +import { isWhereEmpty } from "../../../lib/strategies/isWhereEmpty.js"; +import { DataDatabase } from "../../../types/kyselySupabaseData.js"; +import { QueryStrategy } from "./QueryStrategy.js"; + +/** + * Strategy for building database queries for collections. + * Implements query logic for collection retrieval and counting. + * + * This strategy extends the base QueryStrategy to provide collection-specific query building. + * It handles: + * - Basic data retrieval from the collections table + * - Filtering based on relationships with: + * - admins (through collection_admins and users tables) + * - blueprints (through collection_blueprints and blueprints tables) + * - Counting operations with appropriate joins + * + * The strategy supports complex queries involving multiple table relationships + * and ensures proper join conditions are maintained. + */ +export class CollectionsQueryStrategy extends QueryStrategy< + DataDatabase, + "collections", + GetCollectionsArgs +> { + protected readonly tableName = "collections" as const; + + /** + * Builds a query to retrieve collection data. + * Handles optional filtering through joins with related tables. + * + * @param db - Kysely database instance + * @param args - Optional query arguments for filtering + * @returns A query builder for retrieving collection data + * + * @example + * ```typescript + * // Basic query without filters + * buildDataQuery(db); + * // SELECT * FROM collections + * + * // Query with admin filter + * buildDataQuery(db, { where: { admins: {} } }); + * // SELECT * FROM collections + * // WHERE EXISTS ( + * // SELECT * FROM collection_admins + * // INNER JOIN users ON users.id = collection_admins.user_id + * // WHERE collection_admins.collection_id = collections.id + * // ) + * ``` + */ + buildDataQuery(db: Kysely, args?: GetCollectionsArgs) { + if (!args) { + return db.selectFrom(this.tableName).selectAll(); + } + return db + .selectFrom(this.tableName) + .$if(!isWhereEmpty(args.where?.admins), (qb) => { + return qb.where(({ exists, selectFrom }) => + exists( + selectFrom("collections") + .innerJoin( + "collection_admins", + "collection_admins.collection_id", + "collections.id", + ) + .select("collections.id"), + ), + ); + }) + .$if(!isWhereEmpty(args.where?.blueprints), (qb) => { + return qb.where(({ exists, selectFrom }) => + exists( + selectFrom("collections") + .innerJoin( + "collection_blueprints", + "collection_blueprints.collection_id", + "collections.id", + ) + .select("collections.id"), + ), + ); + }) + .selectAll(this.tableName); + } + + /** + * Builds a query to count collections. + * Handles optional filtering through joins with related tables. + * + * @param db - Kysely database instance + * @param args - Optional query arguments for filtering + * @returns A query builder for counting collections + * + * @example + * ```typescript + * // Count all collections + * buildCountQuery(db); + * // SELECT COUNT(*) as count FROM collections + * + * // Count with admin filter + * buildCountQuery(db, { where: { admins: {} } }); + * // SELECT COUNT(*) as count FROM collections + * // WHERE EXISTS ( + * // SELECT * FROM collection_admins + * // INNER JOIN users ON users.id = collection_admins.user_id + * // WHERE collection_admins.collection_id = collections.id + * // ) + * ``` + */ + buildCountQuery(db: Kysely, args?: GetCollectionsArgs) { + if (!args) { + return db.selectFrom(this.tableName).select((eb) => { + return eb.fn.countAll().as("count"); + }); + } + + return db + .selectFrom(this.tableName) + .$if(!isWhereEmpty(args.where?.admins), (qb) => { + return qb.where(({ exists, selectFrom }) => + exists( + selectFrom("collections") + .innerJoin( + "collection_admins", + "collection_admins.collection_id", + "collections.id", + ) + .select("collections.id"), + ), + ); + }) + .$if(!isWhereEmpty(args.where?.blueprints), (qb) => { + return qb.where(({ exists, selectFrom }) => + exists( + selectFrom("collections") + .innerJoin( + "collection_blueprints", + "collection_blueprints.collection_id", + "collections.id", + ) + .select("collections.id"), + ), + ); + }) + .select((eb) => { + return eb.fn.countAll().as("count"); + }); + } +} diff --git a/src/services/database/strategies/ContractsQueryStrategy.ts b/src/services/database/strategies/ContractsQueryStrategy.ts new file mode 100644 index 00000000..321e9b18 --- /dev/null +++ b/src/services/database/strategies/ContractsQueryStrategy.ts @@ -0,0 +1,57 @@ +import { Kysely } from "kysely"; +import { CachingDatabase } from "../../../types/kyselySupabaseCaching.js"; +import { QueryStrategy } from "./QueryStrategy.js"; + +/** + * Strategy for building database queries for contracts. + * Implements query logic for contract retrieval and counting. + * + * This strategy extends the base QueryStrategy to provide contract-specific query building. + * It handles basic data retrieval and counting operations for contracts deployed on various chains. + * + * @template CachingDatabase - The database type containing the contracts table + */ +export class ContractsQueryStrategy extends QueryStrategy< + CachingDatabase, + "contracts" +> { + protected readonly tableName = "contracts" as const; + + /** + * Builds a query to retrieve contract data. + * Returns a simple SELECT query that retrieves all columns from the contracts table. + * + * @param db - Kysely database instance + * @returns A query builder for retrieving contract data + * + * @example + * ```typescript + * // Basic query to select all contracts + * buildDataQuery(db); + * // SELECT * FROM contracts + * ``` + */ + buildDataQuery(db: Kysely) { + return db.selectFrom(this.tableName).selectAll(this.tableName); + } + + /** + * Builds a query to count contracts. + * Returns a simple COUNT query for the contracts table. + * + * @param db - Kysely database instance + * @returns A query builder for counting contracts + * + * @example + * ```typescript + * // Count all contracts + * buildCountQuery(db); + * // SELECT COUNT(*) as count FROM contracts + * ``` + */ + buildCountQuery(db: Kysely) { + return db.selectFrom(this.tableName).select((eb) => { + return eb.fn.countAll().as("count"); + }); + } +} diff --git a/src/services/database/strategies/FractionsQueryStrategy.ts b/src/services/database/strategies/FractionsQueryStrategy.ts new file mode 100644 index 00000000..fa755efe --- /dev/null +++ b/src/services/database/strategies/FractionsQueryStrategy.ts @@ -0,0 +1,115 @@ +import { Kysely } from "kysely"; +import { GetFractionsArgs } from "../../../graphql/schemas/args/fractionArgs.js"; +import { CachingDatabase } from "../../../types/kyselySupabaseCaching.js"; +import { QueryStrategy } from "./QueryStrategy.js"; +import { isWhereEmpty } from "../../../lib/strategies/isWhereEmpty.js"; + +/** + * Strategy for building database queries for fractions. + * Implements query logic for fraction retrieval and counting. + * + * This strategy extends the base QueryStrategy to provide fraction-specific query building. + * It handles: + * - Basic data retrieval from the fractions_view + * - Filtering based on metadata relationships + * - Counting operations with appropriate joins + * + * @template CachingDatabase - The database type containing the fractions_view table + */ +export class FractionsQueryStrategy extends QueryStrategy< + CachingDatabase, + "fractions_view", + GetFractionsArgs +> { + protected readonly tableName = "fractions_view" as const; + + /** + * Builds a query to retrieve fraction data. + * Handles optional metadata filtering through joins with claims and metadata tables. + * + * @param db - Kysely database instance + * @param args - Optional query arguments for filtering + * @returns A query builder for retrieving fraction data + * + * @example + * ```typescript + * // Basic query to select all fractions + * buildDataQuery(db); + * // SELECT * FROM fractions_view + * + * // Query with metadata filtering + * buildDataQuery(db, { where: { metadata: { ... } } }); + * // SELECT * FROM fractions_view + * // WHERE EXISTS ( + * // SELECT * FROM claims + * // LEFT JOIN metadata ON metadata.id = fractions_view.claims_id + * // WHERE claims.id = fractions_view.claims_id + * // ) + * ``` + */ + buildDataQuery(db: Kysely, args?: GetFractionsArgs) { + if (!args) { + return db.selectFrom(this.tableName).selectAll(); + } + + return db + .selectFrom(this.tableName) + .$if(!isWhereEmpty(args.where?.metadata), (qb) => { + return qb.where(({ exists, selectFrom }) => + exists( + selectFrom("claims") + .whereRef("claims.id", "=", "fractions_view.claims_id") + .leftJoin("metadata", "metadata.id", "fractions_view.claims_id"), + ), + ); + }) + .selectAll(this.tableName); + } + + /** + * Builds a query to count fractions. + * Handles optional metadata filtering through joins with claims and metadata tables. + * + * @param db - Kysely database instance + * @param args - Optional query arguments for filtering + * @returns A query builder for counting fractions + * + * @example + * ```typescript + * // Count all fractions + * buildCountQuery(db); + * // SELECT COUNT(*) as count FROM fractions_view + * + * // Count with metadata filtering + * buildCountQuery(db, { where: { metadata: { ... } } }); + * // SELECT COUNT(*) as count FROM fractions_view + * // WHERE EXISTS ( + * // SELECT * FROM claims + * // LEFT JOIN metadata ON metadata.id = fractions_view.claims_id + * // WHERE claims.id = fractions_view.claims_id + * // ) + * ``` + */ + buildCountQuery(db: Kysely, args?: GetFractionsArgs) { + if (!args) { + return db.selectFrom(this.tableName).select((eb) => { + return eb.fn.countAll().as("count"); + }); + } + + return db + .selectFrom(this.tableName) + .$if(!isWhereEmpty(args.where?.metadata), (qb) => { + return qb.where(({ exists, selectFrom }) => + exists( + selectFrom("claims") + .whereRef("claims.id", "=", "fractions_view.claims_id") + .leftJoin("metadata", "metadata.id", "fractions_view.claims_id"), + ), + ); + }) + .select((eb) => { + return eb.fn.countAll().as("count"); + }); + } +} diff --git a/src/services/database/strategies/HyperboardsQueryStrategy.ts b/src/services/database/strategies/HyperboardsQueryStrategy.ts new file mode 100644 index 00000000..c1840b14 --- /dev/null +++ b/src/services/database/strategies/HyperboardsQueryStrategy.ts @@ -0,0 +1,152 @@ +import { Kysely } from "kysely"; +import { GetHyperboardsArgs } from "../../../graphql/schemas/args/hyperboardArgs.js"; +import { isWhereEmpty } from "../../../lib/strategies/isWhereEmpty.js"; +import { DataDatabase } from "../../../types/kyselySupabaseData.js"; +import { QueryStrategy } from "./QueryStrategy.js"; + +/** + * Strategy for building database queries for hyperboards. + * Implements query logic for hyperboard retrieval and counting. + * + * This strategy extends the base QueryStrategy to provide hyperboard-specific query building. + * It handles: + * - Basic data retrieval from the hyperboards table + * - Filtering based on relationships with: + * - collections (through hyperboard_collections table) + * - admins (through hyperboard_admins table) + * - hypercert metadata (through hyperboard_hypercert_metadata table) + * - blueprint metadata (through hyperboard_blueprint_metadata table) + * - Counting operations with appropriate joins + * + * The strategy supports complex queries involving multiple table relationships + * and ensures proper join conditions are maintained. + */ +export class HyperboardsQueryStrategy extends QueryStrategy< + DataDatabase, + "hyperboards_with_admins", + GetHyperboardsArgs +> { + protected readonly tableName = "hyperboards_with_admins" as const; + + /** + * Builds a query to retrieve hyperboard data. + * Handles optional filtering through joins with related tables. + * + * @param db - Kysely database instance + * @param args - Optional query arguments for filtering + * @returns A query builder for retrieving hyperboard data + * + * @example + * ```typescript + * // Basic query without filters + * buildDataQuery(db); + * // SELECT * FROM hyperboards + * + * // Query with collection filter + * buildDataQuery(db, { where: { collections: {} } }); + * // SELECT * FROM hyperboards + * // WHERE EXISTS ( + * // SELECT * FROM hyperboard_collections + * // WHERE hyperboard_collections.hyperboard_id = hyperboards.id + * // ) + * ``` + */ + buildDataQuery(db: Kysely, args?: GetHyperboardsArgs) { + if (!args) { + return db.selectFrom(this.tableName).selectAll(); + } + + return db + .selectFrom(this.tableName) + .$if(!isWhereEmpty(args.where?.collections), (qb) => { + return qb.where(({ exists, selectFrom }) => + exists( + selectFrom("hyperboards") + .innerJoin( + "hyperboard_collections", + "hyperboard_collections.hyperboard_id", + "hyperboards.id", + ) + .select("hyperboards.id"), + ), + ); + }) + .$if(!isWhereEmpty(args.where?.admins), (qb) => { + return qb.where(({ exists, selectFrom }) => + exists( + selectFrom("hyperboards") + .innerJoin( + "hyperboard_admins", + "hyperboard_admins.hyperboard_id", + "hyperboards.id", + ) + .select("hyperboards.id"), + ), + ); + }) + .selectAll(this.tableName); + } + + /** + * Builds a query to count hyperboards. + * Handles optional filtering through joins with related tables. + * + * @param db - Kysely database instance + * @param args - Optional query arguments for filtering + * @returns A query builder for counting hyperboards + * + * @example + * ```typescript + * // Count all hyperboards + * buildCountQuery(db); + * // SELECT COUNT(*) as count FROM hyperboards + * + * // Count with admin filter + * buildCountQuery(db, { where: { admins: {} } }); + * // SELECT COUNT(*) as count FROM hyperboards + * // WHERE EXISTS ( + * // SELECT * FROM hyperboard_admins + * // WHERE hyperboard_admins.hyperboard_id = hyperboards.id + * // ) + * ``` + */ + buildCountQuery(db: Kysely, args?: GetHyperboardsArgs) { + if (!args) { + return db.selectFrom(this.tableName).select((eb) => { + return eb.fn.countAll().as("count"); + }); + } + + return db + .selectFrom(this.tableName) + .$if(!isWhereEmpty(args.where?.collections), (qb) => { + return qb.where(({ exists, selectFrom }) => + exists( + selectFrom("hyperboards") + .innerJoin( + "hyperboard_collections", + "hyperboard_collections.hyperboard_id", + "hyperboards.id", + ) + .select("hyperboards.id"), + ), + ); + }) + .$if(!isWhereEmpty(args.where?.admins), (qb) => { + return qb.where(({ exists, selectFrom }) => + exists( + selectFrom("hyperboards") + .innerJoin( + "hyperboard_admins", + "hyperboard_admins.hyperboard_id", + "hyperboards.id", + ) + .select("hyperboards.id"), + ), + ); + }) + .select((eb) => { + return eb.fn.countAll().as("count"); + }); + } +} diff --git a/src/services/database/strategies/MarketplaceOrdersQueryStrategy.ts b/src/services/database/strategies/MarketplaceOrdersQueryStrategy.ts new file mode 100644 index 00000000..d7d94f9f --- /dev/null +++ b/src/services/database/strategies/MarketplaceOrdersQueryStrategy.ts @@ -0,0 +1,59 @@ +import { Kysely } from "kysely"; +import { DataDatabase } from "../../../types/kyselySupabaseData.js"; +import { QueryStrategy } from "./QueryStrategy.js"; + +/** + * Strategy for building database queries for marketplace orders. + * Implements query logic for marketplace order retrieval and counting. + * + * This strategy extends the base QueryStrategy to provide marketplace-order-specific query building. + * It handles: + * - Basic data retrieval from the marketplace_orders table + * - Simple counting operations + * + * @template DataDatabase - The database type containing the marketplace_orders table + */ +export class MarketplaceOrdersQueryStrategy extends QueryStrategy< + DataDatabase, + "marketplace_orders" +> { + protected readonly tableName = "marketplace_orders" as const; + + /** + * Builds a query to retrieve marketplace order data. + * Returns all records from the marketplace_orders table. + * + * @param db - Kysely database instance + * @returns A query builder for retrieving marketplace order data + * + * @example + * ```typescript + * // Basic query to select all marketplace orders + * buildDataQuery(db); + * // SELECT * FROM marketplace_orders + * ``` + */ + buildDataQuery(db: Kysely) { + return db.selectFrom(this.tableName).selectAll(); + } + + /** + * Builds a query to count marketplace orders. + * Returns the total count of records in the marketplace_orders table. + * + * @param db - Kysely database instance + * @returns A query builder for counting marketplace orders + * + * @example + * ```typescript + * // Count all marketplace orders + * buildCountQuery(db); + * // SELECT COUNT(*) as count FROM marketplace_orders + * ``` + */ + buildCountQuery(db: Kysely) { + return db + .selectFrom(this.tableName) + .select((eb) => eb.fn.countAll().as("count")); + } +} diff --git a/src/services/database/strategies/MetadataQueryStrategy.ts b/src/services/database/strategies/MetadataQueryStrategy.ts new file mode 100644 index 00000000..88d8c115 --- /dev/null +++ b/src/services/database/strategies/MetadataQueryStrategy.ts @@ -0,0 +1,91 @@ +import { Kysely } from "kysely"; +import { GetMetadataArgs } from "../../../graphql/schemas/args/metadataArgs.js"; +import { CachingDatabase } from "../../../types/kyselySupabaseCaching.js"; +import { MetadataSelect } from "../entities/MetadataEntityService.js"; +import { QueryStrategy } from "./QueryStrategy.js"; + +const supportedColumns = [ + "metadata.id", + "metadata.name", + "metadata.description", + "metadata.external_url", + "metadata.work_scope", + "metadata.work_timeframe_from", + "metadata.work_timeframe_to", + "metadata.impact_scope", + "metadata.impact_timeframe_from", + "metadata.impact_timeframe_to", + "metadata.contributors", + "metadata.rights", + "metadata.uri", + "metadata.properties", + "metadata.allow_list_uri", + "metadata.parsed", +] as const; + +type MetadataSelection = Omit; + +/** + * Strategy for building database queries for metadata records. + * Implements query logic for metadata retrieval and counting. + * + * This strategy handles: + * - Basic metadata queries without filtering + * - Selective column fetching (excludes large fields like 'image' by default) + * + * The strategy is designed to work with the metadata table schema: + * - id: Unique identifier + * - name: Hypercert name + * - description: Detailed description + * - work_scope, impact_scope: Scope definitions + * - timeframe fields: Work and impact time ranges + * - uri: IPFS or other content identifier + * - properties: Additional custom properties + * + * Note: This strategy provides direct table access only. Any relationship + * filtering (e.g., hypercert relationships) should be handled at the service level. + */ +export class MetadataQueryStrategy extends QueryStrategy< + CachingDatabase, + "metadata", + GetMetadataArgs, + MetadataSelection +> { + protected readonly tableName = "metadata" as const; + + /** + * Builds a query to retrieve metadata records. + * Returns all records with supported columns. + * + * @param db - Kysely database instance + * @returns A query builder for retrieving metadata data + * + * @example + * ```typescript + * buildDataQuery(db); + * // SELECT supported_columns FROM metadata + * ``` + */ + buildDataQuery(db: Kysely) { + return db.selectFrom(this.tableName).select(supportedColumns); + } + + /** + * Builds a query to count metadata records. + * Returns total count of all records. + * + * @param db - Kysely database instance + * @returns A query builder for counting metadata records + * + * @example + * ```typescript + * buildCountQuery(db); + * // SELECT COUNT(*) as count FROM metadata + * ``` + */ + buildCountQuery(db: Kysely) { + return db.selectFrom(this.tableName).select((eb) => { + return eb.fn.countAll().as("count"); + }); + } +} diff --git a/src/services/database/strategies/QueryStrategy.ts b/src/services/database/strategies/QueryStrategy.ts new file mode 100644 index 00000000..15bb4929 --- /dev/null +++ b/src/services/database/strategies/QueryStrategy.ts @@ -0,0 +1,57 @@ +import { Kysely, Selectable, SelectQueryBuilder } from "kysely"; + +import type { CachingDatabase } from "../../../types/kyselySupabaseCaching.js"; +import type { DataDatabase } from "../../../types/kyselySupabaseData.js"; + +export type SupportedDatabase = CachingDatabase | DataDatabase; + +/** + * Abstract base class for building database queries with a consistent interface. + * Provides a template for creating specialized query strategies for different tables. + * + * @template DB - The database type (CachingDatabase | DataDatabase) + * @template T - The table name (must be a key of DB and a string) + * @template Args - Query arguments type + * @template Selection - The selection type for the query + * + * Each concrete strategy implementation should: + * - Define the specific table name as a readonly property + * - Implement buildDataQuery() to construct SELECT queries with optional table joins + * - Implement buildCountQuery() to construct COUNT queries with optional table joins + * + * Example usage: + * ```typescript + * class TableQueryStrategy extends QueryStrategy { + * protected readonly tableName = "table_name"; + * + * buildDataQuery(db: Kysely, args?: BaseQueryArgsType) { + * return db.selectFrom(this.tableName) + * .selectAll() + * .$if(args?.where?.referenceTableId, qb => qb.where(...)); + * } + * + * buildCountQuery(db: Kysely, args?: BaseQueryArgsType) { + * return db.selectFrom(this.tableName) + * .select(({ fn }) => fn.count("id").as("count")) + * .$if(args?.where, qb => qb.where(...)); + * } + * } + */ +export abstract class QueryStrategy< + DB extends SupportedDatabase, + T extends keyof DB & string, + Args = void, + Selection = Selectable, +> { + protected abstract readonly tableName: T; + + abstract buildDataQuery( + db: Kysely, + args?: Args, + ): SelectQueryBuilder; + + abstract buildCountQuery( + db: Kysely, + args?: Args, + ): SelectQueryBuilder; +} diff --git a/src/services/database/strategies/QueryStrategyFactory.ts b/src/services/database/strategies/QueryStrategyFactory.ts new file mode 100644 index 00000000..c26e9ac9 --- /dev/null +++ b/src/services/database/strategies/QueryStrategyFactory.ts @@ -0,0 +1,155 @@ +import { BaseQueryArgsType } from "../../../lib/graphql/BaseQueryArgs.js"; +import { AllowlistQueryStrategy } from "./AllowlistQueryStrategy.js"; +import { AttestationsQueryStrategy } from "./AttestationQueryStrategy.js"; +import { BlueprintsQueryStrategy } from "./BlueprintsQueryStrategy.js"; +import { ClaimsQueryStrategy } from "./ClaimsQueryStrategy.js"; +import { CollectionsQueryStrategy } from "./CollectionsQueryStrategy.js"; +import { ContractsQueryStrategy } from "./ContractsQueryStrategy.js"; +import { FractionsQueryStrategy } from "./FractionsQueryStrategy.js"; +import { HyperboardsQueryStrategy } from "./HyperboardsQueryStrategy.js"; +import { MarketplaceOrdersQueryStrategy } from "./MarketplaceOrdersQueryStrategy.js"; +import { MetadataQueryStrategy } from "./MetadataQueryStrategy.js"; +import { QueryStrategy, SupportedDatabase } from "./QueryStrategy.js"; +import { SalesQueryStrategy } from "./SalesQueryStrategy.js"; +import { SignatureRequestsQueryStrategy } from "./SignatureRequestsQueryStrategy.js"; +import { SupportedSchemasQueryStrategy } from "./SupportedSchemasQueryStrategy.js"; +import { UsersQueryStrategy } from "./UsersQueryStrategy.js"; +import { EntityFields } from "../../../lib/graphql/createEntityArgs.js"; +import { SortOptions } from "../../../lib/graphql/createEntitySortArgs.js"; + +/** + * Base type for query arguments used across all strategies + */ +type QueryArgs = BaseQueryArgsType< + Record, + SortOptions +>; + +/** + * Type for strategy constructors to ensure they match the QueryStrategy interface + */ +type QueryStrategyConstructor< + DB extends SupportedDatabase = SupportedDatabase, + T extends keyof DB & string = keyof DB & string, + Args extends QueryArgs = QueryArgs, +> = new () => QueryStrategy; + +/** + * Type for the strategy registry mapping table names to their constructors + */ +type StrategyRegistry = { + [K in keyof SupportedDatabase & string]: QueryStrategyConstructor< + SupportedDatabase, + K + >; +}; + +/** + * Type for the strategy cache mapping table names to their instances + */ +type StrategyCache = { + [K in keyof SupportedDatabase & string]?: QueryStrategy; +}; + +/** + * Factory class for creating query strategies for different tables + * Uses a registry pattern for extensibility and a proxy for lazy loading + */ +export class QueryStrategyFactory { + /** + * Registry of strategy constructors + * @private + */ + private static strategyRegistry: Partial = { + attestations: AttestationsQueryStrategy, + allowlist_records: AllowlistQueryStrategy, + claimable_fractions_with_proofs: AllowlistQueryStrategy, + blueprints_with_admins: BlueprintsQueryStrategy, + blueprints: BlueprintsQueryStrategy, + claims: ClaimsQueryStrategy, + claims_view: ClaimsQueryStrategy, + hypercerts: ClaimsQueryStrategy, + collections: CollectionsQueryStrategy, + contracts: ContractsQueryStrategy, + fractions: FractionsQueryStrategy, + fractions_view: FractionsQueryStrategy, + hyperboards: HyperboardsQueryStrategy, + hyperboards_with_admins: HyperboardsQueryStrategy, + metadata: MetadataQueryStrategy, + orders: MarketplaceOrdersQueryStrategy, + marketplace_orders: MarketplaceOrdersQueryStrategy, + sales: SalesQueryStrategy, + signature_requests: SignatureRequestsQueryStrategy, + attestation_schema: SupportedSchemasQueryStrategy, + eas_schema: SupportedSchemasQueryStrategy, + supported_schemas: SupportedSchemasQueryStrategy, + users: UsersQueryStrategy, + }; + + /** + * Cache of strategy instances + * @private + */ + private static strategies = new Proxy( + {}, + { + get( + target: StrategyCache, + prop: K | string | symbol, + ): QueryStrategy | undefined { + if (typeof prop !== "string") { + return undefined; + } + + const key = prop as K; + + // Check if we already have a cached instance + if (key in target && target[key]) { + return target[key] as QueryStrategy; + } + + // Get the constructor from the registry + const Constructor = QueryStrategyFactory.strategyRegistry[key]; + if (!Constructor) { + throw new Error( + `No strategy registered for table "${String(key)}". Available tables: ${Object.keys( + QueryStrategyFactory.strategyRegistry, + ).join(", ")}`, + ); + } + + // Create and cache a new instance + const strategy = new Constructor() as QueryStrategy< + SupportedDatabase, + K + >; + (target as Record>)[key] = + strategy; + return strategy; + }, + }, + ); + + /** + * Get a strategy instance for a given table + * Creates and caches the instance if it doesn't exist + * + * @param tableName - The name of the table to get a strategy for + * @returns A query strategy instance for the given table + * @throws Error if no strategy is registered for the table + */ + static getStrategy< + DB extends SupportedDatabase, + T extends keyof DB & string, + Args extends QueryArgs = QueryArgs, + >(tableName: T): QueryStrategy { + const strategy = + this.strategies[tableName as keyof SupportedDatabase & string]; + if (!strategy) { + throw new Error( + `Failed to get strategy for table "${tableName}". This might be a type mismatch or the strategy is not properly registered.`, + ); + } + return strategy as QueryStrategy; + } +} diff --git a/src/services/database/strategies/SalesQueryStrategy.ts b/src/services/database/strategies/SalesQueryStrategy.ts new file mode 100644 index 00000000..24266705 --- /dev/null +++ b/src/services/database/strategies/SalesQueryStrategy.ts @@ -0,0 +1,41 @@ +import { Kysely } from "kysely"; +import { CachingDatabase } from "../../../types/kyselySupabaseCaching.js"; +import { QueryStrategy } from "./QueryStrategy.js"; + +/** + * Query strategy for handling sales-related database operations. + * This strategy provides functionality to: + * 1. Build queries for fetching sales data + * 2. Build queries for counting sales + * + * The strategy is used by the SalesService to construct and execute database queries. + * It extends the base QueryStrategy class to provide sales-specific query building. + */ +export class SalesQueryStrategy extends QueryStrategy< + CachingDatabase, + "sales" +> { + protected readonly tableName = "sales" as const; + + /** + * Builds a query to fetch sales data. + * + * @param db - The Kysely database instance + * @returns A query builder configured to select all fields from the sales table + */ + buildDataQuery(db: Kysely) { + return db.selectFrom(this.tableName).selectAll(); + } + + /** + * Builds a query to count sales. + * + * @param db - The Kysely database instance + * @returns A query builder configured to count all rows in the sales table + */ + buildCountQuery(db: Kysely) { + return db.selectFrom(this.tableName).select((eb) => { + return eb.fn.countAll().as("count"); + }); + } +} diff --git a/src/services/database/strategies/SignatureRequestsQueryStrategy.ts b/src/services/database/strategies/SignatureRequestsQueryStrategy.ts new file mode 100644 index 00000000..97209b3c --- /dev/null +++ b/src/services/database/strategies/SignatureRequestsQueryStrategy.ts @@ -0,0 +1,42 @@ +import { Kysely } from "kysely"; +import { DataDatabase } from "../../../types/kyselySupabaseData.js"; +import { QueryStrategy } from "./QueryStrategy.js"; + +/** + * Query strategy for signature requests. + * Handles building queries for retrieving and counting signature requests. + * + * A signature request represents a request for a user to sign a message, with: + * - Safe address (the address that needs to sign) + * - Message hash (hash of the message to be signed) + * - Status (pending, executed, canceled) + * - Purpose (e.g. update_user_data) + */ +export class SignatureRequestsQueryStrategy extends QueryStrategy< + DataDatabase, + "signature_requests" +> { + protected readonly tableName = "signature_requests" as const; + + /** + * Builds a query to select all signature request data. + * + * @param db - The database connection + * @returns A query builder for selecting signature request data + */ + buildDataQuery(db: Kysely) { + return db.selectFrom(this.tableName).selectAll(); + } + + /** + * Builds a query to count signature requests. + * + * @param db - The database connection + * @returns A query builder for counting signature requests + */ + buildCountQuery(db: Kysely) { + return db.selectFrom(this.tableName).select((eb) => { + return eb.fn.countAll().as("count"); + }); + } +} diff --git a/src/services/database/strategies/SupportedSchemasQueryStrategy.ts b/src/services/database/strategies/SupportedSchemasQueryStrategy.ts new file mode 100644 index 00000000..7f1df596 --- /dev/null +++ b/src/services/database/strategies/SupportedSchemasQueryStrategy.ts @@ -0,0 +1,57 @@ +import { Kysely } from "kysely"; +import { CachingDatabase } from "../../../types/kyselySupabaseCaching.js"; +import { QueryStrategy } from "./QueryStrategy.js"; + +/** + * Strategy for querying supported EAS (Ethereum Attestation Service) schemas. + * Provides a simple query interface for the supported_schemas table. + * + * This strategy extends the base QueryStrategy to provide schema-specific query building. + * It handles basic data retrieval and counting operations without complex joins or filtering. + * + * @template CachingDatabase - The database type containing the supported_schemas table + */ +export class SupportedSchemasQueryStrategy extends QueryStrategy< + CachingDatabase, + "supported_schemas" +> { + protected readonly tableName = "supported_schemas" as const; + + /** + * Builds a query to retrieve supported schema data. + * Returns a simple SELECT query that retrieves all columns from the supported_schemas table. + * + * @param db - Kysely database instance + * @returns A query builder for retrieving supported schema data + * + * @example + * ```typescript + * // Basic query to select all supported schemas + * buildDataQuery(db); + * // SELECT * FROM supported_schemas + * ``` + */ + buildDataQuery(db: Kysely) { + return db.selectFrom(this.tableName).selectAll(); + } + + /** + * Builds a query to count supported schemas. + * Returns a simple COUNT query for the supported_schemas table. + * + * @param db - Kysely database instance + * @returns A query builder for counting supported schemas + * + * @example + * ```typescript + * // Count all supported schemas + * buildCountQuery(db); + * // SELECT COUNT(*) as count FROM supported_schemas + * ``` + */ + buildCountQuery(db: Kysely) { + return db.selectFrom(this.tableName).select((eb) => { + return eb.fn.countAll().as("count"); + }); + } +} diff --git a/src/services/database/strategies/UsersQueryStrategy.ts b/src/services/database/strategies/UsersQueryStrategy.ts new file mode 100644 index 00000000..54098d45 --- /dev/null +++ b/src/services/database/strategies/UsersQueryStrategy.ts @@ -0,0 +1,38 @@ +import { Kysely } from "kysely"; +import { DataDatabase } from "../../../types/kyselySupabaseData.js"; +import { QueryStrategy } from "./QueryStrategy.js"; + +/** + * Strategy for building queries related to user data. + * Implements the QueryStrategy interface for the users table. + * + * This strategy extends the base QueryStrategy to provide user-specific query building. + * It handles: + * - Basic data retrieval from the users table + * - Counting operations with appropriate joins + * + * @template DataDatabase - The database type containing the users table + */ +export class UsersQueryStrategy extends QueryStrategy { + protected readonly tableName = "users" as const; + + /** + * Builds a query to select all user data. + * @param db - Database connection + * @returns Query builder for selecting user data + */ + buildDataQuery(db: Kysely) { + return db.selectFrom(this.tableName).selectAll(); + } + + /** + * Builds a query to count total number of users. + * @param db - Database connection + * @returns Query builder for counting users + */ + buildCountQuery(db: Kysely) { + return db.selectFrom(this.tableName).select((eb) => { + return eb.fn.countAll().as("count"); + }); + } +} diff --git a/src/services/graphql/resolvers/allowlistRecordResolver.ts b/src/services/graphql/resolvers/allowlistRecordResolver.ts new file mode 100644 index 00000000..e24cb0a5 --- /dev/null +++ b/src/services/graphql/resolvers/allowlistRecordResolver.ts @@ -0,0 +1,122 @@ +import { inject, injectable } from "tsyringe"; +import { Args, FieldResolver, Query, Resolver, Root } from "type-graphql"; +import { AllowlistRecordService } from "../../database/entities/AllowListRecordEntityService.js"; +import { HypercertsService } from "../../database/entities/HypercertsEntityService.js"; +import { GetAllowlistRecordsArgs } from "../../../graphql/schemas/args/allowlistRecordArgs.js"; +import { + AllowlistRecord, + GetAllowlistRecordResponse, +} from "../../../graphql/schemas/typeDefs/allowlistRecordTypeDefs.js"; + +/** + * GraphQL resolver for AllowlistRecord operations. + * Handles queries for allowlist records and resolves related fields. + * + * This resolver provides: + * - Query for fetching allowlist records with optional filtering + * - Field resolution for the hypercert field, which loads the associated hypercert data + * + * @injectable Marks the class as injectable for dependency injection with tsyringe + * @resolver Marks the class as a GraphQL resolver for the AllowlistRecord type + */ +@injectable() +@Resolver(() => AllowlistRecord) +class AllowlistRecordResolver { + /** + * Creates a new instance of AllowlistRecordResolver. + * + * @param allowlistRecordService - Service for handling allowlist record operations + * @param hypercertsService - Service for handling hypercert operations + */ + constructor( + @inject(AllowlistRecordService) + private allowlistRecordService: AllowlistRecordService, + @inject(HypercertsService) + private hypercertsService: HypercertsService, + ) {} + + /** + * Queries allowlist records based on provided arguments. + * + * @param args - Query arguments for filtering allowlist records + * @returns A promise that resolves to an object containing: + * - data: Array of allowlist records matching the query + * - count: Total number of records matching the query + * + * @example + * Query: + * ```graphql + * query { + * allowlistRecords(where: { hypercert: { hypercert_id: { eq: "123" } } }) { + * data { + * id + * hypercert_id + * } + * count + * } + * } + * ``` + */ + @Query(() => GetAllowlistRecordResponse) + async allowlistRecords(@Args() args: GetAllowlistRecordsArgs) { + try { + return await this.allowlistRecordService.getAllowlistRecords(args); + } catch (e) { + console.error( + `[AllowlistRecordResolver::allowlistRecords] Error fetching allowlist records: ${(e as Error).message}`, + ); + return null; + } + } + + /** + * Resolves the hypercert field for an allowlist record. + * This field resolver is called automatically when the hypercert field is requested in a query. + * + * @param allowlistRecord - The allowlist record for which to resolve the hypercert + * @returns A promise that resolves to the associated hypercert data or null if not found + * + * @example + * Query with hypercert field: + * ```graphql + * query { + * allowlistRecords { + * data { + * id + * hypercert { + * id + * name + * } + * } + * } + * } + * ``` + */ + @FieldResolver() + async hypercert(@Root() allowlistRecord: AllowlistRecord) { + try { + const [hypercert, metadata] = await Promise.all([ + this.hypercertsService.getHypercert({ + where: { hypercert_id: { eq: allowlistRecord.hypercert_id } }, + }), + this.hypercertsService.getHypercertMetadata({ + hypercert_id: allowlistRecord.hypercert_id, + }), + ]); + if (!hypercert) { + return null; + } + return { + ...hypercert, + metadata: metadata || null, + }; + } catch (e) { + console.error( + `[AllowlistRecordResolver::hypercert] Error fetching hypercert: ${(e as Error).message}`, + ); + return null; + } + } +} + +export { AllowlistRecordResolver }; diff --git a/src/services/graphql/resolvers/attestationResolver.ts b/src/services/graphql/resolvers/attestationResolver.ts new file mode 100644 index 00000000..0f723933 --- /dev/null +++ b/src/services/graphql/resolvers/attestationResolver.ts @@ -0,0 +1,346 @@ +import { inject, injectable } from "tsyringe"; +import { Args, FieldResolver, Query, Resolver, Root } from "type-graphql"; +import { getAddress, isAddress } from "viem"; +import { z } from "zod"; +import { GetAttestationsArgs } from "../../../graphql/schemas/args/attestationArgs.js"; +import { + Attestation, + GetAttestationsResponse, +} from "../../../graphql/schemas/typeDefs/attestationTypeDefs.js"; +import { AttestationService } from "../../database/entities/AttestationEntityService.js"; +import { AttestationSchemaService } from "../../database/entities/AttestationSchemaEntityService.js"; +import { HypercertsService } from "../../database/entities/HypercertsEntityService.js"; +import { MetadataService } from "../../database/entities/MetadataEntityService.js"; + +/** + * Schema for validating hypercert pointer data in attestations. + * Ensures that the data contains valid chain_id, contract_address, and token_id fields. + * + * Validation rules: + * - chain_id: Must be a valid bigint (string or number that can be converted to bigint) + * - contract_address: Must be a valid Ethereum address + * - token_id: Must be a valid bigint (string or number that can be converted to bigint) + */ +const HypercertPointer = z.object({ + chain_id: z + .union([ + z.string().refine( + (val) => { + try { + BigInt(val); + return true; + } catch { + return false; + } + }, + { message: "chain_id must be a valid bigint" }, + ), + z.number().int().transform(String), + ]) + .transform((val) => BigInt(val)), + contract_address: z + .string() + .refine(isAddress, { message: "Invalid contract address" }), + token_id: z + .union([ + z.string().refine( + (val) => { + try { + BigInt(val); + return true; + } catch { + return false; + } + }, + { message: "token_id must be a valid bigint" }, + ), + z.number().int().transform(String), + ]) + .transform((val) => BigInt(val)), +}); + +/** + * GraphQL resolver for Attestation operations. + * Handles queries for attestations and resolves related fields like hypercerts, schemas, and metadata. + * + * This resolver provides: + * - Query for fetching attestations with optional filtering + * - Field resolution for hypercert data associated with attestations + * - Field resolution for EAS schema data + * - Field resolution for metadata associated with the attested hypercert + * + * Error handling: + * - Invalid attestation data returns null for related fields + * - Database errors are propagated to the GraphQL layer + * - Schema validation errors result in null hypercert IDs + * + * @injectable Marks the class as injectable for dependency injection with tsyringe + * @resolver Marks the class as a GraphQL resolver for the Attestation type + */ +@injectable() +@Resolver(() => Attestation) +class AttestationResolver { + /** + * Creates a new instance of AttestationResolver. + * + * @param attestationService - Service for handling attestation operations + * @param hypercertService - Service for handling hypercert operations + * @param attestationSchemaService - Service for handling attestation schema operations + * @param metadataService - Service for handling metadata operations + */ + constructor( + @inject(AttestationService) + private attestationService: AttestationService, + @inject(HypercertsService) + private hypercertService: HypercertsService, + @inject(AttestationSchemaService) + private attestationSchemaService: AttestationSchemaService, + @inject(MetadataService) + private metadataService: MetadataService, + ) {} + + /** + * Queries attestations based on provided arguments. + * Returns both the matching attestations and a total count. + * + * @param args - Query arguments for filtering attestations + * @returns A promise that resolves to an object containing: + * - data: Array of attestations matching the query + * - count: Total number of matching attestations + * + * Filtering supports: + * - Attestation fields (id, supported_schemas_id, etc.) + * - Related EAS schema fields + * - Related hypercert fields + * + * @example + * Query with filtering: + * ```graphql + * query { + * attestations( + * where: { + * id: { eq: "123" }, + * eas_schema: { id: { eq: "schema-id" } }, + * hypercert: { id: { eq: "hypercert-id" } } + * } + * ) { + * data { + * id + * data + * supported_schemas_id + * } + * count + * } + * } + * ``` + */ + @Query(() => GetAttestationsResponse) + async attestations(@Args() args: GetAttestationsArgs) { + try { + return await this.attestationService.getAttestations(args); + } catch (e) { + console.error( + `[AttestationResolver::attestations] Error fetching attestations: ${(e as Error).message}`, + ); + return null; + } + } + + /** + * Resolves the hypercert field for an attestation. + * This field resolver is called automatically when the hypercert field is requested in a query. + * It extracts the hypercert ID from the attestation data and fetches the corresponding hypercert. + * + * @param attestation - The attestation for which to resolve the hypercert + * @returns A promise that resolves to: + * - The associated hypercert data if found + * - undefined if: + * - attestation.data is null/undefined + * - hypercert ID cannot be extracted from data + * - no matching hypercert is found + * + * @example + * Query with hypercert field: + * ```graphql + * query { + * attestations { + * data { + * id + * hypercert { + * id + * name + * # Additional hypercert fields... + * } + * } + * } + * } + * ``` + */ + @FieldResolver() + async hypercert(@Root() attestation: Attestation) { + try { + if (!attestation.data) return null; + + const attested_hypercert_id = this.getHypercertIdFromAttestationData( + attestation.data, + ); + + if (!attested_hypercert_id) return null; + + return await this.hypercertService.getHypercert({ + where: { + hypercert_id: { eq: attested_hypercert_id }, + }, + }); + } catch (e) { + console.error( + `[AttestationResolver::hypercert] Error fetching hypercert: ${(e as Error).message}`, + ); + return null; + } + } + + /** + * Resolves the EAS schema field for an attestation. + * This field resolver is called automatically when the eas_schema field is requested in a query. + * + * @param attestation - The attestation for which to resolve the schema + * @returns A promise that resolves to: + * - The associated schema data if found + * - undefined if no schema ID is present + * + * @example + * Query with schema field: + * ```graphql + * query { + * attestations { + * data { + * id + * eas_schema { + * id + * name + * schema + * description + * # Additional schema fields... + * } + * } + * } + * } + * ``` + */ + @FieldResolver() + async eas_schema(@Root() attestation: Attestation) { + try { + if (!attestation.supported_schemas_id) return null; + + return await this.attestationSchemaService.getAttestationSchema({ + where: { + id: { eq: attestation.supported_schemas_id }, + }, + }); + } catch (e) { + console.error( + `[AttestationResolver::eas_schema] Error fetching eas_schema: ${(e as Error).message}`, + ); + return null; + } + } + + /** + * Resolves the metadata field for an attestation. + * This field resolver is called automatically when the metadata field is requested in a query. + * It extracts the hypercert ID from the attestation data and fetches the corresponding metadata. + * + * @param attestation - The attestation for which to resolve the metadata + * @returns A promise that resolves to: + * - The associated metadata if found + * - undefined if: + * - attestation.data is null/undefined + * - hypercert ID cannot be extracted from data + * - no matching metadata is found + * @throws {Error} If the metadata service query fails + * + * @example + * Query with metadata field: + * ```graphql + * query { + * attestations { + * data { + * id + * metadata { + * id + * name + * description + * # Additional metadata fields... + * } + * } + * } + * } + * ``` + */ + //TODO: Should this be part of the resolved hypercert data? + @FieldResolver() + async metadata(@Root() attestation: Attestation) { + try { + if (!attestation.data) return null; + + const attested_hypercert_id = this.getHypercertIdFromAttestationData( + attestation.data, + ); + + if (!attested_hypercert_id) return null; + + return await this.hypercertService.getHypercertMetadata({ + hypercert_id: attested_hypercert_id, + }); + } catch (e) { + console.error( + `[AttestationResolver::metadata] Error fetching metadata: ${(e as Error).message}`, + ); + return null; + } + } + + /** + * Extracts and formats the hypercert ID from attestation data. + * The hypercert ID is constructed from chain_id, contract_address, and token_id. + * + * @param attestationData - The data field from an attestation + * @returns A formatted hypercert ID string or undefined if: + * - data is null/undefined + * - data fails schema validation: + * - chain_id is not a valid bigint + * - contract_address is not a valid Ethereum address + * - token_id is not a valid bigint + * + * @example + * Format: "{chain_id}-{contract_address}-{token_id}" + * Result: "1-0x1234...5678-123" + * + * Invalid examples: + * ```typescript + * getHypercertIdFromAttestationData({ chain_id: "not_a_number" }) // returns undefined + * getHypercertIdFromAttestationData({ chain_id: "1", contract_address: "invalid" }) // returns undefined + * getHypercertIdFromAttestationData(null) // returns undefined + * ``` + */ + getHypercertIdFromAttestationData(attestationData: unknown): string | null { + try { + if (!attestationData) return null; + + const parseResult = HypercertPointer.safeParse(attestationData); + + if (!parseResult.success) return null; + + const { chain_id, contract_address, token_id } = parseResult.data; + return `${chain_id.toString()}-${getAddress(contract_address)}-${token_id.toString()}`; + } catch (e) { + console.error( + `[AttestationResolver::getHypercertIdFromAttestationData] Error parsing hypercert ID: ${(e as Error).message}`, + ); + return null; + } + } +} + +export { AttestationResolver }; diff --git a/src/services/graphql/resolvers/attestationSchemaResolver.ts b/src/services/graphql/resolvers/attestationSchemaResolver.ts new file mode 100644 index 00000000..25946639 --- /dev/null +++ b/src/services/graphql/resolvers/attestationSchemaResolver.ts @@ -0,0 +1,129 @@ +import { inject, injectable } from "tsyringe"; +import { Args, FieldResolver, Query, Resolver, Root } from "type-graphql"; +import { GetAttestationSchemasArgs } from "../../../graphql/schemas/args/attestationSchemaArgs.js"; +import { + AttestationSchema, + GetAttestationsSchemaResponse, +} from "../../../graphql/schemas/typeDefs/attestationSchemaTypeDefs.js"; +import { GetAttestationsResponse } from "../../../graphql/schemas/typeDefs/attestationTypeDefs.js"; +import { AttestationService } from "../../../services/database/entities/AttestationEntityService.js"; +import { AttestationSchemaService } from "../../../services/database/entities/AttestationSchemaEntityService.js"; + +/** + * GraphQL resolver for AttestationSchema operations. + * Handles queries for attestation schemas and resolves related fields. + * + * This resolver provides: + * - Query for fetching attestation schemas with optional filtering + * - Field resolution for attestations associated with a schema + * + * @injectable Marks the class as injectable for dependency injection with tsyringe + * @resolver Marks the class as a GraphQL resolver for the AttestationSchema type + */ +@injectable() +@Resolver(() => AttestationSchema) +class AttestationSchemaResolver { + /** + * Creates a new instance of AttestationSchemaResolver. + * + * @param attestationSchemaService - Service for handling attestation schema operations + * @param attestationService - Service for handling attestation operations + */ + constructor( + @inject(AttestationSchemaService) + private attestationSchemaService: AttestationSchemaService, + @inject(AttestationService) + private attestationService: AttestationService, + ) {} + + /** + * Queries attestation schemas based on provided arguments. + * Returns both the matching schemas and a total count. + * + * @param args - Query arguments for filtering schemas + * @returns A promise that resolves to an object containing: + * - data: Array of attestation schemas matching the query + * - count: Total number of matching schemas + * + * @example + * Query with filtering: + * ```graphql + * query { + * attestationSchemas( + * where: { + * id: { eq: "schema-id" }, + * revocable: { eq: true } + * } + * ) { + * data { + * id + * chain_id + * schema + * resolver + * revocable + * } + * count + * } + * } + * ``` + */ + @Query(() => GetAttestationsSchemaResponse) + async attestationSchemas(@Args() args: GetAttestationSchemasArgs) { + try { + return await this.attestationSchemaService.getAttestationSchemas(args); + } catch (e) { + console.error( + `[AttestationSchemaResolver::attestationSchemas] Error fetching attestation schemas: ${(e as Error).message}`, + ); + return null; + } + } + + /** + * Resolves the attestations field for an attestation schema. + * This field resolver is called automatically when the attestations field is requested in a query. + * + * @param schema - The schema for which to resolve attestations + * @returns A promise that resolves to an object containing: + * - data: Array of attestations using this schema + * - count: Total number of attestations using this schema + * @throws {Error} If the attestation service query fails + * + * @example + * Query with attestations field: + * ```graphql + * query { + * attestationSchemas { + * data { + * id + * schema + * attestations { + * data { + * id + * data + * attester + * recipient + * } + * count + * } + * } + * } + * } + * ``` + */ + @FieldResolver(() => GetAttestationsResponse, { nullable: true }) + async attestations(@Root() schema: Partial) { + try { + return await this.attestationService.getAttestations({ + where: { supported_schemas_id: { eq: schema.id } }, + }); + } catch (e) { + console.error( + `[AttestationSchemaResolver::attestations] Error fetching attestations: ${(e as Error).message}`, + ); + return null; + } + } +} + +export { AttestationSchemaResolver }; diff --git a/src/services/graphql/resolvers/blueprintResolver.ts b/src/services/graphql/resolvers/blueprintResolver.ts new file mode 100644 index 00000000..3d0bfb2f --- /dev/null +++ b/src/services/graphql/resolvers/blueprintResolver.ts @@ -0,0 +1,190 @@ +import { Args, FieldResolver, Query, Resolver, Root } from "type-graphql"; +import { + Blueprint, + GetBlueprintsResponse, +} from "../../../graphql/schemas/typeDefs/blueprintTypeDefs.js"; +import { GetBlueprintsArgs } from "../../../graphql/schemas/args/blueprintArgs.js"; +import { inject, injectable } from "tsyringe"; +import { BlueprintsService } from "../../database/entities/BlueprintsEntityService.js"; +import { HypercertsService } from "../../database/entities/HypercertsEntityService.js"; + +/** + * GraphQL resolver for Blueprint operations. + * Handles queries for blueprints and resolves related fields. + * + * This resolver provides: + * - Query for fetching blueprints with optional filtering + * - Field resolution for admins associated with a blueprint + * - Field resolution for hypercerts associated with a blueprint + * + * Error Handling: + * All resolvers follow the GraphQL best practice of returning partial data instead of throwing errors. + * If an operation fails, it will: + * - Log the error internally for monitoring + * - Return null/empty data to the client + * - Include error information in the GraphQL response errors array + * + * @injectable Marks the class as injectable for dependency injection with tsyringe + * @resolver Marks the class as a GraphQL resolver for the Blueprint type + */ +@injectable() +@Resolver(() => Blueprint) +class BlueprintResolver { + /** + * Creates a new instance of BlueprintResolver. + * + * @param blueprintsService - Service for handling blueprint operations + * @param hypercertsService - Service for handling hypercert operations + */ + constructor( + @inject(BlueprintsService) + private blueprintsService: BlueprintsService, + @inject(HypercertsService) + private hypercertsService: HypercertsService, + ) {} + + /** + * Queries blueprints based on provided arguments. + * Returns both the matching blueprints and a total count. + * + * @param args - Query arguments for filtering blueprints + * @returns A promise that resolves to an object containing: + * - data: Array of blueprints matching the query + * - count: Total number of matching blueprints + * Returns null if an error occurs + * + * @example + * ```graphql + * query { + * blueprints( + * where: { + * id: { eq: "blueprint-1" } + * } + * ) { + * data { + * id + * name + * description + * admins { + * address + * display_name + * } + * } + * count + * } + * } + * ``` + */ + @Query(() => GetBlueprintsResponse) + async blueprints(@Args() args: GetBlueprintsArgs) { + try { + return await this.blueprintsService.getBlueprints(args); + } catch (e) { + console.error( + `[BlueprintResolver::blueprints] Error fetching blueprints: ${(e as Error).message}`, + ); + return null; + } + } + + /** + * Resolves the admins field for a blueprint. + * Retrieves the list of administrators associated with the blueprint. + * + * @param blueprint - The blueprint for which to resolve admins + * @returns A promise resolving to: + * - Array of admin users if found + * - Empty array if: + * - No blueprint ID is available + * - An error occurs during retrieval + * + * @example + * ```graphql + * query { + * blueprints { + * data { + * id + * admins { + * address + * display_name + * avatar + * } + * } + * } + * } + * ``` + */ + @FieldResolver() + async admins(@Root() blueprint: Blueprint) { + if (!blueprint.id) { + console.warn( + `[BlueprintResolver::admins] No blueprint id found for ${blueprint.id}`, + ); + return []; + } + + try { + return await this.blueprintsService.getBlueprintAdmins(blueprint.id); + } catch (e) { + console.error( + `[BlueprintResolver::admins] Error fetching admins for blueprint ${blueprint.id}: ${(e as Error).message}`, + ); + return []; + } + } + + /** + * Resolves the hypercerts field for a blueprint. + * Retrieves the list of hypercerts associated with the blueprint. + * + * @param blueprint - The blueprint for which to resolve hypercerts + * @returns A promise resolving to: + * - Array of hypercerts if found + * - null if: + * - No hypercert IDs are available + * - An error occurs during retrieval + * + * @example + * ```graphql + * query { + * blueprints { + * data { + * id + * hypercerts { + * data { + * id + * hypercert_id + * metadata { + * name + * description + * } + * } + * } + * } + * } + * } + * ``` + */ + @FieldResolver() + async hypercerts(@Root() blueprint: Blueprint) { + if (!blueprint.hypercert_ids?.length) { + console.warn( + `[BlueprintResolver::hypercerts] No hypercert ids found for blueprint ${blueprint.id}`, + ); + return null; + } + + try { + return await this.hypercertsService.getHypercerts({ + where: { hypercert_id: { in: blueprint.hypercert_ids } }, + }); + } catch (e) { + console.error( + `[BlueprintResolver::hypercerts] Error fetching hypercerts for blueprint ${blueprint.id}: ${(e as Error).message}`, + ); + return null; + } + } +} + +export { BlueprintResolver }; diff --git a/src/services/graphql/resolvers/collectionResolver.ts b/src/services/graphql/resolvers/collectionResolver.ts new file mode 100644 index 00000000..184369ff --- /dev/null +++ b/src/services/graphql/resolvers/collectionResolver.ts @@ -0,0 +1,228 @@ +import { Args, FieldResolver, Query, Resolver, Root } from "type-graphql"; + +import { GetCollectionsArgs } from "../../../graphql/schemas/args/collectionArgs.js"; +import { Blueprint } from "../../../graphql/schemas/typeDefs/blueprintTypeDefs.js"; +import { + Collection, + GetCollectionsResponse, +} from "../../../graphql/schemas/typeDefs/collectionTypeDefs.js"; +import { User } from "../../../graphql/schemas/typeDefs/userTypeDefs.js"; + +import { inject, injectable } from "tsyringe"; +import { CollectionService } from "../../database/entities/CollectionEntityService.js"; +import { GetHypercertsResponse } from "../../../graphql/schemas/typeDefs/hypercertTypeDefs.js"; + +/** + * GraphQL resolver for Collection operations. + * Handles queries for collections and resolves related fields like hypercerts, admins, and blueprints. + * + * This resolver provides: + * - Query for fetching collections with optional filtering + * - Field resolution for hypercerts within a collection + * - Field resolution for collection admins + * - Field resolution for blueprints associated with a collection + * + * Error Handling: + * If an operation fails, it will: + * - Log the error internally for monitoring + * - Return null/empty data to the client + * - Include error information in the GraphQL response errors array + * + * @injectable Marks the class as injectable for dependency injection with tsyringe + * @resolver Marks the class as a GraphQL resolver for the Collection type + */ +@injectable() +@Resolver(() => Collection) +class CollectionResolver { + /** + * Creates a new instance of CollectionResolver. + * + * @param collectionService - Service for handling collection operations + */ + constructor( + @inject(CollectionService) + private collectionService: CollectionService, + ) {} + + /** + * Queries collections based on provided arguments. + * Returns both the matching collections and a total count. + * + * @param args - Query arguments for filtering collections + * @returns A promise that resolves to an object containing: + * - data: Array of collections matching the query + * - count: Total number of matching collections + * + * @example + * ```graphql + * query { + * collections( + * where: { + * name: { contains: "Research" } + * } + * ) { + * data { + * id + * name + * description + * hypercerts { + * id + * name + * } + * } + * count + * } + * } + * ``` + */ + @Query(() => GetCollectionsResponse) + async collections(@Args() args: GetCollectionsArgs) { + try { + return await this.collectionService.getCollections(args); + } catch (e) { + console.error( + `[CollectionResolver::collections] Error fetching collections: ${(e as Error).message}`, + ); + return null; + } + } + + /** + * Resolves the hypercerts field for a collection. + * Returns all hypercerts that belong to the specified collection. + * + * @param collection - The collection for which to resolve hypercerts + * @returns A promise resolving to: + * - Array of hypercerts if found + * - null if collection ID is undefined or an error occurs + * + * @example + * ```graphql + * query { + * collections { + * data { + * id + * name + * hypercerts { + * id + * name + * description + * } + * } + * } + * } + * ``` + */ + @FieldResolver(() => GetHypercertsResponse) + async hypercerts(@Root() collection: Collection) { + if (!collection.id) { + console.error( + "[CollectionResolver::hypercerts] Collection ID is undefined", + ); + return null; + } + + try { + return await this.collectionService.getCollectionHypercerts( + collection.id, + ); + } catch (e) { + console.error( + `[CollectionResolver::hypercerts] Error fetching hypercerts: ${(e as Error).message}`, + ); + return null; + } + } + + /** + * Resolves the admins field for a collection. + * Returns all users who have admin privileges for the specified collection. + * + * @param collection - The collection for which to resolve admins + * @returns A promise resolving to: + * - Array of users if found + * - null if collection ID is undefined or an error occurs + * + * @example + * ```graphql + * query { + * collections { + * data { + * id + * name + * admins { + * id + * address + * display_name + * } + * } + * } + * } + * ``` + */ + @FieldResolver(() => [User]) + async admins(@Root() collection: Collection) { + if (!collection.id) { + console.error("[CollectionResolver::admins] Collection ID is undefined"); + return null; + } + + try { + return await this.collectionService.getCollectionAdmins(collection.id); + } catch (e) { + console.error( + `[CollectionResolver::admins] Error fetching admins: ${(e as Error).message}`, + ); + return null; + } + } + + /** + * Resolves the blueprints field for a collection. + * Returns all blueprints associated with the specified collection. + * + * @param collection - The collection for which to resolve blueprints + * @returns A promise resolving to: + * - Array of blueprints if found + * - null if collection ID is undefined or an error occurs + * + * @example + * ```graphql + * query { + * collections { + * data { + * id + * name + * blueprints { + * id + * name + * description + * } + * } + * } + * } + * ``` + */ + @FieldResolver(() => [Blueprint]) + async blueprints(@Root() collection: Collection) { + if (!collection.id) { + console.error( + "[CollectionResolver::blueprints] Collection ID is undefined", + ); + return null; + } + + try { + return await this.collectionService.getCollectionBlueprints( + collection.id, + ); + } catch (e) { + console.error( + `[CollectionResolver::blueprints] Error fetching blueprints: ${(e as Error).message}`, + ); + return null; + } + } +} + +export { CollectionResolver }; diff --git a/src/graphql/schemas/resolvers/composed.ts b/src/services/graphql/resolvers/composed.ts similarity index 100% rename from src/graphql/schemas/resolvers/composed.ts rename to src/services/graphql/resolvers/composed.ts diff --git a/src/services/graphql/resolvers/contractResolver.ts b/src/services/graphql/resolvers/contractResolver.ts new file mode 100644 index 00000000..c97ea8c6 --- /dev/null +++ b/src/services/graphql/resolvers/contractResolver.ts @@ -0,0 +1,83 @@ +import { inject, injectable } from "tsyringe"; +import { Args, Query, Resolver } from "type-graphql"; +import { GetContractsArgs } from "../../../graphql/schemas/args/contractArgs.js"; +import { + Contract, + GetContractsResponse, +} from "../../../graphql/schemas/typeDefs/contractTypeDefs.js"; +import { ContractService } from "../../database/entities/ContractEntityService.js"; + +/** + * GraphQL resolver for Contract operations. + * Handles queries for contracts deployed on various chains. + * + * This resolver provides: + * - Query for fetching contracts with optional filtering + * - Support for pagination and sorting + * + * Each contract represents a smart contract deployed on a blockchain, + * containing information such as: + * - Chain ID + * - Contract address + * - Deployment block number + * + * @injectable Marks the class as injectable for dependency injection with tsyringe + * @resolver Marks the class as a GraphQL resolver for the Contract type + */ +@injectable() +@Resolver(() => Contract) +class ContractResolver { + /** + * Creates a new instance of ContractResolver. + * + * @param contractService - Service for handling contract operations + */ + constructor( + @inject(ContractService) + private contractService: ContractService, + ) {} + + /** + * Queries contracts based on provided arguments. + * Returns both the matching contracts and a total count. + * + * @param args - Query arguments for filtering contracts + * @returns A promise that resolves to an object containing: + * - data: Array of contracts matching the query + * - count: Total number of matching contracts + * + * @example + * Query with filtering: + * ```graphql + * query { + * contracts( + * where: { + * chain_id: { eq: "1" }, + * contract_address: { eq: "0x..." } + * } + * ) { + * data { + * id + * chain_id + * contract_address + * start_block + * } + * count + * } + * } + * ``` + */ + @Query(() => GetContractsResponse) + async contracts(@Args() args: GetContractsArgs) { + try { + return await this.contractService.getContracts(args); + } catch (e) { + console.error( + `[ContractResolver::contracts] Error fetching contracts: ${(e as Error).message}`, + ); + return null; + } + } +} + +export { ContractResolver }; diff --git a/src/services/graphql/resolvers/fractionResolver.ts b/src/services/graphql/resolvers/fractionResolver.ts new file mode 100644 index 00000000..3baab7f9 --- /dev/null +++ b/src/services/graphql/resolvers/fractionResolver.ts @@ -0,0 +1,276 @@ +import { parseClaimOrFractionId } from "@hypercerts-org/sdk"; +import { inject, injectable } from "tsyringe"; +import { Args, FieldResolver, Query, Resolver, Root } from "type-graphql"; +import { GetFractionsArgs } from "../../../graphql/schemas/args/fractionArgs.js"; +import { + Fraction, + GetFractionsResponse, +} from "../../../graphql/schemas/typeDefs/fractionTypeDefs.js"; +import { FractionService } from "../../database/entities/FractionEntityService.js"; +import { HypercertsService } from "../../database/entities/HypercertsEntityService.js"; +import { MarketplaceOrdersService } from "../../database/entities/MarketplaceOrdersEntityService.js"; +import { SalesService } from "../../database/entities/SalesEntityService.js"; + +/** + * GraphQL resolver for Fraction operations. + * Handles queries for fractions and resolves related fields like metadata, orders, and sales. + * + * This resolver provides: + * - Query for fetching fractions with optional filtering + * - Field resolution for metadata associated with the fraction's claim + * - Field resolution for marketplace orders related to the fraction + * - Field resolution for sales history of the fraction + * + * Each fraction represents a portion of a hypercert, with its own unique identifiers + * and relationships to other entities in the system. + * + * Error Handling: + * All resolvers follow the GraphQL best practice of returning partial data instead of throwing errors. + * If an operation fails, it will: + * - Log the error internally for monitoring + * - Return null/empty data to the client + * - Include error information in the GraphQL response errors array + * + * @injectable Marks the class as injectable for dependency injection with tsyringe + * @resolver Marks the class as a GraphQL resolver for the Fraction type + */ +@injectable() +@Resolver(() => Fraction) +class FractionResolver { + /** + * Creates a new instance of FractionResolver. + * + * @param fractionsService - Service for handling fraction operations + * @param metadataService - Service for handling metadata operations + * @param salesService - Service for handling sales operations + * @param marketplaceOrdersService - Service for handling marketplace orders operations + */ + constructor( + @inject(FractionService) + private fractionsService: FractionService, + @inject(HypercertsService) + private hypercertService: HypercertsService, + @inject(SalesService) + private salesService: SalesService, + @inject(MarketplaceOrdersService) + private marketplaceOrdersService: MarketplaceOrdersService, + ) {} + + /** + * Queries fractions based on provided arguments. + * Returns both the matching fractions and a total count. + * + * @param args - Query arguments for filtering fractions + * @returns A promise that resolves to an object containing: + * - data: Array of fractions matching the query + * - count: Total number of matching fractions + * @throws {Error} If the database query fails + * + * @example + * Query with filtering: + * ```graphql + * query { + * fractions( + * where: { + * hypercert_id: { eq: "1-0x1234...5678-1" }, + * owner_address: { eq: "0xabcd...efgh" } + * } + * ) { + * data { + * id + * units + * owner_address + * } + * count + * } + * } + * ``` + */ + @Query(() => GetFractionsResponse) + async fractions(@Args() args: GetFractionsArgs) { + try { + return await this.fractionsService.getFractions(args); + } catch (e) { + console.error( + `[FractionResolver::fractions] Error fetching fractions: ${(e as Error).message}`, + ); + return null; + } + } + + /** + * Resolves the metadata field for a fraction. + * Retrieves metadata associated with the fraction's claim. + * + * @param fraction - The fraction for which to resolve metadata + * @returns A promise that resolves to the metadata object or undefined if: + * - The fraction has no claims_id + * - No metadata is found for the claim + * @throws {Error} If the metadata service query fails + * + * @example + * Query with metadata field: + * ```graphql + * query { + * fractions { + * data { + * id + * metadata { + * name + * description + * image + * } + * } + * } + * } + * ``` + */ + @FieldResolver() + async metadata(@Root() fraction: Fraction) { + if (!fraction.claims_id) { + return null; + } + + try { + return await this.hypercertService.getHypercertMetadata({ + claims_id: fraction.claims_id, + }); + } catch (e) { + console.error( + `[FractionResolver::metadata] Error fetching metadata for fraction ${fraction.id}: ${(e as Error).message}`, + ); + return null; + } + } + + /** + * Resolves the orders field for a fraction. + * Retrieves marketplace orders associated with the fraction. + * + * @param fraction - The fraction for which to resolve orders + * @returns A promise that resolves to an object containing: + * - data: Array of orders related to the fraction + * - count: Total number of related orders + * Returns undefined if: + * - The fraction has no fraction_id + * - The fraction_id cannot be parsed + * @throws {Error} If the marketplace orders service query fails + * + * @example + * Query with orders field: + * ```graphql + * query { + * fractions { + * data { + * id + * orders { + * data { + * id + * price + * status + * } + * count + * } + * } + * } + * } + * ``` + */ + @FieldResolver() + async orders(@Root() fraction: Fraction) { + if (!fraction.fraction_id) { + return null; + } + + const { id } = parseClaimOrFractionId(fraction.fraction_id); + + if (!id) { + console.warn( + `[FractionResolver::orders] Error parsing fraction_id for fraction ${fraction.id}`, + ); + return null; + } + + try { + return await this.marketplaceOrdersService.getOrders({ + where: { + itemIds: { + arrayContains: [id.toString()], + }, + }, + }); + } catch (e) { + console.error( + `[FractionResolver::orders] Error fetching orders for fraction ${fraction.id}: ${(e as Error).message}`, + ); + // Return empty result instead of throwing + return null; + } + } + + /** + * Resolves the sales field for a fraction. + * Retrieves sales history associated with the fraction. + * + * @param fraction - The fraction for which to resolve sales + * @returns A promise that resolves to an object containing: + * - data: Array of sales related to the fraction + * - count: Total number of related sales + * Returns undefined if: + * - The fraction has no fraction_id + * - The fraction_id cannot be parsed + * @throws {Error} If the sales service query fails + * + * @example + * Query with sales field: + * ```graphql + * query { + * fractions { + * data { + * id + * sales { + * data { + * id + * price + * timestamp + * } + * count + * } + * } + * } + * } + * ``` + */ + @FieldResolver() + async sales(@Root() fraction: Fraction) { + if (!fraction.fraction_id) { + return null; + } + + const { id } = parseClaimOrFractionId(fraction.fraction_id); + + if (!id) { + console.warn( + `[FractionResolver::sales] Error parsing fraction_id for fraction ${fraction.id}`, + ); + return null; + } + + try { + return await this.salesService.getSales({ + where: { + item_ids: { + arrayContains: [id.toString()], + }, + }, + }); + } catch (e) { + console.error( + `[FractionResolver::sales] Error fetching sales for fraction ${fraction.id}: ${(e as Error).message}`, + ); + return null; + } + } +} + +export { FractionResolver }; diff --git a/src/services/graphql/resolvers/hyperboardResolver.ts b/src/services/graphql/resolvers/hyperboardResolver.ts new file mode 100644 index 00000000..4aabf7aa --- /dev/null +++ b/src/services/graphql/resolvers/hyperboardResolver.ts @@ -0,0 +1,399 @@ +import { Selectable } from "kysely"; +import _ from "lodash"; +import { inject, injectable } from "tsyringe"; +import { Args, FieldResolver, Query, Resolver, Root } from "type-graphql"; +import { DataKyselyService } from "../../../client/kysely.js"; +import { GetHyperboardsArgs } from "../../../graphql/schemas/args/hyperboardArgs.js"; +import { + GetHyperboardOwnersResponse, + GetHyperboardsResponse, + GetSectionsResponse, + Hyperboard, +} from "../../../graphql/schemas/typeDefs/hyperboardTypeDefs.js"; +import GetUsersResponse from "../../../graphql/schemas/typeDefs/userTypeDefs.js"; +import { CachingDatabase } from "../../../types/kyselySupabaseCaching.js"; +import { DataDatabase } from "../../../types/kyselySupabaseData.js"; +import { processCollectionToSection } from "../../../utils/processCollectionToSection.js"; +import { processSectionsToHyperboardOwnership } from "../../../utils/processSectionsToHyperboardOwnership.js"; +import { AllowlistRecordService } from "../../database/entities/AllowListRecordEntityService.js"; +import { CollectionService } from "../../database/entities/CollectionEntityService.js"; +import { FractionService } from "../../database/entities/FractionEntityService.js"; +import { HyperboardService } from "../../database/entities/HyperboardEntityService.js"; +import { HypercertsService } from "../../database/entities/HypercertsEntityService.js"; +import { MetadataService } from "../../database/entities/MetadataEntityService.js"; +import { UsersService } from "../../database/entities/UsersEntityService.js"; + +/** + * GraphQL resolver for Hyperboard operations. + * Handles queries for hyperboards and resolves related fields like sections, owners, and admins. + * + * This resolver provides: + * - Query for fetching hyperboards with optional filtering + * - Field resolution for sections within a hyperboard + * - Field resolution for hyperboard owners + * - Field resolution for hyperboard admins + * + * Error Handling: + * If an operation fails, it will: + * - Log the error internally for monitoring + * - Return null/empty data to the client + * - Include error information in the GraphQL response errors array + * + * @injectable Marks the class as injectable for dependency injection with tsyringe + * @resolver Marks the class as a GraphQL resolver for the Hyperboard type + */ +@injectable() +@Resolver(() => Hyperboard) +class HyperboardResolver { + /** + * Creates a new instance of HyperboardResolver. + * + * @param hyperboardService - Service for handling hyperboard operations + * @param fractionsService - Service for handling fraction operations + * @param allowlistRecordService - Service for handling allowlist records + * @param hypercertsService - Service for handling hypercerts + * @param metadataService - Service for handling metadata + * @param usersService - Service for handling users + * @param collectionService - Service for handling collections + * @param dbService - Service for database operations + */ + constructor( + @inject(HyperboardService) + private hyperboardService: HyperboardService, + @inject(FractionService) + private fractionsService: FractionService, + @inject(AllowlistRecordService) + private allowlistRecordService: AllowlistRecordService, + @inject(HypercertsService) + private hypercertsService: HypercertsService, + @inject(MetadataService) + private metadataService: MetadataService, + @inject(UsersService) + private usersService: UsersService, + @inject(CollectionService) + private collectionService: CollectionService, + @inject(DataKyselyService) + private dbService: DataKyselyService, + ) {} + + /** + * Queries hyperboards based on provided arguments. + * Returns both the matching hyperboards and a total count. + * + * @param args - Query arguments for filtering hyperboards + * @returns A promise that resolves to an object containing: + * - data: Array of hyperboards matching the query + * - count: Total number of matching hyperboards + * + * @example + * ```graphql + * query { + * hyperboards( + * where: { + * name: { contains: "Research" } + * } + * ) { + * data { + * id + * name + * sections { + * data { + * id + * name + * } + * } + * } + * count + * } + * } + * ``` + */ + @Query(() => GetHyperboardsResponse) + async hyperboards(@Args() args: GetHyperboardsArgs) { + try { + return await this.hyperboardService.getHyperboards(args); + } catch (e) { + console.error( + `[HyperboardResolver::hyperboards] Error fetching hyperboards: ${(e as Error).message}`, + ); + return null; + } + } + + /** + * Resolves the sections field for a hyperboard. + * Returns all sections that belong to the specified hyperboard. + * + * @param hyperboard - The hyperboard for which to resolve sections + * @returns A promise resolving to: + * - Array of sections if found + * - null if hyperboard ID is undefined or an error occurs + * + * @example + * ```graphql + * query { + * hyperboards { + * data { + * id + * name + * sections { + * data { + * id + * name + * collection { + * id + * name + * } + * } + * } + * } + * } + * } + * ``` + */ + @FieldResolver(() => GetSectionsResponse) + async sections(@Root() hyperboard: Hyperboard) { + if (!hyperboard.id) { + console.error( + "[HyperboardResolver::sections] Hyperboard ID is undefined", + ); + return null; + } + + try { + const hyperboardId = hyperboard.id; + + // Get collections for this hyperboard + const { data: collections } = + await this.hyperboardService.getHyperboardCollections(hyperboardId); + + // Process each collection into a section + const sections = await Promise.all( + collections.map(async (collection) => { + if (!collection.id) { + throw new Error( + `[HyperboardResolver::sections] Collection has no id`, + ); + } + + // Get all hypercert IDs for the collection + const collectionHypercertIds = + await this.collectionService.getCollectionHypercertIds( + collection.id, + ); + + const hypercertIds = collectionHypercertIds.map( + (hypercertId) => hypercertId.hypercert_id, + ); + + // Fetch all related data in parallel + const [fractions, allowlistEntries, hypercerts, metadata] = + await Promise.all([ + this.fractionsService + .getFractions({ + where: { hypercert_id: { in: hypercertIds } }, + first: Number.MAX_SAFE_INTEGER, + }) + .then((res) => res.data), + this.allowlistRecordService + .getAllowlistRecords({ + where: { + hypercert_id: { in: hypercertIds }, + claimed: { eq: false }, + }, + first: Number.MAX_SAFE_INTEGER, + }) + .then((res) => res.data), + this.hypercertsService + .getHypercerts({ + where: { hypercert_id: { in: hypercertIds } }, + first: Number.MAX_SAFE_INTEGER, + }) + .then((res) => res.data), + this.hypercertsService.getHypercertMetadataSets({ + hypercert_ids: hypercertIds, + }), + ]); + + const metadataByUri = _.keyBy(metadata, "uri"); + + // Get blueprints and metadata + const [ + collectionBlueprints, + hyperboardHypercertMetadata, + blueprintMetadata, + ] = await Promise.all([ + this.collectionService.getCollectionBlueprints(collection.id), + this.hyperboardService.getHyperboardHypercertMetadata(hyperboardId), + this.hyperboardService.getHyperboardBlueprintMetadata(hyperboardId), + ]); + + const blueprints = collectionBlueprints.data || []; + + // Get users for all entities + const users = await this.getUsers( + fractions, + allowlistEntries, + blueprints, + ); + + return processCollectionToSection({ + collection, + hyperboardHypercertMetadata, + blueprints, + fractions: this.filterValidFractions(fractions, hypercertIds), + blueprintMetadata, + allowlistEntries: this.filterValidAllowlistEntries( + allowlistEntries, + hypercertIds, + ), + hypercerts: this.enrichHypercertsWithMetadata( + // @ts-expect-error - claim_attestation_count is not in the type + hypercerts, + metadataByUri, + ), + users: users?.filter((x) => !!x) || [], + }); + }), + ); + + return { data: sections, count: sections.length }; + } catch (e) { + console.error( + `[HyperboardResolver::sections] Error fetching sections for hyperboard ${hyperboard.id}: ${(e as Error).message}`, + ); + return null; + } + } + + /** + * Resolves the owners field for a hyperboard. + * Returns all users who own fractions or have allowlist entries in the hyperboard. + * + * @param hyperboard - The hyperboard for which to resolve owners + * @returns A promise resolving to: + * - Array of owners if found + * - null if an error occurs + */ + @FieldResolver(() => GetHyperboardOwnersResponse) + async owners(@Root() hyperboard: Hyperboard) { + try { + const sections = await this.sections(hyperboard); + + if (!sections) { + return []; + } + + return processSectionsToHyperboardOwnership(sections.data); + } catch (e) { + console.error( + `[HyperboardResolver::owners] Error fetching owners for hyperboard ${hyperboard.id}: ${(e as Error).message}`, + ); + return null; + } + } + + /** + * Resolves the admins field for a hyperboard. + * Returns all users who have admin privileges for the specified hyperboard. + * + * @param hyperboard - The hyperboard for which to resolve admins + * @returns A promise resolving to: + * - Array of admins if found + * - null if hyperboard ID is undefined or an error occurs + */ + @FieldResolver(() => GetUsersResponse) + async admins(@Root() hyperboard: Hyperboard) { + if (!hyperboard.id) { + console.error("[HyperboardResolver::admins] Hyperboard ID is undefined"); + return null; + } + + try { + return await this.hyperboardService.getHyperboardAdmins(hyperboard.id); + } catch (e) { + console.error( + `[HyperboardResolver::admins] Error fetching admins for hyperboard ${hyperboard.id}: ${(e as Error).message}`, + ); + return null; + } + } + + /** + * Helper method to fetch users for fractions, allowlist entries, and blueprints. + * Deduplicates user addresses and fetches user data in bulk. + */ + private async getUsers( + fractions: Selectable[], + allowlistEntries: Selectable< + CachingDatabase["claimable_fractions_with_proofs"] + >[], + blueprints: Selectable[], + ) { + try { + const ownerAddresses = _.uniq([ + ...fractions.map((x) => x?.owner_address), + ...allowlistEntries.flatMap((x) => x?.user_address), + ...blueprints.map((blueprint) => blueprint.minter_address), + ]).filter((x): x is string => !!x); + + return await this.usersService + .getUsers({ + where: { address: { in: ownerAddresses } }, + }) + .then((res) => res.data); + } catch (e) { + console.error( + `[HyperboardResolver::getUsers] Error fetching users: ${(e as Error).message}`, + ); + return null; + } + } + + /** + * Helper method to filter valid fractions. + * Ensures fractions have hypercert IDs and belong to the given set of hypercert IDs. + */ + private filterValidFractions( + fractions: Selectable[], + hypercertIds: string[], + ) { + return fractions.filter( + (fraction): fraction is NonNullable => + !!fraction?.hypercert_id && + hypercertIds.includes(fraction.hypercert_id), + ); + } + + /** + * Helper method to filter valid allowlist entries. + * Ensures entries have hypercert IDs and belong to the given set of hypercert IDs. + */ + private filterValidAllowlistEntries( + allowlistEntries: Selectable< + CachingDatabase["claimable_fractions_with_proofs"] + >[], + hypercertIds: string[], + ) { + return allowlistEntries.filter( + (entry): entry is NonNullable => + !!entry?.hypercert_id && hypercertIds.includes(entry.hypercert_id), + ); + } + + /** + * Helper method to enrich hypercerts with their metadata. + * Combines hypercert data with metadata from the metadata URI. + */ + private enrichHypercertsWithMetadata( + hypercerts: Selectable[], + metadataByUri: Record>, + ) { + return hypercerts.map((hypercert) => ({ + ...hypercert, + name: (hypercert.uri && metadataByUri[hypercert.uri]?.name) || "", + })); + } +} + +export { HyperboardResolver }; diff --git a/src/services/graphql/resolvers/hypercertResolver.ts b/src/services/graphql/resolvers/hypercertResolver.ts new file mode 100644 index 00000000..fe08dfc7 --- /dev/null +++ b/src/services/graphql/resolvers/hypercertResolver.ts @@ -0,0 +1,488 @@ +import { parseClaimOrFractionId } from "@hypercerts-org/sdk"; +import _ from "lodash"; +import "reflect-metadata"; +import { inject, injectable } from "tsyringe"; +import { Args, FieldResolver, Query, Resolver, Root } from "type-graphql"; +import { AttestationService } from "../../database/entities/AttestationEntityService.js"; +import { ContractService } from "../../database/entities/ContractEntityService.js"; +import { FractionService } from "../../database/entities/FractionEntityService.js"; +import { HypercertsService } from "../../database/entities/HypercertsEntityService.js"; +import { + MarketplaceOrderSelect, + MarketplaceOrdersService, +} from "../../database/entities/MarketplaceOrdersEntityService.js"; +import { MetadataService } from "../../database/entities/MetadataEntityService.js"; +import { SalesService } from "../../database/entities/SalesEntityService.js"; +import { Database } from "../../../types/supabaseData.js"; +import { addPriceInUsdToOrder } from "../../../utils/addPriceInUSDToOrder.js"; +import { getCheapestOrder } from "../../../utils/getCheapestOrder.js"; +import { getMaxUnitsForSaleInOrders } from "../../../utils/getMaxUnitsForSaleInOrders.js"; +import { GetHypercertsArgs } from "../../../graphql/schemas/args/hypercertsArgs.js"; +import { + GetHypercertsResponse, + Hypercert, +} from "../../../graphql/schemas/typeDefs/hypercertTypeDefs.js"; + +/** + * GraphQL resolver for Hypercert operations. + * Handles queries for hypercerts and resolves related fields. + * + * This resolver provides: + * - Query for fetching hypercerts with optional filtering + * - Field resolution for: + * - metadata: Associated metadata from IPFS + * - contract: Contract details + * - attestations: Related attestations + * - fractions: Ownership fractions + * - sales: Sales history + * + * Error Handling: + * All resolvers follow the GraphQL best practice of returning partial data instead of throwing errors. + * If an operation fails, it will: + * - Log the error internally for monitoring + * - Return null/empty data to the client + * - Include error information in the GraphQL response errors array + */ +@injectable() +@Resolver(() => Hypercert) +class HypercertResolver { + constructor( + @inject(HypercertsService) + private hypercertsService: HypercertsService, + @inject(MetadataService) + private metadataService: MetadataService, + @inject(ContractService) + private contractService: ContractService, + @inject(AttestationService) + private attestationService: AttestationService, + @inject(FractionService) + private fractionService: FractionService, + @inject(SalesService) + private salesService: SalesService, + @inject(MarketplaceOrdersService) + private marketplaceOrdersService: MarketplaceOrdersService, + ) {} + + /** + * Resolves hypercerts queries with optional filtering. + * + * @param args - Query arguments for filtering hypercerts + * @returns A promise resolving to: + * - data: Array of hypercerts matching the criteria + * - count: Total number of matching records + * - null if an error occurs + * + * @example + * ```graphql + * query { + * hypercerts(where: { hypercert_id: { eq: "1-0x1234...5678-123" } }) { + * data { + * id + * hypercert_id + * metadata { + * name + * description + * } + * } + * count + * } + * } + * ``` + */ + @Query(() => GetHypercertsResponse) + async hypercerts(@Args() args: GetHypercertsArgs) { + try { + return await this.hypercertsService.getHypercerts(args); + } catch (e) { + console.error( + `[HypercertResolver::hypercerts] Error fetching hypercerts: ${(e as Error).message}`, + ); + return null; + } + } + + /** + * Resolves the metadata field for a hypercert. + * This field resolver is called automatically when the metadata field is requested in a query. + * + * @param hypercert - The hypercert for which to resolve metadata + * @returns A promise resolving to: + * - The associated metadata if found + * - null if: + * - No URI is available + * - No matching metadata is found + * - An error occurs during retrieval + * + * @example + * ```graphql + * query { + * hypercerts { + * data { + * id + * metadata { + * name + * description + * work_scope + * } + * } + * } + * } + * ``` + */ + @FieldResolver({ nullable: true }) + async metadata(@Root() hypercert: Hypercert) { + try { + if (!hypercert.uri) { + console.warn( + `[HypercertResolver::metadata] No uri found for hypercert ${hypercert.id}`, + ); + return null; + } + + return await this.metadataService.getMetadataSingle({ + where: { uri: { eq: hypercert.uri } }, + }); + } catch (e) { + console.error( + `[HypercertResolver::metadata] Error fetching metadata: ${(e as Error).message}`, + ); + return null; + } + } + + /** + * Resolves the contract field for a hypercert. + * This field resolver is called automatically when the contract field is requested in a query. + * + * @param hypercert - The hypercert for which to resolve contract details + * @returns A promise resolving to: + * - The associated contract if found + * - null if: + * - No contracts_id is available + * - No matching contract is found + * - An error occurs during retrieval + * + * @example + * ```graphql + * query { + * hypercerts { + * data { + * id + * contract { + * chain_id + * contract_address + * } + * } + * } + * } + * ``` + */ + @FieldResolver() + async contract(@Root() hypercert: Hypercert) { + try { + if (!hypercert.contracts_id) { + console.warn( + `[HypercertResolver::contract] No contract id found for hypercert ${hypercert.id}`, + ); + return null; + } + + return await this.contractService.getContract({ + where: { id: { eq: hypercert.contracts_id } }, + }); + } catch (e) { + console.error( + `[HypercertResolver::contract] Error fetching contract: ${(e as Error).message}`, + ); + return null; + } + } + + /** + * Resolves the attestations field for a hypercert. + * This field resolver is called automatically when the attestations field is requested in a query. + * + * @param hypercert - The hypercert for which to resolve attestations + * @returns A promise resolving to: + * - Array of attestations if found + * - null if: + * - No hypercert id is available + * - An error occurs during retrieval + * + * @example + * ```graphql + * query { + * hypercerts { + * data { + * id + * attestations { + * data { + * id + * data + * } + * } + * } + * } + * } + * ``` + */ + @FieldResolver() + async attestations(@Root() hypercert: Hypercert) { + try { + if (!hypercert.id) { + console.warn( + `[HypercertResolver::attestations] No id found for hypercert`, + ); + return null; + } + + return await this.attestationService.getAttestations({ + where: { hypercert: { id: { eq: hypercert.id } } }, + }); + } catch (e) { + console.error( + `[HypercertResolver::attestations] Error fetching attestations: ${(e as Error).message}`, + ); + return null; + } + } + + /** + * Resolves the fractions field for a hypercert. + * This field resolver is called automatically when the fractions field is requested in a query. + * + * @param hypercert - The hypercert for which to resolve fractions + * @returns A promise resolving to: + * - Array of fractions if found + * - null if: + * - No hypercert_id is available + * - An error occurs during retrieval + * + * @example + * ```graphql + * query { + * hypercerts { + * data { + * id + * fractions { + * data { + * id + * units + * owner_address + * } + * } + * } + * } + * } + * ``` + */ + @FieldResolver() + async fractions(@Root() hypercert: Hypercert) { + try { + if (!hypercert.hypercert_id) { + console.warn( + `[HypercertResolver::fractions] No hypercert id found for ${hypercert.id}`, + ); + return null; + } + + return await this.fractionService.getFractions({ + where: { hypercert_id: { eq: hypercert.hypercert_id } }, + }); + } catch (e) { + console.error( + `[HypercertResolver::fractions] Error fetching fractions: ${(e as Error).message}`, + ); + return null; + } + } + + /** + * Resolves the orders field for a hypercert. + * This field resolver is called automatically when the orders field is requested in a query. + * + * @param hypercert - The hypercert for which to resolve orders + * @returns A promise resolving to: + * - The associated orders if found + * - null if: + * - No hypercert_id is available + * - An error occurs during retrieval + * + * @example + * ```graphql + * query { + * hypercerts { + * data { + * id + * orders { + * data { + * id + * price + * timestamp + * } + * } + * } + * } + * } + * ``` + */ + @FieldResolver() + async orders(@Root() hypercert: Hypercert) { + if (!hypercert.id || !hypercert.hypercert_id) { + return null; + } + + const defaultValue = { + data: [], + count: 0, + totalUnitsForSale: BigInt(0), + }; + + try { + const [{ data: fractions }, orders] = await Promise.all([ + this.fractionService.getFractions({ + where: { hypercert_id: { eq: hypercert.hypercert_id } }, + }), + this.marketplaceOrdersService.getOrders({ + where: { + hypercert_id: { eq: hypercert.hypercert_id }, + invalidated: { eq: false }, + }, + }), + ]); + + if (!fractions || !orders?.data) { + console.warn( + `[HypercertResolver::orders] Error fetching data for ${hypercert.hypercert_id}`, + ); + return defaultValue; + } + + const { data: ordersData, count: ordersCount } = orders; + + const ordersByFraction = _.groupBy( + ordersData, + (order) => (order.itemIds as unknown as string[])[0], + ); + + const { chainId, contractAddress } = parseClaimOrFractionId( + hypercert.hypercert_id, + ); + + // const ordersWithPrices: (Database["public"]["Tables"]["marketplace_orders"]["Row"] & { + // priceInUSD: string; + // pricePerPercentInUSD: string; + // })[] = []; + + // const ordersByFraction = _.groupBy( + // ordersData, + // (order) => (order.itemIds as unknown as string[])[0], + // ); + + // Process all orders with prices in parallel + const ordersWithPrices = await Promise.all( + ordersData.map(async (order) => { + const orderWithPrice = await addPriceInUsdToOrder( + order as unknown as Database["public"]["Tables"]["marketplace_orders"]["Row"], + hypercert.units as bigint, + ); + return { + ...orderWithPrice, + pricePerPercentInUSD: + orderWithPrice.pricePerPercentInUSD.toString(), + }; + }), + ); + + // For each fraction, find all orders and find the max units for sale for that fraction + const totalUnitsForSale = ( + await Promise.all( + Object.entries(ordersByFraction).map(async ([tokenId, orders]) => { + const fractionId = `${chainId}-${contractAddress}-${tokenId}`; + const fraction = fractions.find( + (f) => (f.fraction_id as unknown as string) === fractionId, + ); + + if (!fraction) { + console.error( + `[HypercertResolver::orders] Fraction not found for ${fractionId}`, + ); + return BigInt(0); + } + + return getMaxUnitsForSaleInOrders( + orders as MarketplaceOrderSelect[], + BigInt(fraction.units as unknown as bigint), + ); + }), + ) + ).reduce((acc, val) => acc + val, BigInt(0)); + + const cheapestOrder = getCheapestOrder(ordersWithPrices); + + return { + totalUnitsForSale, + cheapestOrder, + data: ordersWithPrices || [], + count: ordersCount || 0, + }; + } catch (e) { + console.error( + `[HypercertResolver::orders] Error fetching orders for ${hypercert.hypercert_id}: ${(e as Error).toString()}`, + ); + return defaultValue; + } + } + + /** + * Resolves the sales field for a hypercert. + * This field resolver is called automatically when the sales field is requested in a query. + * + * @param hypercert - The hypercert for which to resolve sales history + * @returns A promise resolving to: + * - Array of sales if found + * - null if: + * - No hypercert_id is available + * - An error occurs during retrieval + * + * @example + * ```graphql + * query { + * hypercerts { + * data { + * id + * sales { + * data { + * id + * price + * timestamp + * } + * } + * } + * } + * } + * ``` + */ + @FieldResolver() + async sales(@Root() hypercert: Hypercert) { + try { + if (!hypercert.hypercert_id) { + console.warn( + `[HypercertResolver::sales] No hypercert id found for ${hypercert.id}`, + ); + return null; + } + + return await this.salesService.getSales({ + where: { hypercert_id: { eq: hypercert.hypercert_id } }, + }); + } catch (e) { + console.error( + `[HypercertResolver::sales] Error fetching sales: ${(e as Error).message}`, + ); + return null; + } + } +} + +export { HypercertResolver }; diff --git a/src/services/graphql/resolvers/metadataResolver.ts b/src/services/graphql/resolvers/metadataResolver.ts new file mode 100644 index 00000000..b8e620f1 --- /dev/null +++ b/src/services/graphql/resolvers/metadataResolver.ts @@ -0,0 +1,123 @@ +import { inject, injectable } from "tsyringe"; +import { Args, FieldResolver, Query, Resolver, Root } from "type-graphql"; +import { CachingKyselyService } from "../../../client/kysely.js"; +import { GetMetadataArgs } from "../../../graphql/schemas/args/metadataArgs.js"; +import { + GetMetadataResponse, + Metadata, +} from "../../../graphql/schemas/typeDefs/metadataTypeDefs.js"; +import { MetadataService } from "../../database/entities/MetadataEntityService.js"; + +/** + * GraphQL resolver for Metadata operations. + * Handles queries for metadata records and resolves related fields. + * + * This resolver provides: + * - Query for fetching metadata with optional filtering + * - Field resolution for image data (handled separately for performance) + * + * Error Handling: + * All resolvers follow the GraphQL best practice of returning partial data instead of throwing errors. + * If an operation fails, it will: + * - Log the error internally for monitoring + * - Return null/empty data to the client + * - Include error information in the GraphQL response errors array + */ +@injectable() +@Resolver(() => Metadata) +class MetadataResolver { + constructor( + @inject(MetadataService) + private metadataService: MetadataService, + @inject(CachingKyselyService) + private cachingKyselyService: CachingKyselyService, + ) {} + + /** + * Resolves metadata queries with optional filtering. + * + * @param args - Query arguments for filtering metadata records + * @returns A promise resolving to: + * - data: Array of metadata records matching the criteria + * - count: Total number of matching records + * Returns null if an error occurs + * + * @example + * ```graphql + * query { + * metadata(where: { uri: { eq: "ipfs://..." } }) { + * data { + * id + * name + * description + * image + * } + * count + * } + * } + * ``` + */ + @Query(() => GetMetadataResponse) + async metadata(@Args() args: GetMetadataArgs) { + try { + return await this.metadataService.getMetadata(args); + } catch (e) { + console.error( + `[MetadataResolver::metadata] Error fetching metadata: ${(e as Error).message}`, + ); + return null; + } + } + + /** + * Resolves the image field for a metadata record. + * Handled separately from other fields for performance optimization. + * + * @param metadata - The metadata record for which to resolve the image + * @returns A promise resolving to: + * - The image data if found + * - null if: + * - No URI is available + * - No image data exists + * - An error occurs during retrieval + * + * @example + * ```graphql + * query { + * metadata { + * data { + * id + * image # This field is resolved by this resolver + * } + * } + * } + * ``` + */ + @FieldResolver(() => String) + async image(@Root() metadata: Metadata) { + if (!metadata.uri) { + console.warn( + `[MetadataResolver::image] No URI found for metadata ${metadata.id}`, + ); + return null; + } + + try { + const result = await this.cachingKyselyService + .getConnection() + .selectFrom("metadata") + .where("uri", "=", metadata.uri) + .select("image") + .executeTakeFirst(); + + return result?.image ?? null; + } catch (e) { + console.error( + `[MetadataResolver::image] Error fetching image for metadata ${metadata.id}: ${(e as Error).message}`, + ); + return null; + } + } +} + +export { MetadataResolver }; diff --git a/src/services/graphql/resolvers/orderResolver.ts b/src/services/graphql/resolvers/orderResolver.ts new file mode 100644 index 00000000..5f8425d5 --- /dev/null +++ b/src/services/graphql/resolvers/orderResolver.ts @@ -0,0 +1,218 @@ +import _ from "lodash"; +import { inject, injectable } from "tsyringe"; +import { Args, FieldResolver, Query, Resolver, Root } from "type-graphql"; +import { getAddress } from "viem"; +import { GetOrdersArgs } from "../../../graphql/schemas/args/orderArgs.js"; +import { + GetOrdersResponse, + Order, +} from "../../../graphql/schemas/typeDefs/orderTypeDefs.js"; +import { addPriceInUsdToOrder } from "../../../utils/addPriceInUSDToOrder.js"; +import { getHypercertTokenId } from "../../../utils/tokenIds.js"; +import { HypercertsService } from "../../database/entities/HypercertsEntityService.js"; +import { MarketplaceOrdersService } from "../../database/entities/MarketplaceOrdersEntityService.js"; + +/** + * GraphQL resolver for marketplace orders. + * Handles queries for orders and resolves related fields. + * + * This resolver provides: + * - Query for fetching orders with optional filtering + * - Price calculation in USD for each order + * - Field resolution for: + * - hypercert: Associated hypercert details and metadata + * + * Error Handling: + * - Query operations throw errors to be handled by the GraphQL error handler + * - Field resolvers return null on errors to allow partial data resolution + * - All errors are logged for monitoring + * + * @injectable Marks the class as injectable for dependency injection with tsyringe + * @resolver Marks the class as a GraphQL resolver for the Order type + */ +@injectable() +@Resolver(() => Order) +class OrderResolver { + constructor( + @inject(MarketplaceOrdersService) + private readonly marketplaceOrdersService: MarketplaceOrdersService, + @inject(HypercertsService) + private readonly hypercertService: HypercertsService, + ) {} + + /** + * Queries marketplace orders based on provided arguments. + * Fetches associated hypercerts and calculates USD prices. + * + * @param args - Query arguments for filtering orders + * @returns A promise resolving to: + * - data: Array of orders with USD prices + * - count: Total number of matching records + * @throws Error if the operation fails + * + * @example + * ```graphql + * query { + * orders( + * where: { + * seller: { eq: "0x..." } + * status: { eq: "active" } + * } + * ) { + * data { + * id + * price + * priceInUsd + * seller + * status + * hypercert { + * id + * metadata { + * name + * description + * } + * } + * } + * count + * } + * } + * ``` + */ + @Query(() => GetOrdersResponse) + async orders(@Args() args: GetOrdersArgs) { + try { + const ordersRes = await this.marketplaceOrdersService.getOrders(args); + + if (!ordersRes || !ordersRes.data || !ordersRes.count) { + return { + data: [], + count: 0, + }; + } + + const { data, count } = ordersRes; + + // Get unique hypercert IDs and convert to lowercase once + const allHypercertIds = _.uniq( + data.map((order) => order.hypercert_id as unknown as string), + ); + + // Fetch hypercerts in parallel with any other async operations + const { data: hypercertsData } = + await this.hypercertService.getHypercerts({ + where: { + hypercert_id: { in: allHypercertIds }, + }, + }); + + // Create lookup map with lowercase keys + const hypercerts = new Map( + hypercertsData.map((h) => [ + (h.hypercert_id as unknown as string)?.toLowerCase(), + h, + ]), + ); + + // Process orders in parallel since addPriceInUsdToOrder is async + const ordersWithPrices = await Promise.all( + data.map(async (order) => { + const hypercert = hypercerts.get( + (order.hypercert_id as unknown as string)?.toLowerCase(), + ); + if (!hypercert?.units) { + console.warn( + `[OrderResolver::orders] No hypercert units found for hypercert_id: ${order.hypercert_id}`, + ); + return order; + } + return addPriceInUsdToOrder( + order, + hypercert.units as unknown as bigint, + ); + }), + ); + + return { + data: ordersWithPrices, + count: count ?? ordersWithPrices.length, + }; + } catch (e) { + throw new Error( + `[OrderResolver::orders] Error fetching orders: ${(e as Error).message}`, + ); + } + } + + /** + * Resolves the hypercert field for an order. + * This field resolver is called automatically when the hypercert field is requested in a query. + * + * @param order - The order for which to resolve the hypercert + * @returns A promise resolving to: + * - The hypercert with its metadata if found + * - null if: + * - Required fields are missing + * - An error occurs during retrieval + * + * @example + * ```graphql + * query { + * orders { + * data { + * id + * hypercert { + * id + * uri + * metadata { + * name + * description + * image + * } + * } + * } + * } + * } + * ``` + */ + @FieldResolver({ nullable: true }) + async hypercert(@Root() order: Order) { + try { + const tokenId = order.itemIds?.[0]; + const collectionId = order.collection; + const chainId = order.chainId; + + if (!tokenId || !collectionId || !chainId) { + console.warn( + `[OrderResolver::hypercert] Missing tokenId or collectionId`, + ); + return null; + } + + const hypercertId = getHypercertTokenId(BigInt(tokenId)); + const formattedHypercertId = `${chainId}-${getAddress(collectionId)}-${hypercertId.toString()}`; + + const [hypercert, metadata] = await Promise.all([ + this.hypercertService.getHypercert({ + where: { + hypercert_id: { eq: formattedHypercertId }, + }, + }), + this.hypercertService.getHypercertMetadata({ + hypercert_id: formattedHypercertId, + }), + ]); + + return { + ...hypercert, + metadata: metadata || null, + }; + } catch (e) { + console.error( + `[OrderResolver::hypercert] Error resolving hypercert: ${(e as Error).message}`, + ); + return null; + } + } +} + +export { OrderResolver }; diff --git a/src/services/graphql/resolvers/salesResolver.ts b/src/services/graphql/resolvers/salesResolver.ts new file mode 100644 index 00000000..30e54ccf --- /dev/null +++ b/src/services/graphql/resolvers/salesResolver.ts @@ -0,0 +1,131 @@ +import { inject, injectable } from "tsyringe"; +import { Args, FieldResolver, Query, Resolver, Root } from "type-graphql"; +import { HypercertsService } from "../../database/entities/HypercertsEntityService.js"; +import { SalesService } from "../../database/entities/SalesEntityService.js"; +import { GetSalesArgs } from "../../../graphql/schemas/args/salesArgs.js"; +import { + Sale, + GetSalesResponse, +} from "../../../graphql/schemas/typeDefs/salesTypeDefs.js"; + +/** + * Resolver for handling sales-related GraphQL queries and field resolvers. + * This resolver provides functionality to: + * 1. Query sales with filtering and pagination + * 2. Resolve the associated hypercert for a sale + */ +@injectable() +@Resolver(() => Sale) +class SalesResolver { + constructor( + @inject(SalesService) + private salesService: SalesService, + @inject(HypercertsService) + private hypercertsService: HypercertsService, + ) {} + + /** + * Query resolver for fetching sales with optional filtering and pagination. + * + * @param args - Query arguments including where conditions, sorting, and pagination + * @returns A promise resolving to: + * - Object containing sales data and count if successful + * - null if an error occurs during retrieval + * + * @example + * ```graphql + * query { + * sales( + * where: { hypercert_id: { eq: "123" } } + * first: 10 + * offset: 0 + * ) { + * data { + * id + * buyer + * seller + * hypercert { + * id + * } + * } + * count + * } + * } + * ``` + */ + @Query(() => GetSalesResponse) + async sales(@Args() args: GetSalesArgs) { + try { + return await this.salesService.getSales(args); + } catch (e) { + console.error( + `[SalesResolver::sales] Error fetching sales: ${(e as Error).message}`, + ); + return null; + } + } + + /** + * Field resolver for the hypercert associated with a sale. + * This resolver is called automatically when the hypercert field is requested in a query. + * + * @param sale - The sale for which to resolve the associated hypercert + * @returns A promise resolving to: + * - The associated hypercert if found + * - null if: + * - No hypercert_id is available + * - The hypercert is not found + * - An error occurs during retrieval + * + * @example + * ```graphql + * query { + * sales { + * data { + * id + * hypercert { + * id + * hypercert_id + * } + * } + * } + * } + * ``` + */ + @FieldResolver({ nullable: true }) + async hypercert(@Root() sale: Sale) { + if (!sale.hypercert_id) { + console.warn(`[SalesResolver::hypercert_id] Missing hypercert_id`); + return null; + } + + try { + const [hypercert, metadata] = await Promise.all([ + this.hypercertsService.getHypercert({ + where: { + hypercert_id: { eq: sale.hypercert_id }, + }, + }), + this.hypercertsService.getHypercertMetadata({ + hypercert_id: sale.hypercert_id, + }), + ]); + + if (!hypercert) { + return null; + } + + return { + ...hypercert, + metadata: metadata || null, + }; + } catch (e) { + console.error( + `[SalesResolver::hypercert] Error fetching hypercert: ${(e as Error).message}`, + ); + return null; + } + } +} + +export { SalesResolver }; diff --git a/src/services/graphql/resolvers/signatureRequestResolver.ts b/src/services/graphql/resolvers/signatureRequestResolver.ts new file mode 100644 index 00000000..91f7d733 --- /dev/null +++ b/src/services/graphql/resolvers/signatureRequestResolver.ts @@ -0,0 +1,53 @@ +import { Args, FieldResolver, Query, Resolver, Root } from "type-graphql"; + +import { GetSignatureRequestsArgs } from "../../../graphql/schemas/args/signatureRequestArgs.js"; +import { + GetSignatureRequestResponse, + SignatureRequest, +} from "../../../graphql/schemas/typeDefs/signatureRequestTypeDefs.js"; + +import { inject, injectable } from "tsyringe"; +import { SignatureRequestsService } from "../../database/entities/SignatureRequestsEntityService.js"; + +/** + * GraphQL resolver for signature requests. + * Handles queries for retrieving signature requests and resolves specific fields. + * + * A signature request represents a message that needs to be signed by a Safe wallet, + * typically used for user data updates or other authenticated operations. + */ +@injectable() +@Resolver(() => SignatureRequest) +export class SignatureRequestResolver { + constructor( + @inject(SignatureRequestsService) + private signatureRequestsService: SignatureRequestsService, + ) {} + + /** + * Query resolver for fetching signature requests. + * Can be filtered by safe address and status. + * + * @param args - Query arguments including optional safe_address and status filters + * @returns A paginated response containing signature requests and total count + */ + @Query(() => GetSignatureRequestResponse) + async signatureRequests(@Args() args: GetSignatureRequestsArgs) { + return await this.signatureRequestsService.getSignatureRequests(args); + } + + /** + * Field resolver for the message field. + * Ensures consistent string representation of messages, whether they're + * stored as objects or strings. + * + * @param signatureRequest - The signature request containing the message + * @returns The message as a string, stringified if it's an object + */ + @FieldResolver(() => String) + message(@Root() signatureRequest: SignatureRequest): string { + return typeof signatureRequest.message === "object" + ? JSON.stringify(signatureRequest.message) + : signatureRequest.message || "could not parse message"; + } +} diff --git a/src/services/graphql/resolvers/userResolver.ts b/src/services/graphql/resolvers/userResolver.ts new file mode 100644 index 00000000..600249bd --- /dev/null +++ b/src/services/graphql/resolvers/userResolver.ts @@ -0,0 +1,138 @@ +import { Args, FieldResolver, Query, Resolver, Root } from "type-graphql"; + +import { GetUsersArgs } from "../../../graphql/schemas/args/userArgs.js"; +import { SignatureRequest } from "../../../graphql/schemas/typeDefs/signatureRequestTypeDefs.js"; +import GetUsersResponse, { + User, +} from "../../../graphql/schemas/typeDefs/userTypeDefs.js"; + +import { inject, injectable } from "tsyringe"; +import { SignatureRequestsService } from "../../database/entities/SignatureRequestsEntityService.js"; +import { UsersService } from "../../database/entities/UsersEntityService.js"; + +/** + * GraphQL resolver for User operations. + * Handles queries for users and resolves related fields. + * + * This resolver provides: + * - Query for fetching users with optional filtering + * - Field resolution for signature requests associated with a user + * + * Error Handling: + * If an operation fails, it will: + * - Log the error internally for monitoring + * - Return null/empty data to the client + * - Include error information in the GraphQL response errors array + * + * @injectable Marks the class as injectable for dependency injection with tsyringe + * @resolver Marks the class as a GraphQL resolver for the User type + */ +@injectable() +@Resolver(() => User) +class UserResolver { + /** + * Creates a new instance of UserResolver. + * + * @param usersService - Service for handling user operations + * @param signatureRequestsService - Service for handling signature request operations + */ + constructor( + @inject(UsersService) + private usersService: UsersService, + @inject(SignatureRequestsService) + private signatureRequestsService: SignatureRequestsService, + ) {} + + /** + * Queries users based on provided arguments. + * Returns both the matching users and a total count. + * + * @param args - Query arguments for filtering users + * @returns A promise that resolves to an object containing: + * - data: Array of users matching the query + * - count: Total number of matching users + * + * @example + * ```graphql + * query { + * users( + * where: { + * address: { eq: "0x..." }, + * chain_id: { eq: 1 } + * } + * ) { + * data { + * id + * address + * display_name + * avatar + * } + * count + * } + * } + * ``` + */ + @Query(() => GetUsersResponse) + async users(@Args() args: GetUsersArgs) { + try { + return await this.usersService.getUsers(args); + } catch (e) { + console.error( + `[UserResolver::users] Error fetching users: ${(e as Error).message}`, + ); + return null; + } + } + + /** + * Resolves the signature_requests field for a user. + * This field resolver is called automatically when the signature_requests field is requested in a query. + * + * @param user - The user for which to resolve signature requests + * @returns A promise resolving to: + * - Array of signature requests if found + * - null if: + * - No user address is available + * - An error occurs during retrieval + * + * @example + * ```graphql + * query { + * users { + * data { + * id + * address + * signature_requests { + * id + * message + * status + * } + * } + * } + * } + * ``` + */ + @FieldResolver(() => [SignatureRequest]) + async signature_requests(@Root() user: User) { + if (!user.address) { + return null; + } + + try { + return await this.signatureRequestsService.getSignatureRequests({ + where: { + safe_address: { + eq: user.address, + }, + }, + }); + } catch (e) { + console.error( + `[UserResolver::signature_requests] Error fetching signature requests for user ${user.id}: ${(e as Error).message}`, + ); + return null; + } + } +} + +export { UserResolver }; diff --git a/src/types/api.ts b/src/types/api.ts index f3e0eac8..b167c816 100644 --- a/src/types/api.ts +++ b/src/types/api.ts @@ -199,3 +199,7 @@ export interface HyperboardUpdateRequest extends HyperboardCreateRequest { } export interface HyperboardResponse extends DataResponse<{ id: string }> {} + +export type ValidationResult = + | { valid: true; data: T; errors?: Record } + | { valid: false; data?: T; errors?: Record }; diff --git a/src/types/argTypes.ts b/src/types/argTypes.ts new file mode 100644 index 00000000..2934c004 --- /dev/null +++ b/src/types/argTypes.ts @@ -0,0 +1,21 @@ +import { + BigIntSearchOptions, + BooleanSearchOptions, + IdSearchOptions, + NumberArraySearchOptions, + NumberSearchOptions, + StringArraySearchOptions, + StringSearchOptions, + SignatureRequestStatusSearchOptions, +} from "../graphql/schemas/inputs/searchOptions.js"; + +export const SearchOptionMap = { + string: StringSearchOptions, + number: NumberSearchOptions, + bigint: BigIntSearchOptions, + id: IdSearchOptions, + boolean: BooleanSearchOptions, + stringArray: StringArraySearchOptions, + numberArray: NumberArraySearchOptions, + enum: SignatureRequestStatusSearchOptions, +} as const; diff --git a/src/types/supabaseCaching.ts b/src/types/supabaseCaching.ts index 7ab7c1b6..af205dc9 100644 --- a/src/types/supabaseCaching.ts +++ b/src/types/supabaseCaching.ts @@ -115,6 +115,13 @@ export type Database = { referencedRelation: "claims"; referencedColumns: ["id"]; }, + { + foreignKeyName: "attestations_claims_id_fkey"; + columns: ["claims_id"]; + isOneToOne: false; + referencedRelation: "claims_view"; + referencedColumns: ["id"]; + }, { foreignKeyName: "attestations_claims_id_fkey"; columns: ["claims_id"]; @@ -274,6 +281,7 @@ export type Database = { }; fractions: { Row: { + burned: boolean; claims_id: string; creation_block_number: number; creation_block_timestamp: number; @@ -287,6 +295,7 @@ export type Database = { value: number | null; }; Insert: { + burned?: boolean; claims_id: string; creation_block_number: number; creation_block_timestamp: number; @@ -300,6 +309,7 @@ export type Database = { value?: number | null; }; Update: { + burned?: boolean; claims_id?: string; creation_block_number?: number; creation_block_timestamp?: number; @@ -320,6 +330,13 @@ export type Database = { referencedRelation: "claims"; referencedColumns: ["id"]; }, + { + foreignKeyName: "fractions_claims_id_fkey"; + columns: ["claims_id"]; + isOneToOne: false; + referencedRelation: "claims_view"; + referencedColumns: ["id"]; + }, { foreignKeyName: "fractions_claims_id_fkey"; columns: ["claims_id"]; @@ -389,6 +406,13 @@ export type Database = { referencedRelation: "claims"; referencedColumns: ["id"]; }, + { + foreignKeyName: "hypercert_allow_lists_claims_id_fkey"; + columns: ["claims_id"]; + isOneToOne: false; + referencedRelation: "claims_view"; + referencedColumns: ["id"]; + }, { foreignKeyName: "hypercert_allow_lists_claims_id_fkey"; columns: ["claims_id"]; @@ -559,8 +583,38 @@ export type Database = { }; Relationships: []; }; + claims_view: { + Row: { + attestations_count: number | null; + burned: boolean | null; + contracts_id: string | null; + creation_block_number: number | null; + creation_block_timestamp: number | null; + creator_address: string | null; + hypercert_id: string | null; + id: string | null; + last_update_block_number: number | null; + last_update_block_timestamp: number | null; + owner_address: string | null; + sales_count: number | null; + token_id: number | null; + units: number | null; + uri: string | null; + value: number | null; + }; + Relationships: [ + { + foreignKeyName: "claims_contracts_id_fkey"; + columns: ["contracts_id"]; + isOneToOne: false; + referencedRelation: "contracts"; + referencedColumns: ["id"]; + }, + ]; + }; fractions_view: { Row: { + burned: boolean | null; claims_id: string | null; creation_block_number: number | null; creation_block_timestamp: number | null; @@ -582,6 +636,13 @@ export type Database = { referencedRelation: "claims"; referencedColumns: ["id"]; }, + { + foreignKeyName: "fractions_claims_id_fkey"; + columns: ["claims_id"]; + isOneToOne: false; + referencedRelation: "claims_view"; + referencedColumns: ["id"]; + }, { foreignKeyName: "fractions_claims_id_fkey"; columns: ["claims_id"]; diff --git a/src/types/supabaseData.ts b/src/types/supabaseData.ts index 96b16c09..e363ff13 100644 --- a/src/types/supabaseData.ts +++ b/src/types/supabaseData.ts @@ -132,6 +132,13 @@ export type Database = { referencedRelation: "collections"; referencedColumns: ["id"]; }, + { + foreignKeyName: "collection_admins_collection_id_fkey"; + columns: ["collection_id"]; + isOneToOne: false; + referencedRelation: "collections_with_admins"; + referencedColumns: ["id"]; + }, ]; }; collection_blueprints: { @@ -172,6 +179,13 @@ export type Database = { referencedRelation: "collections"; referencedColumns: ["id"]; }, + { + foreignKeyName: "collection_blueprints_collection_id_fkey"; + columns: ["collection_id"]; + isOneToOne: false; + referencedRelation: "collections_with_admins"; + referencedColumns: ["id"]; + }, ]; }; collections: { @@ -307,6 +321,13 @@ export type Database = { referencedRelation: "hyperboards"; referencedColumns: ["id"]; }, + { + foreignKeyName: "hyperboard_admins_hyperboard_id_fkey"; + columns: ["hyperboard_id"]; + isOneToOne: false; + referencedRelation: "hyperboards_with_admins"; + referencedColumns: ["id"]; + }, ]; }; hyperboard_blueprint_metadata: { @@ -346,6 +367,13 @@ export type Database = { referencedRelation: "collections"; referencedColumns: ["id"]; }, + { + foreignKeyName: "hyperboard_blueprint_metadata_collection_id_fkey"; + columns: ["collection_id"]; + isOneToOne: false; + referencedRelation: "collections_with_admins"; + referencedColumns: ["id"]; + }, { foreignKeyName: "hyperboard_blueprint_metadata_hyperboard_id_fkey"; columns: ["hyperboard_id"]; @@ -353,6 +381,13 @@ export type Database = { referencedRelation: "hyperboards"; referencedColumns: ["id"]; }, + { + foreignKeyName: "hyperboard_blueprint_metadata_hyperboard_id_fkey"; + columns: ["hyperboard_id"]; + isOneToOne: false; + referencedRelation: "hyperboards_with_admins"; + referencedColumns: ["id"]; + }, ]; }; hyperboard_collections: { @@ -385,6 +420,13 @@ export type Database = { referencedRelation: "hyperboards"; referencedColumns: ["id"]; }, + { + foreignKeyName: "hyperboard_registries_hyperboard_id_fkey"; + columns: ["hyperboard_id"]; + isOneToOne: false; + referencedRelation: "hyperboards_with_admins"; + referencedColumns: ["id"]; + }, { foreignKeyName: "hyperboard_registries_registries_id_fk"; columns: ["collection_id"]; @@ -392,6 +434,13 @@ export type Database = { referencedRelation: "collections"; referencedColumns: ["id"]; }, + { + foreignKeyName: "hyperboard_registries_registries_id_fk"; + columns: ["collection_id"]; + isOneToOne: false; + referencedRelation: "collections_with_admins"; + referencedColumns: ["id"]; + }, ]; }; hyperboard_hypercert_metadata: { @@ -424,6 +473,13 @@ export type Database = { referencedRelation: "collections"; referencedColumns: ["id"]; }, + { + foreignKeyName: "hyperboard_hypercert_metadata_collection_id_fkey"; + columns: ["collection_id"]; + isOneToOne: false; + referencedRelation: "collections_with_admins"; + referencedColumns: ["id"]; + }, { foreignKeyName: "hyperboard_hypercert_metadata_hyperboard_id_fkey"; columns: ["hyperboard_id"]; @@ -431,6 +487,13 @@ export type Database = { referencedRelation: "hyperboards"; referencedColumns: ["id"]; }, + { + foreignKeyName: "hyperboard_hypercert_metadata_hyperboard_id_fkey"; + columns: ["hyperboard_id"]; + isOneToOne: false; + referencedRelation: "hyperboards_with_admins"; + referencedColumns: ["id"]; + }, { foreignKeyName: "hyperboard_hypercert_metadata_hypercert_id_collection_id_fkey"; columns: ["hypercert_id", "collection_id"]; @@ -494,6 +557,13 @@ export type Database = { referencedRelation: "collections"; referencedColumns: ["id"]; }, + { + foreignKeyName: "claims_registry_id_fkey"; + columns: ["collection_id"]; + isOneToOne: false; + referencedRelation: "collections_with_admins"; + referencedColumns: ["id"]; + }, ]; }; marketplace_order_nonces: { @@ -690,6 +760,37 @@ export type Database = { }; Relationships: []; }; + collections_with_admins: { + Row: { + admin_address: string | null; + admin_chain_id: number | null; + avatar: string | null; + chain_ids: number[] | null; + created_at: string | null; + description: string | null; + display_name: string | null; + hidden: boolean | null; + id: string | null; + name: string | null; + }; + Relationships: []; + }; + hyperboards_with_admins: { + Row: { + admin_address: string | null; + admin_chain_id: number | null; + avatar: string | null; + background_image: string | null; + chain_ids: number[] | null; + created_at: string | null; + display_name: string | null; + grayscale_images: boolean | null; + id: string | null; + name: string | null; + tile_border_color: string | null; + }; + Relationships: []; + }; }; Functions: { default_sponsor_metadata_by_address: { diff --git a/src/utils/addPriceInUSDToOrder.ts b/src/utils/addPriceInUSDToOrder.ts index e2ff6751..3994658e 100644 --- a/src/utils/addPriceInUSDToOrder.ts +++ b/src/utils/addPriceInUSDToOrder.ts @@ -1,9 +1,9 @@ -import { Database } from "../types/supabaseData.js"; +import { MarketplaceOrderSelect } from "../services/database/entities/MarketplaceOrdersEntityService.js"; import { getTokenPriceWithCurrencyFromCache } from "./getTokenPriceInUSD.js"; import { formatUnits } from "viem"; export const addPriceInUsdToOrder = async ( - order: Database["public"]["Tables"]["marketplace_orders"]["Row"], + order: MarketplaceOrderSelect, unitsInHypercerts: bigint, ) => { const { price, currency, chainId } = order; @@ -15,6 +15,18 @@ export const addPriceInUsdToOrder = async ( throw new Error(`Token price not found for ${currency}`); } + if (!tokenPrice.decimals) { + throw new Error( + `Token price data incomplete for ${currency}: decimals missing`, + ); + } + + if (!tokenPrice.price) { + throw new Error( + `Token price data incomplete for ${currency}: price missing`, + ); + } + const unitsInPercentage = BigInt(unitsInHypercerts) / BigInt(100); const pricePerPercentInTokenWei = BigInt(price) * unitsInPercentage; const pricePerPercentInToken = formatUnits( diff --git a/src/utils/constants.ts b/src/utils/constants.ts index d5855691..4541838c 100644 --- a/src/utils/constants.ts +++ b/src/utils/constants.ts @@ -16,8 +16,10 @@ export const web3upKey = getRequiredEnvVar("KEY", "WEB3UP Key"); export const web3upProof = getRequiredEnvVar("PROOF", "WEB3UP Proof"); export const indexerEnvironment = getRequiredEnvVar("INDEXER_ENVIRONMENT"); export const alchemyApiKey = getRequiredEnvVar("ALCHEMY_API_KEY"); -export const infuraApiKey = getRequiredEnvVar("INFURA_API_KEY"); export const drpcApiPkey = getRequiredEnvVar("DRPC_API_KEY"); export const cachingDatabaseUrl = getRequiredEnvVar("CACHING_DATABASE_URL"); export const dataDatabaseUrl = getRequiredEnvVar("DATA_DATABASE_URL"); export const filecoinApiKey = getRequiredEnvVar("FILECOIN_API_KEY"); + +const ENABLE_CRON_JOBS_ENV = getRequiredEnvVar("ENABLE_CRON_JOBS"); +export const ENABLE_CRON_JOBS = ENABLE_CRON_JOBS_ENV === "true"; diff --git a/src/utils/getCheapestOrder.ts b/src/utils/getCheapestOrder.ts index 2e5615e5..dd9329c7 100644 --- a/src/utils/getCheapestOrder.ts +++ b/src/utils/getCheapestOrder.ts @@ -1,8 +1,8 @@ import _ from "lodash"; -import { Database } from "../types/supabaseData.js"; +import { MarketplaceOrderSelect } from "../services/database/entities/MarketplaceOrdersEntityService.js"; export const getCheapestOrder = ( - orders: (Database["public"]["Tables"]["marketplace_orders"]["Row"] & { + orders: (MarketplaceOrderSelect & { pricePerPercentInUSD: string; })[], ) => diff --git a/src/utils/getFractionsById.ts b/src/utils/getFractionsById.ts index b8f8111e..9f546ce1 100644 --- a/src/utils/getFractionsById.ts +++ b/src/utils/getFractionsById.ts @@ -14,6 +14,7 @@ const fractionsByIdQuery = graphql(` } `); +//TODO: replace with service method as this is the API service calling the graph service export const getFractionsById = async (fractionId: string) => { const { data, error } = await urqlClient .query(fractionsByIdQuery, { diff --git a/src/utils/getTokenPriceInUSD.ts b/src/utils/getTokenPriceInUSD.ts index 47f965d4..5501dce0 100644 --- a/src/utils/getTokenPriceInUSD.ts +++ b/src/utils/getTokenPriceInUSD.ts @@ -127,6 +127,7 @@ type CurrencyFeeds = Record< string >; +// Get pricefeeds from https://docs.chain.link/data-feeds/price-feeds/addresses?page=1&testnetPage=1 const feedsPerChain: Record, Partial> = { [ChainId.BASE_SEPOLIA]: { ETH: "0x4aDC67696bA383F43DD60A9e78F2C97Fbbfc7cb1", @@ -157,6 +158,8 @@ const feedsPerChain: Record, Partial> = { cUSD: "0xe38A27BE4E7d866327e09736F3C570F256FFd048", USDC: "0xc7A353BaE210aed958a1A2928b654938EC59DaB2", USDT: "0x5e37AF40A7A344ec9b03CCD34a250F3dA9a20B02", + // Placeholder for USDGLO, used USDC on Celo for now + USDGLO: "0xc7A353BaE210aed958a1A2928b654938EC59DaB2", }, [ChainId.ARBITRUM]: { ETH: "0x639Fe6ab55C921f74e7fac1ee960C0B6293ba612", diff --git a/src/utils/processCollectionToSection.ts b/src/utils/processCollectionToSection.ts index 7356e8dc..71238e68 100644 --- a/src/utils/processCollectionToSection.ts +++ b/src/utils/processCollectionToSection.ts @@ -1,30 +1,40 @@ -import { Database as DataDatabase } from "../types/supabaseData.js"; -import { Database as CachingDatabase } from "../types/supabaseCaching.js"; import { parseUnits } from "viem"; import _ from "lodash"; import { calculateBigIntPercentage } from "./calculateBigIntPercentage.js"; +import { Section } from "../graphql/schemas/typeDefs/hyperboardTypeDefs.js"; +import { DataDatabase } from "../types/kyselySupabaseData.js"; +import { CachingDatabase } from "../types/kyselySupabaseCaching.js"; +import { Selectable } from "kysely"; + +interface ProcessCollectionToSectionArgs { + collection: Selectable; + hyperboardHypercertMetadata: Selectable< + DataDatabase["hyperboard_hypercert_metadata"] + >[]; + blueprints: Selectable[]; + blueprintMetadata: Selectable< + DataDatabase["hyperboard_blueprint_metadata"] + >[]; + fractions: Selectable[]; + allowlistEntries: Selectable< + CachingDatabase["claimable_fractions_with_proofs"] + >[]; + hypercerts: (Selectable & { + name: string; + })[]; + users: Selectable[]; +} export const processCollectionToSection = ({ blueprintMetadata, - hypercert_metadata, + hyperboardHypercertMetadata, blueprints, fractions, allowlistEntries, collection, hypercerts, users, -}: { - collection: DataDatabase["public"]["Tables"]["collections"]["Row"]; - hypercert_metadata: DataDatabase["public"]["Tables"]["hyperboard_hypercert_metadata"]["Row"][]; - blueprints: DataDatabase["public"]["Tables"]["blueprints"]["Row"][]; - blueprintMetadata: DataDatabase["public"]["Tables"]["hyperboard_blueprint_metadata"]["Row"][]; - fractions: CachingDatabase["public"]["Views"]["fractions_view"]["Row"][]; - allowlistEntries: CachingDatabase["public"]["Views"]["claimable_fractions_with_proofs"]["Row"][]; - hypercerts: (CachingDatabase["public"]["Tables"]["claims"]["Row"] & { - name: string; - })[]; - users: DataDatabase["public"]["Tables"]["users"]["Row"][]; -}) => { +}: ProcessCollectionToSectionArgs): Section => { const NUMBER_OF_UNITS_IN_HYPERCERT = parseUnits("1", 8); // Calculate the total number of units in all claims and blueprints combined const totalUnitsInBlueprints = @@ -36,7 +46,7 @@ export const processCollectionToSection = ({ const totalUnits = totalUnitsInClaims + totalUnitsInBlueprints; const totalOfAllDisplaySizes = [ - ...hypercert_metadata, + ...hyperboardHypercertMetadata, ...blueprintMetadata, ].reduce((acc, curr) => acc + BigInt(curr?.display_size || 0), 0n); // Calculate the amount of surface per display size unit @@ -45,7 +55,7 @@ export const processCollectionToSection = ({ const hypercertsByHypercertId = _.keyBy(hypercerts, "hypercert_id"); const hypercertMetadataByHypercertId = _.keyBy( - hypercert_metadata, + hyperboardHypercertMetadata, "hypercert_id", ); const fractionsByHypercertId = _.groupBy(fractions, "hypercert_id"); @@ -56,20 +66,20 @@ export const processCollectionToSection = ({ if (!hypercert) { throw new Error( - `[HyperboardResolver::processRegistryForDisplay] Hypercert not found for ${hypercertId}`, + `[HyperboardResolver::processCollectionToSection] Hypercert not found for ${hypercertId}`, ); } if (!metadata) { console.log(hypercertId, hypercertMetadataByHypercertId); throw new Error( - `[HyperboardResolver::processRegistryForDisplay] Metadata not found for ${hypercertId}`, + `[HyperboardResolver::processCollectionToSection] Metadata not found for ${hypercertId}`, ); } if (!metadata.display_size) { throw new Error( - `[HyperboardResolver::processRegistryForDisplay] Metadata display size not found for ${hypercertId}`, + `[HyperboardResolver::processCollectionToSection] Metadata display size not found for ${hypercertId}`, ); } @@ -105,7 +115,7 @@ export const processCollectionToSection = ({ .map((entry) => { if (!entry.hypercert_id) { throw new Error( - `[HyperboardResolver::processRegistryForDisplay] Allowlist entry does not have a hypercert_id`, + `[HyperboardResolver::processCollectionToSection] Allowlist entry does not have a hypercert_id`, ); } // Calculate the number of units per display unit @@ -113,13 +123,13 @@ export const processCollectionToSection = ({ if (!hypercert) { throw new Error( - `[HyperboardResolver::processRegistryForDisplay] Hypercert not found for ${entry.hypercert_id}`, + `[HyperboardResolver::processCollectionToSection] Hypercert not found for ${entry.hypercert_id}`, ); } if (!hypercert.units) { throw new Error( - `[HyperboardResolver::processRegistryForDisplay] Hypercert does not have units`, + `[HyperboardResolver::processCollectionToSection] Hypercert does not have units`, ); } @@ -139,11 +149,16 @@ export const processCollectionToSection = ({ "blueprint_id", ); const blueprintResults = blueprints.map((blueprint) => { + if (!blueprint.id) { + throw new Error( + `[HyperboardResolver::processCollectionToSection] Blueprint does not have an id`, + ); + } const blueprintMeta = blueprintMetadataByBlueprintId[blueprint.id]; if (!blueprintMeta) { throw new Error( - `[HyperboardResolver::processRegistryForDisplay] Blueprint metadata not found for ${blueprint.id}`, + `[HyperboardResolver::processCollectionToSection] Blueprint metadata not found for ${blueprint.id}`, ); } @@ -169,7 +184,7 @@ export const processCollectionToSection = ({ const fractionsWithDisplayData = fractionsResults.map((fraction) => { if (!fraction.owner) { throw new Error( - `[HyperboardResolver::processRegistryForDisplay] Fraction does not have an owner address`, + `[HyperboardResolver::processCollectionToSection] Fraction does not have an owner address`, ); } return { @@ -189,7 +204,7 @@ export const processCollectionToSection = ({ ].map((fraction) => { if (!fraction.owner) { throw new Error( - `[HyperboardResolver::processRegistryForDisplay] Fraction does not have an owner`, + `[HyperboardResolver::processCollectionToSection] Fraction does not have an owner`, ); } return { @@ -224,13 +239,13 @@ export const processCollectionToSection = ({ if (!hypercert) { throw new Error( - `[HyperboardResolver::processRegistryForDisplay] Hypercert not found for ${id}`, + `[HyperboardResolver::processCollectionToSection] Hypercert not found for ${id}`, ); } if (!hypercert?.units) { throw new Error( - `[HyperboardResolver::processRegistryForDisplay] Hypercert not found for ${id}`, + `[HyperboardResolver::processCollectionToSection] Hypercert not found for ${id}`, ); } @@ -238,7 +253,7 @@ export const processCollectionToSection = ({ if (!hypercert?.name) { throw new Error( - `[HyperboardResolver::processRegistryForDisplay] Hypercert name not found for ${id}`, + `[HyperboardResolver::processCollectionToSection] Hypercert name not found for ${id}`, ); } @@ -247,7 +262,7 @@ export const processCollectionToSection = ({ if (!unitsForHypercert) { throw new Error( - `[HyperboardResolver::processRegistryForDisplay] Units not found for ${id}`, + `[HyperboardResolver::processCollectionToSection] Units not found for ${id}`, ); } @@ -267,8 +282,9 @@ export const processCollectionToSection = ({ return { percentage, chain_id: fractionsPerOwner[0].displayData.chain_id, - avatar: fractionsPerOwner[0].displayData.avatar, - display_name: fractionsPerOwner[0].displayData.display_name, + avatar: fractionsPerOwner[0].displayData.avatar || undefined, + display_name: + fractionsPerOwner[0].displayData.display_name || undefined, address: fractionsPerOwner[0].displayData.address, units: totalUnitsForOwner, }; @@ -282,7 +298,7 @@ export const processCollectionToSection = ({ const display_size = displayMetadata?.display_size; if (!display_size) { throw new Error( - `[HyperboardResolver::processRegistryForDisplay] Display size not found for ${id} while processing section ${collection.id}`, + `[HyperboardResolver::processCollectionToSection] Display size not found for ${id} while processing section ${collection.id}`, ); } @@ -297,7 +313,14 @@ export const processCollectionToSection = ({ total_units: unitsForHypercert, name, percentage: 100, - owners, + owners: { + data: owners.map((owner) => ({ + ...owner, + percentage: owner.percentage, + units: owner.units, + })), + count: owners.length, + }, }; }, ); @@ -310,10 +333,10 @@ export const processCollectionToSection = ({ const display_size = metadata?.display_size; if (display_size === null) { throw new Error( - `[HyperboardResolver::processRegistryForDisplay] Display size not found for ${entry.id} while processing section ${collection.id}`, + `[HyperboardResolver::processCollectionToSection] Display size not found for ${entry.id} while processing section ${collection.id}`, ); } - return entry.owners.map((owner) => ({ + return entry.owners.data.map((owner) => ({ ...owner, percentage: (owner.percentage || 0) * display_size, })); @@ -324,8 +347,8 @@ export const processCollectionToSection = ({ owners.reduce((acc, curr) => acc + curr.percentage, 0) / Number(totalOfAllDisplaySizes); return { - avatar: owners[0].avatar, - display_name: owners[0].display_name, + avatar: owners[0].avatar || undefined, + display_name: owners[0].display_name || undefined, address: owners[0].address, chain_id: owners[0].chain_id, percentage_owned, @@ -335,9 +358,12 @@ export const processCollectionToSection = ({ .value(); return { - collection, + collections: [collection], label: collection.name, - entries, - owners, + entries: entries || [], + owners: { + data: owners || [], + count: owners?.length || 0, + }, }; }; diff --git a/src/utils/processSectionsToHyperboardOwnership.ts b/src/utils/processSectionsToHyperboardOwnership.ts index 752b0fdf..cd2169aa 100644 --- a/src/utils/processSectionsToHyperboardOwnership.ts +++ b/src/utils/processSectionsToHyperboardOwnership.ts @@ -5,10 +5,10 @@ import { import _ from "lodash"; export const processSectionsToHyperboardOwnership = ( - sections: Pick[], + sections: Section[], ): HyperboardOwner[] => { const numberOfSectionsWithOwners = sections.filter( - (section) => !!section.owners?.length, + (section) => !!section.owners?.data?.length, ).length; if (numberOfSectionsWithOwners === 0) { @@ -16,7 +16,7 @@ export const processSectionsToHyperboardOwnership = ( } return _.chain(sections) - .flatMap((section) => section.owners) + .flatMap((section) => section.owners?.data || []) .groupBy((owner) => owner?.address) .mapValues((values) => ({ ...values[0], diff --git a/src/utils/validateMetadataAndClaimdata.ts b/src/utils/validateMetadataAndClaimdata.ts index 0d4faa76..8697cbf9 100644 --- a/src/utils/validateMetadataAndClaimdata.ts +++ b/src/utils/validateMetadataAndClaimdata.ts @@ -1,31 +1,41 @@ -import {validateClaimData, validateMetaData, HypercertMetadata} from "@hypercerts-org/sdk"; -import {isHypercertMetadata} from "./isHypercertsMetadata.js"; -import {ValidationResult} from "../types/api.js"; +import { + HypercertMetadata, + validateClaimData, + validateMetaData, +} from "@hypercerts-org/sdk"; +import { ValidationResult } from "../types/api.js"; +import { isHypercertMetadata } from "./isHypercertsMetadata.js"; -export const validateMetadataAndClaimdata = (data: HypercertMetadata): ValidationResult => { - // Check if object is hypercert metadata object - if (!isHypercertMetadata(data)) { - return { - data, - valid: false, - errors: {metadata: "Provided metadata is not a valid hypercert metadata object"}, - }; - } +// TODO: replace with validations from SDK +export const validateMetadataAndClaimdata = ( + data: HypercertMetadata, +): ValidationResult => { + // Check if object is hypercert metadata object + if (!isHypercertMetadata(data)) { + return { + data, + valid: false, + errors: { + metadata: "Provided metadata is not a valid hypercert metadata object", + }, + }; + } - // Check if hypercert claim data is valid - const {valid: claimDataValid, errors: claimDataErrors} = - validateClaimData(data.hypercert); + // Check if hypercert claim data is valid + const { valid: claimDataValid, errors: claimDataErrors } = validateClaimData( + data.hypercert, + ); - // Check if hypercert metadata is valid - const {valid: metadataValid, errors: metadataErrors} = - validateMetaData(data); + // Check if hypercert metadata is valid + const { valid: metadataValid, errors: metadataErrors } = + validateMetaData(data); - return { - data, - valid: claimDataValid && metadataValid, - errors: { - ...claimDataErrors, - ...metadataErrors, - }, - }; -} \ No newline at end of file + return { + data, + valid: claimDataValid && metadataValid, + errors: { + ...claimDataErrors, + ...metadataErrors, + }, + }; +}; diff --git a/src/utils/waitForTxThenMintBlueprint.ts b/src/utils/waitForTxThenMintBlueprint.ts index 9de48f1d..e6f69b3d 100644 --- a/src/utils/waitForTxThenMintBlueprint.ts +++ b/src/utils/waitForTxThenMintBlueprint.ts @@ -1,32 +1,50 @@ import { EvmClientFactory } from "../client/evmClient.js"; -import { SupabaseDataService } from "../services/SupabaseDataService.js"; +import { BlueprintsService } from "../services/database/entities/BlueprintsEntityService.js"; import { generateHypercertIdFromReceipt } from "./generateHypercertIdFromReceipt.js"; +import { inject, injectable, container } from "tsyringe"; -export const waitForTxThenMintBlueprint = async ( - tx_hash: string, - chain_id: number, - blueprintId: number, -) => { - const client = EvmClientFactory.createViemClient(chain_id); +@injectable() +export class WaitForTxThenMintBlueprintService { + constructor( + @inject(BlueprintsService) private blueprintsService: BlueprintsService, + ) {} - const receipt = await client.waitForTransactionReceipt({ - hash: tx_hash as `0x${string}`, - }); + async execute(tx_hash: string, chain_id: number, blueprintId: number) { + const client = EvmClientFactory.createViemClient(chain_id); - if (!receipt) { - throw new Error("No receipt found"); - } + const receipt = await client.waitForTransactionReceipt({ + hash: tx_hash as `0x${string}`, + }); - if (receipt.status !== "success") { - throw new Error("Transaction failed"); - } + if (!receipt) { + throw new Error("No receipt found"); + } - const hypercertId = generateHypercertIdFromReceipt(receipt, chain_id); + if (receipt.status !== "success") { + throw new Error("Transaction failed"); + } - if (!hypercertId) { - throw new Error("No hypercertId found"); + const hypercertId = generateHypercertIdFromReceipt(receipt, chain_id); + + if (!hypercertId) { + throw new Error("No hypercertId found"); + } + + await this.blueprintsService.mintBlueprintAndSwapInCollections( + blueprintId, + hypercertId, + ); } +} - const dataService = new SupabaseDataService(); - await dataService.mintBlueprintAndSwapInCollections(blueprintId, hypercertId); +// Export a convenience function that creates and executes the service +export const waitForTxThenMintBlueprint = async ( + tx_hash: string, + chain_id: number, + blueprintId: number, +) => { + const service = new WaitForTxThenMintBlueprintService( + container.resolve(BlueprintsService), + ); + return service.execute(tx_hash, chain_id, blueprintId); }; diff --git a/supabase/migrations/20250525202620_hyperboards_with_admins.sql b/supabase/migrations/20250525202620_hyperboards_with_admins.sql new file mode 100644 index 00000000..8c8959f9 --- /dev/null +++ b/supabase/migrations/20250525202620_hyperboards_with_admins.sql @@ -0,0 +1,15 @@ +create view hyperboards_with_admins as +select hyperboards.id, + hyperboards.created_at, + hyperboards.name, + hyperboards.background_image, + hyperboards.grayscale_images, + hyperboards.tile_border_color, + hyperboards.chain_ids, + u.address AS admin_address, + u.chain_id AS admin_chain_id, + u.avatar, + u.display_name +from public.hyperboards + join public.hyperboard_admins ha on hyperboards.id = ha.hyperboard_id + join public.users u on ha.user_id = u.id \ No newline at end of file diff --git a/supabase/migrations/20250525203837_collections_with_admins.sql b/supabase/migrations/20250525203837_collections_with_admins.sql new file mode 100644 index 00000000..d12fadfc --- /dev/null +++ b/supabase/migrations/20250525203837_collections_with_admins.sql @@ -0,0 +1,14 @@ +create view collections_with_admins as +select collections.id, + collections.created_at, + collections.name, + collections.description, + collections.hidden, + collections.chain_ids, + u.address AS admin_address, + u.chain_id AS admin_chain_id, + u.avatar, + u.display_name +from public.collections + join public.collection_admins ca on collections.id = ca.collection_id + join public.users u on ca.user_id = u.id \ No newline at end of file diff --git a/test/api/v1/AllowlistController.test.ts b/test/api/v1/AllowlistController.test.ts index 64c3dd7a..2c296415 100644 --- a/test/api/v1/AllowlistController.test.ts +++ b/test/api/v1/AllowlistController.test.ts @@ -20,7 +20,7 @@ vi.mock("../../../src/services/StorageService", async () => { }; }); -describe("Allow list upload at v1/allowlists", async () => { +describe("Allow list upload at v2/allowlists", async () => { const controller = new AllowListController(); const mockStorage = mock(); @@ -77,7 +77,7 @@ describe("Allow list upload at v1/allowlists", async () => { }); }); -describe("Allow list validation at v1/allowlists/validate", async () => { +describe("Allow list validation at v2/allowlists/validate", async () => { const controller = new AllowListController(); test("Validates correctness of allowlist and returns results", async () => { diff --git a/test/api/v1/MetadataController.test.ts b/test/api/v1/MetadataController.test.ts index 147bcc54..cab91e81 100644 --- a/test/api/v1/MetadataController.test.ts +++ b/test/api/v1/MetadataController.test.ts @@ -20,7 +20,7 @@ vi.mock("../../../src/services/StorageService", async () => { }; }); -describe("Metadata upload at v1/metadata", async () => { +describe("Metadata upload at v2/metadata", async () => { const controller = new MetadataController(); const mockStorage = mock(); @@ -44,7 +44,7 @@ describe("Metadata upload at v1/metadata", async () => { expect(response.success).to.be.false; expect(response.data).to.be.undefined; - expect(response.message).to.eq("Errors while validating metadata"); + expect(response.message).to.eq("Metadata validation failed"); expect(response.errors).to.deep.eq({ metadata: "Provided metadata is not a valid hypercert metadata object", }); @@ -65,7 +65,7 @@ describe("Metadata upload at v1/metadata", async () => { }); }); -describe("Metadata validation at v1/metadata/validate", async () => { +describe("Metadata validation at v2/metadata/validate", async () => { const controller = new MetadataController(); test("Validates a metadata set and returns results", async () => { @@ -83,7 +83,7 @@ describe("Metadata validation at v1/metadata/validate", async () => { }); expect(response.success).to.be.true; - expect(response.message).to.eq("Errors while validating metadata"); + expect(response.message).to.eq("Metadata validation failed"); expect(response.errors).to.deep.eq({ metadata: "Provided metadata is not a valid hypercert metadata object", }); diff --git a/test/api/v1/UploadController.test.ts b/test/api/v1/UploadController.test.ts index a2c97eef..cc5f68af 100644 --- a/test/api/v1/UploadController.test.ts +++ b/test/api/v1/UploadController.test.ts @@ -18,7 +18,7 @@ vi.mock("../../../src/services/StorageService", async () => { }; }); -describe("File upload at v1/upload", async () => { +describe("File upload at v2/upload", async () => { const controller = new UploadController(); const mockStorage = mock(); diff --git a/test/client/evmClient.test.ts b/test/client/evmClient.test.ts index 3c6b6af9..eb9e7b97 100644 --- a/test/client/evmClient.test.ts +++ b/test/client/evmClient.test.ts @@ -6,7 +6,6 @@ import { RpcClientFactory } from "../../src/client/rpcClientFactory.js"; vi.mock("@/utils/constants", () => ({ indexerEnvironment: "test", alchemyApiKey: "mock-alchemy-key", - infuraApiKey: "mock-infura-key", drpcApiPkey: "mock-drpc-key", filecoinApiKey: "mock-filecoin-key", Environment: { TEST: "test", PROD: "prod" }, @@ -59,10 +58,9 @@ describe("EvmClientFactory", () => { expect(sepoliaUrls[0]).toContain("alchemy.com"); const opUrls = EvmClientFactory.getAllAvailableUrls(10); - expect(opUrls).toHaveLength(3); // Alchemy, Infura, DRPC for Optimism + expect(opUrls).toHaveLength(2); // Alchemy, DRPC for Optimism expect(opUrls[0]).toContain("alchemy.com"); - expect(opUrls[1]).toContain("infura.io"); - expect(opUrls[2]).toContain("drpc.org"); + expect(opUrls[1]).toContain("drpc.org"); }); it("returns empty array for unsupported chain", () => { diff --git a/test/graphql/schemas/args/hypercertsArgs.test.ts b/test/graphql/schemas/args/hypercertsArgs.test.ts new file mode 100644 index 00000000..8fd5ff03 --- /dev/null +++ b/test/graphql/schemas/args/hypercertsArgs.test.ts @@ -0,0 +1,67 @@ +import { describe, it, expect } from "vitest"; +import { + GetHypercertsArgs, + HypercertSortOptions, + HypercertWhereInput, +} from "../../../../src/graphql/schemas/args/hypercertsArgs.js"; + +//TOOD can be removed later, used this more as a smoke test during development +describe("HypercertsArgs", () => { + it("should have correct class names", () => { + expect(GetHypercertsArgs.name).toBe("GetHypercertsArgs"); + expect(HypercertWhereInput.name).toBe("HypercertWhereInput"); + expect(HypercertSortOptions.name).toBe("HypercertSortOptions"); + }); + + it("should have correct structure for GetHypercertsArgs", () => { + const instance = new GetHypercertsArgs(); + expect(instance).toHaveProperty("where"); + expect(instance).toHaveProperty("sortBy"); + expect(instance).toHaveProperty("first"); + expect(instance).toHaveProperty("offset"); + }); + + it("should include all required where fields", () => { + const whereInstance = new HypercertWhereInput(); + const whereFields = Object.keys(whereInstance); + + expect(whereFields).toContain("id"); + expect(whereFields).toContain("creation_block_timestamp"); + expect(whereFields).toContain("creation_block_number"); + expect(whereFields).toContain("token_id"); + expect(whereFields).toContain("creator_address"); + expect(whereFields).toContain("uri"); + expect(whereFields).toContain("hypercert_id"); + expect(whereFields).toContain("units"); + }); + + it("should include reference fields in where args", () => { + const whereFields = Object.keys(HypercertWhereInput.prototype); + + expect(whereFields).toContain("contract"); + expect(whereFields).toContain("metadata"); + expect(whereFields).toContain("attestations"); + expect(whereFields).toContain("fractions"); + }); + + it("should include all required sort fields", () => { + const sortInstance = new HypercertSortOptions(); + const sortFields = Object.keys(sortInstance); + + // Basic fields that should be sortable + expect(sortFields).toContain("id"); + expect(sortFields).toContain("creation_block_timestamp"); + expect(sortFields).toContain("creation_block_number"); + expect(sortFields).toContain("token_id"); + expect(sortFields).toContain("creator_address"); + expect(sortFields).toContain("uri"); + expect(sortFields).toContain("hypercert_id"); + expect(sortFields).toContain("units"); + + // Reference fields should NOT be included + expect(sortFields).not.toContain("contract"); + expect(sortFields).not.toContain("metadata"); + expect(sortFields).not.toContain("attestations"); + expect(sortFields).not.toContain("fractions"); + }); +}); diff --git a/test/lib/db/queryModifiers/applyPagination.test.ts b/test/lib/db/queryModifiers/applyPagination.test.ts new file mode 100644 index 00000000..41171879 --- /dev/null +++ b/test/lib/db/queryModifiers/applyPagination.test.ts @@ -0,0 +1,143 @@ +import { describe, it, expect, beforeEach } from "vitest"; +import { Kysely } from "kysely"; +import { IMemoryDb, newDb } from "pg-mem"; +import { applyPagination } from "../../../../src/lib/db/queryModifiers/applyPagination.js"; +import { DataDatabase } from "../../../../src/types/kyselySupabaseData.js"; + +interface TestDatabase extends DataDatabase { + test_users: { + id: number; + name: string; + active: boolean; + created_at: Date; + }; +} + +describe("applyPagination", () => { + let db: Kysely; + let mem: IMemoryDb; + + beforeEach(() => { + mem = newDb(); + db = mem.adapters.createKysely(); + + // Create test table + mem.public.none(` + CREATE TABLE test_users ( + id SERIAL PRIMARY KEY, + name TEXT NOT NULL, + active BOOLEAN NOT NULL DEFAULT true, + created_at TIMESTAMP NOT NULL DEFAULT NOW() + ); + `); + }); + + describe("basic functionality", () => { + it("should apply default limit of 100 when first is not provided", () => { + const baseQuery = db.selectFrom("test_users").selectAll() as any; + const result = applyPagination(baseQuery, {}); + + const { sql, parameters } = result.compile(); + expect(sql).toMatch(/limit \$1/); + expect(parameters).toEqual([100]); + }); + + it("should apply the specified limit when first is provided", () => { + const baseQuery = db.selectFrom("test_users").selectAll() as any; + const result = applyPagination(baseQuery, { first: 25 }); + + const { sql, parameters } = result.compile(); + expect(sql).toMatch(/limit \$1/); + expect(parameters).toEqual([25]); + }); + + it("should apply offset when provided", () => { + const baseQuery = db.selectFrom("test_users").selectAll() as any; + const result = applyPagination(baseQuery, { offset: 10 }); + + const { sql, parameters } = result.compile(); + expect(sql).toMatch(/limit \$1 offset \$2/); + expect(parameters).toEqual([100, 10]); // Default limit and offset + }); + + it("should apply both limit and offset when both are provided", () => { + const baseQuery = db.selectFrom("test_users").selectAll() as any; + const result = applyPagination(baseQuery, { first: 20, offset: 40 }); + + const { sql, parameters } = result.compile(); + expect(sql).toMatch(/limit \$1 offset \$2/); + expect(parameters).toEqual([20, 40]); + }); + }); + + describe("edge cases", () => { + it("should handle zero values correctly", () => { + const baseQuery = db.selectFrom("test_users").selectAll() as any; + const result = applyPagination(baseQuery, { first: 0, offset: 0 }); + + const { sql, parameters } = result.compile(); + expect(sql).toMatch(/limit \$1/); + expect(sql).not.toMatch(/offset \$2/); + expect(parameters).toEqual([100]); + }); + + it("should handle undefined values correctly", () => { + const baseQuery = db.selectFrom("test_users").selectAll() as any; + const result = applyPagination(baseQuery, { + first: undefined, + offset: undefined, + }); + + const { sql, parameters } = result.compile(); + expect(sql).toMatch(/limit \$1/); + expect(parameters).toEqual([100]); // Should use default limit + expect(sql).not.toMatch(/offset/); + }); + + it("should handle large values correctly", () => { + const baseQuery = db.selectFrom("test_users").selectAll() as any; + const result = applyPagination(baseQuery, { + first: 1000, + offset: Number.MAX_SAFE_INTEGER, + }); + + const { sql, parameters } = result.compile(); + expect(sql).toMatch(/limit \$1 offset \$2/); + expect(parameters).toEqual([1000, Number.MAX_SAFE_INTEGER]); + }); + }); + + describe("query builder integration", () => { + it("should work with complex queries", () => { + const baseQuery = db + .selectFrom("test_users") + .where("active", "=", true) + .orderBy("created_at") as any; + + const result = applyPagination(baseQuery, { first: 10, offset: 20 }); + + const { sql, parameters } = result.compile(); + expect(sql).toContain("where"); + expect(sql).toContain("order by"); + expect(sql).toMatch(/limit \$\d+ offset \$\d+/); + expect(parameters).toContain(10); + expect(parameters).toContain(20); + }); + + it("should preserve existing query modifiers", () => { + const baseQuery = db + .selectFrom("test_users") + .selectAll() + .where("active", "=", true) + .orderBy("created_at") as any; + + const result = applyPagination(baseQuery, { first: 10 }); + + const { sql, parameters } = result.compile(); + expect(sql).toContain("where"); + expect(sql).toContain("order by"); + expect(sql).toMatch(/limit \$\d+/); + expect(parameters).toContain(10); + }); + }); +}); diff --git a/test/lib/db/queryModifiers/applySort.test.ts b/test/lib/db/queryModifiers/applySort.test.ts new file mode 100644 index 00000000..0199ebb6 --- /dev/null +++ b/test/lib/db/queryModifiers/applySort.test.ts @@ -0,0 +1,192 @@ +import { describe, it, expect, beforeEach } from "vitest"; +import { Kysely } from "kysely"; +import { IMemoryDb, newDb } from "pg-mem"; +import { applySort } from "../../../../src/lib/db/queryModifiers/applySort.js"; +import { SortOrder } from "../../../../src/graphql/schemas/enums/sortEnums.js"; +import { DataDatabase } from "../../../../src/types/kyselySupabaseData.js"; + +type TestDatabase = DataDatabase & { + test_users: { + id: number; + name: string; + age: number; + active: boolean; + created_at: Date; + score: number; + }; +}; + +describe("applySort", () => { + let db: Kysely; + let mem: IMemoryDb; + + beforeEach(() => { + mem = newDb(); + db = mem.adapters.createKysely(); + + // Create test table + mem.public.none(` + CREATE TABLE test_users ( + id SERIAL PRIMARY KEY, + name TEXT NOT NULL, + age INTEGER NOT NULL, + active BOOLEAN NOT NULL DEFAULT true, + created_at TIMESTAMP NOT NULL DEFAULT NOW(), + score NUMERIC NOT NULL DEFAULT 0 + ); + `); + + // Insert some test data + mem.public.none(` + INSERT INTO test_users (name, age, score, created_at) VALUES + ('Alice', 25, 100, '2024-01-01'), + ('Bob', 30, 85, '2024-01-02'), + ('Charlie', 20, 95, '2024-01-03'); + `); + }); + + describe("basic functionality", () => { + it("should return original query when no sort is provided", () => { + const baseQuery = db.selectFrom("test_users").selectAll() as any; + const result = applySort(baseQuery, {}); + + expect(result).toBe(baseQuery); + + const { sql, parameters } = result.compile(); + expect(sql).not.toContain("order by"); + expect(parameters).toEqual([]); + }); + + it("should apply single ascending sort", () => { + const baseQuery = db.selectFrom("test_users").selectAll() as any; + const result = applySort(baseQuery, { + sortBy: { name: SortOrder.ascending }, + }); + + const { sql } = result.compile(); + expect(sql).toMatch(/order by.*"name".*asc/i); + }); + + it("should apply single descending sort", () => { + const baseQuery = db.selectFrom("test_users").selectAll() as any; + const result = applySort(baseQuery, { + sortBy: { age: SortOrder.descending }, + }); + + const { sql } = result.compile(); + expect(sql).toMatch(/order by.*"age".*desc/i); + }); + }); + + describe("multiple sort conditions", () => { + it("should apply multiple sort conditions in order", () => { + const baseQuery = db.selectFrom("test_users").selectAll() as any; + const result = applySort(baseQuery, { + sortBy: { + score: SortOrder.descending, + name: SortOrder.ascending, + }, + }); + + const { sql } = result.compile(); + expect(sql).toMatch(/order by.*"score".*desc.*"name".*asc/i); + }); + + it("should handle mixed sort directions", () => { + const baseQuery = db.selectFrom("test_users").selectAll() as any; + const result = applySort(baseQuery, { + sortBy: { + age: SortOrder.ascending, + score: SortOrder.descending, + name: SortOrder.ascending, + }, + }); + + const { sql } = result.compile(); + expect(sql).toMatch(/order by.*"age".*asc.*"score".*desc.*"name".*asc/i); + }); + }); + + describe("edge cases", () => { + it("should ignore null and undefined sort values", () => { + const baseQuery = db.selectFrom("test_users").selectAll() as any; + const result = applySort(baseQuery, { + sortBy: { + name: null, + age: undefined, + score: SortOrder.ascending, + }, + }); + + const { sql } = result.compile(); + expect(sql).toMatch(/order by.*"score".*asc/i); + expect(sql).not.toMatch(/"test_users"."name"/); + expect(sql).not.toMatch(/"test_users"."age"/); + }); + + it("should return original query when all sort values are null/undefined", () => { + const baseQuery = db.selectFrom("test_users").selectAll() as any; + const result = applySort(baseQuery, { + sortBy: { + name: null, + age: undefined, + }, + }); + + expect(result).toBe(baseQuery); + + const { sql } = result.compile(); + expect(sql).not.toContain("order by"); + }); + }); + + describe("query builder integration", () => { + it("should work with existing where conditions", () => { + const baseQuery = db + .selectFrom("test_users") + .selectAll() + .where("active", "=", true) as any; + + const result = applySort(baseQuery, { + sortBy: { name: SortOrder.ascending }, + }); + + const { sql } = result.compile(); + expect(sql).toContain("where"); + expect(sql).toMatch(/order by.*"name".*asc/i); + }); + + it("should preserve existing order by clauses", () => { + const baseQuery = db + .selectFrom("test_users") + .selectAll() + .orderBy("id", "asc") as any; + + const result = applySort(baseQuery, { + sortBy: { name: SortOrder.ascending }, + }); + + const { sql } = result.compile(); + expect(sql).toMatch(/order by.*"id".*asc.*"name".*asc/i); + }); + + it("should work with limit and offset", () => { + const baseQuery = db + .selectFrom("test_users") + .selectAll() + .limit(10) + .offset(20) as any; + + const result = applySort(baseQuery, { + sortBy: { name: SortOrder.ascending }, + }); + + const { sql, parameters } = result.compile(); + expect(sql).toMatch(/order by.*"name".*asc/i); + expect(sql).toContain("limit"); + expect(sql).toContain("offset"); + expect(parameters).toContain(10); + expect(parameters).toContain(20); + }); + }); +}); diff --git a/test/lib/db/queryModifiers/applyWhere.test.ts b/test/lib/db/queryModifiers/applyWhere.test.ts new file mode 100644 index 00000000..c4fd48de --- /dev/null +++ b/test/lib/db/queryModifiers/applyWhere.test.ts @@ -0,0 +1,222 @@ +import { describe, it, expect, beforeEach } from "vitest"; +import { Kysely } from "kysely"; +import { IMemoryDb, newDb } from "pg-mem"; +import { applyWhere } from "../../../../src/lib/db/queryModifiers/applyWhere.js"; +import { DataDatabase } from "../../../../src/types/kyselySupabaseData.js"; + +type TestDatabase = DataDatabase & { + test_users: { + id: number; + name: string; + age: number; + active: boolean; + created_at: Date; + tags: string[]; + }; +}; + +describe("applyWhere", () => { + let db: Kysely; + let mem: IMemoryDb; + + beforeEach(() => { + mem = newDb(); + db = mem.adapters.createKysely(); + + // Create test table + mem.public.none(` + CREATE TABLE test_users ( + id SERIAL PRIMARY KEY, + name TEXT NOT NULL, + age INTEGER NOT NULL, + active BOOLEAN NOT NULL DEFAULT true, + created_at TIMESTAMP NOT NULL DEFAULT NOW(), + tags TEXT[] NOT NULL DEFAULT '{}' + ); + `); + }); + + describe("basic functionality", () => { + it("should return original query when no where clause is provided", () => { + const baseQuery = db.selectFrom("test_users").selectAll() as any; + const result = applyWhere( + "test_users", + baseQuery, + {}, + ); + + expect(result).toBe(baseQuery); + + const { sql, parameters } = result.compile(); + expect(sql).not.toContain("where"); + expect(parameters).toEqual([]); + }); + + it("should apply simple equality condition", () => { + const baseQuery = db.selectFrom("test_users").selectAll() as any; + const result = applyWhere( + "test_users", + baseQuery, + { + where: { name: { eq: "John" } }, + }, + ); + + const { sql, parameters } = result.compile(); + expect(sql).toMatch(/where.*"test_users"."name".*=.*\$1/i); + expect(parameters).toEqual(["John"]); + }); + + it("should apply multiple conditions with AND", () => { + const baseQuery = db.selectFrom("test_users").selectAll() as any; + const result = applyWhere( + "test_users", + baseQuery, + { + where: { + name: { eq: "John" }, + age: { gt: 18 }, + }, + }, + ); + + const { sql, parameters } = result.compile(); + expect(sql).toMatch( + /where.*"test_users"."name".*=.*\$1.*and.*"test_users"."age".*>.*\$2/i, + ); + expect(parameters).toEqual(["John", 18]); + }); + }); + + describe("comparison operators", () => { + it("should handle greater than condition", () => { + const baseQuery = db.selectFrom("test_users").selectAll() as any; + const result = applyWhere( + "test_users", + baseQuery, + { + where: { age: { gt: 18 } }, + }, + ); + + const { sql, parameters } = result.compile(); + expect(sql).toMatch(/where.*"test_users"."age".*>.*\$1/i); + expect(parameters).toEqual([18]); + }); + + it("should handle less than or equal condition", () => { + const baseQuery = db.selectFrom("test_users").selectAll() as any; + const result = applyWhere( + "test_users", + baseQuery, + { + where: { age: { lte: 65 } }, + }, + ); + + const { sql, parameters } = result.compile(); + expect(sql).toMatch(/where.*"test_users"."age".*<=.*\$1/i); + expect(parameters).toEqual([65]); + }); + }); + + describe("text search conditions", () => { + it("should handle contains condition", () => { + const baseQuery = db.selectFrom("test_users").selectAll() as any; + const result = applyWhere( + "test_users", + baseQuery, + { + where: { name: { contains: "oh" } }, + }, + ); + + const { sql, parameters } = result.compile(); + expect(sql).toMatch( + /where.*lower.*"test_users"."name".*like.*lower.*\$1/i, + ); + expect(parameters).toEqual(["%oh%"]); + }); + + it("should handle startsWith condition", () => { + const baseQuery = db.selectFrom("test_users").selectAll() as any; + const result = applyWhere( + "test_users", + baseQuery, + { + where: { name: { startsWith: "Jo" } }, + }, + ); + + const { sql, parameters } = result.compile(); + expect(sql).toMatch( + /where.*lower.*"test_users"."name".*like.*lower.*\$1/i, + ); + expect(parameters).toEqual(["Jo%"]); + }); + }); + + describe("array conditions", () => { + it("should handle array contains condition", () => { + const baseQuery = db.selectFrom("test_users").selectAll() as any; + const result = applyWhere( + "test_users", + baseQuery, + { + where: { tags: { arrayContains: ["tag1", "tag2"] } }, + }, + ); + + const { sql, parameters } = result.compile(); + expect(sql).toMatch(/where.*"test_users"."tags".*@>.*array\[\$1, \$2\]/i); + expect(parameters).toEqual(["tag1", "tag2"]); + }); + }); + + describe("query builder integration", () => { + it("should work with complex queries", () => { + const baseQuery = db + .selectFrom("test_users") + .selectAll() + .orderBy("created_at") as any; + + const result = applyWhere( + "test_users", + baseQuery, + { + where: { + active: { eq: true }, + age: { gt: 18 }, + }, + }, + ); + + const { sql, parameters } = result.compile(); + expect(sql).toContain("where"); + expect(sql).toContain("order by"); + expect(parameters).toEqual([true, 18]); + }); + + it("should preserve existing query modifiers", () => { + const baseQuery = db + .selectFrom("test_users") + .selectAll() + .orderBy("created_at") + .limit(10) as any; + + const result = applyWhere( + "test_users", + baseQuery, + { + where: { active: { eq: true } }, + }, + ); + + const { sql, parameters } = result.compile(); + expect(sql).toContain("where"); + expect(sql).toContain("order by"); + expect(sql).toContain("limit"); + expect(parameters).toEqual([true, 10]); + }); + }); +}); diff --git a/test/lib/db/queryModifiers/buildWhereCondition.test.ts b/test/lib/db/queryModifiers/buildWhereCondition.test.ts new file mode 100644 index 00000000..46110367 --- /dev/null +++ b/test/lib/db/queryModifiers/buildWhereCondition.test.ts @@ -0,0 +1,353 @@ +import { expressionBuilder, Kysely } from "kysely"; +import { IMemoryDb, newDb } from "pg-mem"; +import { beforeEach, describe, expect, it } from "vitest"; +import { + buildWhereCondition, + WhereFilter, +} from "../../../../src/lib/db/queryModifiers/buildWhereCondition.js"; +import { DataDatabase } from "../../../../src/types/kyselySupabaseData.js"; + +type GeneratedAlways = import("kysely").GeneratedAlways; + +// Mock database for testing +interface TestDatabase extends DataDatabase { + test_table: { + id: GeneratedAlways; + name: string; + created_at: Date; + test_reference_table_id: number; + }; + test_reference_table: { + id: GeneratedAlways; + name: string; + }; + claims: { + id: GeneratedAlways; + uri: string; + hypercert_id: string; + }; + fractions_view: { + id: GeneratedAlways; + amount: number; + hypercert_id: string; + }; +} + +const cleanSql = (sql: string) => sql.replace(/\s+/g, " ").trim(); + +describe("buildWhereCondition", () => { + let mem: IMemoryDb; + + let kysely: Kysely; + + beforeEach(() => { + mem = newDb(); + kysely = mem.adapters.createKysely(); + }); + + describe("Basic Filters", () => { + it("should build simple equality condition", () => { + const query = kysely.selectFrom("test_table").selectAll(); + + const where: WhereFilter = { id: { eq: "123" } }; + + const condition = buildWhereCondition( + "test_table", + where, + expressionBuilder(query), + ); + + if (!condition) { + throw new Error("Expected condition to be defined"); + } + + const compiledQuery = kysely + .selectFrom("test_table") + .where(condition) + .compile(); + + const expectedSql = + 'select from "test_table" where "test_table"."id" = $1'; + expect(cleanSql(compiledQuery.sql)).toBe(cleanSql(expectedSql)); + expect(compiledQuery.parameters).toEqual(["123"]); + }); + + it("should build numeric comparison conditions", () => { + const query = kysely.selectFrom("test_table").selectAll(); + const where: WhereFilter = { age: { gt: 18, lte: 65 } }; + + const condition = buildWhereCondition( + "test_table", + where, + expressionBuilder(query), + ); + + if (!condition) { + throw new Error("Expected condition to be defined"); + } + + const compiledQuery = kysely + .selectFrom("test_table") + .where(condition) + .compile(); + + const expectedSql = + 'select from "test_table" where ("test_table"."age" > $1 and "test_table"."age" <= $2)'; + expect(cleanSql(compiledQuery.sql)).toBe(cleanSql(expectedSql)); + expect(compiledQuery.parameters).toEqual([18, 65]); + }); + + it("should build string search conditions", () => { + const query = kysely.selectFrom("test_table").selectAll(); + const where: WhereFilter = { name: { contains: "john" } }; + + const condition = buildWhereCondition( + "test_table", + where, + expressionBuilder(query), + ); + + if (!condition) { + throw new Error("Expected condition to be defined"); + } + + const compiledQuery = kysely + .selectFrom("test_table") + .where(condition) + .compile(); + + const expectedSql = + 'select from "test_table" where lower("test_table"."name") like lower($1)'; + expect(cleanSql(compiledQuery.sql)).toBe(cleanSql(expectedSql)); + expect(compiledQuery.parameters).toEqual(["%john%"]); + }); + + it("should build array conditions", () => { + const query = kysely.selectFrom("test_table").selectAll(); + const where: WhereFilter = { + roles: { arrayContains: ["admin", "user"] }, + }; + + const condition = buildWhereCondition( + "test_table", + where, + expressionBuilder(query), + ); + + if (!condition) { + throw new Error("Expected condition to be defined"); + } + + const compiledQuery = kysely + .selectFrom("test_table") + .where(condition) + .compile(); + + const expectedSql = + 'select from "test_table" where "test_table"."roles" @> ARRAY[$1, $2]'; + expect(cleanSql(compiledQuery.sql)).toBe(cleanSql(expectedSql)); + expect(compiledQuery.parameters).toEqual(["admin", "user"]); + }); + }); + + describe("Nested Filters", () => { + it("should build condition for standard foreign key relation", () => { + const query = kysely.selectFrom("test_table").selectAll(); + const where: WhereFilter = { + company: { + name: { eq: "Acme" }, + }, + }; + + const condition = buildWhereCondition( + "test_table", + where, + expressionBuilder(query), + ); + + if (!condition) { + throw new Error("Expected condition to be defined"); + } + + const compiledQuery = kysely + .selectFrom("test_table") + .where(condition) + .compile(); + + const expectedSql = + 'select from "test_table" where exists ( select from "company" where "company".id = "test_table".company_id and "company"."name" = $1 )'; + expect(cleanSql(compiledQuery.sql)).toBe(cleanSql(expectedSql)); + expect(compiledQuery.parameters).toEqual(["Acme"]); + }); + + it("should build condition for custom relation from TABLE_RELATIONS", () => { + const query = kysely.selectFrom("claims").selectAll(); + const where: WhereFilter = { + fractions_view: { + amount: { gt: 100 }, + }, + }; + + const condition = buildWhereCondition( + "claims", + where, + expressionBuilder(query), + ); + + if (!condition) { + throw new Error("Expected condition to be defined"); + } + + const compiledQuery = kysely + .selectFrom("claims") + .where(condition) + .compile(); + + // Using the actual relation defined in TABLE_RELATIONS + const expectedSql = + 'select from "claims" where exists ( select from "fractions_view" where claims.hypercert_id = fractions_view.hypercert_id and "fractions_view"."amount" > $1 )'; + expect(cleanSql(compiledQuery.sql)).toBe(cleanSql(expectedSql)); + expect(compiledQuery.parameters).toEqual([100]); + }); + + it("should handle multiple nested conditions", () => { + const query = kysely.selectFrom("test_table").selectAll(); + const where: WhereFilter = { + claims: { + uri: { eq: "test-uri" }, + }, + fractions_view: { + amount: { gt: 100 }, + }, + }; + + const condition = buildWhereCondition( + "test_table", + where, + expressionBuilder(query), + ); + + if (!condition) { + throw new Error("Expected condition to be defined"); + } + + const compiledQuery = kysely + .selectFrom("test_table") + .where(condition) + .compile(); + + const expectedSql = + 'select from "test_table" where (exists ( select from "claims" where "claims".id = "test_table".claims_id and "claims"."uri" = $1 ) and exists ( select from "fractions_view" where "fractions_view".id = "test_table".fractions_view_id and "fractions_view"."amount" > $2 ))'; + expect(cleanSql(compiledQuery.sql)).toBe(cleanSql(expectedSql)); + expect(compiledQuery.parameters).toEqual(["test-uri", 100]); + }); + }); + + describe("Edge Cases", () => { + it("should return undefined for empty where clause", () => { + const query = kysely.selectFrom("test_table").selectAll(); + const where = {}; + + const condition = buildWhereCondition( + "test_table", + where, + expressionBuilder(query), + ); + + if (condition) { + throw new Error("Expected condition to be undefined"); + } + }); + + it("should ignore undefined values", () => { + const query = kysely.selectFrom("test_table").selectAll(); + const where: WhereFilter = { + id: { eq: undefined }, + name: { eq: "test" }, + }; + + const condition = buildWhereCondition( + "test_table", + where, + expressionBuilder(query), + ); + + if (!condition) { + throw new Error("Expected condition to be defined"); + } + + const compiledQuery = kysely + .selectFrom("test_table") + .where(condition) + .compile(); + + const expectedSql = + 'select from "test_table" where "test_table"."name" = $1'; + expect(cleanSql(compiledQuery.sql)).toBe(cleanSql(expectedSql)); + expect(compiledQuery.parameters).toEqual(["test"]); + }); + + it("should handle table prefix mapping", () => { + const query = kysely.selectFrom("test_table").selectAll(); + const where: WhereFilter = { + hypercert: { + id: { eq: "123" }, + }, + }; + + const condition = buildWhereCondition( + "test_table", + where, + expressionBuilder(query), + ); + + if (!condition) { + throw new Error("Expected condition to be defined"); + } + + const compiledQuery = kysely + .selectFrom("test_table") + .where(condition) + .compile(); + + const expectedSql = + 'select from "test_table" where exists ( select from "claims" where "claims".id = "test_table".claims_id and "claims"."id" = $1 )'; + expect(cleanSql(compiledQuery.sql)).toBe(cleanSql(expectedSql)); + expect(compiledQuery.parameters).toEqual(["123"]); + }); + }); + + describe("Complex Queries", () => { + it("should build complex nested conditions with multiple operators", () => { + const query = kysely.selectFrom("test_table").selectAll(); + const where: WhereFilter = { + age: { gte: 18, lte: 65 }, + name: { contains: "john" }, + company: { + name: { eq: "Acme" }, + size: { gt: 100 }, + }, + }; + + const condition = buildWhereCondition( + "test_table", + where, + expressionBuilder(query), + ); + + if (!condition) { + throw new Error("Expected condition to be defined"); + } + + const compiledQuery = kysely + .selectFrom("test_table") + .where(condition) + .compile(); + + const expectedSql = + 'select from "test_table" where ("test_table"."age" >= $1 and "test_table"."age" <= $2 and lower("test_table"."name") like lower($3) and exists ( select from "company" where "company".id = "test_table".company_id and ("company"."name" = $4 and "company"."size" > $5) ))'; + expect(cleanSql(compiledQuery.sql)).toBe(cleanSql(expectedSql)); + expect(compiledQuery.parameters).toEqual([18, 65, "%john%", "Acme", 100]); + }); + }); +}); diff --git a/test/lib/db/queryModifiers/queryModifiers.test.ts b/test/lib/db/queryModifiers/queryModifiers.test.ts new file mode 100644 index 00000000..9bde31d8 --- /dev/null +++ b/test/lib/db/queryModifiers/queryModifiers.test.ts @@ -0,0 +1,244 @@ +import { Kysely } from "kysely"; +import { IMemoryDb, newDb } from "pg-mem"; +import { beforeEach, describe, expect, it } from "vitest"; +import { SortOrder } from "../../../../src/graphql/schemas/enums/sortEnums.js"; +import { + composeQueryModifiers, + createStandardQueryModifier, + QueryModifier, +} from "../../../../src/lib/db/queryModifiers/queryModifiers.js"; +import { DataDatabase } from "../../../../src/types/kyselySupabaseData.js"; + +// Define test database type +interface TestDatabase extends DataDatabase { + test_users: { + id: number; + name: string; + age: number; + active: boolean; + created_at: Date; + }; +} + +describe("queryModifiers", () => { + let db: Kysely; + let mem: IMemoryDb; + + beforeEach(() => { + mem = newDb(); + db = mem.adapters.createKysely(); + + // Create test table + mem.public.none(` + CREATE TABLE test_users ( + id SERIAL PRIMARY KEY, + name TEXT NOT NULL, + age INTEGER NOT NULL, + active BOOLEAN NOT NULL DEFAULT true, + created_at TIMESTAMP NOT NULL DEFAULT NOW() + ); + `); + + // Insert test data + mem.public.none(` + INSERT INTO test_users (name, age, active, created_at) VALUES + ('Alice', 25, true, '2024-01-01'), + ('Bob', 30, false, '2024-01-02'), + ('Charlie', 20, true, '2024-01-03'); + `); + }); + + describe("QueryModifier Type", () => { + it("should allow creation of a valid query modifier", () => { + const modifier: QueryModifier< + TestDatabase, + "test_users", + { age?: number } + > = (query, args) => { + return args.age ? query.where("age", ">=", args.age) : query; + }; + + const result = modifier(db.selectFrom("test_users").selectAll(), { + age: 25, + }); + + const { sql, parameters } = result.compile(); + expect(sql).toMatch(/where.*"age".*>=.*\$1/i); + expect(parameters).toEqual([25]); + }); + }); + + describe("composeQueryModifiers", () => { + it("should compose multiple query modifiers into a single function", () => { + const whereModifier: QueryModifier = ( + query, + _args, + ) => query.where("active", "=", true); + + const sortModifier: QueryModifier = ( + query, + _args, + ) => query.orderBy("name", "asc"); + + const composedModifier = composeQueryModifiers( + whereModifier, + sortModifier, + ); + const result = composedModifier( + db.selectFrom("test_users").selectAll(), + {}, + ); + + const { sql, parameters } = result.compile(); + expect(sql).toMatch(/where.*"active".*=.*\$1.*order by.*"name".*asc/i); + expect(parameters).toEqual([true]); + }); + + it("should apply modifiers in the correct order", async () => { + const results: string[] = []; + + const modifier1: QueryModifier = ( + query, + _args, + ) => { + results.push("where"); + return query.where("age", ">", 20); + }; + + const modifier2: QueryModifier = ( + query, + _args, + ) => { + results.push("sort"); + return query.orderBy("name", "asc"); + }; + + const modifier3: QueryModifier = ( + query, + _args, + ) => { + results.push("limit"); + return query.limit(2); + }; + + const composedModifier = composeQueryModifiers( + modifier1, + modifier2, + modifier3, + ); + + const result = await composedModifier( + db.selectFrom("test_users").selectAll(), + {}, + ).execute(); + + expect(results).toEqual(["where", "sort", "limit"]); + expect(result).toHaveLength(2); + expect(result[0].name).toBe("Alice"); + expect(result[1].name).toBe("Bob"); + }); + + it("should handle undefined return values gracefully", () => { + const modifier1: QueryModifier = ( + _query, + _args, + ) => undefined as any; + + const modifier2: QueryModifier = ( + query, + _args, + ) => query.orderBy("name", "asc"); + + const composedModifier = composeQueryModifiers(modifier1, modifier2); + const result = composedModifier( + db.selectFrom("test_users").selectAll(), + {}, + ); + const { sql } = result.compile(); + expect(sql).toMatch(/order by.*"name".*asc/i); + }); + }); + + describe("createStandardQueryModifier", () => { + it("should create a working composed modifier with all components", async () => { + const standardModifier = createStandardQueryModifier< + TestDatabase, + "test_users", + any + >("test_users"); + + const result = await standardModifier( + db.selectFrom("test_users").selectAll(), + { + where: { age: { gt: 20 } }, + sortBy: { name: SortOrder.ascending }, + first: 2, + offset: 0, + }, + ).execute(); + + expect(result).toHaveLength(2); + expect(result[0].name).toBe("Alice"); + expect(result[1].name).toBe("Bob"); + }); + + it("should work with partial arguments", async () => { + const standardModifier = createStandardQueryModifier< + TestDatabase, + "test_users", + any + >("test_users"); + + // Only apply where condition + const result1 = await standardModifier( + db.selectFrom("test_users").selectAll(), + { + where: { active: { eq: true } }, + }, + ).execute(); + + expect(result1.length).toBe(2); + expect(result1.every((r) => r.active)).toBe(true); + + // Only apply sort + const result2 = await standardModifier( + db.selectFrom("test_users").selectAll(), + { + sortBy: { age: SortOrder.descending }, + }, + ).execute(); + + expect(result2[0].age).toBe(30); + expect(result2[2].age).toBe(20); + + // Only apply pagination + const result3 = await standardModifier( + db.selectFrom("test_users").selectAll(), + { + first: 2, + }, + ).execute(); + + expect(result3).toHaveLength(2); + }); + + it("should preserve the type safety of the query builder", () => { + const standardModifier = createStandardQueryModifier< + TestDatabase, + "test_users", + any + >("test_users"); + + const query = db.selectFrom("test_users").selectAll(); + + const result = standardModifier(query, { + sortBy: { age: SortOrder.ascending }, + }); + + // This should compile without type errors + const { sql } = result.compile(); + expect(sql).toContain("select"); + expect(sql).toContain("order by"); + }); + }); +}); diff --git a/test/lib/db/queryModifiers/typeRegistry.test.ts b/test/lib/db/queryModifiers/typeRegistry.test.ts new file mode 100644 index 00000000..e42d84ac --- /dev/null +++ b/test/lib/db/queryModifiers/typeRegistry.test.ts @@ -0,0 +1,225 @@ +import { beforeEach, describe, expect, it } from "vitest"; +import { container } from "tsyringe"; +import { TypeRegistry } from "../../../../src/lib/graphql/TypeRegistry.js"; +import { createEntitySortArgs } from "../../../../src/lib/graphql/createEntitySortArgs.js"; +import { createEntityWhereArgs } from "../../../../src/lib/graphql/createEntityWhereArgs.js"; +import { EntityTypeDefs } from "../../../../src/graphql/schemas/typeDefs/typeDefs.js"; + +// Test field definitions +const testFields = { + id: "string", + name: "string", +} as const; + +describe("TypeRegistry", () => { + let registry: TypeRegistry; + + beforeEach(() => { + // Reset the container before each test + container.clearInstances(); + registry = container.resolve(TypeRegistry); + }); + + describe("WhereArgs", () => { + it("should create new WhereArgs type when not found", () => { + const creatorCalled = { value: false }; + const whereArgs = registry.getOrCreateWhereInput( + EntityTypeDefs.Hypercert, + () => { + creatorCalled.value = true; + return createEntityWhereArgs(EntityTypeDefs.Hypercert, testFields); + }, + ); + + expect(creatorCalled.value).toBe(true); + expect(whereArgs).toBeDefined(); + expect(whereArgs.name).toBe("HypercertWhereInput"); + }); + + it("should not call creator function when type already exists", () => { + // First call to create the type + const firstCall = registry.getOrCreateWhereInput( + EntityTypeDefs.Hypercert, + () => createEntityWhereArgs(EntityTypeDefs.Hypercert, testFields), + ); + + // Second call should reuse existing type + const creatorCalled = { value: false }; + const secondCall = registry.getOrCreateWhereInput( + EntityTypeDefs.Hypercert, + () => { + creatorCalled.value = true; + throw new Error("Creator should not be called"); + }, + ); + + expect(creatorCalled.value).toBe(false); + expect(secondCall).toBe(firstCall); + }); + + it("should create different WhereArgs types for different entities", () => { + const firstEntity = registry.getOrCreateWhereInput( + EntityTypeDefs.Hypercert, + () => createEntityWhereArgs(EntityTypeDefs.Hypercert, testFields), + ); + const secondEntity = registry.getOrCreateWhereInput( + EntityTypeDefs.Fraction, + () => createEntityWhereArgs(EntityTypeDefs.Fraction, testFields), + ); + + expect(firstEntity).not.toBe(secondEntity); + expect(firstEntity.name).toBe("HypercertWhereInput"); + expect(secondEntity.name).toBe("FractionWhereInput"); + }); + + it("should throw error if type not found after creation attempt", () => { + // Mock Map.get to simulate type not being set + const originalGet = Map.prototype.get; + Map.prototype.get = () => undefined; + + expect(() => + registry.getOrCreateWhereInput(EntityTypeDefs.Hypercert, () => + createEntityWhereArgs(EntityTypeDefs.Hypercert, testFields), + ), + ).toThrow("WhereInput not found for type Hypercert"); + + // Restore original Map.get + Map.prototype.get = originalGet; + }); + }); + + describe("SortArgs", () => { + it("should create and store SortArgs type", () => { + const sortArgs = registry.getOrCreateSortOptions( + EntityTypeDefs.Hypercert, + () => createEntitySortArgs(EntityTypeDefs.Hypercert, testFields), + ); + + expect(sortArgs).toBeDefined(); + expect(sortArgs.name).toBe("HypercertSortOptions"); + }); + + it("should return the same SortArgs type for the same entity", () => { + const firstCall = registry.getOrCreateSortOptions( + EntityTypeDefs.Hypercert, + () => createEntitySortArgs(EntityTypeDefs.Hypercert, testFields), + ); + const secondCall = registry.getOrCreateSortOptions( + EntityTypeDefs.Hypercert, + () => createEntitySortArgs(EntityTypeDefs.Hypercert, testFields), + ); + + expect(firstCall).toBe(secondCall); + }); + + it("should create different SortArgs types for different entities", () => { + const firstEntity = registry.getOrCreateSortOptions( + EntityTypeDefs.Hypercert, + () => createEntitySortArgs(EntityTypeDefs.Hypercert, testFields), + ); + const secondEntity = registry.getOrCreateSortOptions( + EntityTypeDefs.Fraction, + () => createEntitySortArgs(EntityTypeDefs.Fraction, testFields), + ); + + expect(firstEntity).not.toBe(secondEntity); + expect(firstEntity.name).toBe("HypercertSortOptions"); + expect(secondEntity.name).toBe("FractionSortOptions"); + }); + + it("should throw error if type not found after creation attempt", () => { + // Mock Map.get to simulate type not being set + const originalGet = Map.prototype.get; + Map.prototype.get = () => undefined; + + expect(() => + registry.getOrCreateSortOptions(EntityTypeDefs.Hypercert, () => + createEntitySortArgs(EntityTypeDefs.Hypercert, testFields), + ), + ).toThrow("SortOptions not found for type Hypercert"); + + // Restore original Map.get + Map.prototype.get = originalGet; + }); + }); + + describe("Registry operations", () => { + it("should clear all cached types", () => { + // Create some types + registry.getOrCreateWhereInput(EntityTypeDefs.Hypercert, () => + createEntityWhereArgs(EntityTypeDefs.Hypercert, testFields), + ); + registry.getOrCreateSortOptions(EntityTypeDefs.Hypercert, () => + createEntitySortArgs(EntityTypeDefs.Hypercert, testFields), + ); + + // Clear the registry + registry.clear(); + + // Verify types are recreated (creator is called again) + const whereCreatorCalled = { value: false }; + const sortCreatorCalled = { value: false }; + + registry.getOrCreateWhereInput(EntityTypeDefs.Hypercert, () => { + whereCreatorCalled.value = true; + return createEntityWhereArgs(EntityTypeDefs.Hypercert, testFields); + }); + + registry.getOrCreateSortOptions(EntityTypeDefs.Hypercert, () => { + sortCreatorCalled.value = true; + return createEntitySortArgs(EntityTypeDefs.Hypercert, testFields); + }); + + expect(whereCreatorCalled.value).toBe(true); + expect(sortCreatorCalled.value).toBe(true); + }); + + it("should maintain type safety through generic parameters", () => { + // Create a type that matches the WhereArgsType structure + interface TestWhereType { + id?: { eq?: string }; + name?: { contains?: string }; + } + + // This should compile without type errors + const whereArgs = registry.getOrCreateWhereInput( + EntityTypeDefs.Hypercert, + () => createEntityWhereArgs(EntityTypeDefs.Hypercert, testFields), + ); + + // The returned type should be ClassType + const instance = new whereArgs(); + expect(instance).toHaveProperty("id"); + expect(instance).toHaveProperty("name"); + // Verify the structure matches our expectations + instance.id = { eq: "test" }; + instance.name = { contains: "test" }; + expect(instance.id?.eq).toBe("test"); + expect(instance.name?.contains).toBe("test"); + }); + }); + + describe("Singleton behavior", () => { + it("should maintain singleton instance across multiple resolves", () => { + const firstInstance = container.resolve(TypeRegistry); + const secondInstance = container.resolve(TypeRegistry); + expect(firstInstance).toBe(secondInstance); + }); + + it("should maintain state across multiple resolves", () => { + const firstInstance = container.resolve(TypeRegistry); + const whereArgs = firstInstance.getOrCreateWhereInput( + EntityTypeDefs.Hypercert, + () => createEntityWhereArgs(EntityTypeDefs.Hypercert, testFields), + ); + + const secondInstance = container.resolve(TypeRegistry); + const sameWhereArgs = secondInstance.getOrCreateWhereInput( + EntityTypeDefs.Hypercert, + () => createEntityWhereArgs(EntityTypeDefs.Hypercert, testFields), + ); + + expect(whereArgs).toBe(sameWhereArgs); + }); + }); +}); diff --git a/test/lib/graphql/BaseQueryArgs.test.ts b/test/lib/graphql/BaseQueryArgs.test.ts new file mode 100644 index 00000000..5d1f2fcf --- /dev/null +++ b/test/lib/graphql/BaseQueryArgs.test.ts @@ -0,0 +1,73 @@ +import { InputType } from "type-graphql"; +import { describe, expect, it } from "vitest"; +import { SortOrder } from "../../../src/graphql/schemas/enums/sortEnums.js"; +import { BaseQueryArgs } from "../../../src/lib/graphql/BaseQueryArgs.js"; +import { createEntityArgs } from "../../../src/lib/graphql/createEntityArgs.js"; + +const { WhereInput, SortOptions } = createEntityArgs("Contract", { + id: "string", + address: "string", + chain_id: "number", +}); + +describe("BaseQueryArgs", () => { + it("should create a class with all expected fields", () => { + const QueryArgs = BaseQueryArgs(WhereInput, SortOptions); + const instance = new QueryArgs(); + + // Check that the class has all expected properties + expect(instance).toHaveProperty("where"); + expect(instance).toHaveProperty("sortBy"); + expect(instance).toHaveProperty("first"); + expect(instance).toHaveProperty("offset"); + }); + + it("should maintain type information from input args", () => { + const QueryArgs = BaseQueryArgs(WhereInput, SortOptions); + const instance = new QueryArgs(); + + // Set valid values + instance.where = { id: { eq: "test" } }; + instance.sortBy = { address: SortOrder.ascending }; + instance.first = 10; + instance.offset = 0; + + // Type checks + expect(typeof instance.where?.id?.eq).toBe("string"); + expect(typeof instance.sortBy?.address).toBe("string"); + expect(typeof instance.first).toBe("number"); + expect(typeof instance.offset).toBe("number"); + }); + + it("should allow nullable fields", () => { + const QueryArgs = BaseQueryArgs(WhereInput, SortOptions); + const instance = new QueryArgs(); + + // All fields should be nullable + expect(instance.sortBy).toBeUndefined(); + expect(instance.first).toBeUndefined(); + expect(instance.offset).toBeUndefined(); + }); + + it("should require where field", () => { + const QueryArgs = BaseQueryArgs(WhereInput, SortOptions); + const instance = new QueryArgs(); + + // TypeScript should enforce this at compile time, but we can check at runtime + expect(instance).toHaveProperty("where"); + }); + + it("should work with empty input types", () => { + @InputType() + class EmptyWhereInput {} + + @InputType() + class EmptySortOptions {} + + const QueryArgs = BaseQueryArgs(EmptyWhereInput, EmptySortOptions); + const instance = new QueryArgs(); + + expect(instance).toHaveProperty("where"); + expect(instance).toHaveProperty("sortBy"); + }); +}); diff --git a/test/lib/graphql/createEntityArgs.test.ts b/test/lib/graphql/createEntityArgs.test.ts new file mode 100644 index 00000000..b71ceaed --- /dev/null +++ b/test/lib/graphql/createEntityArgs.test.ts @@ -0,0 +1,181 @@ +import "reflect-metadata"; +import { beforeEach, describe, expect, it } from "vitest"; +import { registry } from "../../../src/lib/graphql/TypeRegistry.js"; +import { createEntityArgs } from "../../../src/lib/graphql/createEntityArgs.js"; +import { EntityTypeDefs } from "../../../src/graphql/schemas/typeDefs/typeDefs.js"; +import { WhereFieldDefinitions } from "../../../src/lib/graphql/whereFieldDefinitions.js"; +import { SortOrder } from "../../../src/graphql/schemas/enums/sortEnums.js"; + +describe("createEntityArgs", () => { + beforeEach(() => { + // Reset the registry before each test + (registry as any).whereArgs = new Map(); + (registry as any).sortOptions = new Map(); + (registry as any).sortArgs = new Map(); + }); + + describe("basic functionality", () => { + it("should create WhereInput and SortOptions classes", () => { + const { WhereInput, SortOptions } = createEntityArgs( + EntityTypeDefs.Contract, + { + address: "string", + chain_id: "number", + }, + ); + + expect(WhereInput).toBeDefined(); + expect(WhereInput.name).toBe("ContractWhereInput"); + expect(SortOptions).toBeDefined(); + expect(SortOptions.name).toBe("ContractSortOptions"); + }); + + it("should create instances with correct field types", () => { + const { WhereInput, SortOptions } = createEntityArgs( + EntityTypeDefs.Contract, + { + address: "string", + chain_id: "number", + }, + ); + + const whereInstance = new WhereInput(); + const sortInstance = new SortOptions(); + + // Check field existence + expect(whereInstance).toHaveProperty("address"); + expect(whereInstance).toHaveProperty("chain_id"); + expect(sortInstance).toHaveProperty("address"); + expect(sortInstance).toHaveProperty("chain_id"); + + // Check initial values + expect(whereInstance.address).toBeUndefined(); + expect(whereInstance.chain_id).toBeUndefined(); + expect(sortInstance.address).toBeNull(); + expect(sortInstance.chain_id).toBeNull(); + }); + + it("should allow setting valid filter and sort values", () => { + const { WhereInput, SortOptions } = createEntityArgs( + EntityTypeDefs.Contract, + { + address: "string", + chain_id: "number", + }, + ); + + const whereInstance = new WhereInput(); + whereInstance.address = { contains: "0x123" }; + whereInstance.chain_id = { eq: 1 }; + + const sortInstance = new SortOptions(); + sortInstance.address = SortOrder.ascending; + sortInstance.chain_id = SortOrder.descending; + + // Check filter values + expect(whereInstance.address).toEqual({ contains: "0x123" }); + expect(whereInstance.chain_id).toEqual({ eq: 1 }); + + // Check sort values + expect(sortInstance.address).toBe(SortOrder.ascending); + expect(sortInstance.chain_id).toBe(SortOrder.descending); + }); + }); + + describe("nested reference fields", () => { + it("should handle single-level nested references", () => { + const { WhereInput, SortOptions } = createEntityArgs( + EntityTypeDefs.Hypercert, + { + token_id: "bigint", + metadata: { + type: "id", + references: { + entity: EntityTypeDefs.Metadata, + fields: WhereFieldDefinitions.Metadata.fields, + }, + }, + }, + ); + + const whereInstance = new WhereInput(); + const sortInstance = new SortOptions(); + + // Check primitive fields + expect(whereInstance.token_id).toBeUndefined(); + expect(sortInstance.token_id).toBeNull(); + + // Check nested fields + expect(whereInstance.metadata).toBeDefined(); + expect(whereInstance.metadata?.constructor.name).toBe( + "HypercertMetadataWhereInput", + ); + expect(Object.keys(whereInstance.metadata || {})).toEqual( + Object.keys(WhereFieldDefinitions.Metadata.fields), + ); + + // Sort options should not include reference fields + expect(sortInstance).not.toHaveProperty("metadata"); + }); + + it("should handle deeply nested references", () => { + const { WhereInput } = createEntityArgs(EntityTypeDefs.Attestation, { + uid: "string", + hypercert: { + type: "id", + references: { + entity: EntityTypeDefs.Hypercert, + fields: { + metadata: { + type: "id", + references: { + entity: EntityTypeDefs.Metadata, + fields: WhereFieldDefinitions.Metadata.fields, + }, + }, + }, + }, + }, + }); + + const instance = new WhereInput(); + expect(instance.uid).toBeUndefined(); + expect(instance.hypercert).toBeDefined(); + expect(instance.hypercert?.constructor.name).toBe( + "AttestationHypercertWhereInput", + ); + expect(instance.hypercert?.metadata).toBeDefined(); + expect(instance.hypercert?.metadata?.constructor.name).toBe( + "AttestationHypercertMetadataWhereInput", + ); + }); + }); + + describe("type registry", () => { + it("should reuse cached classes for same entity", () => { + const args1 = createEntityArgs(EntityTypeDefs.Contract, { + id: "string", + }); + const args2 = createEntityArgs(EntityTypeDefs.Contract, { + id: "string", + }); + + expect(args1.WhereInput).toBe(args2.WhereInput); + expect(args1.SortOptions).toBe(args2.SortOptions); + }); + + it("should create different classes for different entities", () => { + const args1 = createEntityArgs(EntityTypeDefs.Contract, { + id: "string", + }); + const args2 = createEntityArgs(EntityTypeDefs.Metadata, { + id: "string", + }); + + expect(args1.WhereInput).not.toBe(args2.WhereInput); + expect(args1.SortOptions).not.toBe(args2.SortOptions); + expect(args1.WhereInput.name).toBe("ContractWhereInput"); + expect(args2.WhereInput.name).toBe("MetadataWhereInput"); + }); + }); +}); diff --git a/test/lib/graphql/createEntitySortArgs.test.ts b/test/lib/graphql/createEntitySortArgs.test.ts new file mode 100644 index 00000000..0534fb4a --- /dev/null +++ b/test/lib/graphql/createEntitySortArgs.test.ts @@ -0,0 +1,197 @@ +import "reflect-metadata"; +import { getMetadataStorage } from "type-graphql"; +import { beforeEach, describe, expect, it } from "vitest"; +import { SortOrder } from "../../../src/graphql/schemas/enums/sortEnums.js"; +import { EntityTypeDefs } from "../../../src/graphql/schemas/typeDefs/typeDefs.js"; +import { createEntitySortArgs } from "../../../src/lib/graphql/createEntitySortArgs.js"; + +describe("createEntitySort", () => { + beforeEach(() => { + getMetadataStorage().clear(); + }); + + it("should create classes with correct names", () => { + const SortArgs = createEntitySortArgs("Contract", { + address: "string", + chain_id: "number", + }); + + expect(SortArgs.name).toBe("ContractSortOptions"); + }); + + it("should create fields for each sortable property", () => { + const SortArgs = createEntitySortArgs("Contract", { + address: "string", + chain_id: "number", + }); + + const metadata = getMetadataStorage(); + const fields = metadata.fields.filter( + (field) => field.target.name === "ContractSortOptions", + ); + + expect(fields).toHaveLength(2); + expect(fields[0].name).toBe("address"); + expect(fields[1].name).toBe("chain_id"); + + const sortArgs = new SortArgs(); + expect(Object.keys(sortArgs).length).toBe(2); + expect(Object.keys(sortArgs)).toContain("address"); + expect(Object.keys(sortArgs)).toContain("chain_id"); + expect(sortArgs.address).toBeNull(); + expect(sortArgs.chain_id).toBeNull(); + }); + + it("should initialize with null values", () => { + const SortArgs = createEntitySortArgs("Contract", { + address: "string", + chain_id: "number", + }); + + const instance = new SortArgs(); + expect(instance.address).toBeNull(); + expect(instance.chain_id).toBeNull(); + + // Expect fields to be defined on the object + expect(Object.keys(instance).length).toBe(2); + expect(Object.keys(instance)).toContain("address"); + expect(Object.keys(instance)).toContain("chain_id"); + }); + + it("should create sort options for primitive types only", () => { + const SortArgs = createEntitySortArgs("Contract", { + address: "string", + chain_id: "number", + metadata: { + type: "id", + references: { + entity: "Metadata", + fields: { name: "string" }, + }, + }, + }); + + const instance = new SortArgs(); + + expect("address" in instance).toBe(true); + expect("chain_id" in instance).toBe(true); + expect("metadata" in instance).toBe(false); + }); + + it("should allow setting valid sort orders", () => { + const SortArgs = createEntitySortArgs("Contract", { + address: "string", + chain_id: "number", + }); + + const instance = new SortArgs(); + + instance.address = SortOrder.ascending; + instance.chain_id = SortOrder.descending; + + expect(instance.address).toBe(SortOrder.ascending); + expect(instance.chain_id).toBe(SortOrder.descending); + }); + + it("should handle complex entity definitions", () => { + const SortArgs = createEntitySortArgs(EntityTypeDefs.Hypercert, { + token_id: "bigint", + creation_block_timestamp: "bigint", + units: "bigint", + sales_count: "number", + }); + + const instance = new SortArgs(); + + instance.token_id = SortOrder.descending; + instance.creation_block_timestamp = SortOrder.ascending; + + expect(instance.token_id).toBe(SortOrder.descending); + expect(instance.creation_block_timestamp).toBe(SortOrder.ascending); + }); + + it("should create nullable sort fields", () => { + createEntitySortArgs("Contract", { + address: "string", + }); + + const metadata = getMetadataStorage(); + const fields = metadata.fields.filter( + (field) => field.target.name === "ContractSortOptions", + ); + + expect(fields[0].typeOptions?.nullable).toBe(true); + }); + + it("should handle empty field definitions", () => { + const SortArgs = createEntitySortArgs(EntityTypeDefs.Contract, {}); + const instance = new SortArgs(); + expect(Object.keys(instance).length).toBe(0); + }); + + it("should accept valid sort orders and null", () => { + const SortArgs = createEntitySortArgs(EntityTypeDefs.Contract, { + address: "string", + }); + + const instance = new SortArgs(); + + // Should accept valid sort orders + instance.address = SortOrder.ascending; + expect(instance.address).toBe(SortOrder.ascending); + + instance.address = SortOrder.descending; + expect(instance.address).toBe(SortOrder.descending); + + // Should accept null + instance.address = null; + expect(instance.address).toBeNull(); + }); + + it("should properly apply field decorators", () => { + createEntitySortArgs(EntityTypeDefs.Contract, { + address: "string", + }); + + const metadata = getMetadataStorage(); + const fields = metadata.fields.filter( + (field) => field.target.name === "ContractSortOptions", + ); + + expect(fields[0].typeOptions?.nullable).toBe(true); + expect(fields[0].getType()).toBe(SortOrder); + }); + + it("should handle malformed field definitions gracefully", () => { + const SortArgs = createEntitySortArgs(EntityTypeDefs.Contract, { + // @ts-expect-error - Testing invalid field type + invalid: { type: "invalid" }, + valid: "string", + }); + + const instance = new SortArgs(); + expect("valid" in instance).toBe(true); + expect("invalid" in instance).toBe(false); + }); + + it("should not add complex nested field definitions", () => { + const SortArgs = createEntitySortArgs(EntityTypeDefs.Contract, { + simple: "string", + nested: { + type: "id", + references: { + entity: EntityTypeDefs.Metadata, + fields: { + field1: "string", + field2: "number", + }, + }, + }, + }); + + const instance = new SortArgs(); + expect("simple" in instance).toBe(true); + // We don't support nested fields yet in sort args + expect("nested" in instance).toBe(false); + }); +}); diff --git a/test/lib/graphql/createEntityWhereArgs.test.ts b/test/lib/graphql/createEntityWhereArgs.test.ts new file mode 100644 index 00000000..b0ee3c4c --- /dev/null +++ b/test/lib/graphql/createEntityWhereArgs.test.ts @@ -0,0 +1,181 @@ +import "reflect-metadata"; +import { getMetadataStorage } from "type-graphql"; +import { beforeEach, describe, expect, it } from "vitest"; +import { EntityTypeDefs } from "../../../src/graphql/schemas/typeDefs/typeDefs.js"; +import { createEntityWhereArgs } from "../../../src/lib/graphql/createEntityWhereArgs.js"; +import { WhereFieldDefinitions } from "../../../src/lib/graphql/whereFieldDefinitions.js"; +import { SearchOptionMap } from "../../../src/types/argTypes.js"; + +describe("createEntityWhereArgs", () => { + beforeEach(() => { + // Clear type-graphql metadata between tests + getMetadataStorage().clear(); + }); + + describe("basic functionality", () => { + it("should create a class with the correct name", () => { + const WhereArgs = createEntityWhereArgs(EntityTypeDefs.Contract, { + address: "string", + chain_id: "number", + }); + + expect(WhereArgs.name).toBe("ContractWhereInput"); + }); + + it("should create fields with correct types for primitive fields", () => { + createEntityWhereArgs(EntityTypeDefs.Contract, { + address: "string", + chain_id: "number", + }); + + const metadata = getMetadataStorage(); + const fields = metadata.fields.filter( + (field) => field.target.name === "ContractWhereInput", + ); + + expect(fields).toHaveLength(2); + expect(fields.map((f) => f.name)).toEqual(["address", "chain_id"]); + expect(fields[0].typeOptions?.nullable).toBe(true); + expect(fields[1].typeOptions?.nullable).toBe(true); + expect(fields[0].getType()).toBe(SearchOptionMap.string); + expect(fields[1].getType()).toBe(SearchOptionMap.number); + }); + + it("should initialize all fields as undefined in constructor", () => { + const WhereArgs = createEntityWhereArgs(EntityTypeDefs.Contract, { + address: "string", + chain_id: "number", + }); + + const instance = new WhereArgs(); + expect(instance.address).toBeUndefined(); + expect(instance.chain_id).toBeUndefined(); + }); + + it("should allow setting filter values for primitive fields", () => { + const WhereArgs = createEntityWhereArgs(EntityTypeDefs.Contract, { + address: "string", + chain_id: "number", + }); + + const instance = new WhereArgs(); + instance.address = { contains: "0x123" }; + instance.chain_id = { eq: 1 }; + + expect(instance.address).toEqual({ contains: "0x123" }); + expect(instance.chain_id).toEqual({ eq: 1 }); + }); + }); + + describe("nested reference fields", () => { + it("should handle single-level nested reference fields", () => { + const WhereArgs = createEntityWhereArgs(EntityTypeDefs.Hypercert, { + token_id: "bigint", + metadata: { + type: "id", + references: { + entity: EntityTypeDefs.Metadata, + fields: WhereFieldDefinitions.Metadata.fields, + }, + }, + }); + + const metadata = getMetadataStorage(); + const fields = metadata.fields.filter( + (field) => field.target.name === "HypercertWhereInput", + ); + + expect(fields).toHaveLength(2); + expect(fields.map((f) => f.name)).toEqual(["token_id", "metadata"]); + + const instance = new WhereArgs(); + expect(instance.token_id).toBeUndefined(); + expect(instance.metadata).toBeDefined(); + expect(instance.metadata?.constructor.name).toBe( + "HypercertMetadataWhereInput", + ); + expect(Object.keys(instance.metadata || {})).toEqual( + Object.keys(WhereFieldDefinitions.Metadata.fields), + ); + }); + + it("should handle deeply nested reference fields", () => { + createEntityWhereArgs(EntityTypeDefs.Attestation, { + uid: "string", + token_id: "bigint", + hypercert: { + type: "id", + references: { + entity: EntityTypeDefs.Hypercert, + fields: { + metadata: { + type: "id", + references: { + entity: EntityTypeDefs.Metadata, + fields: WhereFieldDefinitions.Metadata.fields, + }, + }, + }, + }, + }, + }); + + const metadata = getMetadataStorage(); + const allFields = metadata.fields; + + // Check attestation level + const attestationFields = allFields.filter( + (field) => field.target.name === "AttestationWhereInput", + ); + expect(attestationFields).toHaveLength(3); + + // Check hypercert level + const hypercertFields = allFields.filter( + (field) => field.target.name === "AttestationHypercertWhereInput", + ); + expect(hypercertFields).toHaveLength(1); + + // Check metadata level + const metadataFields = allFields.filter( + (field) => + field.target.name === "AttestationHypercertMetadataWhereInput", + ); + expect(metadataFields).toHaveLength( + Object.keys(WhereFieldDefinitions.Metadata.fields).length, + ); + }); + }); + + describe("error handling", () => { + it("should throw error for invalid primitive field type", () => { + expect(() => { + createEntityWhereArgs(EntityTypeDefs.Contract, { + // @ts-expect-error - Testing invalid type + name: "InvalidType", + }); + }).toThrow('Invalid field type "InvalidType" for field "name"'); + }); + }); + + describe("field initialization", () => { + it("should initialize nested reference fields with their own instances", () => { + const WhereArgs = createEntityWhereArgs(EntityTypeDefs.Hypercert, { + token_id: "bigint", + metadata: { + type: "id", + references: { + entity: EntityTypeDefs.Metadata, + fields: { name: "string" }, + }, + }, + }); + + const instance = new WhereArgs(); + expect(instance.metadata).toBeDefined(); + expect(instance.metadata?.constructor.name).toBe( + "HypercertMetadataWhereInput", + ); + expect(instance.metadata?.name).toBeUndefined(); + }); + }); +}); diff --git a/test/services/database/entities/AllowListRecordEntityService.test.ts b/test/services/database/entities/AllowListRecordEntityService.test.ts new file mode 100644 index 00000000..91db4530 --- /dev/null +++ b/test/services/database/entities/AllowListRecordEntityService.test.ts @@ -0,0 +1,105 @@ +import { beforeEach, describe, expect, it, vi } from "vitest"; +import { AllowlistRecordService } from "../../../../src/services/database/entities/AllowListRecordEntityService.js"; + +// Create mock outside of describe block to ensure it's available during module mocking +const mockEntityService = { + getMany: vi.fn(), + getSingle: vi.fn(), +}; + +// Mock the module before any tests run +vi.mock( + "../../../../src/services/database/entities/EntityServiceFactory.js", + () => ({ + createEntityService: () => mockEntityService, + }), +); + +describe("AllowlistRecordService", () => { + let service: AllowlistRecordService; + + beforeEach(() => { + // Reset all mocks before each test + vi.clearAllMocks(); + + // Create service instance + service = new AllowlistRecordService(); + }); + + describe("getAllowlistRecords", () => { + it("should call entityService.getMany with provided arguments", async () => { + const args = { + where: { + hypercert: { + hypercert_id: { eq: "test-id" }, + }, + }, + }; + + await service.getAllowlistRecords(args); + + expect(mockEntityService.getMany).toHaveBeenCalledTimes(1); + expect(mockEntityService.getMany).toHaveBeenCalledWith(args); + }); + + it("should return the result from entityService.getMany", async () => { + const expectedResult = { + data: [{ id: "1", hypercert_id: "test-id" }], + count: 1, + }; + mockEntityService.getMany.mockResolvedValue(expectedResult); + + const result = await service.getAllowlistRecords({}); + + expect(result).toEqual(expectedResult); + }); + + it("should handle errors from entityService.getMany", async () => { + const error = new Error("Database error"); + mockEntityService.getMany.mockRejectedValue(error); + + await expect(service.getAllowlistRecords({})).rejects.toThrow(error); + }); + }); + + describe("getAllowlistRecord", () => { + it("should call entityService.getSingle with provided arguments", async () => { + const args = { + where: { + hypercert: { + hypercert_id: { eq: "test-id" }, + }, + }, + }; + + await service.getAllowlistRecord(args); + + expect(mockEntityService.getSingle).toHaveBeenCalledTimes(1); + expect(mockEntityService.getSingle).toHaveBeenCalledWith(args); + }); + + it("should return the result from entityService.getSingle", async () => { + const expectedResult = { id: "1", hypercert_id: "test-id" }; + mockEntityService.getSingle.mockResolvedValue(expectedResult); + + const result = await service.getAllowlistRecord({}); + + expect(result).toEqual(expectedResult); + }); + + it("should handle null result from entityService.getSingle", async () => { + mockEntityService.getSingle.mockResolvedValue(null); + + const result = await service.getAllowlistRecord({}); + + expect(result).toBeNull(); + }); + + it("should handle errors from entityService.getSingle", async () => { + const error = new Error("Database error"); + mockEntityService.getSingle.mockRejectedValue(error); + + await expect(service.getAllowlistRecord({})).rejects.toThrow(error); + }); + }); +}); diff --git a/test/services/database/entities/AttestationEntityService.test.ts b/test/services/database/entities/AttestationEntityService.test.ts new file mode 100644 index 00000000..cdcaeac0 --- /dev/null +++ b/test/services/database/entities/AttestationEntityService.test.ts @@ -0,0 +1,262 @@ +import { beforeEach, describe, expect, it, vi } from "vitest"; +import type { GetAttestationsArgs } from "../../../../src/graphql/schemas/args/attestationArgs.js"; +import { AttestationService } from "../../../../src/services/database/entities/AttestationEntityService.js"; +import type { Json } from "../../../../src/types/supabaseCaching.js"; + +type AttestationData = { + id: string; + data: Record; + [key: string]: Json | undefined; +}; + +type ParsedData = { + token_id: string; + other_field: string; + [key: string]: string | undefined; +}; + +const mockEntityService = { + getMany: vi.fn(), + getSingle: vi.fn(), +}; +// Mock the createEntityService function +vi.mock( + "../../../../src/services/database/entities/EntityServiceFactory.js", + () => ({ + createEntityService: () => mockEntityService, + }), +); + +describe("AttestationService", () => { + let service: AttestationService; + + beforeEach(() => { + vi.clearAllMocks(); + service = new AttestationService(); + }); + + describe("getAttestations", () => { + it("should return attestations with parsed data", async () => { + // Arrange + const args: GetAttestationsArgs = { + where: { + id: { eq: "test-id" }, + }, + }; + const mockResponse = { + data: [ + { + id: "1", + data: { + token_id: "123456789", + uid: "0x123456789", + }, + }, + { + id: "2", + data: { + token_id: "987654321", + uid: "0x123456789", + }, + }, + ] as AttestationData[], + count: 2, + }; + mockEntityService.getMany.mockResolvedValue(mockResponse); + + // Act + const result = await service.getAttestations(args); + + // Assert + expect(mockEntityService.getMany).toHaveBeenCalledWith(args); + expect(result.count).toBe(2); + expect(result.data).toHaveLength(2); + const data0 = result.data[0].data as Record; + const data1 = result.data[1].data as Record; + expect(data0.token_id).toBe("123456789"); + expect(data1.token_id).toBe("987654321"); + }); + + it("should handle attestations without token_id in data", async () => { + // Arrange + const mockResponse = { + data: [ + { + id: "1", + data: { + other_field: "value", + }, + other_field: "value", + }, + ] as AttestationData[], + count: 1, + }; + mockEntityService.getMany.mockResolvedValue(mockResponse); + + // Act + const result = await service.getAttestations({}); + + // Assert + expect(mockEntityService.getMany).toHaveBeenCalledWith({}); + expect(result.count).toBe(1); + const data = result.data[0].data as Record; + expect(data.other_field).toBe("value"); + expect(data.token_id).toBeUndefined(); + }); + + it("should handle empty result set", async () => { + // Arrange + const mockResponse = { + data: [], + count: 0, + }; + mockEntityService.getMany.mockResolvedValue(mockResponse); + + // Act + const result = await service.getAttestations({}); + + // Assert + expect(result.count).toBe(0); + expect(result.data).toHaveLength(0); + }); + + it("should handle errors from entityService.getMany", async () => { + // Arrange + const error = new Error("Database error"); + mockEntityService.getMany.mockRejectedValue(error); + + // Act & Assert + await expect(service.getAttestations({})).rejects.toThrow(error); + }); + }); + + describe("getAttestation", () => { + it("should return a single attestation", async () => { + // Arrange + const args: GetAttestationsArgs = { + where: { + id: { eq: "test-id" }, + }, + }; + const mockResponse = { + id: "1", + data: { + token_id: "123456789", + uid: "0x123456789", + }, + } as AttestationData; + mockEntityService.getSingle.mockResolvedValue(mockResponse); + + // Act + const result = await service.getAttestation(args); + + // Assert + expect(mockEntityService.getSingle).toHaveBeenCalledWith(args); + expect(result).toEqual(mockResponse); + }); + + it("should return undefined when attestation is not found", async () => { + // Arrange + mockEntityService.getSingle.mockResolvedValue(undefined); + + // Act + const result = await service.getAttestation({}); + + // Assert + expect(result).toBeUndefined(); + expect(mockEntityService.getSingle).toHaveBeenCalledWith({}); + }); + + it("should handle errors from entityService.getSingle", async () => { + // Arrange + const error = new Error("Database error"); + mockEntityService.getSingle.mockRejectedValue(error); + + // Act & Assert + await expect(service.getAttestation({})).rejects.toThrow(error); + }); + }); + + describe("parseAttestation", () => { + it("should convert token_id to string", () => { + // Arrange + const data = { + token_id: 123456789n, + other_field: "value", + }; + + // Act + const result = service.parseAttestation(data as unknown as Json); + + // Assert + expect(result).not.toBeNull(); + if (result && typeof result === "object" && !Array.isArray(result)) { + const parsed = result as ParsedData; + expect(parsed.token_id).toBe("123456789"); + expect(parsed.other_field).toBe("value"); + } + }); + + it("should handle string token_id", () => { + // Arrange + const data = { + token_id: "123456789", + other_field: "value", + }; + + // Act + const result = service.parseAttestation(data as unknown as Json); + + // Assert + expect(result).not.toBeNull(); + if (result && typeof result === "object" && !Array.isArray(result)) { + const parsed = result as ParsedData; + expect(parsed.token_id).toBe("123456789"); + expect(parsed.other_field).toBe("value"); + } + }); + + it("should handle null data", () => { + // Act + const result = service.parseAttestation(null); + + // Assert + expect(result).toBeNull(); + }); + + it("should handle data without token_id", () => { + // Arrange + const data = { + other_field: "value", + }; + + // Act + const result = service.parseAttestation(data as unknown as Json); + + // Assert + expect(result).toEqual(data); + }); + + it("should handle empty object", () => { + // Act + const result = service.parseAttestation({} as Json); + + // Assert + expect(result).toEqual({}); + }); + + it("should handle token_id with null value", () => { + // Arrange + const data = { + token_id: null, + other_field: "value", + }; + + // Act + const result = service.parseAttestation(data as unknown as Json); + + // Assert + expect(result).toEqual(data); + }); + }); +}); diff --git a/test/services/database/entities/AttestationSchemaEntityService.test.ts b/test/services/database/entities/AttestationSchemaEntityService.test.ts new file mode 100644 index 00000000..ced933ab --- /dev/null +++ b/test/services/database/entities/AttestationSchemaEntityService.test.ts @@ -0,0 +1,140 @@ +import { beforeEach, describe, expect, it, vi } from "vitest"; +import type { GetAttestationSchemasArgs } from "../../../../src/graphql/schemas/args/attestationSchemaArgs.js"; +import { AttestationSchemaService } from "../../../../src/services/database/entities/AttestationSchemaEntityService.js"; + +const ZERO_ADDRESS = "0x0000000000000000000000000000000000000000"; + +const mockEntityService = { + getMany: vi.fn(), + getSingle: vi.fn(), +}; + +// Mock the createEntityService function +vi.mock( + "../../../../src/services/database/entities/EntityServiceFactory.js", + () => ({ + createEntityService: () => mockEntityService, + }), +); + +describe("AttestationSchemaService", () => { + let service: AttestationSchemaService; + + beforeEach(() => { + vi.clearAllMocks(); + service = new AttestationSchemaService(); + }); + + describe("getAttestationSchemas", () => { + it("should return attestation schemas", async () => { + // Arrange + const args: GetAttestationSchemasArgs = { + where: { + id: { eq: "test-id" }, + }, + }; + const mockResponse = { + data: [ + { + id: "1", + chain_id: 1, + schema: { type: "test" }, + resolver: ZERO_ADDRESS, + revocable: true, + }, + { + id: "2", + chain_id: 1, + schema: { type: "test2" }, + resolver: ZERO_ADDRESS, + revocable: false, + }, + ], + count: 2, + }; + mockEntityService.getMany.mockResolvedValue(mockResponse); + + // Act + const result = await service.getAttestationSchemas(args); + + // Assert + expect(mockEntityService.getMany).toHaveBeenCalledWith(args); + expect(result.count).toBe(2); + expect(result.data).toHaveLength(2); + expect(result.data[0].id).toBe("1"); + expect(result.data[1].id).toBe("2"); + }); + + it("should handle empty result set", async () => { + // Arrange + const mockResponse = { + data: [], + count: 0, + }; + mockEntityService.getMany.mockResolvedValue(mockResponse); + + // Act + const result = await service.getAttestationSchemas({}); + + // Assert + expect(result.count).toBe(0); + expect(result.data).toHaveLength(0); + }); + + it("should handle errors from entityService.getMany", async () => { + // Arrange + const error = new Error("Database error"); + mockEntityService.getMany.mockRejectedValue(error); + + // Act & Assert + await expect(service.getAttestationSchemas({})).rejects.toThrow(error); + }); + }); + + describe("getAttestationSchema", () => { + it("should return a single attestation schema", async () => { + // Arrange + const args: GetAttestationSchemasArgs = { + where: { + id: { eq: "test-id" }, + }, + }; + const mockResponse = { + id: "1", + chain_id: 1, + schema: { type: "test" }, + resolver: ZERO_ADDRESS, + revocable: true, + }; + mockEntityService.getSingle.mockResolvedValue(mockResponse); + + // Act + const result = await service.getAttestationSchema(args); + + // Assert + expect(mockEntityService.getSingle).toHaveBeenCalledWith(args); + expect(result).toEqual(mockResponse); + }); + + it("should return undefined when schema is not found", async () => { + // Arrange + mockEntityService.getSingle.mockResolvedValue(undefined); + + // Act + const result = await service.getAttestationSchema({}); + + // Assert + expect(result).toBeUndefined(); + expect(mockEntityService.getSingle).toHaveBeenCalledWith({}); + }); + + it("should handle errors from entityService.getSingle", async () => { + // Arrange + const error = new Error("Database error"); + mockEntityService.getSingle.mockRejectedValue(error); + + // Act & Assert + await expect(service.getAttestationSchema({})).rejects.toThrow(error); + }); + }); +}); diff --git a/test/services/database/entities/BlueprintsEntityService.test.ts b/test/services/database/entities/BlueprintsEntityService.test.ts new file mode 100644 index 00000000..997a3d25 --- /dev/null +++ b/test/services/database/entities/BlueprintsEntityService.test.ts @@ -0,0 +1,353 @@ +import { faker } from "@faker-js/faker"; +import { Kysely } from "kysely"; +import { container } from "tsyringe"; +import { beforeEach, describe, expect, it, vi } from "vitest"; +import { DataKyselyService } from "../../../../src/client/kysely.js"; +import { GetBlueprintsArgs } from "../../../../src/graphql/schemas/args/blueprintArgs.js"; +import { BlueprintsService } from "../../../../src/services/database/entities/BlueprintsEntityService.js"; +import { UsersService } from "../../../../src/services/database/entities/UsersEntityService.js"; +import type { DataDatabase } from "../../../../src/types/kyselySupabaseData.js"; +import { + createTestDataDatabase, + generateHypercertId, + generateMockAddress, + generateMockBlueprint, + generateMockCollection, + generateMockUser, +} from "../../../utils/testUtils.js"; + +const mockDb = vi.fn(); + +//TODO introduce this in-memory kysely service in the test utils and other tests +vi.mock("../../../../src/client/kysely.js", () => ({ + get DataKyselyService() { + return class MockDataKyselyService { + getConnection() { + return mockDb(); + } + get db() { + return mockDb(); + } + }; + }, + get kyselyData() { + return mockDb(); + }, +})); + +describe("BlueprintsService", () => { + let blueprintsService: BlueprintsService; + let usersService: UsersService; + let db: Kysely; + + beforeEach(async () => { + vi.clearAllMocks(); + + ({ db } = await createTestDataDatabase()); + + mockDb.mockReturnValue(db); + + usersService = new UsersService(container.resolve(DataKyselyService)); + + blueprintsService = new BlueprintsService( + container.resolve(DataKyselyService), + usersService, + ); + }); + + describe("getBlueprints", () => { + it.skip("should return blueprints with correct data", async () => { + // TODO: Reenable this test when pg-mem supports views + // Arrange + const mockBlueprint = generateMockBlueprint(); + await db.insertInto("blueprints").values(mockBlueprint).execute(); + const args: GetBlueprintsArgs = { + where: { + id: { eq: mockBlueprint.id }, + }, + }; + + // Act + const result = await blueprintsService.getBlueprints(args); + + // Assert + expect(result.count).toBe(1); + expect(result.data).toHaveLength(1); + expect(result.data[0].id).toBe(mockBlueprint.id); + expect(result.data[0].form_values).toEqual(mockBlueprint.form_values); + expect(result.data[0].minter_address).toBe(mockBlueprint.minter_address); + expect(result.data[0].minted).toBe(mockBlueprint.minted); + expect(result.data[0].hypercert_ids).toEqual(mockBlueprint.hypercert_ids); + }); + + it("should handle empty result set", async () => { + // TODO: Reenable this test when pg-mem supports views + // Arrange + const args: GetBlueprintsArgs = {}; + + // Act + const result = await blueprintsService.getBlueprints(args); + + // Assert + expect(result.count).toBe(0); + expect(result.data).toHaveLength(0); + }); + + it("should handle errors from entityService.getMany", async () => { + // Arrange + // Mock the database to throw an error + vi.spyOn(db, "selectFrom").mockImplementation(() => { + throw new Error("Database error"); + }); + + // Act & Assert + await expect(blueprintsService.getBlueprints({})).rejects.toThrow( + "Database error", + ); + }); + }); + + describe("getBlueprint", () => { + it.skip("should return a single blueprint", async () => { + // TODO: Reenable this test when pg-mem supports views + const mockBlueprint = generateMockBlueprint(); + + // Insert test data into pg-mem + await db.insertInto("blueprints").values(mockBlueprint).execute(); + + // Arrange + const args: GetBlueprintsArgs = { + where: { + id: { eq: mockBlueprint.id }, + }, + }; + + // Act + const result = await blueprintsService.getBlueprint(args); + + // Assert + expect(result).toBeDefined(); + expect(result?.id).toBe(mockBlueprint.id); + expect(result?.form_values).toEqual(mockBlueprint.form_values); + expect(result?.minter_address).toBe(mockBlueprint.minter_address); + expect(result?.minted).toBe(mockBlueprint.minted); + expect(result?.hypercert_ids).toEqual(mockBlueprint.hypercert_ids); + }); + + it.skip("should return undefined when blueprint not found", async () => { + // TODO: Reenable this test when pg-mem supports views + // Arrange + const args: GetBlueprintsArgs = { + where: { id: { eq: 999 } }, + }; + + // Act + const result = await blueprintsService.getBlueprint(args); + + // Assert + expect(result).toBeUndefined(); + }); + }); + + describe("getBlueprintAdmins", () => { + it("should return blueprint admins", async () => { + // Arrange + const mockBlueprint = generateMockBlueprint(); + await db.insertInto("blueprints").values(mockBlueprint).execute(); + + const mockUser = generateMockUser(); + await db.insertInto("users").values(mockUser).execute(); + + await db + .insertInto("blueprint_admins") + .values({ + blueprint_id: mockBlueprint.id, + user_id: mockUser.id, + created_at: new Date().toISOString(), + }) + .execute(); + + // Act + const result = await blueprintsService.getBlueprintAdmins( + mockBlueprint.id, + ); + + // Assert + expect(result).toHaveLength(1); + expect(result[0].id).toBe(mockUser.id); + expect(result[0].display_name).toBe(mockUser.display_name); + }); + }); + + describe("deleteBlueprint", () => { + it("should delete a blueprint", async () => { + // Arrange + const mockBlueprint = generateMockBlueprint(); + await db.insertInto("blueprints").values(mockBlueprint).execute(); + + // Act + await blueprintsService.deleteBlueprint(mockBlueprint.id); + + // Assert + const deletedBlueprint = await db + .selectFrom("blueprints") + .where("id", "=", mockBlueprint.id) + .executeTakeFirst(); + expect(deletedBlueprint).toBeUndefined(); + }); + }); + + describe("upsertBlueprints", () => { + it("should create or update blueprints", async () => { + // Arrange + const mockBlueprint = generateMockBlueprint(); + const [insertedBlueprint] = await db + .insertInto("blueprints") + .values(mockBlueprint) + .returning("id") + .execute(); + + const updatedBlueprint = { + ...mockBlueprint, + form_values: { ...mockBlueprint.form_values, name: "Updated Name" }, + }; + + // Act + const result = await blueprintsService.upsertBlueprints([ + updatedBlueprint, + ]); + + // Assert + expect(result).toHaveLength(1); + expect(result[0].id).toBe(insertedBlueprint.id); + + const updatedRecord = await db + .selectFrom("blueprints") + .where("id", "=", insertedBlueprint.id) + .selectAll() + .executeTakeFirst(); + // casting because form_values is a jsonb column + expect((updatedRecord?.form_values as { name: string }).name).toBe( + "Updated Name", + ); + }); + }); + + describe("addAdminToBlueprint", () => { + it("should add an admin to a blueprint", async () => { + // Arrange + const mockBlueprint = generateMockBlueprint(); + await db.insertInto("blueprints").values(mockBlueprint).execute(); + + const adminAddress = generateMockAddress(); + const chainId = 1; + + // Act + const result = await blueprintsService.addAdminToBlueprint( + mockBlueprint.id, + adminAddress, + chainId, + ); + + // Assert + expect(result).toBeDefined(); + expect(result?.blueprint_id).toBe(mockBlueprint.id); + + const adminUser = await db + .selectFrom("users") + .where("address", "=", adminAddress) + .selectAll() + .executeTakeFirst(); + expect(adminUser).toBeDefined(); + expect(adminUser?.chain_id).toBe(chainId); + }); + }); + + describe("mintBlueprintAndSwapInCollections", () => { + it("should mint blueprint and update collections", async () => { + // Arrange + const mockBlueprint = generateMockBlueprint(); + await db.insertInto("blueprints").values(mockBlueprint).execute(); + + const mockCollection = generateMockCollection(); + await db + .insertInto("collections") + .values({ + id: mockCollection.id, + name: mockCollection.name, + description: mockCollection.description, + chain_ids: mockCollection.chain_ids.map((id) => Number(id)), + hidden: mockCollection.hidden, + created_at: new Date().toISOString(), + }) + .execute(); + + const hyperboardId = faker.string.uuid(); + const displaySize = 100; + await db + .insertInto("hyperboard_blueprint_metadata") + .values({ + blueprint_id: mockBlueprint.id, + hyperboard_id: hyperboardId, + collection_id: mockCollection.id, + display_size: displaySize, + created_at: new Date().toISOString(), + }) + .execute(); + + await db + .insertInto("collection_blueprints") + .values({ + blueprint_id: mockBlueprint.id, + collection_id: mockCollection.id, + created_at: new Date().toISOString(), + }) + .execute(); + + const hypercertId = generateHypercertId(); + + // Act + // Note that we intercept the array_append query in the test utils + // All other calls actually interact with the database + await blueprintsService.mintBlueprintAndSwapInCollections( + mockBlueprint.id, + hypercertId, + ); + + // Assert + const updatedBlueprint = await db + .selectFrom("blueprints") + .where("id", "=", mockBlueprint.id) + .selectAll() + .executeTakeFirst(); + + console.log(updatedBlueprint); + expect(updatedBlueprint?.minted).toBe(true); + // expect(updatedBlueprint?.hypercert_ids).toContain(hypercertId); + + const hypercert = await db + .selectFrom("hypercerts") + .where("hypercert_id", "=", hypercertId) + .where("collection_id", "=", mockCollection.id) + .executeTakeFirst(); + expect(hypercert).toBeDefined(); + + const hypercertMetadata = await db + .selectFrom("hyperboard_hypercert_metadata") + .where("hypercert_id", "=", hypercertId) + .where("collection_id", "=", mockCollection.id) + .where("hyperboard_id", "=", hyperboardId) + .selectAll() + .executeTakeFirst(); + expect(hypercertMetadata).toBeDefined(); + expect(hypercertMetadata?.display_size).toBe(displaySize); + + const collectionBlueprint = await db + .selectFrom("collection_blueprints") + .where("blueprint_id", "=", mockBlueprint.id) + .selectAll() + .executeTakeFirst(); + expect(collectionBlueprint).toBeUndefined(); + }); + }); +}); diff --git a/test/services/database/entities/CollectionEntityService.test.ts b/test/services/database/entities/CollectionEntityService.test.ts new file mode 100644 index 00000000..ffce86ea --- /dev/null +++ b/test/services/database/entities/CollectionEntityService.test.ts @@ -0,0 +1,540 @@ +import { Kysely } from "kysely"; +import { container } from "tsyringe"; +import { beforeEach, describe, expect, it, vi, type Mock } from "vitest"; +import { + CachingKyselyService, + DataKyselyService, +} from "../../../../src/client/kysely.js"; +import { GetCollectionsArgs } from "../../../../src/graphql/schemas/args/collectionArgs.js"; +import { BlueprintsService } from "../../../../src/services/database/entities/BlueprintsEntityService.js"; +import { CollectionService } from "../../../../src/services/database/entities/CollectionEntityService.js"; +import { HypercertsService } from "../../../../src/services/database/entities/HypercertsEntityService.js"; +import { UsersService } from "../../../../src/services/database/entities/UsersEntityService.js"; +import { CachingDatabase } from "../../../../src/types/kyselySupabaseCaching.js"; +import type { DataDatabase } from "../../../../src/types/kyselySupabaseData.js"; +import { + createTestCachingDatabase, + createTestDataDatabase, + generateMockBlueprint, + generateMockCollection, + generateMockUser, +} from "../../../utils/testUtils.js"; + +const mockDataDb = vi.fn(); +const mockCachingDb = vi.fn(); +vi.mock("../../../../src/client/kysely.js", () => ({ + get CachingKyselyService() { + return class MockCachingKyselyService { + getConnection() { + return mockCachingDb(); + } + get db() { + return mockCachingDb(); + } + }; + }, + + get DataKyselyService() { + return class MockDataKyselyService { + getConnection() { + return mockDataDb(); + } + get db() { + return mockDataDb(); + } + }; + }, + get kyselyData() { + return mockDataDb(); + }, +})); + +describe("CollectionService", () => { + let collectionService: CollectionService; + let dataDb: Kysely; + let cachingDb: Kysely; + let mockHypercertsService: HypercertsService; + let mockBlueprintsService: BlueprintsService; + let mockUsersService: UsersService; + + beforeEach(async () => { + vi.clearAllMocks(); + + ({ db: dataDb } = await createTestDataDatabase()); + ({ db: cachingDb } = await createTestCachingDatabase()); + + mockDataDb.mockReturnValue(dataDb); + mockCachingDb.mockReturnValue(cachingDb); + + // Create mock services + mockHypercertsService = { + getHypercerts: vi.fn(), + getHypercert: vi.fn(), + entityService: {}, + cachingKyselyService: container.resolve(CachingKyselyService), + } as unknown as HypercertsService; + + const getOrCreateUser = vi.fn(); + mockUsersService = { + getOrCreateUser, + entityService: {}, + dataKyselyService: container.resolve(DataKyselyService), + } as unknown as UsersService; + + mockBlueprintsService = { + getBlueprints: vi.fn(), + getBlueprint: vi.fn(), + entityService: {}, + usersService: mockUsersService, + dataKyselyService: container.resolve(DataKyselyService), + } as unknown as BlueprintsService; + + collectionService = new CollectionService( + mockHypercertsService, + container.resolve(DataKyselyService), + mockBlueprintsService, + mockUsersService, + ); + }); + + describe("getCollections", () => { + it("should return collections with correct data", async () => { + // Arrange + const mockCollection = generateMockCollection(); + + const [collection] = await dataDb + .insertInto("collections") + .values({ + name: mockCollection.name, + description: mockCollection.description, + chain_ids: mockCollection.chain_ids.map(Number), + hidden: mockCollection.hidden, + created_at: mockCollection.created_at, + }) + .returning("id") + .execute(); + + // Insert mock admin + const admin = generateMockUser(); + await dataDb + .insertInto("users") + .values({ + id: admin.id, + address: admin.address, + chain_id: admin.chain_id, + }) + .execute(); + + await dataDb + .insertInto("collection_admins") + .values({ + collection_id: collection.id, + user_id: admin.id, + }) + .execute(); + + // Insert mock blueprint + const blueprint = generateMockBlueprint(); + await dataDb + .insertInto("blueprints") + .values({ + id: blueprint.id, + form_values: blueprint.form_values, + minter_address: blueprint.minter_address, + minted: blueprint.minted, + hypercert_ids: blueprint.hypercert_ids, + }) + .execute(); + + await dataDb + .insertInto("collection_blueprints") + .values({ + collection_id: collection.id, + blueprint_id: blueprint.id, + }) + .execute(); + + const args: GetCollectionsArgs = { + where: { + id: { eq: collection.id }, + }, + }; + + // Act + const result = await collectionService.getCollections(args); + + // Assert + expect(result.count).toBe(1); + expect(result.data).toHaveLength(1); + expect(result.data[0].id).toBe(collection.id); + expect(result.data[0].name).toBe(mockCollection.name); + expect(result.data[0].description).toBe(mockCollection.description); + expect(result.data[0].chain_ids).toEqual( + mockCollection.chain_ids.map(Number), + ); + expect(result.data[0].hidden).toBe(mockCollection.hidden); + }); + + it("should handle empty result set", async () => { + // Arrange + const args: GetCollectionsArgs = {}; + + // Act + const result = await collectionService.getCollections(args); + + // Assert + expect(result.count).toBe(0); + expect(result.data).toHaveLength(0); + }); + + it("should handle errors from entityService.getMany", async () => { + // Arrange + // Mock the database to throw an error + vi.spyOn(dataDb, "selectFrom").mockImplementation(() => { + throw new Error("Database error"); + }); + + // Act & Assert + await expect(collectionService.getCollections({})).rejects.toThrow( + "Database error", + ); + }); + + it("should filter collections by admin address", async () => { + // Arrange + const mockCollection = generateMockCollection(); + const [collection] = await dataDb + .insertInto("collections") + .values({ + name: mockCollection.name, + description: mockCollection.description, + chain_ids: mockCollection.chain_ids.map(Number), + hidden: mockCollection.hidden, + created_at: mockCollection.created_at, + }) + .returning("id") + .execute(); + + const admin = generateMockUser(); + await dataDb + .insertInto("users") + .values({ + id: admin.id, + address: admin.address, + chain_id: admin.chain_id, + }) + .execute(); + + await dataDb + .insertInto("collection_admins") + .values({ + collection_id: collection.id, + user_id: admin.id, + }) + .execute(); + + const args: GetCollectionsArgs = { + where: { + admins: { address: { eq: admin.address } }, + }, + }; + + // Act + const result = await collectionService.getCollections(args); + + // Assert + expect(result.count).toBe(1); + expect(result.data).toHaveLength(1); + expect(result.data[0].id).toBe(collection.id); + }); + + it("should filter collections by blueprint id", async () => { + // Arrange + const mockCollection = generateMockCollection(); + const [collection] = await dataDb + .insertInto("collections") + .values({ + name: mockCollection.name, + description: mockCollection.description, + chain_ids: mockCollection.chain_ids.map(Number), + hidden: mockCollection.hidden, + created_at: mockCollection.created_at, + }) + .returning("id") + .execute(); + + const blueprint = generateMockBlueprint(); + await dataDb + .insertInto("blueprints") + .values({ + id: blueprint.id, + form_values: blueprint.form_values, + minter_address: blueprint.minter_address, + minted: blueprint.minted, + hypercert_ids: blueprint.hypercert_ids, + }) + .execute(); + + await dataDb + .insertInto("collection_blueprints") + .values({ + collection_id: collection.id, + blueprint_id: blueprint.id, + }) + .execute(); + + const args: GetCollectionsArgs = { + where: { + blueprints: { id: { eq: blueprint.id } }, + }, + }; + + console.log(args); + + // Act + // TODO: Fix this test + // const result = await collectionService.getCollections(args); + + // // Assert + // expect(result.count).toBe(1); + // expect(result.data).toHaveLength(1); + // expect(result.data[0].id).toBe(collection.id); + }); + }); + + describe("upsertCollections", () => { + it("should upsert collections with correct values", async () => { + // Arrange + const mockCollection = generateMockCollection(); + const collections = [ + { + name: mockCollection.name, + description: mockCollection.description, + chain_ids: mockCollection.chain_ids.map(Number), + hidden: mockCollection.hidden, + created_at: mockCollection.created_at, + }, + ]; + + // Act + const result = await collectionService.upsertCollections(collections); + + // Assert + expect(result).toHaveLength(1); + const insertedCollection = result[0]; + expect(insertedCollection).toMatchObject({ + name: mockCollection.name, + description: mockCollection.description, + chain_ids: mockCollection.chain_ids.map(Number), + hidden: mockCollection.hidden, + }); + }); + + it("should handle errors during collection upsert", async () => { + // Arrange + vi.spyOn(dataDb, "insertInto").mockImplementation(() => { + throw new Error("Database error"); + }); + + // Act & Assert + await expect(collectionService.upsertCollections([])).rejects.toThrow( + "Database error", + ); + }); + }); + + describe("getCollectionAdmins", () => { + it("should return admins for a collection", async () => { + // Arrange + const mockCollection = generateMockCollection(); + const [collection] = await dataDb + .insertInto("collections") + .values({ + name: mockCollection.name, + description: mockCollection.description, + chain_ids: mockCollection.chain_ids.map(Number), + hidden: mockCollection.hidden, + created_at: mockCollection.created_at, + }) + .returning("id") + .execute(); + + const admin = generateMockUser(); + await dataDb + .insertInto("users") + .values({ + id: admin.id, + address: admin.address, + chain_id: admin.chain_id, + }) + .execute(); + + await dataDb + .insertInto("collection_admins") + .values({ + collection_id: collection.id, + user_id: admin.id, + }) + .execute(); + + // Act + const result = await collectionService.getCollectionAdmins(collection.id); + + // Assert + expect(result).toHaveLength(1); + expect(result[0].address).toBe(admin.address); + expect(result[0].chain_id).toBe(admin.chain_id); + }); + }); + + describe("addBlueprintsToCollection", () => { + it("should add blueprints to a collection", async () => { + // Arrange + const mockCollection = generateMockCollection(); + const [collection] = await dataDb + .insertInto("collections") + .values({ + name: mockCollection.name, + description: mockCollection.description, + chain_ids: mockCollection.chain_ids.map(Number), + hidden: mockCollection.hidden, + created_at: mockCollection.created_at, + }) + .returning("id") + .execute(); + + const blueprint = generateMockBlueprint(); + await dataDb + .insertInto("blueprints") + .values({ + id: blueprint.id, + form_values: blueprint.form_values, + minter_address: blueprint.minter_address, + minted: blueprint.minted, + hypercert_ids: blueprint.hypercert_ids, + }) + .execute(); + + // Act + await collectionService.addBlueprintsToCollection([ + { + collection_id: collection.id, + blueprint_id: blueprint.id, + }, + ]); + + // Assert + const blueprintResult = await dataDb + .selectFrom("collection_blueprints") + .where("collection_id", "=", collection.id) + .selectAll() + .execute(); + expect(blueprintResult).toHaveLength(1); + expect(blueprintResult[0].blueprint_id).toBe(blueprint.id); + }); + + it("should handle errors when adding blueprints", async () => { + // Arrange + vi.spyOn(dataDb, "insertInto").mockImplementation(() => { + throw new Error("Database error"); + }); + + // Act & Assert + await expect( + collectionService.addBlueprintsToCollection([ + { + collection_id: "test-id", + blueprint_id: 1, + }, + ]), + ).rejects.toThrow("Database error"); + }); + }); + + describe("getCollectionBlueprints", () => { + it("should return blueprints for a collection", async () => { + // Arrange + const mockCollection = generateMockCollection(); + const [collection] = await dataDb + .insertInto("collections") + .values({ + name: mockCollection.name, + description: mockCollection.description, + chain_ids: mockCollection.chain_ids.map(Number), + hidden: mockCollection.hidden, + created_at: mockCollection.created_at, + }) + .returning("id") + .execute(); + + const blueprint = generateMockBlueprint(); + await dataDb + .insertInto("blueprints") + .values({ + id: blueprint.id, + form_values: blueprint.form_values, + minter_address: blueprint.minter_address, + minted: blueprint.minted, + hypercert_ids: blueprint.hypercert_ids, + }) + .execute(); + + await dataDb + .insertInto("collection_blueprints") + .values({ + collection_id: collection.id, + blueprint_id: blueprint.id, + }) + .execute(); + + (mockBlueprintsService.getBlueprints as Mock).mockResolvedValue({ + data: [blueprint], + count: 1, + }); + + // Act + const result = await collectionService.getCollectionBlueprints( + collection.id, + ); + + // Assert + expect(result.data).toHaveLength(1); + expect(result.data[0]).toBe(blueprint); + expect(mockBlueprintsService.getBlueprints).toHaveBeenCalledWith({ + where: { id: { in: [blueprint.id] } }, + }); + }); + }); + + describe("getCollectionHypercerts", () => { + it("should return hypercerts for a collection", async () => { + // Arrange + const mockCollection = generateMockCollection(); + const [collection] = await dataDb + .insertInto("collections") + .values({ + name: mockCollection.name, + description: mockCollection.description, + chain_ids: mockCollection.chain_ids.map(Number), + hidden: mockCollection.hidden, + created_at: mockCollection.created_at, + }) + .returning("id") + .execute(); + + (mockHypercertsService.getHypercerts as Mock).mockResolvedValue({ + data: [], + count: 0, + }); + + // Act + const result = await collectionService.getCollectionHypercerts( + collection.id, + ); + + // Assert + expect(result.data).toHaveLength(0); + expect(mockHypercertsService.getHypercerts).toHaveBeenCalled(); + }); + }); +}); diff --git a/test/services/database/entities/ContractEntityService.test.ts b/test/services/database/entities/ContractEntityService.test.ts new file mode 100644 index 00000000..17f0d0ac --- /dev/null +++ b/test/services/database/entities/ContractEntityService.test.ts @@ -0,0 +1,141 @@ +import { beforeEach, describe, expect, it, vi } from "vitest"; +import { faker } from "@faker-js/faker"; +import { getAddress } from "viem"; +import { ContractService } from "../../../../src/services/database/entities/ContractEntityService.js"; +import type { GetContractsArgs } from "../../../../src/graphql/schemas/args/contractArgs.js"; + +const mockEntityService = { + getMany: vi.fn(), + getSingle: vi.fn(), +}; + +// Mock the createEntityService function +vi.mock( + "../../../../src/services/database/entities/EntityServiceFactory.js", + () => ({ + createEntityService: () => mockEntityService, + }), +); + +describe("ContractService", () => { + let service: ContractService; + const mockContractAddress = getAddress(faker.finance.ethereumAddress()); + + beforeEach(() => { + vi.clearAllMocks(); + service = new ContractService(); + }); + + describe("getContracts", () => { + it("should return contracts with correct data", async () => { + // Arrange + const args: GetContractsArgs = { + where: { + chain_id: { eq: 1n }, + contract_address: { eq: mockContractAddress }, + }, + }; + const mockResponse = { + data: [ + { + id: "1", + chain_id: 1n, + contract_address: mockContractAddress, + start_block: 1000000n, + }, + { + id: "2", + chain_id: 1n, + contract_address: mockContractAddress, + start_block: 2000000n, + }, + ], + count: 2, + }; + mockEntityService.getMany.mockResolvedValue(mockResponse); + + // Act + const result = await service.getContracts(args); + + // Assert + expect(mockEntityService.getMany).toHaveBeenCalledWith(args); + expect(result.count).toBe(2); + expect(result.data).toHaveLength(2); + expect(result.data[0].contract_address).toBe(mockContractAddress); + expect(result.data[1].contract_address).toBe(mockContractAddress); + }); + + it("should handle empty result set", async () => { + // Arrange + const mockResponse = { + data: [], + count: 0, + }; + mockEntityService.getMany.mockResolvedValue(mockResponse); + + // Act + const result = await service.getContracts({}); + + // Assert + expect(result.count).toBe(0); + expect(result.data).toHaveLength(0); + }); + + it("should handle errors from entityService.getMany", async () => { + // Arrange + const error = new Error("Database error"); + mockEntityService.getMany.mockRejectedValue(error); + + // Act & Assert + await expect(service.getContracts({})).rejects.toThrow(error); + }); + }); + + describe("getContract", () => { + it("should return a single contract", async () => { + // Arrange + const args: GetContractsArgs = { + where: { + chain_id: { eq: 1n }, + contract_address: { eq: mockContractAddress }, + }, + }; + const mockResponse = { + id: "1", + chain_id: 1n, + contract_address: mockContractAddress, + start_block: 1000000n, + }; + mockEntityService.getSingle.mockResolvedValue(mockResponse); + + // Act + const result = await service.getContract(args); + + // Assert + expect(mockEntityService.getSingle).toHaveBeenCalledWith(args); + expect(result).toEqual(mockResponse); + expect(result?.contract_address).toBe(mockContractAddress); + }); + + it("should return undefined when contract is not found", async () => { + // Arrange + mockEntityService.getSingle.mockResolvedValue(undefined); + + // Act + const result = await service.getContract({}); + + // Assert + expect(result).toBeUndefined(); + expect(mockEntityService.getSingle).toHaveBeenCalledWith({}); + }); + + it("should handle errors from entityService.getSingle", async () => { + // Arrange + const error = new Error("Database error"); + mockEntityService.getSingle.mockRejectedValue(error); + + // Act & Assert + await expect(service.getContract({})).rejects.toThrow(error); + }); + }); +}); diff --git a/test/services/database/entities/EntityServiceFactory.test.ts b/test/services/database/entities/EntityServiceFactory.test.ts new file mode 100644 index 00000000..9230942d --- /dev/null +++ b/test/services/database/entities/EntityServiceFactory.test.ts @@ -0,0 +1,77 @@ +import { Kysely } from "kysely"; +import { IMemoryDb, newDb } from "pg-mem"; +import { beforeEach, describe, expect, it } from "vitest"; +import { GetUsersArgs } from "../../../../src/graphql/schemas/args/userArgs.js"; +import { + createEntityService, + EntityService, +} from "../../../../src/services/database/entities/EntityServiceFactory.js"; +import { DataDatabase } from "../../../../src/types/kyselySupabaseData.js"; + +type TestDatabase = DataDatabase; + +describe("EntityServiceFactory", () => { + let db: Kysely; + let mem: IMemoryDb; + let entityService: EntityService; + + beforeEach(() => { + mem = newDb(); + db = mem.adapters.createKysely(); + + // Create test table + mem.public.none(` + CREATE TABLE users ( + id SERIAL PRIMARY KEY, + display_name TEXT NOT NULL, + avatar TEXT NOT NULL + ); + `); + + // Insert some test data + mem.public.none(` + INSERT INTO users (display_name, avatar) VALUES + ('Alice', 'https://example.com/alice.jpg'), + ('Bob', 'https://example.com/bob.jpg'), + ('Charlie', 'https://example.com/charlie.jpg'); + `); + + entityService = createEntityService("users", "TestEntityService", db); + }); + + describe("Basic Functionality", () => { + it("should retrieve a single entity", async () => { + const result = await entityService.getSingle({ + where: { id: { eq: "1" } }, + }); + expect(result).toBeDefined(); + expect(result?.id).toBe(1); + expect(result?.display_name).toBe("Alice"); + expect(result?.avatar).toBe("https://example.com/alice.jpg"); + }); + + it("should retrieve multiple entities", async () => { + const result = await entityService.getMany({}); + expect(result).toBeDefined(); + expect(Array.isArray(result.data)).toBe(true); + expect(result.count).toBe(3); // Alice, Bob, and Charlie + }); + }); + + describe("Error Handling", () => { + it("should return undefined for non-existent entity", async () => { + const result = await entityService.getSingle({ + where: { id: { eq: "999" } }, + }); + expect(result).toBeUndefined(); + }); + }); + + describe("Instance Uniqueness", () => { + it("should return unique instances for each service", () => { + const service1 = createEntityService("users", "TestEntityService1", db); + const service2 = createEntityService("users", "TestEntityService2", db); + expect(service1).not.toBe(service2); // Should be different instances + }); + }); +}); diff --git a/test/services/database/entities/FractionEntityService.test.ts b/test/services/database/entities/FractionEntityService.test.ts new file mode 100644 index 00000000..e6856335 --- /dev/null +++ b/test/services/database/entities/FractionEntityService.test.ts @@ -0,0 +1,119 @@ +import { beforeEach, describe, expect, it, vi } from "vitest"; +import { FractionService } from "../../../../src/services/database/entities/FractionEntityService.js"; +import { generateMockFraction } from "../../../utils/testUtils.js"; +import type { GetFractionsArgs } from "../../../../src/graphql/schemas/args/fractionArgs.js"; + +const mockEntityService = { + getMany: vi.fn(), + getSingle: vi.fn(), +}; + +// Mock the createEntityService function +vi.mock( + "../../../../src/services/database/entities/EntityServiceFactory.js", + () => ({ + createEntityService: () => mockEntityService, + }), +); + +describe("FractionService", () => { + let service: FractionService; + let mockFraction: ReturnType; + + beforeEach(() => { + vi.clearAllMocks(); + service = new FractionService(); + mockFraction = generateMockFraction(); + }); + + describe("getFractions", () => { + it("should return fractions with correct data", async () => { + // Arrange + const args: GetFractionsArgs = {}; + const mockResponse = { + data: [mockFraction], + count: 1, + }; + mockEntityService.getMany.mockResolvedValue(mockResponse); + + // Act + const result = await service.getFractions(args); + + // Assert + expect(mockEntityService.getMany).toHaveBeenCalledWith(args); + expect(result.data).toHaveLength(1); + expect(result.data[0]).toEqual(mockFraction); + expect(result.count).toBe(1); + }); + + it("should return empty array when no fractions match criteria", async () => { + // Arrange + const args: GetFractionsArgs = { + where: { hypercert_id: { eq: "non-existent-id" } }, + }; + const mockResponse = { + data: [], + count: 0, + }; + mockEntityService.getMany.mockResolvedValue(mockResponse); + + // Act + const result = await service.getFractions(args); + + // Assert + expect(mockEntityService.getMany).toHaveBeenCalledWith(args); + expect(result.data).toHaveLength(0); + expect(result.count).toBe(0); + }); + + it("should handle errors from entityService.getMany", async () => { + // Arrange + const error = new Error("Database error"); + mockEntityService.getMany.mockRejectedValue(error); + + // Act & Assert + await expect(service.getFractions({})).rejects.toThrow(error); + }); + }); + + describe("getFraction", () => { + it("should return a single fraction by id", async () => { + // Arrange + const args: GetFractionsArgs = { + where: { id: { eq: mockFraction.id } }, + }; + mockEntityService.getSingle.mockResolvedValue(mockFraction); + + // Act + const result = await service.getFraction(args); + + // Assert + expect(mockEntityService.getSingle).toHaveBeenCalledWith(args); + expect(result).toEqual(mockFraction); + }); + + it("should return undefined when fraction not found", async () => { + // Arrange + const args: GetFractionsArgs = { + where: { id: { eq: "non-existent-id" } }, + }; + mockEntityService.getSingle.mockResolvedValue(undefined); + + // Act + const result = await service.getFraction(args); + + // Assert + expect(mockEntityService.getSingle).toHaveBeenCalledWith(args); + expect(result).toBeUndefined(); + }); + + it("should handle errors from entityService.getSingle", async () => { + // Arrange + const error = new Error("Database error"); + mockEntityService.getSingle.mockRejectedValue(error); + + // Act & Assert + await expect(service.getFraction({})).rejects.toThrow(error); + }); + }); +}); diff --git a/test/services/database/entities/HyperboardEntityService.test.ts b/test/services/database/entities/HyperboardEntityService.test.ts new file mode 100644 index 00000000..69fe123b --- /dev/null +++ b/test/services/database/entities/HyperboardEntityService.test.ts @@ -0,0 +1,385 @@ +import { faker } from "@faker-js/faker"; +import { Kysely } from "kysely"; +import { container } from "tsyringe"; +import { beforeEach, describe, expect, it, vi } from "vitest"; +import { + CachingKyselyService, + DataKyselyService, +} from "../../../../src/client/kysely.js"; +import { GetHyperboardsArgs } from "../../../../src/graphql/schemas/args/hyperboardArgs.js"; +import { BlueprintsService } from "../../../../src/services/database/entities/BlueprintsEntityService.js"; +import { CollectionService } from "../../../../src/services/database/entities/CollectionEntityService.js"; +import { HyperboardService } from "../../../../src/services/database/entities/HyperboardEntityService.js"; +import { HypercertsService } from "../../../../src/services/database/entities/HypercertsEntityService.js"; +import { UsersService } from "../../../../src/services/database/entities/UsersEntityService.js"; +import { CachingDatabase } from "../../../../src/types/kyselySupabaseCaching.js"; +import type { DataDatabase } from "../../../../src/types/kyselySupabaseData.js"; +import { + checkSimilarity, + createTestCachingDatabase, + createTestDataDatabase, + generateMockAddress, + generateMockCollection, + generateMockHyperboard, + generateMockUser, +} from "../../../utils/testUtils.js"; + +const mockDataDb = vi.fn(); +const mockCachingDb = vi.fn(); + +vi.mock("../../../../src/client/kysely.js", () => ({ + get CachingKyselyService() { + return class MockCachingKyselyService { + getConnection() { + return mockCachingDb(); + } + get db() { + return mockCachingDb(); + } + }; + }, + get DataKyselyService() { + return class MockDataKyselyService { + getConnection() { + return mockDataDb(); + } + get db() { + return mockDataDb(); + } + }; + }, + get kyselyCaching() { + return mockCachingDb(); + }, + get kyselyData() { + return mockDataDb(); + }, +})); + +describe("HyperboardService", () => { + let hyperboardService: HyperboardService; + let dataDb: Kysely; + let cachingDb: Kysely; + let blueprintsService: BlueprintsService; + let hypercertsService: HypercertsService; + let usersService: UsersService; + let collectionService: CollectionService; + + beforeEach(async () => { + vi.clearAllMocks(); + + ({ db: dataDb } = await createTestDataDatabase()); + ({ db: cachingDb } = await createTestCachingDatabase()); + + mockDataDb.mockReturnValue(dataDb); + mockCachingDb.mockReturnValue(cachingDb); + + // Create mock services + hypercertsService = new HypercertsService( + container.resolve(CachingKyselyService), + ); + usersService = new UsersService(container.resolve(DataKyselyService)); + blueprintsService = new BlueprintsService( + container.resolve(DataKyselyService), + usersService, + ); + collectionService = new CollectionService( + hypercertsService, + container.resolve(DataKyselyService), + blueprintsService, + usersService, + ); + + hyperboardService = new HyperboardService( + container.resolve(DataKyselyService), + collectionService, + usersService, + ); + }); + + describe("getHyperboards", () => { + it.skip("should return hyperboards with correct data", async () => { + // TODO: Reenable this test when pg-mem supports views + // Arrange + const mockHyperboard = generateMockHyperboard(); + + const [hyperboard] = await dataDb + .insertInto("hyperboards") + .values({ + id: mockHyperboard.id, + name: mockHyperboard.name, + chain_ids: mockHyperboard.chain_ids.map((id) => Number(id)), + background_image: mockHyperboard.background_image, + grayscale_images: mockHyperboard.grayscale_images, + tile_border_color: mockHyperboard.tile_border_color, + }) + .returningAll() + .execute(); + + const args: GetHyperboardsArgs = { + where: { + id: { eq: hyperboard.id }, + }, + }; + + // Act + const result = await hyperboardService.getHyperboards(args); + + // Assert + expect(result.count).toBe(1); + expect(result.data).toHaveLength(1); + expect(result.data[0]).not.toBeNull(); + expect(result.data[0].id).toBe(hyperboard.id); + expect(result.data[0].name).toBe(mockHyperboard.name); + expect(result.data[0].chain_ids.map(BigInt)).toEqual( + mockHyperboard.chain_ids, + ); + expect(result.data[0].background_image).toBe( + mockHyperboard.background_image, + ); + expect(result.data[0].grayscale_images).toBe( + mockHyperboard.grayscale_images, + ); + expect(result.data[0].tile_border_color).toBe( + mockHyperboard.tile_border_color, + ); + }); + + it.skip("should handle empty result set", async () => { + // TODO: Reenable this test when pg-mem supports views + // Arrange + const args: GetHyperboardsArgs = {}; + + // Act + const result = await hyperboardService.getHyperboards(args); + + // Assert + expect(result.count).toBe(0); + expect(result.data).toHaveLength(0); + }); + + it("should handle errors from entityService.getMany", async () => { + // Arrange + vi.spyOn(dataDb, "selectFrom").mockImplementation(() => { + throw new Error("Database error"); + }); + + // Act & Assert + await expect(hyperboardService.getHyperboards({})).rejects.toThrow( + "Database error", + ); + }); + }); + + describe("getHyperboardCollections", () => { + it("should fetch collections for a hyperboard", async () => { + // Arrange + const mockHyperboard = generateMockHyperboard(); + const mockCollection = generateMockCollection(); + + // Insert the hyperboard + const [hyperboard] = await dataDb + .insertInto("hyperboards") + .values({ + id: mockHyperboard.id, + name: mockHyperboard.name, + chain_ids: mockHyperboard.chain_ids.map((id) => Number(id)), + }) + .returningAll() + .execute(); + + // Insert the collection first + await dataDb + .insertInto("collections") + .values({ + id: mockCollection.id, + name: mockCollection.name, + description: mockCollection.description, + chain_ids: mockCollection.chain_ids.map((id) => Number(id)), + hidden: mockCollection.hidden, + created_at: mockCollection.created_at, + }) + .execute(); + + // Then create the relationship + await dataDb + .insertInto("hyperboard_collections") + .values({ + hyperboard_id: hyperboard.id, + collection_id: mockCollection.id, + }) + .execute(); + + // Act + const result = await hyperboardService.getHyperboardCollections( + hyperboard.id, + ); + + // Assert + + result.data.map((collection) => + checkSimilarity(collection, mockCollection), + ); + }); + + it("should handle errors when fetching collections", async () => { + // Arrange + const error = new Error("Fetching collections failed"); + vi.spyOn(collectionService, "getCollections").mockImplementation(() => + Promise.reject(error), + ); + + // Act & Assert + await expect( + hyperboardService.getHyperboardCollections(faker.string.uuid()), + ).rejects.toThrow("Fetching collections failed"); + }); + }); + + describe("getHyperboardAdmins", () => { + it("should fetch admin users for a hyperboard", async () => { + // Arrange + const mockHyperboard = generateMockHyperboard(); + const mockUser = generateMockUser(); + + // First create the hyperboard + const [hyperboard] = await dataDb + .insertInto("hyperboards") + .values({ + id: mockHyperboard.id, + name: mockHyperboard.name, + chain_ids: mockHyperboard.chain_ids.map((id) => Number(id)), + }) + .returningAll() + .execute(); + + // Create the user first + const [user] = await dataDb + .insertInto("users") + .values({ + id: mockUser.id, + address: mockUser.address, + chain_id: mockUser.chain_id, + display_name: mockUser.display_name, + avatar: mockUser.avatar, + created_at: new Date().toISOString(), + }) + .returningAll() + .execute(); + + // Then create the admin relationship + await dataDb + .insertInto("hyperboard_admins") + .values({ + hyperboard_id: hyperboard.id, + user_id: user.id, + }) + .execute(); + + vi.spyOn(usersService, "getUsers").mockImplementation(() => + Promise.resolve({ + data: [user], + count: 1, + }), + ); + + // Act + const result = await hyperboardService.getHyperboardAdmins(hyperboard.id); + + // Assert + expect(usersService.getUsers).toHaveBeenCalledWith({ + where: { + id: { + in: [user.id], + }, + }, + }); + expect(result).toEqual({ data: [user], count: 1 }); + }); + + it("should handle errors when fetching admins", async () => { + // Arrange + const error = new Error("Failed to get hyperboard admins"); + vi.spyOn(usersService, "getUsers").mockImplementation(() => + Promise.reject(error), + ); + + // Act & Assert + await expect( + hyperboardService.getHyperboardAdmins(faker.string.uuid()), + ).rejects.toThrow("Failed to get hyperboard admins"); + }); + }); + + describe("addAdminToHyperboard", () => { + it("should add an admin to a hyperboard", async () => { + // Arrange + const mockHyperboard = generateMockHyperboard(); + const mockUser = generateMockUser(); + + // First create the hyperboard + const [hyperboard] = await dataDb + .insertInto("hyperboards") + .values({ + id: mockHyperboard.id, + name: mockHyperboard.name, + chain_ids: mockHyperboard.chain_ids.map((id) => Number(id)), + }) + .returningAll() + .execute(); + + // Create the user first + const [user] = await dataDb + .insertInto("users") + .values({ + id: mockUser.id, + address: mockUser.address, + chain_id: mockUser.chain_id, + display_name: mockUser.display_name, + avatar: mockUser.avatar, + created_at: new Date().toISOString(), + }) + .returningAll() + .execute(); + + vi.spyOn(usersService, "getOrCreateUser").mockImplementation(() => + Promise.resolve(user), + ); + + // Act + const result = await hyperboardService.addAdminToHyperboard( + hyperboard.id, + { + address: mockUser.address, + chain_id: mockUser.chain_id, + }, + ); + + // Assert + expect(usersService.getOrCreateUser).toHaveBeenCalledWith({ + address: mockUser.address, + chain_id: mockUser.chain_id, + }); + expect(result).toMatchObject({ + hyperboard_id: hyperboard.id, + user_id: user.id, + }); + }); + + it("should handle errors when adding an admin", async () => { + // Arrange + const error = new Error("Failed to add admin to hyperboard"); + vi.spyOn(usersService, "getOrCreateUser").mockImplementation(() => + Promise.reject(error), + ); + + // Act & Assert + await expect( + hyperboardService.addAdminToHyperboard("test-id", { + address: generateMockAddress(), + chain_id: faker.number.int({ min: 1, max: 100000 }), + }), + ).rejects.toThrow("Failed to add admin to hyperboard"); + }); + }); +}); diff --git a/test/services/database/entities/HypercertsEntityService.test.ts b/test/services/database/entities/HypercertsEntityService.test.ts new file mode 100644 index 00000000..f600025d --- /dev/null +++ b/test/services/database/entities/HypercertsEntityService.test.ts @@ -0,0 +1,339 @@ +import { beforeEach, describe, expect, it, vi } from "vitest"; +import { container } from "tsyringe"; +import { HypercertsService } from "../../../../src/services/database/entities/HypercertsEntityService.js"; +import { CachingKyselyService } from "../../../../src/client/kysely.js"; +import type { Mock } from "vitest"; +import type { GetHypercertsArgs } from "../../../../src/graphql/schemas/args/hypercertsArgs.js"; +import { faker } from "@faker-js/faker"; +import { + generateHypercertId, + generateMockMetadata, +} from "../../../utils/testUtils.js"; + +// Create mock entity service +const mockEntityService = { + getMany: vi.fn(), + getSingle: vi.fn(), +}; + +let mockConnection: { + selectFrom: Mock; +}; +let mockQuery: { + leftJoin: Mock; + selectAll: Mock; + where: Mock; + execute: Mock; + executeTakeFirst: Mock; +}; + +// Mock the createEntityService function +vi.mock( + "../../../../src/services/database/entities/EntityServiceFactory.js", + () => ({ + createEntityService: () => mockEntityService, + }), +); + +describe("HypercertsService", () => { + let service: HypercertsService; + + beforeEach(() => { + // Mock console methods + vi.spyOn(console, "warn").mockImplementation(() => {}); + vi.spyOn(console, "error").mockImplementation(() => {}); + + // Create mock query builder + mockQuery = { + leftJoin: vi.fn().mockReturnThis(), + selectAll: vi.fn().mockReturnThis(), + where: vi.fn().mockReturnThis(), + execute: vi.fn(), + executeTakeFirst: vi.fn(), + }; + + // Create mock connection + mockConnection = { + selectFrom: vi.fn().mockReturnValue(mockQuery), + }; + + // Create mock caching service + const mockCachingKyselyService = { + getConnection: vi.fn().mockReturnValue(mockConnection), + }; + + // Register mocks with the DI container + container.registerInstance( + CachingKyselyService, + mockCachingKyselyService as unknown as CachingKyselyService, + ); + + // Create a new instance for each test + service = container.resolve(HypercertsService); + }); + + describe("getHypercerts", () => { + it("should return hypercerts for given arguments", async () => { + // Arrange + const args: GetHypercertsArgs = { + where: { + hypercert_id: { eq: generateHypercertId() }, + }, + }; + const expectedResult = { + data: [ + { id: faker.string.uuid(), hypercert_id: generateHypercertId() }, + { id: faker.string.uuid(), hypercert_id: generateHypercertId() }, + ], + count: 2, + }; + mockEntityService.getMany.mockResolvedValue(expectedResult); + + // Act + const result = await service.getHypercerts(args); + + // Assert + expect(mockEntityService.getMany).toHaveBeenCalledWith(args); + expect(result).toEqual(expectedResult); + }); + + it("should handle errors from entity service", async () => { + // Arrange + const args: GetHypercertsArgs = {}; + const error = new Error("Database error"); + mockEntityService.getMany.mockRejectedValue(error); + + // Act & Assert + await expect(service.getHypercerts(args)).rejects.toThrow(error); + }); + }); + + describe("getHypercert", () => { + it("should return a single hypercert for given arguments", async () => { + // Arrange + const hypercertId = generateHypercertId(); + const args: GetHypercertsArgs = { + where: { + hypercert_id: { eq: hypercertId }, + }, + }; + const expectedResult = { + id: faker.string.uuid(), + hypercert_id: hypercertId, + }; + mockEntityService.getSingle.mockResolvedValue(expectedResult); + + // Act + const result = await service.getHypercert(args); + + // Assert + expect(mockEntityService.getSingle).toHaveBeenCalledWith(args); + expect(result).toEqual(expectedResult); + }); + + it("should return undefined when no record is found", async () => { + // Arrange + const args: GetHypercertsArgs = { + where: { + hypercert_id: { eq: generateHypercertId() }, + }, + }; + mockEntityService.getSingle.mockResolvedValue(undefined); + + // Act + const result = await service.getHypercert(args); + + // Assert + expect(result).toBeUndefined(); + expect(mockEntityService.getSingle).toHaveBeenCalledWith(args); + }); + + it("should handle errors from entity service", async () => { + // Arrange + const args: GetHypercertsArgs = {}; + const error = new Error("Database error"); + mockEntityService.getSingle.mockRejectedValue(error); + + // Act & Assert + await expect(service.getHypercert(args)).rejects.toThrow(error); + }); + }); + + describe("getHypercertMetadata", () => { + it("should return metadata when searching by claims_id", async () => { + // Arrange + const claimsId = faker.string.uuid(); + const expectedMetadata = generateMockMetadata(); + mockQuery.executeTakeFirst.mockResolvedValue(expectedMetadata); + + // Act + const result = await service.getHypercertMetadata({ + claims_id: claimsId, + }); + + // Assert + expect(mockConnection.selectFrom).toHaveBeenCalledWith("metadata"); + expect(mockQuery.leftJoin).toHaveBeenCalledWith( + "claims", + "metadata.uri", + "claims.uri", + ); + expect(mockQuery.selectAll).toHaveBeenCalledWith("metadata"); + expect(mockQuery.where).toHaveBeenCalledWith(expect.any(Function)); + expect(result).toEqual(expectedMetadata); + }); + + it("should return metadata when searching by hypercert_id", async () => { + // Arrange + const hypercertId = generateHypercertId(); + const expectedMetadata = generateMockMetadata(); + mockQuery.executeTakeFirst.mockResolvedValue(expectedMetadata); + + // Act + const result = await service.getHypercertMetadata({ + hypercert_id: hypercertId, + }); + + // Assert + expect(mockConnection.selectFrom).toHaveBeenCalledWith("metadata"); + expect(mockQuery.leftJoin).toHaveBeenCalledWith( + "claims", + "metadata.uri", + "claims.uri", + ); + expect(mockQuery.selectAll).toHaveBeenCalledWith("metadata"); + expect(mockQuery.where).toHaveBeenCalledWith(expect.any(Function)); + expect(result).toEqual(expectedMetadata); + }); + + it("should return undefined when no record is found", async () => { + // Arrange + const hypercertId = generateHypercertId(); + mockQuery.executeTakeFirst.mockResolvedValue(undefined); + + // Act + const result = await service.getHypercertMetadata({ + hypercert_id: hypercertId, + }); + + // Assert + expect(result).toBeUndefined(); + expect(mockConnection.selectFrom).toHaveBeenCalledWith("metadata"); + }); + + it("should return null when no arguments are provided", async () => { + // Act + const result = await service.getHypercertMetadata({}); + + // Assert + expect(result).toBeNull(); + expect(mockConnection.selectFrom).not.toHaveBeenCalled(); + expect(console.warn).toHaveBeenCalledWith( + expect.stringContaining( + "[HypercertsService::getHypercertMetadata] No claims_id or hypercert_id provided", + ), + ); + }); + + it("should handle database errors", async () => { + // Arrange + const claimsId = faker.string.uuid(); + const error = new Error("Database error"); + mockQuery.executeTakeFirst.mockRejectedValue(error); + + // Act & Assert + await expect( + service.getHypercertMetadata({ claims_id: claimsId }), + ).rejects.toThrow(); + }); + }); + + describe("getHypercertMetadataSets", () => { + it("should return metadata sets when searching by claims_ids", async () => { + // Arrange + const claimsIds = [faker.string.uuid(), faker.string.uuid()]; + const expectedMetadata = [generateMockMetadata(), generateMockMetadata()]; + mockQuery.execute.mockResolvedValue(expectedMetadata); + + // Act + const result = await service.getHypercertMetadataSets({ + claims_ids: claimsIds, + }); + + // Assert + expect(mockConnection.selectFrom).toHaveBeenCalledWith("metadata"); + expect(mockQuery.leftJoin).toHaveBeenCalledWith( + "claims", + "metadata.uri", + "claims.uri", + ); + expect(mockQuery.selectAll).toHaveBeenCalledWith("metadata"); + expect(mockQuery.where).toHaveBeenCalledWith(expect.any(Function)); + expect(result).toEqual(expectedMetadata); + }); + + it("should return metadata sets when searching by hypercert_ids", async () => { + // Arrange + const hypercertIds = [generateHypercertId(), generateHypercertId()]; + const expectedMetadata = [generateMockMetadata(), generateMockMetadata()]; + mockQuery.execute.mockResolvedValue(expectedMetadata); + + // Act + const result = await service.getHypercertMetadataSets({ + hypercert_ids: hypercertIds, + }); + + // Assert + expect(mockConnection.selectFrom).toHaveBeenCalledWith("metadata"); + expect(mockQuery.leftJoin).toHaveBeenCalledWith( + "claims", + "metadata.uri", + "claims.uri", + ); + expect(mockQuery.selectAll).toHaveBeenCalledWith("metadata"); + expect(mockQuery.where).toHaveBeenCalledWith(expect.any(Function)); + expect(result).toEqual(expectedMetadata); + }); + + it("should return empty array when no records are found", async () => { + // Arrange + const hypercertIds = [generateHypercertId()]; + mockQuery.execute.mockResolvedValue([]); + + // Act + const result = await service.getHypercertMetadataSets({ + hypercert_ids: hypercertIds, + }); + + // Assert + expect(result).toEqual([]); + expect(mockConnection.selectFrom).toHaveBeenCalledWith("metadata"); + }); + + it("should return null when no arguments are provided", async () => { + // Act + const result = await service.getHypercertMetadataSets({}); + + // Assert + expect(result).toBeNull(); + expect(mockConnection.selectFrom).not.toHaveBeenCalled(); + expect(console.warn).toHaveBeenCalledWith( + expect.stringContaining( + "[HypercertsService::getHypercertMetadataSets] No claims_ids or hypercert_ids provided", + ), + ); + }); + + it("should handle database errors", async () => { + // Arrange + const claimsIds = [faker.string.uuid(), faker.string.uuid()]; + const error = new Error("Database error"); + mockQuery.execute.mockRejectedValue(error); + + // Act & Assert + await expect( + service.getHypercertMetadataSets({ claims_ids: claimsIds }), + ).rejects.toThrow(); + }); + }); +}); diff --git a/test/services/database/entities/MarketplaceOrdersEntityService.test.ts b/test/services/database/entities/MarketplaceOrdersEntityService.test.ts new file mode 100644 index 00000000..28d19c88 --- /dev/null +++ b/test/services/database/entities/MarketplaceOrdersEntityService.test.ts @@ -0,0 +1,262 @@ +import { Kysely } from "kysely"; +import { container } from "tsyringe"; +import { beforeEach, describe, expect, it, vi } from "vitest"; +import { DataKyselyService } from "../../../../src/client/kysely.js"; +import type { GetOrdersArgs } from "../../../../src/graphql/schemas/args/orderArgs.js"; +import { MarketplaceOrdersService } from "../../../../src/services/database/entities/MarketplaceOrdersEntityService.js"; +import type { DataDatabase } from "../../../../src/types/kyselySupabaseData.js"; +import { + checkSimilarity, + createTestDataDatabase, + generateMockOrder, +} from "../../../utils/testUtils.js"; +import { faker } from "@faker-js/faker"; + +const mockDb = vi.fn(); + +vi.mock("../../../../src/client/kysely.js", () => ({ + get DataKyselyService() { + return class MockDataKyselyService { + getConnection() { + return mockDb(); + } + get db() { + return mockDb(); + } + }; + }, + get kyselyData() { + return mockDb(); + }, +})); + +describe("MarketplaceOrdersService", () => { + let service: MarketplaceOrdersService; + let db: Kysely; + let mockOrder: ReturnType; + + beforeEach(async () => { + vi.clearAllMocks(); + + // Setup test database + ({ db } = await createTestDataDatabase()); + + mockDb.mockReturnValue(db); + service = new MarketplaceOrdersService( + container.resolve(DataKyselyService), + ); + mockOrder = generateMockOrder(); + }); + + describe("getOrders", () => { + it("should return all orders", async () => { + // Arrange + await db.insertInto("marketplace_orders").values(mockOrder).execute(); + + // Act + const result = await service.getOrders({}); + + // Assert + expect(result.data).toHaveLength(1); + checkSimilarity(result.data[0], mockOrder); + }); + + it("should return empty array when no orders match criteria", async () => { + // Arrange + const args: GetOrdersArgs = { + where: { id: { eq: faker.string.uuid() } }, + }; + + // Act + const result = await service.getOrders(args); + + // Assert + expect(result.count).toBe(0); + expect(result.data).toHaveLength(0); + }); + + it("should handle errors from database", async () => { + // Arrange + vi.spyOn(db, "selectFrom").mockImplementation(() => { + throw new Error("Database error"); + }); + + // Act & Assert + await expect(service.getOrders({})).rejects.toThrow("Database error"); + }); + }); + + describe("getOrder", () => { + it("should return a specific order by ID", async () => { + // Arrange + await db.insertInto("marketplace_orders").values(mockOrder).execute(); + + // Act + const result = await service.getOrder({ + where: { id: { eq: mockOrder.id } }, + }); + + // Assert + checkSimilarity(result, mockOrder); + }); + + it("should return undefined if order not found", async () => { + // Act + const result = await service.getOrder({ + where: { id: { eq: faker.string.uuid() } }, + }); + + // Assert + expect(result).toBeUndefined(); + }); + }); + + describe("storeOrder", () => { + it("should store a new order", async () => { + // Arrange + await service.storeOrder(mockOrder); + + // Assert + const storedOrder = await db + .selectFrom("marketplace_orders") + .selectAll() + .where("id", "=", mockOrder.id) + .executeTakeFirst(); + checkSimilarity(storedOrder, mockOrder); + }); + }); + + describe("updateOrder", () => { + it("should update an existing order", async () => { + // Arrange + await db.insertInto("marketplace_orders").values(mockOrder).execute(); + + const updatedOrder = { + ...mockOrder, + invalidated: true, + validator_codes: [42], + }; + + // Act + await service.updateOrder(updatedOrder); + + // Assert + const storedOrder = await db + .selectFrom("marketplace_orders") + .selectAll() + .where("id", "=", mockOrder.id) + .executeTakeFirst(); + checkSimilarity(storedOrder, updatedOrder); + }); + + it("should throw error when updating order without ID", async () => { + // Act & Assert + await expect(service.updateOrder({ chainId: 1 })).rejects.toThrow( + "Order ID is required", + ); + }); + }); + + describe("deleteOrder", () => { + it("should delete an existing order", async () => { + // Arrange + await db.insertInto("marketplace_orders").values(mockOrder).execute(); + + // Act + await service.deleteOrder(mockOrder.id); + + // Assert + const storedOrder = await db + .selectFrom("marketplace_orders") + .selectAll() + .where("id", "=", mockOrder.id) + .executeTakeFirst(); + expect(storedOrder).toBeUndefined(); + }); + }); + + describe("updateOrders", () => { + it("should update multiple orders", async () => { + // Arrange + const mockOrders = [generateMockOrder(), generateMockOrder()]; + await db.insertInto("marketplace_orders").values(mockOrders).execute(); + + const updatedOrders = mockOrders.map((order) => ({ + ...order, + invalidated: true, + validator_codes: [42], + })); + + // Act + const result = await service.updateOrders(updatedOrders); + + // Assert + expect(result).toHaveLength(2); + result.forEach((stored, i) => { + checkSimilarity(stored, updatedOrders[i]); + }); + }); + }); + + describe("nonce operations", () => { + it("should create and retrieve a nonce", async () => { + // Arrange + const mockNonce = { + address: mockOrder.signer, + chain_id: Number(mockOrder.chainId), + nonce_counter: 1, + }; + + // Act + await service.createNonce(mockNonce); + const result = await service.getNonce({ + address: mockNonce.address, + chain_id: mockNonce.chain_id, + }); + + // Assert + expect(result).toEqual(mockNonce); + }); + + it("should update a nonce", async () => { + // Arrange + const mockNonce = { + address: mockOrder.signer, + chain_id: Number(mockOrder.chainId), + nonce_counter: 1, + }; + await service.createNonce(mockNonce); + + // Act + const updatedNonce = { ...mockNonce, nonce_counter: 2 }; + const result = await service.updateNonce(updatedNonce); + + // Assert + expect(result.nonce_counter).toBe(2); + checkSimilarity(result, updatedNonce); + }); + + it("should throw error when getting nonce without required fields", async () => { + // Act & Assert + await expect( + service.getNonce({ address: "", chain_id: 0 }), + ).rejects.toThrow("Address and chain ID are required"); + }); + }); + + describe("batch operations", () => { + it("should upsert multiple orders", async () => { + // Arrange + const orderData1 = mockOrder; + const orderData2 = generateMockOrder(); + + // Act + const result = await service.upsertOrders([orderData1, orderData2]); + + // Assert + expect(result).toHaveLength(2); + checkSimilarity(result[0], orderData1); + checkSimilarity(result[1], orderData2); + }); + }); +}); diff --git a/test/services/database/entities/MetadataEntityService.test.ts b/test/services/database/entities/MetadataEntityService.test.ts new file mode 100644 index 00000000..9ad6fd9e --- /dev/null +++ b/test/services/database/entities/MetadataEntityService.test.ts @@ -0,0 +1,134 @@ +import { container } from "tsyringe"; +import { beforeEach, describe, expect, it, vi } from "vitest"; +import type { GetMetadataArgs } from "../../../../src/graphql/schemas/args/metadataArgs.js"; +import { MetadataService } from "../../../../src/services/database/entities/MetadataEntityService.js"; + +const mockEntityService = { + getMany: vi.fn(), + getSingle: vi.fn(), +}; + +// Mock the createEntityService function +vi.mock( + "../../../../src/services/database/entities/EntityServiceFactory.js", + () => ({ + createEntityService: () => mockEntityService, + }), +); + +describe("MetadataService", () => { + let service: MetadataService; + + beforeEach(() => { + // Create a new instance for each test + service = container.resolve(MetadataService); + }); + + describe("getMetadata", () => { + it("should return metadata records for given arguments", async () => { + // Arrange + const args: GetMetadataArgs = { + where: { + uri: { eq: "ipfs://test" }, + }, + }; + const expectedResult = { + data: [ + { id: "1", name: "Test 1", uri: "ipfs://test" }, + { id: "2", name: "Test 2", uri: "ipfs://test" }, + ], + count: 2, + }; + mockEntityService.getMany.mockResolvedValue(expectedResult); + + // Act + const result = await service.getMetadata(args); + + // Assert + expect(mockEntityService.getMany).toHaveBeenCalledWith(args); + expect(result).toEqual(expectedResult); + }); + + it("should handle empty result set", async () => { + // Arrange + const args: GetMetadataArgs = { + where: { + uri: { eq: "non-existent" }, + }, + }; + const expectedResult = { + data: [], + count: 0, + }; + mockEntityService.getMany.mockResolvedValue(expectedResult); + + // Act + const result = await service.getMetadata(args); + + // Assert + expect(mockEntityService.getMany).toHaveBeenCalledWith(args); + expect(result).toEqual(expectedResult); + }); + + it("should handle errors from entity service", async () => { + // Arrange + const args: GetMetadataArgs = {}; + const error = new Error("Database error"); + mockEntityService.getMany.mockRejectedValue(error); + + // Act & Assert + await expect(service.getMetadata(args)).rejects.toThrow(error); + }); + }); + + describe("getMetadataSingle", () => { + it("should return a single metadata record for given arguments", async () => { + // Arrange + const args: GetMetadataArgs = { + where: { + uri: { eq: "ipfs://test" }, + }, + }; + const expectedResult = { + id: "1", + name: "Test", + uri: "ipfs://test", + }; + mockEntityService.getSingle.mockResolvedValue(expectedResult); + + // Act + const result = await service.getMetadataSingle(args); + + // Assert + expect(mockEntityService.getSingle).toHaveBeenCalledWith(args); + expect(result).toEqual(expectedResult); + }); + + it("should return undefined when no record is found", async () => { + // Arrange + const args: GetMetadataArgs = { + where: { + uri: { eq: "non-existent" }, + }, + }; + mockEntityService.getSingle.mockResolvedValue(undefined); + + // Act + const result = await service.getMetadataSingle(args); + + // Assert + expect(mockEntityService.getSingle).toHaveBeenCalledWith(args); + expect(result).toBeUndefined(); + }); + + it("should handle errors from entity service", async () => { + // Arrange + const args: GetMetadataArgs = {}; + const error = new Error("Database error"); + mockEntityService.getSingle.mockRejectedValue(error); + + // Act & Assert + await expect(service.getMetadataSingle(args)).rejects.toThrow(error); + }); + }); +}); diff --git a/test/services/database/entities/SalesEntityService.test.ts b/test/services/database/entities/SalesEntityService.test.ts new file mode 100644 index 00000000..72fa67c3 --- /dev/null +++ b/test/services/database/entities/SalesEntityService.test.ts @@ -0,0 +1,108 @@ +import { faker } from "@faker-js/faker"; +import { beforeEach, describe, expect, it, vi } from "vitest"; +import type { GetSalesArgs } from "../../../../src/graphql/schemas/args/salesArgs.js"; +import type { Sale } from "../../../../src/graphql/schemas/typeDefs/salesTypeDefs.js"; +import { SalesService } from "../../../../src/services/database/entities/SalesEntityService.js"; +import { generateHypercertId } from "../../../utils/testUtils.js"; + +const mockEntityService = { + getMany: vi.fn(), + getSingle: vi.fn(), +}; + +// Mock the createEntityService function +vi.mock( + "../../../../src/services/database/entities/EntityServiceFactory.js", + () => ({ + createEntityService: () => mockEntityService, + }), +); + +describe("SalesService", () => { + let service: SalesService; + + beforeEach(() => { + service = new SalesService(); + }); + + describe("getSales", () => { + it("should return sales for given arguments", async () => { + // Arrange + const args: GetSalesArgs = { + where: { + hypercert_id: { eq: generateHypercertId() }, + }, + }; + const expectedResult = { + data: [ + { + id: faker.string.uuid(), + hypercert_id: generateHypercertId(), + buyer: faker.string.alphanumeric(42), + seller: faker.string.alphanumeric(42), + currency: faker.string.alphanumeric(42), + collection: faker.string.alphanumeric(42), + transaction_hash: faker.string.alphanumeric(66), + } as Sale, + ], + count: 1, + }; + mockEntityService.getMany.mockResolvedValue(expectedResult); + + // Act + const result = await service.getSales(args); + + // Assert + expect(mockEntityService.getMany).toHaveBeenCalledWith(args); + expect(result).toEqual(expectedResult); + }); + + it("should handle errors from entity service", async () => { + // Arrange + const args: GetSalesArgs = {}; + const error = new Error("Entity service error"); + mockEntityService.getMany.mockRejectedValue(error); + + // Act & Assert + await expect(service.getSales(args)).rejects.toThrow(error); + }); + }); + + describe("getSale", () => { + it("should return a single sale for given arguments", async () => { + // Arrange + const args: GetSalesArgs = { + where: { + id: { eq: faker.string.uuid() }, + }, + }; + const expectedResult = { + id: faker.string.uuid(), + hypercert_id: generateHypercertId(), + buyer: faker.string.alphanumeric(42), + seller: faker.string.alphanumeric(42), + currency: faker.string.alphanumeric(42), + collection: faker.string.alphanumeric(42), + transaction_hash: faker.string.alphanumeric(66), + } as Sale; + mockEntityService.getSingle.mockResolvedValue(expectedResult); + + // Act + const result = await service.getSale(args); + + // Assert + expect(mockEntityService.getSingle).toHaveBeenCalledWith(args); + expect(result).toEqual(expectedResult); + }); + + it("should handle errors from entity service", async () => { + // Arrange + const args: GetSalesArgs = {}; + const error = new Error("Entity service error"); + mockEntityService.getSingle.mockRejectedValue(error); + + // Act & Assert + await expect(service.getSale(args)).rejects.toThrow(error); + }); + }); +}); diff --git a/test/services/database/entities/SignatureRequestsEntityService.test.ts b/test/services/database/entities/SignatureRequestsEntityService.test.ts new file mode 100644 index 00000000..b18fc6fb --- /dev/null +++ b/test/services/database/entities/SignatureRequestsEntityService.test.ts @@ -0,0 +1,224 @@ +import { Kysely } from "kysely"; +import { container } from "tsyringe"; +import { beforeEach, describe, expect, it, vi } from "vitest"; +import { DataKyselyService } from "../../../../src/client/kysely.js"; +import { GetSignatureRequestsArgs } from "../../../../src/graphql/schemas/args/signatureRequestArgs.js"; +import { SignatureRequestsService } from "../../../../src/services/database/entities/SignatureRequestsEntityService.js"; +import type { DataDatabase } from "../../../../src/types/kyselySupabaseData.js"; +import { + createTestDataDatabase, + generateMockAddress, +} from "../../../utils/testUtils.js"; + +const mockDb = vi.fn(); + +vi.mock("../../../../src/client/kysely.js", () => ({ + get DataKyselyService() { + return class MockDataKyselyService { + getConnection() { + return mockDb(); + } + get db() { + return mockDb(); + } + }; + }, + get kyselyData() { + return mockDb(); + }, +})); + +// Helper function to generate mock signature request data +function generateMockSignatureRequest() { + return { + safe_address: generateMockAddress(), + message_hash: `0x${Array.from({ length: 64 }, () => Math.floor(Math.random() * 16).toString(16)).join("")}`, + status: "pending" as const, + purpose: "update_user_data" as const, + message: JSON.stringify({ test: "data" }), + timestamp: Math.floor(Date.now() / 1000), + chain_id: 1, + }; +} + +describe("SignatureRequestsService", () => { + let signatureRequestsService: SignatureRequestsService; + let db: Kysely; + + beforeEach(async () => { + vi.clearAllMocks(); + + // Create test database with signature_requests table + ({ db } = await createTestDataDatabase()); + + mockDb.mockReturnValue(db); + + signatureRequestsService = new SignatureRequestsService( + container.resolve(DataKyselyService), + ); + }); + + describe("getSignatureRequests", () => { + it("should return signature requests with correct data", async () => { + // Arrange + const mockRequest = generateMockSignatureRequest(); + await db.insertInto("signature_requests").values(mockRequest).execute(); + + const args: GetSignatureRequestsArgs = { + where: { + safe_address: { eq: mockRequest.safe_address }, + }, + }; + + // Act + const result = await signatureRequestsService.getSignatureRequests(args); + + // Assert + expect(result.count).toBe(1); + expect(result.data).toHaveLength(1); + expect(result.data[0].safe_address).toBe(mockRequest.safe_address); + expect(result.data[0].message_hash).toBe(mockRequest.message_hash); + expect(result.data[0].status).toBe(mockRequest.status); + expect(result.data[0].purpose).toBe(mockRequest.purpose); + }); + + it("should handle empty result set", async () => { + // Arrange + const args: GetSignatureRequestsArgs = {}; + + // Act + const result = await signatureRequestsService.getSignatureRequests(args); + + // Assert + expect(result.count).toBe(0); + expect(result.data).toHaveLength(0); + }); + }); + + describe("getSignatureRequest", () => { + it("should return a single signature request", async () => { + // Arrange + const mockRequest = generateMockSignatureRequest(); + await db.insertInto("signature_requests").values(mockRequest).execute(); + + const args: GetSignatureRequestsArgs = { + where: { + safe_address: { eq: mockRequest.safe_address }, + message_hash: { eq: mockRequest.message_hash }, + }, + }; + + // Act + const result = await signatureRequestsService.getSignatureRequest(args); + + // Assert + expect(result).toBeDefined(); + expect(result?.safe_address).toBe(mockRequest.safe_address); + expect(result?.message_hash).toBe(mockRequest.message_hash); + expect(result?.status).toBe(mockRequest.status); + expect(result?.purpose).toBe(mockRequest.purpose); + }); + + it("should return undefined when request not found", async () => { + // Arrange + const args: GetSignatureRequestsArgs = { + where: { + safe_address: { eq: generateMockAddress() }, + }, + }; + + // Act + const result = await signatureRequestsService.getSignatureRequest(args); + + // Assert + expect(result).toBeUndefined(); + }); + }); + + describe("addSignatureRequest", () => { + it("should create a new signature request", async () => { + // Arrange + const mockRequest = generateMockSignatureRequest(); + + // Act + const result = + await signatureRequestsService.addSignatureRequest(mockRequest); + + // Assert + expect(result).toBeDefined(); + expect(result?.safe_address).toBe(mockRequest.safe_address); + expect(result?.message_hash).toBe(mockRequest.message_hash); + + // Verify in database + const dbResult = await db + .selectFrom("signature_requests") + .selectAll() + .where("safe_address", "=", mockRequest.safe_address) + .where("message_hash", "=", mockRequest.message_hash) + .executeTakeFirst(); + + expect(dbResult).toBeDefined(); + expect(dbResult?.status).toBe(mockRequest.status); + expect(dbResult?.purpose).toBe(mockRequest.purpose); + }); + + it("should handle duplicate requests", async () => { + // Arrange + const mockRequest = generateMockSignatureRequest(); + await db.insertInto("signature_requests").values(mockRequest).execute(); + + // Act & Assert + await expect( + signatureRequestsService.addSignatureRequest(mockRequest), + ).rejects.toThrow(); + }); + }); + + describe("updateSignatureRequestStatus", () => { + it("should update status of existing request", async () => { + // Arrange + const mockRequest = generateMockSignatureRequest(); + await db.insertInto("signature_requests").values(mockRequest).execute(); + + // Act + await signatureRequestsService.updateSignatureRequestStatus( + mockRequest.safe_address, + mockRequest.message_hash, + "executed", + ); + + // Assert + const result = await db + .selectFrom("signature_requests") + .selectAll() + .where("safe_address", "=", mockRequest.safe_address) + .where("message_hash", "=", mockRequest.message_hash) + .executeTakeFirst(); + + expect(result).toBeDefined(); + expect(result?.status).toBe("executed"); + }); + + it("should handle non-existent request", async () => { + // Arrange + const mockRequest = generateMockSignatureRequest(); + + // Act + await signatureRequestsService.updateSignatureRequestStatus( + mockRequest.safe_address, + mockRequest.message_hash, + "executed", + ); + + // Assert - Should not throw error, but also not update anything + const result = await db + .selectFrom("signature_requests") + .selectAll() + .where("safe_address", "=", mockRequest.safe_address) + .where("message_hash", "=", mockRequest.message_hash) + .executeTakeFirst(); + + expect(result).toBeUndefined(); + }); + }); +}); diff --git a/test/services/database/entities/UsersEntityService.test.ts b/test/services/database/entities/UsersEntityService.test.ts new file mode 100644 index 00000000..147c3dab --- /dev/null +++ b/test/services/database/entities/UsersEntityService.test.ts @@ -0,0 +1,201 @@ +import { Kysely } from "kysely"; +import { container } from "tsyringe"; +import { beforeEach, describe, expect, it, vi } from "vitest"; +import { DataKyselyService } from "../../../../src/client/kysely.js"; +import { GetUsersArgs } from "../../../../src/graphql/schemas/args/userArgs.js"; +import { UsersService } from "../../../../src/services/database/entities/UsersEntityService.js"; +import type { DataDatabase } from "../../../../src/types/kyselySupabaseData.js"; +import { + createTestDataDatabase, + generateMockAddress, + generateMockUser, +} from "../../../utils/testUtils.js"; + +const mockDb = vi.fn(); + +vi.mock("../../../../src/client/kysely.js", () => ({ + get DataKyselyService() { + return class MockDataKyselyService { + getConnection() { + return mockDb(); + } + get db() { + return mockDb(); + } + }; + }, + get kyselyData() { + return mockDb(); + }, +})); + +describe("UsersService", () => { + let usersService: UsersService; + let db: Kysely; + + beforeEach(async () => { + vi.clearAllMocks(); + + ({ db } = await createTestDataDatabase()); + + mockDb.mockReturnValue(db); + + usersService = new UsersService(container.resolve(DataKyselyService)); + }); + + describe("getUsers", () => { + it("should return users with correct data", async () => { + // Arrange + const mockUser = generateMockUser(); + await db.insertInto("users").values(mockUser).execute(); + + const args: GetUsersArgs = { + where: { + address: { eq: mockUser.address }, + }, + }; + + // Act + const result = await usersService.getUsers(args); + + // Assert + expect(result.count).toBe(1); + expect(result.data).toHaveLength(1); + expect(result.data[0].address).toBe(mockUser.address); + expect(result.data[0].chain_id).toBe(mockUser.chain_id); + expect(result.data[0].display_name).toBe(mockUser.display_name); + expect(result.data[0].avatar).toBe(mockUser.avatar); + }); + + it("should handle empty result set", async () => { + // Arrange + const args: GetUsersArgs = {}; + + // Act + const result = await usersService.getUsers(args); + + // Assert + expect(result.count).toBe(0); + expect(result.data).toHaveLength(0); + }); + }); + + describe("getUser", () => { + it("should return a single user", async () => { + // Arrange + const mockUser = generateMockUser(); + await db.insertInto("users").values(mockUser).execute(); + + const args: GetUsersArgs = { + where: { + address: { eq: mockUser.address }, + }, + }; + + // Act + const result = await usersService.getUser(args); + + // Assert + expect(result).toBeDefined(); + expect(result?.address).toBe(mockUser.address); + expect(result?.chain_id).toBe(mockUser.chain_id); + expect(result?.display_name).toBe(mockUser.display_name); + expect(result?.avatar).toBe(mockUser.avatar); + }); + + it("should return undefined when user not found", async () => { + // Arrange + const args: GetUsersArgs = { + where: { address: { eq: generateMockAddress() } }, + }; + + // Act + const result = await usersService.getUser(args); + + // Assert + expect(result).toBeUndefined(); + }); + }); + + describe("getOrCreateUser", () => { + it("should return existing user if found", async () => { + // Arrange + const mockUser = generateMockUser(); + await db.insertInto("users").values(mockUser).execute(); + + // Act + const result = await usersService.getOrCreateUser(mockUser); + + // Assert + expect(result).toBeDefined(); + expect(result.address).toBe(mockUser.address); + expect(result.chain_id).toBe(mockUser.chain_id); + expect(result.display_name).toBe(mockUser.display_name); + expect(result.avatar).toBe(mockUser.avatar); + }); + + it("should create new user if not found", async () => { + // Arrange + const mockUser = generateMockUser(); + + // Act + const result = await usersService.getOrCreateUser(mockUser); + + // Assert + expect(result).toBeDefined(); + expect(result.address).toBe(mockUser.address); + expect(result.chain_id).toBe(mockUser.chain_id); + expect(result.display_name).toBe(mockUser.display_name); + expect(result.avatar).toBe(mockUser.avatar); + }); + }); + + describe("upsertUsers", () => { + it("should create or update users", async () => { + // Arrange + const mockUser = generateMockUser(); + + // Act - First create + const created = await usersService.upsertUsers([mockUser]); + + // Assert - Created + expect(created).toHaveLength(1); + expect(created[0].address).toBe(mockUser.address); + expect(created[0].display_name).toBe(mockUser.display_name); + + // Act - Then update + const updated = await usersService.upsertUsers([ + { + ...mockUser, + display_name: "Updated Name", + avatar: "updated-avatar", + }, + ]); + + // Assert - Updated + expect(updated).toHaveLength(1); + expect(updated[0].address).toBe(mockUser.address); + expect(updated[0].display_name).toBe("Updated Name"); + expect(updated[0].avatar).toBe("updated-avatar"); + }); + + it("should handle multiple users", async () => { + // Arrange + const mockUsers = [generateMockUser(), generateMockUser()]; + + // Act - Insert users one at a time + const results = []; + for (const user of mockUsers) { + const [result] = await usersService.upsertUsers([user]); + results.push(result); + } + + // Assert + expect(results).toHaveLength(2); + expect(results[0].display_name).toBe(mockUsers[0].display_name); + expect(results[1].display_name).toBe(mockUsers[1].display_name); + expect(results[0].chain_id).toBe(mockUsers[0].chain_id); + expect(results[1].chain_id).toBe(mockUsers[1].chain_id); + }); + }); +}); diff --git a/test/services/database/strategies/AllowListQueryStrategy.test.ts b/test/services/database/strategies/AllowListQueryStrategy.test.ts new file mode 100644 index 00000000..3c86acc3 --- /dev/null +++ b/test/services/database/strategies/AllowListQueryStrategy.test.ts @@ -0,0 +1,83 @@ +import { Kysely } from "kysely"; +import { IMemoryDb, newDb } from "pg-mem"; +import { beforeEach, describe, expect, it } from "vitest"; +import { AllowlistQueryStrategy } from "../../../../src/services/database/strategies/AllowlistQueryStrategy.js"; +import { CachingDatabase } from "../../../../src/types/kyselySupabaseCaching.js"; + +type TestDatabase = CachingDatabase; + +describe("AllowlistQueryStrategy", () => { + let db: Kysely; + let mem: IMemoryDb; + const strategy = new AllowlistQueryStrategy(); + + beforeEach(async () => { + mem = newDb(); + db = mem.adapters.createKysely() as import("kysely").Kysely; + + await db.schema + .createTable("claimable_fractions_with_proofs") + .addColumn("id", "integer", (b) => b.primaryKey()) + .execute(); + }); + + describe("basic functionality", () => { + it("should query all claimable fractions records", async () => { + const query = strategy.buildDataQuery(db); + + const { sql } = query.compile(); + expect(sql).toContain("claimable_fractions_with_proofs"); + expect(sql).toMatch(/select \* from "claimable_fractions_with_proofs"/); + expect(sql).not.toMatch( + /where exists \(select from "claims" where "claims"."hypercert_id" = "claimable_fractions_with_proofs"."hypercert_id"\)/, + ); + }); + + it("should query all claimable fractions records with hypercert", async () => { + const query = strategy.buildDataQuery(db, { + where: { + hypercert: { + hypercert_id: { eq: "hyper1" }, + }, + }, + }); + + const { sql } = query.compile(); + expect(sql).toContain("claimable_fractions_with_proofs"); + expect(sql).toMatch( + /where exists \(select from "claims" where "claims"."hypercert_id" = "claimable_fractions_with_proofs"."hypercert_id"\)/, + ); + }); + }); + + describe("count", () => { + it("should query all claimable fractions records", async () => { + const query = strategy.buildCountQuery(db); + + const { sql } = query.compile(); + expect(sql).toContain("claimable_fractions_with_proofs"); + expect(sql).toMatch( + /select count\(\*\) as "count" from "claimable_fractions_with_proofs"/, + ); + expect(sql).not.toMatch( + /where exists \(select from "claims" where "claims"."hypercert_id" = "claimable_fractions_with_proofs"."hypercert_id"\)/, + ); + }); + + it("should query all claimable fractions records with hypercert", async () => { + const query = strategy.buildCountQuery(db, { + where: { + hypercert: { + hypercert_id: { eq: "hyper1" }, + }, + }, + }); + + const { sql } = query.compile(); + expect(sql).toContain("claimable_fractions_with_proofs"); + expect(sql).toMatch( + /where exists \(select from "claims" where "claims"."hypercert_id" = "claimable_fractions_with_proofs"."hypercert_id"\)/, + ); + }); + }); +}); diff --git a/test/services/database/strategies/AttestationQueryStrategy.test.ts b/test/services/database/strategies/AttestationQueryStrategy.test.ts new file mode 100644 index 00000000..26dcc2aa --- /dev/null +++ b/test/services/database/strategies/AttestationQueryStrategy.test.ts @@ -0,0 +1,172 @@ +import { Kysely } from "kysely"; +import { IMemoryDb, newDb } from "pg-mem"; +import { beforeEach, describe, expect, it } from "vitest"; +import { AttestationsQueryStrategy } from "../../../../src/services/database/strategies/AttestationQueryStrategy.js"; +import { CachingDatabase } from "../../../../src/types/kyselySupabaseCaching.js"; + +type TestDatabase = CachingDatabase; + +describe("AttestationsQueryStrategy", () => { + let db: Kysely; + let mem: IMemoryDb; + const strategy = new AttestationsQueryStrategy(); + + beforeEach(async () => { + mem = newDb(); + db = mem.adapters.createKysely() as Kysely; + + // Create required tables + await db.schema + .createTable("attestations") + .addColumn("id", "integer", (b) => b.primaryKey()) + .addColumn("supported_schemas_id", "varchar") + .addColumn("claims_id", "integer") + .execute(); + + await db.schema + .createTable("supported_schemas") + .addColumn("id", "varchar", (b) => b.primaryKey()) + .execute(); + + await db.schema + .createTable("claims") + .addColumn("id", "integer", (b) => b.primaryKey()) + .execute(); + }); + + describe("basic functionality", () => { + it("should query all attestations records", async () => { + const query = strategy.buildDataQuery(db); + + const { sql } = query.compile(); + expect(sql).toContain("attestations"); + expect(sql).toMatch(/select \* from "attestations"/i); + expect(sql).not.toMatch(/where exists/i); + }); + + it("should query attestations with eas_schema filter", async () => { + const query = strategy.buildDataQuery(db, { + where: { + eas_schema: { + id: { eq: "schema-1" }, + }, + }, + }); + + const { sql } = query.compile(); + expect(sql).toContain("attestations"); + expect(sql).toMatch(/select \* from "attestations"/i); + expect(sql).toMatch( + /select .* from "supported_schemas" where "supported_schemas"."id" = "attestations"."supported_schemas_id"/i, + ); + }); + + it("should query attestations with hypercert filter", async () => { + const query = strategy.buildDataQuery(db, { + where: { + hypercert: { + id: { eq: "claim-1" }, + }, + }, + }); + + const { sql } = query.compile(); + expect(sql).toContain("attestations"); + expect(sql).toMatch(/select \* from "attestations"/i); + expect(sql).toMatch( + /select .* from "claims" where "claims"."id" = "attestations"."claims_id"/i, + ); + }); + + it("should query attestations with both eas_schema and hypercert filters", async () => { + const query = strategy.buildDataQuery(db, { + where: { + eas_schema: { + id: { eq: "schema-1" }, + }, + hypercert: { + id: { eq: "claim-1" }, + }, + }, + }); + + const { sql } = query.compile(); + expect(sql).toContain("attestations"); + expect(sql).toMatch(/select \* from "attestations"/i); + expect(sql).toMatch( + /select .* from "supported_schemas" where "supported_schemas"."id" = "attestations"."supported_schemas_id"/i, + ); + expect(sql).toMatch( + /select .* from "claims" where "claims"."id" = "attestations"."claims_id"/i, + ); + }); + }); + + describe("count", () => { + it("should count all attestations records", async () => { + const query = strategy.buildCountQuery(db); + + const { sql } = query.compile(); + expect(sql).toContain("attestations"); + expect(sql).toMatch(/select count\(\*\) as "count" from "attestations"/i); + expect(sql).not.toMatch(/where exists/i); + }); + + it("should count attestations with eas_schema filter", async () => { + const query = strategy.buildCountQuery(db, { + where: { + eas_schema: { + id: { eq: "schema-1" }, + }, + }, + }); + + const { sql } = query.compile(); + expect(sql).toContain("attestations"); + expect(sql).toMatch(/select count\(\*\) as "count" from "attestations"/i); + expect(sql).toMatch( + /select .* from "supported_schemas" where "supported_schemas"."id" = "attestations"."supported_schemas_id"/i, + ); + }); + + it("should count attestations with hypercert filter", async () => { + const query = strategy.buildCountQuery(db, { + where: { + hypercert: { + id: { eq: "claim-1" }, + }, + }, + }); + + const { sql } = query.compile(); + expect(sql).toContain("attestations"); + expect(sql).toMatch(/select count\(\*\) as "count" from "attestations"/i); + expect(sql).toMatch( + /select .* from "claims" where "claims"."id" = "attestations"."claims_id"/i, + ); + }); + + it("should count attestations with both eas_schema and hypercert filters", async () => { + const query = strategy.buildCountQuery(db, { + where: { + eas_schema: { + id: { eq: "schema-1" }, + }, + hypercert: { + id: { eq: "claim-1" }, + }, + }, + }); + + const { sql } = query.compile(); + expect(sql).toContain("attestations"); + expect(sql).toMatch(/select count\(\*\) as "count" from "attestations"/i); + expect(sql).toMatch( + /select .* from "supported_schemas" where "supported_schemas"."id" = "attestations"."supported_schemas_id"/i, + ); + expect(sql).toMatch( + /select .* from "claims" where "claims"."id" = "attestations"."claims_id"/i, + ); + }); + }); +}); diff --git a/test/services/database/strategies/ClaimsQueryStrategy.test.ts b/test/services/database/strategies/ClaimsQueryStrategy.test.ts new file mode 100644 index 00000000..48491b49 --- /dev/null +++ b/test/services/database/strategies/ClaimsQueryStrategy.test.ts @@ -0,0 +1,180 @@ +import { Kysely } from "kysely"; +import { beforeEach, describe, expect, it } from "vitest"; +import { GetHypercertsArgs } from "../../../../src/graphql/schemas/args/hypercertsArgs.js"; +import { ClaimsQueryStrategy } from "../../../../src/services/database/strategies/ClaimsQueryStrategy.js"; +import { CachingDatabase } from "../../../../src/types/kyselySupabaseCaching.js"; +import { + createTestCachingDatabase, + generateHypercertId, +} from "../../../utils/testUtils.js"; + +describe("ClaimsQueryStrategy", () => { + let strategy: ClaimsQueryStrategy; + let db: Kysely; + + beforeEach(async () => { + strategy = new ClaimsQueryStrategy(); + ({ db } = await createTestCachingDatabase()); + }); + + describe("buildDataQuery", () => { + it("should build basic query without args", () => { + // Act + const query = strategy.buildDataQuery(db); + const { sql } = query.compile(); + + // Assert + expect(sql).toBe('select * from "claims_view"'); + }); + + it("should build query with contract filter", () => { + // Arrange + const args: GetHypercertsArgs = { + where: { + contract: { + chain_id: { eq: 1 }, + }, + }, + }; + + // Act + const query = strategy.buildDataQuery(db, args); + const { sql } = query.compile(); + // Assert + expect(sql).toContain( + 'from "contracts" where "contracts"."id" = "claims_view"."contracts_id"', + ); + }); + + it("should build query with fractions filter", () => { + // Arrange + const args: GetHypercertsArgs = { + where: { + fractions: { + fraction_id: { eq: generateHypercertId() }, + }, + }, + }; + + // Act + const query = strategy.buildDataQuery(db, args); + const { sql } = query.compile(); + + // Assert + expect(sql).toContain( + 'from "fractions_view" where "fractions_view"."claims_id" = "claims_view"."id"', + ); + }); + + it("should build query with metadata filter", () => { + // Arrange + const args: GetHypercertsArgs = { + where: { + metadata: { + name: { eq: "Test Claim" }, + }, + }, + }; + + // Act + const query = strategy.buildDataQuery(db, args); + const { sql } = query.compile(); + // Assert + expect(sql).toContain( + 'from "metadata" where "metadata"."uri" = "claims_view"."uri"', + ); + }); + + it("should build query with attestations filter", () => { + // Arrange + const args: GetHypercertsArgs = { + where: { + attestations: { + id: { eq: "test-id" }, + }, + }, + }; + + // Act + const query = strategy.buildDataQuery(db, args); + const { sql } = query.compile(); + // Assert + expect(sql).toContain( + 'from "attestations" where "attestations"."claims_id" = "claims_view"."id"', + ); + }); + + it("should build query with multiple filters", () => { + // Arrange + const args: GetHypercertsArgs = { + where: { + contract: { chain_id: { eq: 1 } }, + metadata: { name: { eq: "Test Claim" } }, + }, + }; + + // Act + const query = strategy.buildDataQuery(db, args); + const { sql } = query.compile(); + // Assert + expect(sql).toContain( + 'from "contracts" where "contracts"."id" = "claims_view"."contracts_id"', + ); + expect(sql).toContain( + 'from "metadata" where "metadata"."uri" = "claims_view"."uri"', + ); + }); + }); + + describe("buildCountQuery", () => { + it("should build basic count query without args", () => { + // Act + const query = strategy.buildCountQuery(db); + const { sql } = query.compile(); + // Assert + expect(sql).toBe('select count(*) as "count" from "claims_view"'); + }); + + it("should build count query with contract filter", () => { + // Arrange + const args: GetHypercertsArgs = { + where: { + contract: { + chain_id: { eq: 1 }, + }, + }, + }; + + // Act + const query = strategy.buildCountQuery(db, args); + const { sql } = query.compile(); + // Assert + expect(sql).toContain( + 'from "contracts" where "contracts"."id" = "claims_view"."contracts_id"', + ); + expect(sql).toContain('count(*) as "count"'); + }); + + it("should build count query with multiple filters", () => { + // Arrange + const args: GetHypercertsArgs = { + where: { + contract: { chain_id: { eq: 1 } }, + metadata: { name: { eq: "Test Claim" } }, + }, + }; + + // Act + const query = strategy.buildCountQuery(db, args); + const { sql } = query.compile(); + // Assert + expect(sql).toContain( + 'from "contracts" where "contracts"."id" = "claims_view"."contracts_id"', + ); + expect(sql).toContain( + 'from "metadata" where "metadata"."uri" = "claims_view"."uri"', + ); + expect(sql).toContain('count(*) as "count"'); + }); + }); +}); diff --git a/test/services/database/strategies/CollectionsQueryStrategy.test.ts b/test/services/database/strategies/CollectionsQueryStrategy.test.ts new file mode 100644 index 00000000..a2905299 --- /dev/null +++ b/test/services/database/strategies/CollectionsQueryStrategy.test.ts @@ -0,0 +1,136 @@ +import { Kysely } from "kysely"; +import { beforeEach, describe, expect, it } from "vitest"; +import { CollectionsQueryStrategy } from "../../../../src/services/database/strategies/CollectionsQueryStrategy.js"; +import { DataDatabase } from "../../../../src/types/kyselySupabaseData.js"; +import { + createTestDataDatabase, + generateMockBlueprint, + generateMockUser, +} from "../../../utils/testUtils.js"; + +type TestDatabase = DataDatabase; + +/** + * Test suite for CollectionsQueryStrategy. + * Verifies the query building functionality for collection data. + * + * Tests cover: + * - Basic data query construction + * - Query construction with admin filters + * - Query construction with blueprint filters + * - Count query construction + * - Table structure and relationships + */ +describe("CollectionsQueryStrategy", () => { + let db: Kysely; + const strategy = new CollectionsQueryStrategy(); + + beforeEach(async () => { + // Create test database with schema + ({ db } = await createTestDataDatabase()); + }); + + describe("data query building", () => { + it("should build a basic query that selects all columns from collections table", async () => { + const query = strategy.buildDataQuery(db); + const { sql } = query.compile(); + + expect(sql).toContain("collections"); + expect(sql).toMatch(/select \* from "collections"/i); + }); + + it("should build a query with admin filter", async () => { + const admin = generateMockUser(); + const query = strategy.buildDataQuery(db, { + where: { + admins: { address: { eq: admin.address } }, + }, + }); + const { sql } = query.compile(); + + expect(sql).toContain("collections"); + expect(sql).toContain("collection_admins"); + }); + + it("should build a query with blueprint filter", async () => { + const blueprint = generateMockBlueprint(); + const query = strategy.buildDataQuery(db, { + where: { blueprints: { id: { eq: blueprint.id } } }, + }); + const { sql } = query.compile(); + + expect(sql).toContain("collections"); + expect(sql).toContain("collection_blueprints"); + expect(sql).toContain("blueprints"); + }); + + it("should build a query with both admin and blueprint filters", async () => { + const admin = generateMockUser(); + const blueprint = generateMockBlueprint(); + const query = strategy.buildDataQuery(db, { + where: { + admins: { address: { eq: admin.address } }, + blueprints: { id: { eq: blueprint.id } }, + }, + }); + const { sql } = query.compile(); + + expect(sql).toContain("collections"); + expect(sql).toContain("collection_admins"); + expect(sql).toContain("collection_blueprints"); + expect(sql).toContain("blueprints"); + }); + }); + + describe("count query building", () => { + it("should build a basic count query", async () => { + const query = strategy.buildCountQuery(db); + const { sql } = query.compile(); + + expect(sql).toContain("collections"); + expect(sql).toMatch(/select count\(\*\) as "count" from "collections"/i); + }); + + it("should build a count query with admin filter", async () => { + const admin = generateMockUser(); + const query = strategy.buildCountQuery(db, { + where: { + admins: { address: { eq: admin.address } }, + }, + }); + const { sql } = query.compile(); + + expect(sql).toContain("collections"); + expect(sql).toContain("collection_admins"); + }); + + it("should build a count query with blueprint filter", async () => { + const blueprint = generateMockBlueprint(); + const query = strategy.buildCountQuery(db, { + where: { blueprints: { id: { eq: blueprint.id } } }, + }); + const { sql } = query.compile(); + + expect(sql).toContain("collections"); + expect(sql).toContain("collection_blueprints"); + expect(sql).toContain("blueprints"); + }); + + it("should build a count query with both admin and blueprint filters", async () => { + const admin = generateMockUser(); + const blueprint = generateMockBlueprint(); + const query = strategy.buildCountQuery(db, { + where: { + admins: { address: { eq: admin.address } }, + blueprints: { id: { eq: blueprint.id } }, + }, + }); + const { sql } = query.compile(); + + expect(sql).toContain("collections"); + expect(sql).toContain("collection_admins"); + expect(sql).toContain("collection_blueprints"); + expect(sql).toContain("blueprints"); + }); + }); +}); diff --git a/test/services/database/strategies/ContractsQueryStrategy.test.ts b/test/services/database/strategies/ContractsQueryStrategy.test.ts new file mode 100644 index 00000000..211d61a1 --- /dev/null +++ b/test/services/database/strategies/ContractsQueryStrategy.test.ts @@ -0,0 +1,45 @@ +import { Kysely } from "kysely"; +import { beforeEach, describe, expect, it } from "vitest"; +import { ContractsQueryStrategy } from "../../../../src/services/database/strategies/ContractsQueryStrategy.js"; +import { CachingDatabase } from "../../../../src/types/kyselySupabaseCaching.js"; +import { createTestCachingDatabase } from "../../../utils/testUtils.js"; + +type TestDatabase = CachingDatabase; + +/** + * Test suite for ContractsQueryStrategy. + * Verifies the query building functionality for contract data. + * + * Tests cover: + * - Basic data query construction + * - Count query construction + * - Table structure and relationships + */ +describe("ContractsQueryStrategy", () => { + let db: Kysely; + const strategy = new ContractsQueryStrategy(); + + beforeEach(async () => { + ({ db } = await createTestCachingDatabase()); + }); + + describe("data query building", () => { + it("should build a query that selects all columns from contracts table", async () => { + const query = strategy.buildDataQuery(db); + const { sql } = query.compile(); + + expect(sql).toContain("contracts"); + expect(sql).toMatch(/select "contracts"\.\* from "contracts"/i); + }); + }); + + describe("count query building", () => { + it("should build a query that counts all records in contracts table", async () => { + const query = strategy.buildCountQuery(db); + const { sql } = query.compile(); + + expect(sql).toContain("contracts"); + expect(sql).toMatch(/select count\(\*\) as "count" from "contracts"/i); + }); + }); +}); diff --git a/test/services/database/strategies/FractionsQueryStrategy.test.ts b/test/services/database/strategies/FractionsQueryStrategy.test.ts new file mode 100644 index 00000000..0f714866 --- /dev/null +++ b/test/services/database/strategies/FractionsQueryStrategy.test.ts @@ -0,0 +1,100 @@ +import { Kysely } from "kysely"; +import { beforeEach, describe, expect, it } from "vitest"; +import { FractionsQueryStrategy } from "../../../../src/services/database/strategies/FractionsQueryStrategy.js"; +import { CachingDatabase } from "../../../../src/types/kyselySupabaseCaching.js"; +import { + createTestCachingDatabase, + generateMockFraction, +} from "../../../utils/testUtils.js"; + +describe("FractionsQueryStrategy", () => { + let db: Kysely; + const strategy = new FractionsQueryStrategy(); + let mockFraction: ReturnType; + + beforeEach(async () => { + // Setup test database with additional metadata table + ({ db } = await createTestCachingDatabase(async (db) => { + await db.schema + .createTable("metadata") + .addColumn("id", "varchar", (b) => b.primaryKey()) + .addColumn("uri", "varchar") + .execute(); + })); + + mockFraction = generateMockFraction(); + + // Insert mock data + await db.insertInto("fractions_view").values(mockFraction).execute(); + }); + + describe("data query building", () => { + it("should build a basic query that selects all columns from fractions_view table", async () => { + const query = strategy.buildDataQuery(db); + const { sql } = query.compile(); + + expect(sql).toContain("fractions_view"); + expect(sql).toContain('select * from "fractions_view"'); + }); + + it("should build a query with metadata join when metadata filter is present", async () => { + const query = strategy.buildDataQuery(db, { + where: { metadata: { uri: { eq: "test-uri" } } }, + }); + const { sql } = query.compile(); + + expect(sql).toContain("fractions_view"); + expect(sql).toContain("claims"); + expect(sql).toContain("metadata"); + expect(sql).toMatch(/exists.*from "claims".*left join "metadata"/i); + }); + + it("should not include metadata join when metadata filter is empty", async () => { + const query = strategy.buildDataQuery(db, { + where: { metadata: {} }, + }); + const { sql } = query.compile(); + + expect(sql).not.toContain("metadata"); + expect(sql).not.toContain("claims"); + }); + }); + + describe("count query building", () => { + it("should build a basic count query", async () => { + const query = strategy.buildCountQuery(db); + const { sql } = query.compile(); + + expect(sql).toContain("fractions_view"); + expect(sql).toMatch( + /select count\(\*\) as "count" from "fractions_view"/i, + ); + }); + + it("should build a count query with metadata join when metadata filter is present", async () => { + const query = strategy.buildCountQuery(db, { + where: { metadata: { uri: { eq: "test-uri" } } }, + }); + const { sql } = query.compile(); + + expect(sql).toContain("fractions_view"); + expect(sql).toContain("claims"); + expect(sql).toContain("metadata"); + expect(sql).toMatch(/exists.*from "claims".*left join "metadata"/i); + expect(sql).toMatch(/select count\(\*\) as "count"/i); + }); + + it("should not include metadata join in count query when metadata filter is empty", async () => { + const query = strategy.buildCountQuery(db, { + where: { metadata: {} }, + }); + const { sql } = query.compile(); + + expect(sql).not.toContain("metadata"); + expect(sql).not.toContain("claims"); + expect(sql).toMatch( + /select count\(\*\) as "count" from "fractions_view"/i, + ); + }); + }); +}); diff --git a/test/services/database/strategies/HyperboardsQueryStrategy.test.ts b/test/services/database/strategies/HyperboardsQueryStrategy.test.ts new file mode 100644 index 00000000..df7f439a --- /dev/null +++ b/test/services/database/strategies/HyperboardsQueryStrategy.test.ts @@ -0,0 +1,76 @@ +import { beforeEach, describe, expect, it } from "vitest"; +import { HyperboardsQueryStrategy } from "../../../../src/services/database/strategies/HyperboardsQueryStrategy.js"; +import { Kysely } from "kysely"; +import { DataDatabase } from "../../../../src/types/kyselySupabaseData.js"; +import { createTestDataDatabase } from "../../../utils/testUtils.js"; + +describe("HyperboardsQueryStrategy", () => { + let db: Kysely; + const strategy = new HyperboardsQueryStrategy(); + + beforeEach(async () => { + ({ db } = await createTestDataDatabase()); + }); + + describe("buildDataQuery", () => { + it("should build a basic query without args", () => { + const query = strategy.buildDataQuery(db); + const { sql } = query.compile(); + expect(sql).toMatch(/select \* from "hyperboards_with_admins"/i); + }); + + it("should build a query with collection filter", () => { + const query = strategy.buildDataQuery(db, { + where: { + collections: {}, + }, + }); + const { sql } = query.compile(); + expect(sql).toContain( + 'select "hyperboards_with_admins".* from "hyperboards_with_admins"', + ); + }); + + it("should build a query with admin filter", () => { + const query = strategy.buildDataQuery(db, { + where: { + admins: {}, + }, + }); + const { sql } = query.compile(); + expect(sql).toContain( + 'select "hyperboards_with_admins".* from "hyperboards_with_admins"', + ); + }); + }); + + describe("buildCountQuery", () => { + it("should build a basic count query without args", () => { + const query = strategy.buildCountQuery(db); + const { sql } = query.compile(); + expect(sql).toMatch( + /select count\(\*\) as "count" from "hyperboards_with_admins"/i, + ); + }); + + it("should build a count query with collection filter", () => { + const query = strategy.buildCountQuery(db, { + where: { + collections: {}, + }, + }); + const { sql } = query.compile(); + expect(sql).toContain('select count(*) as "count"'); + }); + + it("should build a count query with admin filter", () => { + const query = strategy.buildCountQuery(db, { + where: { + admins: {}, + }, + }); + const { sql } = query.compile(); + expect(sql).toContain('select count(*) as "count"'); + }); + }); +}); diff --git a/test/services/database/strategies/MarketplaceOrdersQueryStrategy.test.ts b/test/services/database/strategies/MarketplaceOrdersQueryStrategy.test.ts new file mode 100644 index 00000000..fa353445 --- /dev/null +++ b/test/services/database/strategies/MarketplaceOrdersQueryStrategy.test.ts @@ -0,0 +1,48 @@ +import { Kysely } from "kysely"; +import { IMemoryDb, newDb } from "pg-mem"; +import { beforeEach, describe, expect, it } from "vitest"; +import { MarketplaceOrdersQueryStrategy } from "../../../../src/services/database/strategies/MarketplaceOrdersQueryStrategy.js"; +import { DataDatabase } from "../../../../src/types/kyselySupabaseData.js"; + +type TestDatabase = DataDatabase; + +describe("MarketplaceOrdersQueryStrategy", () => { + let db: Kysely; + let mem: IMemoryDb; + const strategy = new MarketplaceOrdersQueryStrategy(); + + beforeEach(async () => { + mem = newDb(); + db = mem.adapters.createKysely() as Kysely; + + // Create required tables + await db.schema + .createTable("marketplace_orders") + .addColumn("id", "varchar", (b) => b.primaryKey()) + .addColumn("status", "varchar") + .addColumn("buyer_address", "varchar") + .addColumn("seller_address", "varchar") + .addColumn("created_at", "timestamp") + .execute(); + }); + + describe("basic functionality", () => { + it("should query all marketplace orders records", async () => { + const query = strategy.buildDataQuery(db); + + const { sql } = query.compile(); + expect(sql).toContain("marketplace_orders"); + expect(sql).toMatch(/select \* from "marketplace_orders"/i); + }); + + it("should count marketplace orders records", async () => { + const query = strategy.buildCountQuery(db); + + const { sql } = query.compile(); + expect(sql).toContain("marketplace_orders"); + expect(sql).toMatch( + /select count\(\*\) as "count" from "marketplace_orders"/i, + ); + }); + }); +}); diff --git a/test/services/database/strategies/MetadataQueryStrategy.test.ts b/test/services/database/strategies/MetadataQueryStrategy.test.ts new file mode 100644 index 00000000..cccf10f7 --- /dev/null +++ b/test/services/database/strategies/MetadataQueryStrategy.test.ts @@ -0,0 +1,58 @@ +import { Kysely } from "kysely"; +import { IMemoryDb, newDb } from "pg-mem"; +import { beforeEach, describe, expect, it } from "vitest"; +import { MetadataQueryStrategy } from "../../../../src/services/database/strategies/MetadataQueryStrategy.js"; +import { CachingDatabase } from "../../../../src/types/kyselySupabaseCaching.js"; + +type TestDatabase = CachingDatabase; + +describe("MetadataQueryStrategy", () => { + let db: Kysely; + let mem: IMemoryDb; + const strategy = new MetadataQueryStrategy(); + + beforeEach(async () => { + mem = newDb(); + db = mem.adapters.createKysely() as Kysely; + + // Create test tables + await db.schema + .createTable("metadata") + .addColumn("id", "text", (b) => b.primaryKey()) + .addColumn("name", "text") + .addColumn("description", "text") + .addColumn("uri", "text") + .execute(); + }); + + describe("data query building", () => { + it("should build a basic query that selects supported columns", async () => { + // Act + const query = strategy.buildDataQuery(db); + const { sql } = query.compile(); + + // Assert + expect(sql).toContain("metadata"); + expect(sql).toContain("select"); + expect(sql).toContain(`"metadata"."id"`); + expect(sql).toContain(`"metadata"."name"`); + expect(sql).toContain(`"metadata"."description"`); + expect(sql).toContain(`"metadata"."uri"`); + expect(sql).toContain(`"metadata"."properties"`); + expect(sql).not.toContain("image"); // Image is excluded from supported columns + }); + }); + + describe("count query building", () => { + it("should build a basic count query", async () => { + // Act + const query = strategy.buildCountQuery(db); + const { sql } = query.compile(); + + // Assert + expect(sql).toContain("metadata"); + expect(sql).toMatch(/count\(\*\)/i); + expect(sql).toMatch(/as "count"/i); + }); + }); +}); diff --git a/test/services/database/strategies/QueryStrategyFactory.test.ts b/test/services/database/strategies/QueryStrategyFactory.test.ts new file mode 100644 index 00000000..9192cf3d --- /dev/null +++ b/test/services/database/strategies/QueryStrategyFactory.test.ts @@ -0,0 +1,96 @@ +import { describe, expect, it } from "vitest"; +import { AllowlistQueryStrategy } from "../../../../src/services/database/strategies/AllowlistQueryStrategy.js"; +import { AttestationsQueryStrategy } from "../../../../src/services/database/strategies/AttestationQueryStrategy.js"; +import { BlueprintsQueryStrategy } from "../../../../src/services/database/strategies/BlueprintsQueryStrategy.js"; +import { ClaimsQueryStrategy } from "../../../../src/services/database/strategies/ClaimsQueryStrategy.js"; +import { CollectionsQueryStrategy } from "../../../../src/services/database/strategies/CollectionsQueryStrategy.js"; +import { ContractsQueryStrategy } from "../../../../src/services/database/strategies/ContractsQueryStrategy.js"; +import { FractionsQueryStrategy } from "../../../../src/services/database/strategies/FractionsQueryStrategy.js"; +import { HyperboardsQueryStrategy } from "../../../../src/services/database/strategies/HyperboardsQueryStrategy.js"; +import { MarketplaceOrdersQueryStrategy } from "../../../../src/services/database/strategies/MarketplaceOrdersQueryStrategy.js"; +import { MetadataQueryStrategy } from "../../../../src/services/database/strategies/MetadataQueryStrategy.js"; +import { SupportedDatabase } from "../../../../src/services/database/strategies/QueryStrategy.js"; +import { QueryStrategyFactory } from "../../../../src/services/database/strategies/QueryStrategyFactory.js"; +import { SalesQueryStrategy } from "../../../../src/services/database/strategies/SalesQueryStrategy.js"; +import { SignatureRequestsQueryStrategy } from "../../../../src/services/database/strategies/SignatureRequestsQueryStrategy.js"; +import { SupportedSchemasQueryStrategy } from "../../../../src/services/database/strategies/SupportedSchemasQueryStrategy.js"; +import { UsersQueryStrategy } from "../../../../src/services/database/strategies/UsersQueryStrategy.js"; + +type TableName = keyof SupportedDatabase; + +describe("QueryStrategyFactory", () => { + describe("Basic Strategy Resolution", () => { + // This matches the strategyRegistry in QueryStrategyFactory. While it alerts on regressions in the configuration, it does not catch when a new table is added. + + const supportedStrategies = { + attestations: AttestationsQueryStrategy, + claims: ClaimsQueryStrategy, + hypercerts: ClaimsQueryStrategy, + attestation_schema: SupportedSchemasQueryStrategy, + eas_schema: SupportedSchemasQueryStrategy, + supported_schemas: SupportedSchemasQueryStrategy, + metadata: MetadataQueryStrategy, + sales: SalesQueryStrategy, + contracts: ContractsQueryStrategy, + fractions: FractionsQueryStrategy, + fractions_view: FractionsQueryStrategy, + allowlist_records: AllowlistQueryStrategy, + claimable_fractions_with_proofs: AllowlistQueryStrategy, + orders: MarketplaceOrdersQueryStrategy, + marketplace_orders: MarketplaceOrdersQueryStrategy, + users: UsersQueryStrategy, + blueprints: BlueprintsQueryStrategy, + blueprints_with_admins: BlueprintsQueryStrategy, + signature_requests: SignatureRequestsQueryStrategy, + hyperboards: HyperboardsQueryStrategy, + collections: CollectionsQueryStrategy, + } as const; + + it.each(Object.keys(supportedStrategies))( + "should return correct strategy for %s table", + (table) => { + const strategy = QueryStrategyFactory.getStrategy(table as TableName); + expect(strategy).toBeInstanceOf( + supportedStrategies[table as keyof typeof supportedStrategies], + ); + }, + ); + + it("should return unique instances for each table", () => { + const instances = new Set(); + Object.keys(supportedStrategies).forEach((table) => { + const strategy = QueryStrategyFactory.getStrategy(table as TableName); + instances.add(strategy); + }); + + // Each table should have its own instance + expect(instances.size).toBe(Object.keys(supportedStrategies).length); + }); + }); + + describe("Strategy Caching", () => { + it("should return same strategy instance for same table", () => { + const strategy1 = QueryStrategyFactory.getStrategy("claims" as TableName); + const strategy2 = QueryStrategyFactory.getStrategy("claims" as TableName); + + expect(strategy1).toBeInstanceOf(ClaimsQueryStrategy); + expect(strategy2).toBeInstanceOf(ClaimsQueryStrategy); + expect(strategy1).toBe(strategy2); // Should be the exact same instance + }); + }); + + describe("Error Handling", () => { + it("should throw error for unknown table", () => { + expect(() => { + QueryStrategyFactory.getStrategy("non_existent_table" as TableName); + }).toThrow("No strategy registered for table"); + }); + + it("should throw error for invalid table name", () => { + expect(() => { + // @ts-expect-error Testing runtime behavior with invalid input + QueryStrategyFactory.getStrategy("invalid_table"); + }).toThrow(); + }); + }); +}); diff --git a/test/services/database/strategies/SalesQueryStrategy.test.ts b/test/services/database/strategies/SalesQueryStrategy.test.ts new file mode 100644 index 00000000..9e602f08 --- /dev/null +++ b/test/services/database/strategies/SalesQueryStrategy.test.ts @@ -0,0 +1,50 @@ +import { Kysely } from "kysely"; +import { IMemoryDb, newDb } from "pg-mem"; +import { beforeEach, describe, expect, it } from "vitest"; +import { SalesQueryStrategy } from "../../../../src/services/database/strategies/SalesQueryStrategy.js"; +import { CachingDatabase } from "../../../../src/types/kyselySupabaseCaching.js"; + +type TestDatabase = CachingDatabase; + +describe("SalesQueryStrategy", () => { + let db: Kysely; + let mem: IMemoryDb; + const strategy = new SalesQueryStrategy(); + + beforeEach(async () => { + mem = newDb(); + db = mem.adapters.createKysely() as Kysely; + + // Create required tables + await db.schema + .createTable("sales") + .addColumn("id", "integer", (b) => b.primaryKey()) + .addColumn("fraction_id", "varchar") + .addColumn("amount", "integer") + .addColumn("price", "integer") + .addColumn("timestamp", "timestamp") + .execute(); + }); + + describe("buildDataQuery", () => { + it("should build a query to select all fields from sales table", () => { + // Act + const query = strategy.buildDataQuery(db); + const { sql } = query.compile(); + + // Assert + expect(sql).toBe('select * from "sales"'); + }); + }); + + describe("buildCountQuery", () => { + it("should build a query to count all rows in sales table", () => { + // Act + const query = strategy.buildCountQuery(db); + const { sql } = query.compile(); + + // Assert + expect(sql).toBe('select count(*) as "count" from "sales"'); + }); + }); +}); diff --git a/test/services/database/strategies/SignatureRequestsQueryStrategy.test.ts b/test/services/database/strategies/SignatureRequestsQueryStrategy.test.ts new file mode 100644 index 00000000..3f7bf8f6 --- /dev/null +++ b/test/services/database/strategies/SignatureRequestsQueryStrategy.test.ts @@ -0,0 +1,109 @@ +import { Kysely } from "kysely"; +import { beforeEach, describe, expect, it } from "vitest"; +import { SignatureRequestsQueryStrategy } from "../../../../src/services/database/strategies/SignatureRequestsQueryStrategy.js"; +import { DataDatabase } from "../../../../src/types/kyselySupabaseData.js"; +import { + createTestDataDatabase, + generateMockSignatureRequest, +} from "../../../utils/testUtils.js"; + +describe("SignatureRequestsQueryStrategy", () => { + let db: Kysely; + const strategy = new SignatureRequestsQueryStrategy(); + let mockRequest: ReturnType; + + beforeEach(async () => { + ({ db } = await createTestDataDatabase()); + + mockRequest = generateMockSignatureRequest(); + mockRequest.message = JSON.parse(mockRequest.message as string); + await db.insertInto("signature_requests").values(mockRequest).execute(); + }); + + describe("data query building", () => { + it("should build a query that selects all columns from signature_requests table", () => { + const query = strategy.buildDataQuery(db); + const { sql } = query.compile(); + + expect(sql).toContain("signature_requests"); + expect(sql).toMatch(/select \* from "signature_requests"/i); + }); + + it("should return the inserted signature request data", async () => { + const query = strategy.buildDataQuery(db); + const result = await query.execute(); + + expect(result).toHaveLength(1); + expect(result[0]).toMatchObject({ + safe_address: mockRequest.safe_address, + message_hash: mockRequest.message_hash, + chain_id: mockRequest.chain_id, + timestamp: mockRequest.timestamp, + message: mockRequest.message, + purpose: mockRequest.purpose, + status: mockRequest.status, + }); + }); + + it("should handle filtering by safe_address", async () => { + const query = strategy + .buildDataQuery(db) + .where("safe_address", "=", mockRequest.safe_address); + const result = await query.execute(); + + expect(result).toHaveLength(1); + expect(result[0].safe_address).toBe(mockRequest.safe_address); + }); + + it("should handle filtering by status", async () => { + const query = strategy + .buildDataQuery(db) + .where("status", "=", mockRequest.status); + const result = await query.execute(); + + expect(result).toHaveLength(1); + expect(result[0].status).toBe(mockRequest.status); + }); + }); + + describe("count query building", () => { + it("should build a query that counts all records in signature_requests table", () => { + const query = strategy.buildCountQuery(db); + const { sql } = query.compile(); + + expect(sql).toContain("signature_requests"); + expect(sql).toMatch( + /select count\(\*\) as "count" from "signature_requests"/i, + ); + }); + + it("should return correct count of signature requests", async () => { + const query = strategy.buildCountQuery(db); + const result = await query.execute(); + + expect(result).toHaveLength(1); + expect(result[0].count).toBe(1); + }); + + it("should return correct count when filtered", async () => { + // Add another request with different status + await db + .insertInto("signature_requests") + .values({ + ...mockRequest, + safe_address: mockRequest.safe_address + "1", + message_hash: mockRequest.message_hash + "1", + status: "executed", + }) + .execute(); + + const query = strategy + .buildCountQuery(db) + .where("status", "=", "pending"); + const result = await query.execute(); + + expect(result).toHaveLength(1); + expect(result[0].count).toBe(1); + }); + }); +}); diff --git a/test/services/database/strategies/SupportedSchemasQueryStrategy.test.ts b/test/services/database/strategies/SupportedSchemasQueryStrategy.test.ts new file mode 100644 index 00000000..d6a190a3 --- /dev/null +++ b/test/services/database/strategies/SupportedSchemasQueryStrategy.test.ts @@ -0,0 +1,65 @@ +import { Kysely } from "kysely"; +import { IMemoryDb, newDb } from "pg-mem"; +import { beforeEach, describe, expect, it } from "vitest"; +import { SupportedSchemasQueryStrategy } from "../../../../src/services/database/strategies/SupportedSchemasQueryStrategy.js"; +import { CachingDatabase } from "../../../../src/types/kyselySupabaseCaching.js"; + +type TestDatabase = CachingDatabase; + +/** + * Test suite for SupportedSchemasQueryStrategy. + * Verifies the query building functionality for supported EAS schemas. + * + * Tests cover: + * - Basic data query construction + * - Count query construction + * - Table structure and relationships + */ +describe("SupportedSchemasQueryStrategy", () => { + let db: Kysely; + let mem: IMemoryDb; + const strategy = new SupportedSchemasQueryStrategy(); + + beforeEach(async () => { + mem = newDb(); + db = mem.adapters.createKysely() as Kysely; + + // Create required tables with appropriate columns and relationships + await db.schema + .createTable("supported_schemas") + .addColumn("id", "varchar", (b) => b.primaryKey()) + .addColumn("chain_id", "integer") + .addColumn("schema", "jsonb") + .addColumn("resolver", "jsonb") + .addColumn("revocable", "boolean") + .execute(); + + await db.schema + .createTable("attestations") + .addColumn("id", "integer", (b) => b.primaryKey()) + .addColumn("supported_schemas_id", "varchar") + .execute(); + }); + + describe("data query building", () => { + it("should build a query that selects all columns from supported_schemas table", async () => { + const query = strategy.buildDataQuery(db); + const { sql } = query.compile(); + + expect(sql).toContain("supported_schemas"); + expect(sql).toMatch(/select \* from "supported_schemas"/i); + }); + }); + + describe("count query building", () => { + it("should build a query that counts all records in supported_schemas table", async () => { + const query = strategy.buildCountQuery(db); + const { sql } = query.compile(); + + expect(sql).toContain("supported_schemas"); + expect(sql).toMatch( + /select count\(\*\) as "count" from "supported_schemas"/i, + ); + }); + }); +}); diff --git a/test/services/database/strategies/UsersQueryStrategy.test.ts b/test/services/database/strategies/UsersQueryStrategy.test.ts new file mode 100644 index 00000000..4b99c58f --- /dev/null +++ b/test/services/database/strategies/UsersQueryStrategy.test.ts @@ -0,0 +1,62 @@ +import { Kysely } from "kysely"; +import { beforeEach, describe, expect, it } from "vitest"; +import { UsersQueryStrategy } from "../../../../src/services/database/strategies/UsersQueryStrategy.js"; +import { DataDatabase } from "../../../../src/types/kyselySupabaseData.js"; +import { + createTestDataDatabase, + generateMockUser, +} from "../../../utils/testUtils.js"; + +describe("UsersQueryStrategy", () => { + let db: Kysely; + const strategy = new UsersQueryStrategy(); + let mockUser: ReturnType; + + beforeEach(async () => { + ({ db } = await createTestDataDatabase()); + mockUser = generateMockUser(); + await db.insertInto("users").values(mockUser).execute(); + }); + + describe("data query building", () => { + it("should build a query that selects all columns from users table", () => { + const query = strategy.buildDataQuery(db); + const { sql } = query.compile(); + + expect(sql).toContain("users"); + expect(sql).toMatch(/select \* from "users"/i); + }); + + it("should return the inserted user data", async () => { + const query = strategy.buildDataQuery(db); + const result = await query.execute(); + + expect(result).toHaveLength(1); + expect(result[0]).toMatchObject({ + id: mockUser.id, + address: mockUser.address, + chain_id: mockUser.chain_id, + display_name: mockUser.display_name, + avatar: mockUser.avatar, + }); + }); + }); + + describe("count query building", () => { + it("should build a query that counts all records in users table", () => { + const query = strategy.buildCountQuery(db); + const { sql } = query.compile(); + + expect(sql).toContain("users"); + expect(sql).toMatch(/select count\(\*\) as "count" from "users"/i); + }); + + it("should return correct count of users", async () => { + const query = strategy.buildCountQuery(db); + const result = await query.execute(); + + expect(result).toHaveLength(1); + expect(result[0].count).toBe(1); + }); + }); +}); diff --git a/test/services/graphql/resolvers/UserResolver.test.ts b/test/services/graphql/resolvers/UserResolver.test.ts new file mode 100644 index 00000000..364c9c4f --- /dev/null +++ b/test/services/graphql/resolvers/UserResolver.test.ts @@ -0,0 +1,120 @@ +import { container } from "tsyringe"; +import { beforeEach, describe, expect, it, vi } from "vitest"; +import { GetUsersArgs } from "../../../../src/graphql/schemas/args/userArgs.js"; +import type { + SignatureRequestPurpose, + SignatureRequestStatus, +} from "../../../../src/graphql/schemas/typeDefs/signatureRequestTypeDefs.js"; +import type { User } from "../../../../src/graphql/schemas/typeDefs/userTypeDefs.js"; +import { SignatureRequestsService } from "../../../../src/services/database/entities/SignatureRequestsEntityService.js"; +import { UsersService } from "../../../../src/services/database/entities/UsersEntityService.js"; +import { UserResolver } from "../../../../src/services/graphql/resolvers/userResolver.js"; +import type { Json } from "../../../../src/types/supabaseData.js"; +import { generateMockUser } from "../../../utils/testUtils.js"; + +describe("UserResolver", () => { + let userResolver: UserResolver; + let usersService: UsersService; + let signatureRequestsService: SignatureRequestsService; + + beforeEach(() => { + usersService = { + getUsers: vi.fn(), + } as unknown as UsersService; + + signatureRequestsService = { + getSignatureRequests: vi.fn(), + } as unknown as SignatureRequestsService; + + container.register(UsersService, { useValue: usersService }); + container.register(SignatureRequestsService, { + useValue: signatureRequestsService, + }); + + userResolver = new UserResolver(usersService, signatureRequestsService); + }); + + describe("users", () => { + it("should return users from service", async () => { + // Arrange + const mockUser = generateMockUser(); + const mockUsers = [mockUser]; + const args: GetUsersArgs = { + where: { + address: { eq: mockUser.address }, + }, + }; + vi.mocked(usersService.getUsers).mockResolvedValue({ + data: mockUsers, + count: mockUsers.length, + }); + + // Act + const result = await userResolver.users(args); + + // Assert + expect(result?.data).toEqual(mockUsers); + expect(result?.count).toBe(mockUsers.length); + expect(usersService.getUsers).toHaveBeenCalledWith(args); + }); + }); + + describe("signature_requests", () => { + it("should return null if user has no address", async () => { + // Arrange + const user = { ...generateMockUser(), address: undefined } as User; + + // Act + const result = await userResolver.signature_requests(user); + + // Assert + expect(result).toBeNull(); + expect( + signatureRequestsService.getSignatureRequests, + ).not.toHaveBeenCalled(); + }); + + it("should return signature requests for user address", async () => { + // Arrange + const user = generateMockUser(); + const mockSignatureRequests = { + data: [ + { + chain_id: 1, + message: { + metadata: { + name: "Test User", + description: "Test Description", + }, + } as Json, + message_hash: "0x1234", + purpose: "update_user_data" as SignatureRequestPurpose, + safe_address: user.address, + status: "pending" as SignatureRequestStatus, + timestamp: Math.floor(Date.now() / 1000), + }, + ], + count: 1, + }; + + vi.mocked( + signatureRequestsService.getSignatureRequests, + ).mockResolvedValue(mockSignatureRequests); + + // Act + const result = await userResolver.signature_requests(user); + + // Assert + expect(result).toEqual(mockSignatureRequests); + expect( + signatureRequestsService.getSignatureRequests, + ).toHaveBeenCalledWith({ + where: { + safe_address: { + eq: user.address, + }, + }, + }); + }); + }); +}); diff --git a/test/services/graphql/resolvers/allowlistRecordResolver.test.ts b/test/services/graphql/resolvers/allowlistRecordResolver.test.ts new file mode 100644 index 00000000..cffb769e --- /dev/null +++ b/test/services/graphql/resolvers/allowlistRecordResolver.test.ts @@ -0,0 +1,144 @@ +import { beforeEach, describe, expect, it, vi } from "vitest"; +import { container } from "tsyringe"; +import { AllowlistRecordResolver } from "../../../../src/services/graphql/resolvers/allowlistRecordResolver.js"; +import { AllowlistRecordService } from "../../../../src/services/database/entities/AllowListRecordEntityService.js"; +import { HypercertsService } from "../../../../src/services/database/entities/HypercertsEntityService.js"; +import type { GetAllowlistRecordsArgs } from "../../../../src/graphql/schemas/args/allowlistRecordArgs.js"; +import type { AllowlistRecord } from "../../../../src/graphql/schemas/typeDefs/allowlistRecordTypeDefs.js"; +import type { Mock } from "vitest"; + +describe("AllowlistRecordResolver", () => { + let resolver: AllowlistRecordResolver; + let mockAllowlistRecordService: { + getAllowlistRecords: Mock; + getAllowlistRecord: Mock; + }; + let mockHypercertsService: { + getHypercert: Mock; + getHypercertMetadata: Mock; + }; + + beforeEach(() => { + // Create mock services + mockAllowlistRecordService = { + getAllowlistRecords: vi.fn(), + getAllowlistRecord: vi.fn(), + }; + + mockHypercertsService = { + getHypercert: vi.fn(), + getHypercertMetadata: vi.fn(), + }; + + // Register mocks with the DI container + container.registerInstance( + AllowlistRecordService, + mockAllowlistRecordService as unknown as AllowlistRecordService, + ); + container.registerInstance( + HypercertsService, + mockHypercertsService as unknown as HypercertsService, + ); + + // Resolve the resolver with mocked dependencies + resolver = container.resolve(AllowlistRecordResolver); + }); + + describe("allowlistRecords", () => { + it("should return allowlist records for given arguments", async () => { + // Arrange + const args: GetAllowlistRecordsArgs = { + where: { + hypercert: { + hypercert_id: { eq: "test-id" }, + }, + }, + }; + const expectedResult = { + data: [ + { id: "1", hypercert_id: "test-id" }, + { id: "2", hypercert_id: "test-id" }, + ], + count: 2, + }; + mockAllowlistRecordService.getAllowlistRecords.mockResolvedValue( + expectedResult, + ); + + // Act + const result = await resolver.allowlistRecords(args); + + // Assert + expect( + mockAllowlistRecordService.getAllowlistRecords, + ).toHaveBeenCalledWith(args); + expect(result).toEqual(expectedResult); + }); + + it("should handle errors from allowlistRecordService", async () => { + // Arrange + const args: GetAllowlistRecordsArgs = {}; + const error = new Error("Service error"); + mockAllowlistRecordService.getAllowlistRecords.mockRejectedValue(error); + + // Act & Assert + await expect(resolver.allowlistRecords(args)).resolves.toBeNull(); + }); + }); + + describe("hypercert field resolver", () => { + it("should resolve hypercert for an allowlist record", async () => { + // Arrange + const allowlistRecord: AllowlistRecord = { + id: "1", + hypercert_id: "test-hypercert-id", + } as AllowlistRecord; + const expectedHypercert = { + id: "test-hypercert-id", + name: "Test Hypercert", + metadata: null, + }; + mockHypercertsService.getHypercert.mockResolvedValue(expectedHypercert); + + // Act + const result = await resolver.hypercert(allowlistRecord); + + // Assert + expect(mockHypercertsService.getHypercert).toHaveBeenCalledWith({ + where: { hypercert_id: { eq: "test-hypercert-id" } }, + }); + expect(result).toEqual(expectedHypercert); + }); + + it("should handle null hypercert result", async () => { + // Arrange + const allowlistRecord: AllowlistRecord = { + id: "1", + hypercert_id: "non-existent-id", + } as AllowlistRecord; + mockHypercertsService.getHypercert.mockResolvedValue(null); + + // Act + const result = await resolver.hypercert(allowlistRecord); + + // Assert + expect(mockHypercertsService.getHypercert).toHaveBeenCalledWith({ + where: { hypercert_id: { eq: "non-existent-id" } }, + }); + expect(result).toBeNull(); + }); + + it("should handle errors from hypercertsService", async () => { + // Arrange + const allowlistRecord: AllowlistRecord = { + id: "1", + hypercert_id: "error-id", + } as AllowlistRecord; + const error = new Error("Service error"); + mockHypercertsService.getHypercert.mockRejectedValue(error); + + // Act & Assert + await expect(resolver.hypercert(allowlistRecord)).resolves.toBeNull(); + }); + }); +}); diff --git a/test/services/graphql/resolvers/attestationResolver.test.ts b/test/services/graphql/resolvers/attestationResolver.test.ts new file mode 100644 index 00000000..6c798260 --- /dev/null +++ b/test/services/graphql/resolvers/attestationResolver.test.ts @@ -0,0 +1,374 @@ +import { faker } from "@faker-js/faker"; +import { container } from "tsyringe"; +import { getAddress } from "viem"; +import type { Mock } from "vitest"; +import { beforeEach, describe, expect, it, vi } from "vitest"; +import type { GetAttestationsArgs } from "../../../../src/graphql/schemas/args/attestationArgs.js"; +import type { Attestation } from "../../../../src/graphql/schemas/typeDefs/attestationTypeDefs.js"; +import { AttestationService } from "../../../../src/services/database/entities/AttestationEntityService.js"; +import { AttestationSchemaService } from "../../../../src/services/database/entities/AttestationSchemaEntityService.js"; +import { HypercertsService } from "../../../../src/services/database/entities/HypercertsEntityService.js"; +import { AttestationResolver } from "../../../../src/services/graphql/resolvers/attestationResolver.js"; + +describe("AttestationResolver", () => { + let resolver: AttestationResolver; + let mockAttestationService: { + getAttestations: Mock; + }; + let mockHypercertService: { + getHypercert: Mock; + getHypercertMetadata: Mock; + }; + let mockAttestationSchemaService: { + getAttestationSchema: Mock; + }; + + beforeEach(() => { + // Create mock services + mockAttestationService = { + getAttestations: vi.fn(), + }; + + mockHypercertService = { + getHypercert: vi.fn(), + getHypercertMetadata: vi.fn(), + }; + + mockAttestationSchemaService = { + getAttestationSchema: vi.fn(), + }; + + // Register mocks with the DI container + container.registerInstance( + AttestationService, + mockAttestationService as unknown as AttestationService, + ); + container.registerInstance( + HypercertsService, + mockHypercertService as unknown as HypercertsService, + ); + container.registerInstance( + AttestationSchemaService, + mockAttestationSchemaService as unknown as AttestationSchemaService, + ); + + // Resolve the resolver with mocked dependencies + resolver = container.resolve(AttestationResolver); + }); + + describe("attestations", () => { + it("should return attestations for given arguments", async () => { + // Arrange + const args: GetAttestationsArgs = { + where: { + id: { eq: "test-id" }, + }, + }; + const expectedResult = { + data: [ + { id: "1", data: { token_id: "123" } }, + { id: "2", data: { token_id: "456" } }, + ], + count: 2, + }; + mockAttestationService.getAttestations.mockResolvedValue(expectedResult); + + // Act + const result = await resolver.attestations(args); + + // Assert + expect(mockAttestationService.getAttestations).toHaveBeenCalledWith(args); + expect(result).toEqual(expectedResult); + }); + + it("should handle errors from attestationService", async () => { + // Arrange + const error = new Error("Service error"); + mockAttestationService.getAttestations.mockRejectedValue(error); + + // Act & Assert + await expect(resolver.attestations({})).resolves.toBeNull(); + }); + }); + + describe("hypercert field resolver", () => { + it("should resolve hypercert for valid attestation data", async () => { + // Arrange + const attestation: Attestation = { + id: "1", + data: { + chain_id: "1", + contract_address: "0x1234567890123456789012345678901234567890", + token_id: "123", + }, + } as unknown as Attestation; + const expectedHypercert = { + id: "test-hypercert", + name: "Test Hypercert", + }; + mockHypercertService.getHypercert.mockResolvedValue(expectedHypercert); + + // Act + const result = await resolver.hypercert(attestation); + + // Assert + expect(mockHypercertService.getHypercert).toHaveBeenCalledWith({ + where: { + hypercert_id: { + eq: "1-0x1234567890123456789012345678901234567890-123", + }, + }, + }); + expect(result).toEqual(expectedHypercert); + }); + + it("should return null when attestation has no data", async () => { + // Arrange + const attestation: Attestation = { + id: "1", + data: null, + } as Attestation; + + // Act + const result = await resolver.hypercert(attestation); + + // Assert + expect(result).toBeNull(); + expect(mockHypercertService.getHypercert).not.toHaveBeenCalled(); + }); + + it("should handle invalid attestation data", async () => { + // Arrange + const attestation: Attestation = { + id: "1", + data: { + invalid_data: "test", + }, + } as Attestation; + + // Act + const result = await resolver.hypercert(attestation); + + // Assert + expect(result).toBeNull(); + expect(mockHypercertService.getHypercert).not.toHaveBeenCalled(); + }); + }); + + describe("eas_schema field resolver", () => { + it("should resolve schema for attestation with schema id", async () => { + // Arrange + const attestation: Attestation = { + id: "1", + supported_schemas_id: "schema-1", + } as Attestation; + const expectedSchema = { + id: "schema-1", + name: "Test Schema", + }; + mockAttestationSchemaService.getAttestationSchema.mockResolvedValue( + expectedSchema, + ); + + // Act + const result = await resolver.eas_schema(attestation); + + // Assert + expect( + mockAttestationSchemaService.getAttestationSchema, + ).toHaveBeenCalledWith({ + where: { + id: { eq: "schema-1" }, + }, + }); + expect(result).toEqual(expectedSchema); + }); + + it("should return null when attestation has no schema id", async () => { + // Arrange + const attestation: Attestation = { + id: "1", + } as Attestation; + + // Act + const result = await resolver.eas_schema(attestation); + + // Assert + expect(result).toBeNull(); + expect( + mockAttestationSchemaService.getAttestationSchema, + ).not.toHaveBeenCalled(); + }); + }); + + describe("metadata field resolver", () => { + it("should resolve metadata for valid attestation data", async () => { + // Arrange + const attestation: Attestation = { + id: "1", + data: { + chain_id: "1", + contract_address: "0x1234567890123456789012345678901234567890", + token_id: "123", + }, + } as unknown as Attestation; + const expectedMetadata = { + id: "metadata-1", + name: "Test Metadata", + }; + mockHypercertService.getHypercertMetadata.mockResolvedValue( + expectedMetadata, + ); + + // Act + const result = await resolver.metadata(attestation); + + // Assert + expect(mockHypercertService.getHypercertMetadata).toHaveBeenCalledWith({ + hypercert_id: "1-0x1234567890123456789012345678901234567890-123", + }); + expect(result).toEqual(expectedMetadata); + }); + + it("should return undefined when attestation has no data", async () => { + // Arrange + const attestation: Attestation = { + id: "1", + data: null, + } as Attestation; + + // Act + const result = await resolver.metadata(attestation); + + // Assert + expect(result).toBeNull(); + expect(mockHypercertService.getHypercertMetadata).not.toHaveBeenCalled(); + }); + }); + + describe("getHypercertIdFromAttestationData", () => { + const contract_address = getAddress(faker.finance.ethereumAddress()); + + it("should generate correct hypercert id from string bigints", () => { + const data = { + chain_id: "11155111", + contract_address, + token_id: "123", + }; + + const result = resolver.getHypercertIdFromAttestationData(data); + + expect(result).toBe(`11155111-${contract_address}-123`); + }); + + it("should generate correct hypercert id from number inputs", () => { + const data = { + chain_id: 1, + contract_address, + token_id: 123, + }; + + const result = resolver.getHypercertIdFromAttestationData(data); + + expect(result).toBe(`1-${contract_address}-123`); + }); + + it("should handle large bigint values", () => { + const data = { + chain_id: "9007199254740991000", // Number.MAX_SAFE_INTEGER * 1000 + contract_address, + token_id: "9007199254740991", + }; + + const result = resolver.getHypercertIdFromAttestationData(data); + + expect(result).toBe( + `9007199254740991000-${contract_address}-9007199254740991`, + ); + }); + + it("should handle invalid chain_id", () => { + const data = { + chain_id: "not_a_bigint", + contract_address, + token_id: "123", + }; + + const result = resolver.getHypercertIdFromAttestationData(data); + + expect(result).toBeNull(); + }); + + it("should handle invalid contract_address", () => { + const data = { + chain_id: "1", + contract_address: "not_an_address", + token_id: "123", + }; + + const result = resolver.getHypercertIdFromAttestationData(data); + + expect(result).toBeNull(); + }); + + it("should handle invalid token_id", () => { + const data = { + chain_id: "1", + contract_address, + token_id: "not_a_bigint", + }; + + const result = resolver.getHypercertIdFromAttestationData(data); + + expect(result).toBeNull(); + }); + + it("should handle floating point numbers", () => { + const data = { + chain_id: 1.5, + contract_address, + token_id: "123", + }; + + const result = resolver.getHypercertIdFromAttestationData(data); + + expect(result).toBeNull(); + }); + + it("should handle missing required fields", () => { + const data = { + chain_id: "1", + // missing contract_address + token_id: "123", + }; + + const result = resolver.getHypercertIdFromAttestationData(data); + + expect(result).toBeNull(); + }); + + it("should handle null data", () => { + const result = resolver.getHypercertIdFromAttestationData(null); + + expect(result).toBeNull(); + }); + + it("should handle empty object", () => { + const result = resolver.getHypercertIdFromAttestationData({}); + + expect(result).toBeNull(); + }); + + it("should handle negative bigint values", () => { + const data = { + chain_id: "-1", + contract_address, + token_id: "123", + }; + + const result = resolver.getHypercertIdFromAttestationData(data); + + expect(result).toBe(`-1-${contract_address}-123`); + }); + }); +}); diff --git a/test/services/graphql/resolvers/attestationSchemaResolver.test.ts b/test/services/graphql/resolvers/attestationSchemaResolver.test.ts new file mode 100644 index 00000000..b3f7096e --- /dev/null +++ b/test/services/graphql/resolvers/attestationSchemaResolver.test.ts @@ -0,0 +1,138 @@ +import { beforeEach, describe, expect, it, vi } from "vitest"; +import { container } from "tsyringe"; +import { AttestationSchemaResolver } from "../../../../src/services/graphql/resolvers/attestationSchemaResolver.js"; +import { AttestationSchemaService } from "../../../../src/services/database/entities/AttestationSchemaEntityService.js"; +import { AttestationService } from "../../../../src/services/database/entities/AttestationEntityService.js"; +import type { Mock } from "vitest"; +import type { GetAttestationSchemasArgs } from "../../../../src/graphql/schemas/args/attestationSchemaArgs.js"; +import type { AttestationSchema } from "../../../../src/graphql/schemas/typeDefs/attestationSchemaTypeDefs.js"; + +const ZERO_ADDRESS = "0x0000000000000000000000000000000000000000"; + +describe("AttestationSchemaResolver", () => { + let attestationSchemaResolver: AttestationSchemaResolver; + let mockAttestationSchemaService: { + getAttestationSchemas: Mock; + }; + let mockAttestationService: { + getAttestations: Mock; + }; + + beforeEach(() => { + mockAttestationSchemaService = { + getAttestationSchemas: vi.fn(), + }; + mockAttestationService = { + getAttestations: vi.fn(), + }; + + container.clearInstances(); + container.registerInstance( + AttestationSchemaService, + mockAttestationSchemaService as unknown as AttestationSchemaService, + ); + container.registerInstance( + AttestationService, + mockAttestationService as unknown as AttestationService, + ); + + attestationSchemaResolver = container.resolve(AttestationSchemaResolver); + }); + + describe("attestationSchemas", () => { + it("should return attestation schemas", async () => { + const mockSchemas = { + data: [ + { + id: "1", + uid: "schema-1", + chain_id: "1", + schema: "test schema 1", + resolver: ZERO_ADDRESS, + revocable: true, + attestations: null, + }, + { + id: "2", + uid: "schema-2", + chain_id: "1", + schema: "test schema 2", + resolver: ZERO_ADDRESS, + revocable: false, + attestations: null, + }, + ], + count: 2, + }; + + mockAttestationSchemaService.getAttestationSchemas.mockResolvedValue( + mockSchemas, + ); + + const args: GetAttestationSchemasArgs = {}; + const result = await attestationSchemaResolver.attestationSchemas(args); + + expect(result).toEqual(mockSchemas); + expect( + mockAttestationSchemaService.getAttestationSchemas, + ).toHaveBeenCalledWith(args); + }); + + it("should handle errors from attestationSchemaService", async () => { + // Arrange + const error = new Error("Service error"); + mockAttestationSchemaService.getAttestationSchemas.mockRejectedValue( + error, + ); + + // Act & Assert + await expect( + attestationSchemaResolver.attestationSchemas({}), + ).resolves.toBeNull(); + }); + }); + + describe("attestations", () => { + it("should return attestations for a schema", async () => { + const mockSchema = { + id: "1", + uid: "schema-1", + chain_id: "1", + schema: "test schema 1", + resolver: ZERO_ADDRESS, + revocable: true, + attestations: null, + } as AttestationSchema; + + const mockAttestations = { + data: [], + count: 0, + }; + + mockAttestationService.getAttestations.mockResolvedValue( + mockAttestations, + ); + + const result = await attestationSchemaResolver.attestations(mockSchema); + + expect(result).toEqual(mockAttestations); + expect(mockAttestationService.getAttestations).toHaveBeenCalledWith({ + where: { supported_schemas_id: { eq: mockSchema.id } }, + }); + }); + + it("should handle errors from attestationService", async () => { + // Arrange + const schema: AttestationSchema = { + id: "1", + } as AttestationSchema; + const error = new Error("Service error"); + mockAttestationService.getAttestations.mockRejectedValue(error); + + // Act & Assert + await expect( + attestationSchemaResolver.attestations(schema), + ).resolves.toBeNull(); + }); + }); +}); diff --git a/test/services/graphql/resolvers/blueprintResolver.test.ts b/test/services/graphql/resolvers/blueprintResolver.test.ts new file mode 100644 index 00000000..fee432a2 --- /dev/null +++ b/test/services/graphql/resolvers/blueprintResolver.test.ts @@ -0,0 +1,193 @@ +import { container } from "tsyringe"; +import { beforeEach, describe, expect, it, Mock, vi } from "vitest"; +import { GetBlueprintsArgs } from "../../../../src/graphql/schemas/args/blueprintArgs.js"; +import { Blueprint } from "../../../../src/graphql/schemas/typeDefs/blueprintTypeDefs.js"; +import { BlueprintsService } from "../../../../src/services/database/entities/BlueprintsEntityService.js"; +import { HypercertsService } from "../../../../src/services/database/entities/HypercertsEntityService.js"; +import { BlueprintResolver } from "../../../../src/services/graphql/resolvers/blueprintResolver.js"; +import { + generateMockAddress, + generateMockBlueprint, +} from "../../../utils/testUtils.js"; + +describe("BlueprintResolver", () => { + let resolver: BlueprintResolver; + let mockBlueprintsService: { + getBlueprints: Mock; + getBlueprintAdmins: Mock; + }; + let mockHypercertsService: { + getHypercerts: Mock; + }; + + beforeEach(() => { + // Create mock services + mockBlueprintsService = { + getBlueprints: vi.fn(), + getBlueprintAdmins: vi.fn(), + }; + mockHypercertsService = { + getHypercerts: vi.fn(), + }; + + // Register mocks with the DI container + container.registerInstance( + BlueprintsService, + mockBlueprintsService as unknown as BlueprintsService, + ); + container.registerInstance( + HypercertsService, + mockHypercertsService as unknown as HypercertsService, + ); + + // Create resolver instance + resolver = container.resolve(BlueprintResolver); + }); + + describe("blueprints", () => { + it("should return blueprints data when successful", async () => { + // Arrange + const args: GetBlueprintsArgs = { + where: { id: { eq: 1 } }, + }; + const mockBlueprint = generateMockBlueprint(); + const mockResponse = { + data: [mockBlueprint as unknown as Blueprint], + count: 1, + }; + mockBlueprintsService.getBlueprints.mockResolvedValue(mockResponse); + + // Act + const result = await resolver.blueprints(args); + + // Assert + expect(result).toEqual(mockResponse); + expect(mockBlueprintsService.getBlueprints).toHaveBeenCalledWith(args); + }); + + it("should return null when an error occurs", async () => { + // Arrange + const args: GetBlueprintsArgs = { + where: { id: { eq: 1 } }, + }; + mockBlueprintsService.getBlueprints.mockRejectedValue( + new Error("Test error"), + ); + + // Act + const result = await resolver.blueprints(args); + + // Assert + expect(result).toBeNull(); + }); + }); + + describe("admins", () => { + it("should return admins data when successful", async () => { + // Arrange + const blueprint = generateMockBlueprint() as unknown as Blueprint; + const mockAdmins = [ + { + address: generateMockAddress(), + display_name: "Test Admin", + avatar: "test-avatar", + }, + ]; + mockBlueprintsService.getBlueprintAdmins.mockResolvedValue(mockAdmins); + + // Act + const result = await resolver.admins(blueprint); + + // Assert + expect(result).toEqual(mockAdmins); + expect(mockBlueprintsService.getBlueprintAdmins).toHaveBeenCalledWith( + blueprint.id, + ); + }); + + it("should return empty array when blueprint has no id", async () => { + // Arrange + const blueprint = generateMockBlueprint() as unknown as Blueprint; + blueprint.id = undefined; + + // Act + const result = await resolver.admins(blueprint); + + // Assert + expect(result).toEqual([]); + expect(mockBlueprintsService.getBlueprintAdmins).not.toHaveBeenCalled(); + }); + + it("should return empty array when an error occurs", async () => { + // Arrange + const blueprint = generateMockBlueprint() as unknown as Blueprint; + mockBlueprintsService.getBlueprintAdmins.mockRejectedValue( + new Error("Test error"), + ); + + // Act + const result = await resolver.admins(blueprint); + + // Assert + expect(result).toEqual([]); + }); + }); + + describe("hypercerts", () => { + it("should return hypercerts data when successful", async () => { + // Arrange + const blueprint = generateMockBlueprint() as unknown as Blueprint; + const hypercertIds = blueprint.hypercert_ids as string[]; + const mockResponse = { + data: [ + { + id: hypercertIds[0], + hypercert_id: hypercertIds[0], + metadata: { + name: "Test Hypercert", + description: "Test Description", + }, + }, + ], + count: 1, + }; + mockHypercertsService.getHypercerts.mockResolvedValue(mockResponse); + + // Act + const result = await resolver.hypercerts(blueprint); + + // Assert + expect(result).toEqual(mockResponse); + expect(mockHypercertsService.getHypercerts).toHaveBeenCalledWith({ + where: { hypercert_id: { in: hypercertIds } }, + }); + }); + + it("should return null when blueprint has no hypercert ids", async () => { + // Arrange + const blueprint = generateMockBlueprint() as unknown as Blueprint; + blueprint.hypercert_ids = []; + + // Act + const result = await resolver.hypercerts(blueprint); + + // Assert + expect(result).toBeNull(); + expect(mockHypercertsService.getHypercerts).not.toHaveBeenCalled(); + }); + + it("should return null when an error occurs", async () => { + // Arrange + const blueprint = generateMockBlueprint() as unknown as Blueprint; + mockHypercertsService.getHypercerts.mockRejectedValue( + new Error("Test error"), + ); + + // Act + const result = await resolver.hypercerts(blueprint); + + // Assert + expect(result).toBeNull(); + }); + }); +}); diff --git a/test/services/graphql/resolvers/collectionResolver.test.ts b/test/services/graphql/resolvers/collectionResolver.test.ts new file mode 100644 index 00000000..802dc57c --- /dev/null +++ b/test/services/graphql/resolvers/collectionResolver.test.ts @@ -0,0 +1,248 @@ +import { container } from "tsyringe"; +import type { Mock } from "vitest"; +import { beforeEach, describe, expect, it, vi } from "vitest"; +import { GetCollectionsArgs } from "../../../../src/graphql/schemas/args/collectionArgs.js"; +import type { Collection } from "../../../../src/graphql/schemas/typeDefs/collectionTypeDefs.js"; +import { CollectionService } from "../../../../src/services/database/entities/CollectionEntityService.js"; +import { CollectionResolver } from "../../../../src/services/graphql/resolvers/collectionResolver.js"; +import { faker } from "@faker-js/faker"; +import { + generateMockBlueprint, + generateMockUser, +} from "../../../utils/testUtils.js"; + +describe("CollectionResolver", () => { + let resolver: CollectionResolver; + let mockCollectionService: { + getCollections: Mock; + getCollectionHypercerts: Mock; + getCollectionAdmins: Mock; + getCollectionBlueprints: Mock; + }; + let mockCollection: Collection; + + beforeEach(() => { + // Mock console methods + vi.spyOn(console, "error").mockImplementation(() => {}); + + // Create mock service + mockCollectionService = { + getCollections: vi.fn(), + getCollectionHypercerts: vi.fn(), + getCollectionAdmins: vi.fn(), + getCollectionBlueprints: vi.fn(), + }; + + // Register mock with the DI container + container.registerInstance( + CollectionService, + mockCollectionService as unknown as CollectionService, + ); + + // Create test data + mockCollection = { + id: faker.string.uuid(), + name: faker.company.name(), + description: faker.lorem.paragraph(), + created_at: faker.date.past().toISOString(), + updated_at: faker.date.recent().toISOString(), + } as Collection; + + // Create resolver instance + resolver = container.resolve(CollectionResolver); + }); + + describe("collections query", () => { + it("should return collections for given arguments", async () => { + // Arrange + const args: GetCollectionsArgs = { + where: { + name: { contains: mockCollection.name }, + }, + }; + const expectedResult = { + data: [mockCollection], + count: 1, + }; + mockCollectionService.getCollections.mockResolvedValue(expectedResult); + + // Act + const result = await resolver.collections(args); + + // Assert + expect(mockCollectionService.getCollections).toHaveBeenCalledWith(args); + expect(result).toEqual(expectedResult); + }); + + it("should handle errors from collectionService", async () => { + // Arrange + const error = new Error("Service error"); + mockCollectionService.getCollections.mockRejectedValue(error); + + // Act + const result = await resolver.collections({}); + + // Assert + expect(result).toBeNull(); + expect(console.error).toHaveBeenCalledWith( + expect.stringContaining( + "[CollectionResolver::collections] Error fetching collections:", + ), + ); + }); + }); + + describe("hypercerts field resolver", () => { + it("should resolve hypercerts for a collection", async () => { + // Arrange + const expectedHypercerts = [ + { id: faker.string.uuid(), name: faker.company.name() }, + { id: faker.string.uuid(), name: faker.company.name() }, + ]; + mockCollectionService.getCollectionHypercerts.mockResolvedValue( + expectedHypercerts, + ); + + // Act + const result = await resolver.hypercerts(mockCollection); + + // Assert + expect( + mockCollectionService.getCollectionHypercerts, + ).toHaveBeenCalledWith(mockCollection.id); + expect(result).toEqual(expectedHypercerts); + }); + + it("should return null when collection has no id", async () => { + // Arrange + const collectionWithoutId = { ...mockCollection, id: undefined }; + + // Act + const result = await resolver.hypercerts(collectionWithoutId); + + // Assert + expect(result).toBeNull(); + expect( + mockCollectionService.getCollectionHypercerts, + ).not.toHaveBeenCalled(); + }); + + it("should handle errors from collectionService", async () => { + // Arrange + const error = new Error("Service error"); + mockCollectionService.getCollectionHypercerts.mockRejectedValue(error); + + // Act + const result = await resolver.hypercerts(mockCollection); + + // Assert + expect(result).toBeNull(); + expect(console.error).toHaveBeenCalledWith( + expect.stringContaining( + "[CollectionResolver::hypercerts] Error fetching hypercerts:", + ), + ); + }); + }); + + describe("admins field resolver", () => { + it("should resolve admins for a collection", async () => { + // Arrange + const expectedAdmins = [generateMockUser(), generateMockUser()]; + mockCollectionService.getCollectionAdmins.mockResolvedValue( + expectedAdmins, + ); + + // Act + const result = await resolver.admins(mockCollection); + + // Assert + expect(mockCollectionService.getCollectionAdmins).toHaveBeenCalledWith( + mockCollection.id, + ); + expect(result).toEqual(expectedAdmins); + }); + + it("should return null when collection has no id", async () => { + // Arrange + const collectionWithoutId = { ...mockCollection, id: undefined }; + + // Act + const result = await resolver.admins(collectionWithoutId); + + // Assert + expect(result).toBeNull(); + expect(mockCollectionService.getCollectionAdmins).not.toHaveBeenCalled(); + }); + + it("should handle errors from collectionService", async () => { + // Arrange + const error = new Error("Service error"); + mockCollectionService.getCollectionAdmins.mockRejectedValue(error); + + // Act + const result = await resolver.admins(mockCollection); + + // Assert + expect(result).toBeNull(); + expect(console.error).toHaveBeenCalledWith( + expect.stringContaining( + "[CollectionResolver::admins] Error fetching admins:", + ), + ); + }); + }); + + describe("blueprints field resolver", () => { + it("should resolve blueprints for a collection", async () => { + // Arrange + const expectedBlueprints = [ + generateMockBlueprint(), + generateMockBlueprint(), + ]; + mockCollectionService.getCollectionBlueprints.mockResolvedValue( + expectedBlueprints, + ); + + // Act + const result = await resolver.blueprints(mockCollection); + + // Assert + expect( + mockCollectionService.getCollectionBlueprints, + ).toHaveBeenCalledWith(mockCollection.id); + expect(result).toEqual(expectedBlueprints); + }); + + it("should return null when collection has no id", async () => { + // Arrange + const collectionWithoutId = { ...mockCollection, id: undefined }; + + // Act + const result = await resolver.blueprints(collectionWithoutId); + + // Assert + expect(result).toBeNull(); + expect( + mockCollectionService.getCollectionBlueprints, + ).not.toHaveBeenCalled(); + }); + + it("should handle errors from collectionService", async () => { + // Arrange + const error = new Error("Service error"); + mockCollectionService.getCollectionBlueprints.mockRejectedValue(error); + + // Act + const result = await resolver.blueprints(mockCollection); + + // Assert + expect(result).toBeNull(); + expect(console.error).toHaveBeenCalledWith( + expect.stringContaining( + "[CollectionResolver::blueprints] Error fetching blueprints:", + ), + ); + }); + }); +}); diff --git a/test/services/graphql/resolvers/contractResolver.test.ts b/test/services/graphql/resolvers/contractResolver.test.ts new file mode 100644 index 00000000..544c6048 --- /dev/null +++ b/test/services/graphql/resolvers/contractResolver.test.ts @@ -0,0 +1,96 @@ +import { beforeEach, describe, expect, it, vi } from "vitest"; +import { container } from "tsyringe"; +import { faker } from "@faker-js/faker"; +import { getAddress } from "viem"; +import { ContractResolver } from "../../../../src/services/graphql/resolvers/contractResolver.js"; +import { ContractService } from "../../../../src/services/database/entities/ContractEntityService.js"; +import type { Mock } from "vitest"; +import type { GetContractsArgs } from "../../../../src/graphql/schemas/args/contractArgs.js"; + +describe("ContractResolver", () => { + let resolver: ContractResolver; + let mockContractService: { + getContracts: Mock; + }; + const mockContractAddress = getAddress(faker.finance.ethereumAddress()); + + beforeEach(() => { + // Create mock service + mockContractService = { + getContracts: vi.fn(), + }; + + // Register mock with the DI container + container.registerInstance( + ContractService, + mockContractService as unknown as ContractService, + ); + + // Resolve the resolver with mocked dependencies + resolver = container.resolve(ContractResolver); + }); + + describe("contracts", () => { + it("should return contracts for given arguments", async () => { + // Arrange + const args: GetContractsArgs = { + where: { + chain_id: { eq: 1n }, + contract_address: { eq: mockContractAddress }, + }, + }; + const expectedResult = { + data: [ + { + id: "1", + chain_id: 1n, + contract_address: mockContractAddress, + start_block: 1000000n, + }, + { + id: "2", + chain_id: 1n, + contract_address: mockContractAddress, + start_block: 2000000n, + }, + ], + count: 2, + }; + mockContractService.getContracts.mockResolvedValue(expectedResult); + + // Act + const result = await resolver.contracts(args); + + // Assert + expect(mockContractService.getContracts).toHaveBeenCalledWith(args); + expect(result).toEqual(expectedResult); + expect(result?.data[0].contract_address).toBe(mockContractAddress); + expect(result?.data[1].contract_address).toBe(mockContractAddress); + }); + + it("should handle empty result set", async () => { + // Arrange + const expectedResult = { + data: [], + count: 0, + }; + mockContractService.getContracts.mockResolvedValue(expectedResult); + + // Act + const result = await resolver.contracts({}); + + // Assert + expect(result?.data).toHaveLength(0); + expect(result?.count).toBe(0); + }); + + it("should handle errors from contractService", async () => { + // Arrange + const error = new Error("Service error"); + mockContractService.getContracts.mockRejectedValue(error); + + // Act & Assert + await expect(resolver.contracts({})).resolves.toBeNull(); + }); + }); +}); diff --git a/test/services/graphql/resolvers/fractionResolver.test.ts b/test/services/graphql/resolvers/fractionResolver.test.ts new file mode 100644 index 00000000..9524f828 --- /dev/null +++ b/test/services/graphql/resolvers/fractionResolver.test.ts @@ -0,0 +1,269 @@ +import { parseClaimOrFractionId } from "@hypercerts-org/sdk"; +import { container } from "tsyringe"; +import type { Mock } from "vitest"; +import { beforeEach, describe, expect, it, vi } from "vitest"; +import type { GetFractionsArgs } from "../../../../src/graphql/schemas/args/fractionArgs.js"; +import type { Fraction } from "../../../../src/graphql/schemas/typeDefs/fractionTypeDefs.js"; +import { FractionService } from "../../../../src/services/database/entities/FractionEntityService.js"; +import { HypercertsService } from "../../../../src/services/database/entities/HypercertsEntityService.js"; +import { MarketplaceOrdersService } from "../../../../src/services/database/entities/MarketplaceOrdersEntityService.js"; +import { SalesService } from "../../../../src/services/database/entities/SalesEntityService.js"; +import { FractionResolver } from "../../../../src/services/graphql/resolvers/fractionResolver.js"; +import { generateMockFraction } from "../../../utils/testUtils.js"; + +vi.mock("@hypercerts-org/sdk", () => ({ + parseClaimOrFractionId: vi.fn(), +})); + +describe("FractionResolver", () => { + let resolver: FractionResolver; + let mockFractionService: { + getFractions: Mock; + }; + let mockSalesService: { + getSales: Mock; + }; + let mockMarketplaceOrdersService: { + getOrders: Mock; + }; + let mockHypercertService: { + getHypercertMetadata: Mock; + }; + let mockFraction: Fraction; + + beforeEach(() => { + // Create mock services + mockFractionService = { + getFractions: vi.fn(), + }; + + mockHypercertService = { + getHypercertMetadata: vi.fn(), + }; + + mockSalesService = { + getSales: vi.fn(), + }; + + mockMarketplaceOrdersService = { + getOrders: vi.fn(), + }; + + // Register mocks with the DI container + container.registerInstance( + FractionService, + mockFractionService as unknown as FractionService, + ); + container.registerInstance( + HypercertsService, + mockHypercertService as unknown as HypercertsService, + ); + container.registerInstance( + SalesService, + mockSalesService as unknown as SalesService, + ); + container.registerInstance( + MarketplaceOrdersService, + mockMarketplaceOrdersService as unknown as MarketplaceOrdersService, + ); + + // Create test data + mockFraction = generateMockFraction(); + + // Resolve the resolver with mocked dependencies + resolver = container.resolve(FractionResolver); + }); + + describe("fractions query", () => { + it("should return fractions for given arguments", async () => { + // Arrange + const args: GetFractionsArgs = { + where: { hypercert_id: { eq: mockFraction.hypercert_id } }, + }; + const expectedResult = { + data: [mockFraction], + count: 1, + }; + mockFractionService.getFractions.mockResolvedValue(expectedResult); + + // Act + const result = await resolver.fractions(args); + + // Assert + expect(mockFractionService.getFractions).toHaveBeenCalledWith(args); + expect(result).toEqual(expectedResult); + }); + + it("should handle errors from fractionService", async () => { + // Arrange + const error = new Error("Service error"); + mockFractionService.getFractions.mockRejectedValue(error); + + // Act & Assert + await expect(resolver.fractions({})).resolves.toBeNull(); + }); + }); + + describe("metadata field resolver", () => { + it("should resolve metadata for valid fraction data", async () => { + // Arrange + const expectedMetadata = { + id: "test-metadata", + name: "Test Metadata", + }; + mockHypercertService.getHypercertMetadata.mockResolvedValue( + expectedMetadata, + ); + + // Act + const result = await resolver.metadata(mockFraction); + + // Assert + expect(mockHypercertService.getHypercertMetadata).toHaveBeenCalledWith({ + claims_id: mockFraction.claims_id, + }); + expect(result).toEqual(expectedMetadata); + }); + + it("should return null when fraction has no claims_id", async () => { + // Arrange + const fractionWithoutClaimsId: Fraction = { + ...mockFraction, + claims_id: undefined, + }; + + // Act + const result = await resolver.metadata(fractionWithoutClaimsId); + + // Assert + expect(result).toBeNull(); + expect(mockHypercertService.getHypercertMetadata).not.toHaveBeenCalled(); + }); + }); + + describe("orders field resolver", () => { + it("should resolve orders for valid fraction data", async () => { + // Arrange + const parsedId = "123"; + (parseClaimOrFractionId as Mock).mockReturnValue({ id: parsedId }); + const expectedOrders = { + data: [{ id: "order-1" }], + count: 1, + }; + mockMarketplaceOrdersService.getOrders.mockResolvedValue(expectedOrders); + + // Act + const result = await resolver.orders(mockFraction); + + // Assert + expect(mockMarketplaceOrdersService.getOrders).toHaveBeenCalledWith({ + where: { + itemIds: { + arrayContains: [parsedId], + }, + }, + }); + expect(result).toEqual(expectedOrders); + }); + + it("should return null when fraction has no fraction_id", async () => { + // Arrange + const fractionWithoutId: Fraction = { + ...mockFraction, + fraction_id: undefined, + }; + + // Act + const result = await resolver.orders(fractionWithoutId); + + // Assert + expect(result).toBeNull(); + expect(mockMarketplaceOrdersService.getOrders).not.toHaveBeenCalled(); + }); + + it("should handle invalid fraction_id parsing", async () => { + // Arrange + (parseClaimOrFractionId as Mock).mockReturnValue({ id: undefined }); + + // Act + const result = await resolver.orders(mockFraction); + + // Assert + expect(result).toBeNull(); + expect(mockMarketplaceOrdersService.getOrders).not.toHaveBeenCalled(); + }); + + it("should handle errors from marketplaceOrdersService", async () => { + // Arrange + (parseClaimOrFractionId as Mock).mockReturnValue({ id: "123" }); + const error = new Error("Service error"); + mockMarketplaceOrdersService.getOrders.mockRejectedValue(error); + + // Act & Assert + await expect(resolver.orders(mockFraction)).resolves.toBeNull(); + }); + }); + + describe("sales field resolver", () => { + it("should resolve sales for valid fraction data", async () => { + // Arrange + const parsedId = "123"; + (parseClaimOrFractionId as Mock).mockReturnValue({ id: parsedId }); + const expectedSales = { + data: [{ id: "sale-1" }], + count: 1, + }; + mockSalesService.getSales.mockResolvedValue(expectedSales); + + // Act + const result = await resolver.sales(mockFraction); + + // Assert + expect(mockSalesService.getSales).toHaveBeenCalledWith({ + where: { + item_ids: { + arrayContains: [parsedId], + }, + }, + }); + expect(result).toEqual(expectedSales); + }); + + it("should return null when fraction has no fraction_id", async () => { + // Arrange + const fractionWithoutId: Fraction = { + ...mockFraction, + fraction_id: undefined, + }; + + // Act + const result = await resolver.sales(fractionWithoutId); + + // Assert + expect(result).toBeNull(); + expect(mockSalesService.getSales).not.toHaveBeenCalled(); + }); + + it("should handle invalid fraction_id parsing", async () => { + // Arrange + (parseClaimOrFractionId as Mock).mockReturnValue({ id: undefined }); + + // Act + const result = await resolver.sales(mockFraction); + + // Assert + expect(result).toBeNull(); + expect(mockSalesService.getSales).not.toHaveBeenCalled(); + }); + + it("should handle errors from salesService", async () => { + // Arrange + (parseClaimOrFractionId as Mock).mockReturnValue({ id: "123" }); + const error = new Error("Service error"); + mockSalesService.getSales.mockRejectedValue(error); + + // Act & Assert + await expect(resolver.sales(mockFraction)).resolves.toBeNull(); + }); + }); +}); diff --git a/test/services/graphql/resolvers/hyperboardResolver.test.ts b/test/services/graphql/resolvers/hyperboardResolver.test.ts new file mode 100644 index 00000000..77f0f5b7 --- /dev/null +++ b/test/services/graphql/resolvers/hyperboardResolver.test.ts @@ -0,0 +1,383 @@ +import { container } from "tsyringe"; +import type { Mock } from "vitest"; +import { beforeEach, describe, expect, it, vi } from "vitest"; +import { DataKyselyService } from "../../../../src/client/kysely.js"; +import { GetHyperboardsArgs } from "../../../../src/graphql/schemas/args/hyperboardArgs.js"; +import { AllowlistRecordService } from "../../../../src/services/database/entities/AllowListRecordEntityService.js"; +import { CollectionService } from "../../../../src/services/database/entities/CollectionEntityService.js"; +import { FractionService } from "../../../../src/services/database/entities/FractionEntityService.js"; +import { HyperboardService } from "../../../../src/services/database/entities/HyperboardEntityService.js"; +import { HypercertsService } from "../../../../src/services/database/entities/HypercertsEntityService.js"; +import { MetadataService } from "../../../../src/services/database/entities/MetadataEntityService.js"; +import { UsersService } from "../../../../src/services/database/entities/UsersEntityService.js"; +import { HyperboardResolver } from "../../../../src/services/graphql/resolvers/hyperboardResolver.js"; +import { + generateHypercertId, + generateMockBlueprint, + generateMockCollection, + generateMockFraction, + generateMockHyperboard, + generateMockMetadata, + generateMockUser, +} from "../../../utils/testUtils.js"; + +describe("HyperboardResolver", () => { + let resolver: HyperboardResolver; + let mockHyperboardService: { + getHyperboards: Mock; + getHyperboardCollections: Mock; + getHyperboardHypercertMetadata: Mock; + getHyperboardBlueprintMetadata: Mock; + getHyperboardAdmins: Mock; + }; + let mockFractionService: { + getFractions: Mock; + }; + let mockAllowlistRecordService: { + getAllowlistRecords: Mock; + }; + let mockHypercertsService: { + getHypercerts: Mock; + getHypercertMetadataSets: Mock; + }; + let mockUsersService: { + getUsers: Mock; + }; + let mockCollectionService: { + getCollectionHypercertIds: Mock; + getCollectionBlueprints: Mock; + }; + let mockHyperboard: ReturnType; + + beforeEach(() => { + // Mock console methods + vi.spyOn(console, "error").mockImplementation(() => {}); + + // Create mock services + mockHyperboardService = { + getHyperboards: vi.fn(), + getHyperboardCollections: vi.fn(), + getHyperboardHypercertMetadata: vi.fn(), + getHyperboardBlueprintMetadata: vi.fn(), + getHyperboardAdmins: vi.fn(), + }; + + mockFractionService = { + getFractions: vi.fn(), + }; + + mockAllowlistRecordService = { + getAllowlistRecords: vi.fn(), + }; + + mockHypercertsService = { + getHypercerts: vi.fn(), + getHypercertMetadataSets: vi.fn(), + }; + + mockUsersService = { + getUsers: vi.fn(), + }; + + mockCollectionService = { + getCollectionHypercertIds: vi.fn(), + getCollectionBlueprints: vi.fn(), + }; + + // Register mocks with the DI container + container.registerInstance( + HyperboardService, + mockHyperboardService as unknown as HyperboardService, + ); + container.registerInstance( + FractionService, + mockFractionService as unknown as FractionService, + ); + container.registerInstance( + AllowlistRecordService, + mockAllowlistRecordService as unknown as AllowlistRecordService, + ); + container.registerInstance( + HypercertsService, + mockHypercertsService as unknown as HypercertsService, + ); + container.registerInstance(MetadataService, {} as MetadataService); + container.registerInstance( + UsersService, + mockUsersService as unknown as UsersService, + ); + container.registerInstance( + CollectionService, + mockCollectionService as unknown as CollectionService, + ); + container.registerInstance(DataKyselyService, {} as DataKyselyService); + + // Create test data + mockHyperboard = generateMockHyperboard(); + + // Create resolver instance + resolver = container.resolve(HyperboardResolver); + }); + + describe("hyperboards query", () => { + it("should return hyperboards for given arguments", async () => { + // Arrange + const args: GetHyperboardsArgs = { + where: { + id: { eq: mockHyperboard.id }, + }, + }; + const expectedResult = { + data: [mockHyperboard], + count: 1, + }; + mockHyperboardService.getHyperboards.mockResolvedValue(expectedResult); + + // Act + const result = await resolver.hyperboards(args); + + // Assert + expect(mockHyperboardService.getHyperboards).toHaveBeenCalledWith(args); + expect(result).toEqual(expectedResult); + }); + + it("should handle errors from hyperboardService", async () => { + // Arrange + const error = new Error("Service error"); + mockHyperboardService.getHyperboards.mockRejectedValue(error); + + // Act + const result = await resolver.hyperboards({}); + + // Assert + expect(result).toBeNull(); + expect(console.error).toHaveBeenCalledWith( + expect.stringContaining( + "[HyperboardResolver::hyperboards] Error fetching hyperboards:", + ), + ); + }); + }); + + describe("sections field resolver", () => { + it("should resolve sections for a hyperboard", async () => { + // Arrange + const mockCollection = generateMockCollection(); + const mockHypercertId = generateHypercertId(); + const mockBlueprint = generateMockBlueprint(); + const mockUser = generateMockUser(); + const mockFraction = generateMockFraction(); + const mockMetadata = generateMockMetadata(); + + // Setup mock responses with complete data structures + mockHyperboardService.getHyperboardCollections.mockResolvedValue({ + data: [mockCollection], + count: 1, + }); + + mockCollectionService.getCollectionHypercertIds.mockResolvedValue([ + { hypercert_id: mockHypercertId }, + ]); + + mockFractionService.getFractions.mockResolvedValue({ + data: [mockFraction], + count: 1, + }); + + mockAllowlistRecordService.getAllowlistRecords.mockResolvedValue({ + data: [ + { + hypercert_id: mockHypercertId, + user_address: mockUser.address, + claimed: false, + }, + ], + count: 1, + }); + + mockHypercertsService.getHypercerts.mockResolvedValue({ + data: [ + { + hypercert_id: mockHypercertId, + uri: mockMetadata.uri, + units: "100000000", + name: "Test Hypercert", + }, + ], + count: 1, + }); + + mockHypercertsService.getHypercertMetadataSets.mockResolvedValue([ + { + ...mockMetadata, + uri: mockMetadata.uri, + name: "Test Hypercert", + }, + ]); + + mockCollectionService.getCollectionBlueprints.mockResolvedValue({ + data: [mockBlueprint], + count: 1, + }); + + mockHyperboardService.getHyperboardHypercertMetadata.mockResolvedValue([ + { + hypercert_id: mockHypercertId, + display_size: 1, + }, + ]); + + mockHyperboardService.getHyperboardBlueprintMetadata.mockResolvedValue([ + { + blueprint_id: mockBlueprint.id, + display_size: 1, + }, + ]); + + mockUsersService.getUsers.mockResolvedValue({ + data: [mockUser], + count: 1, + }); + + // Act + const result = await resolver.sections(mockHyperboard); + + // Assert + expect(result).toBeTruthy(); + if (!result) { + throw new Error("Result should not be null"); + } + + expect(result.data).toHaveLength(1); + expect(result.data[0].collections).toHaveLength(1); + expect( + mockHyperboardService.getHyperboardCollections, + ).toHaveBeenCalledWith(mockHyperboard.id); + + // Verify the section data structure + const section = result.data[0]; + expect(section).toHaveProperty("label"); + expect(section).toHaveProperty("collections"); + expect(section).toHaveProperty("entries"); + expect(section).toHaveProperty("owners"); + expect(section.collections).toBeInstanceOf(Array); + expect(section.owners?.data).toHaveLength(2); + }); + + it("should return empty sections when hyperboard has no id", async () => { + // Arrange + const hyperboardWithoutId = { ...mockHyperboard, id: undefined }; + + // Act + const result = await resolver.sections(hyperboardWithoutId); + + // Assert + expect(result).toBeNull(); + expect( + mockHyperboardService.getHyperboardCollections, + ).not.toHaveBeenCalled(); + }); + + it("should handle errors from services", async () => { + // Arrange + const error = new Error("Service error"); + mockHyperboardService.getHyperboardCollections.mockRejectedValue(error); + + // Act + const result = await resolver.sections(mockHyperboard); + + // Assert + expect(result).toBeNull(); + expect(console.error).toHaveBeenCalledWith( + expect.stringContaining( + "[HyperboardResolver::sections] Error fetching sections for hyperboard", + ), + ); + }); + }); + + describe("owners field resolver", () => { + it("should resolve owners for a hyperboard", async () => { + // Arrange + const mockUsers = [generateMockUser(), generateMockUser()]; + mockHyperboardService.getHyperboardCollections.mockResolvedValue({ + data: [generateMockCollection()], + }); + mockUsersService.getUsers.mockResolvedValue({ data: mockUsers }); + + // Act + const result = await resolver.owners(mockHyperboard); + + // Assert + expect(Array.isArray(result)).toBe(true); + expect( + mockHyperboardService.getHyperboardCollections, + ).toHaveBeenCalledWith(mockHyperboard.id); + }); + + it("should handle errors", async () => { + // Arrange + const error = new Error("Service error"); + mockHyperboardService.getHyperboardCollections.mockRejectedValue(error); + + // Act + const result = await resolver.owners(mockHyperboard); + + // Assert + expect(result).toEqual([]); + expect(console.error).toHaveBeenCalledWith( + expect.stringContaining( + "[HyperboardResolver::sections] Error fetching sections for hyperboard", + ), + ); + }); + }); + + describe("admins field resolver", () => { + it("should resolve admins for a hyperboard", async () => { + // Arrange + const expectedAdmins = [generateMockUser(), generateMockUser()]; + mockHyperboardService.getHyperboardAdmins.mockResolvedValue( + expectedAdmins, + ); + + // Act + const result = await resolver.admins(mockHyperboard); + + // Assert + expect(mockHyperboardService.getHyperboardAdmins).toHaveBeenCalledWith( + mockHyperboard.id, + ); + expect(result).toEqual(expectedAdmins); + }); + + it("should return empty array when hyperboard has no id", async () => { + // Arrange + const hyperboardWithoutId = { ...mockHyperboard, id: undefined }; + + // Act + const result = await resolver.admins(hyperboardWithoutId); + + // Assert + expect(result).toBeNull(); + expect(mockHyperboardService.getHyperboardAdmins).not.toHaveBeenCalled(); + }); + + it("should handle errors from hyperboardService", async () => { + // Arrange + const error = new Error("Service error"); + mockHyperboardService.getHyperboardAdmins.mockRejectedValue(error); + + // Act + const result = await resolver.admins(mockHyperboard); + + // Assert + expect(result).toBeNull(); + expect(console.error).toHaveBeenCalledWith( + expect.stringContaining( + "[HyperboardResolver::admins] Error fetching admins for hyperboard", + ), + ); + }); + }); +}); diff --git a/test/services/graphql/resolvers/hypercertResolver.test.ts b/test/services/graphql/resolvers/hypercertResolver.test.ts new file mode 100644 index 00000000..5dac3a32 --- /dev/null +++ b/test/services/graphql/resolvers/hypercertResolver.test.ts @@ -0,0 +1,480 @@ +import { beforeEach, describe, expect, it, vi } from "vitest"; +import { container } from "tsyringe"; +import { HypercertResolver } from "../../../../src/services/graphql/resolvers/hypercertResolver.js"; +import { HypercertsService } from "../../../../src/services/database/entities/HypercertsEntityService.js"; +import { MetadataService } from "../../../../src/services/database/entities/MetadataEntityService.js"; +import { ContractService } from "../../../../src/services/database/entities/ContractEntityService.js"; +import { AttestationService } from "../../../../src/services/database/entities/AttestationEntityService.js"; +import { FractionService } from "../../../../src/services/database/entities/FractionEntityService.js"; +import { SalesService } from "../../../../src/services/database/entities/SalesEntityService.js"; +import { MarketplaceOrdersService } from "../../../../src/services/database/entities/MarketplaceOrdersEntityService.js"; +import type { Mock } from "vitest"; +import type { GetHypercertsArgs } from "../../../../src/graphql/schemas/args/hypercertsArgs.js"; +import type { Hypercert } from "../../../../src/graphql/schemas/typeDefs/hypercertTypeDefs.js"; +import { faker } from "@faker-js/faker"; +import { + generateHypercertId, + generateMockMetadata, +} from "../../../utils/testUtils.js"; + +describe("HypercertResolver", () => { + let resolver: HypercertResolver; + let mockHypercertsService: { + getHypercerts: Mock; + }; + let mockMetadataService: { + getMetadataSingle: Mock; + }; + let mockContractService: { + getContract: Mock; + }; + let mockAttestationService: { + getAttestations: Mock; + }; + let mockFractionService: { + getFractions: Mock; + }; + let mockSalesService: { + getSales: Mock; + }; + let mockMarketplaceOrdersService: { + getOrders: Mock; + }; + + beforeEach(() => { + // Mock console methods + vi.spyOn(console, "warn").mockImplementation(() => {}); + vi.spyOn(console, "error").mockImplementation(() => {}); + + // Create mock services + mockHypercertsService = { + getHypercerts: vi.fn(), + }; + + mockMetadataService = { + getMetadataSingle: vi.fn(), + }; + + mockContractService = { + getContract: vi.fn(), + }; + + mockAttestationService = { + getAttestations: vi.fn(), + }; + + mockFractionService = { + getFractions: vi.fn(), + }; + + mockSalesService = { + getSales: vi.fn(), + }; + + mockMarketplaceOrdersService = { + getOrders: vi.fn(), + }; + + // Register mocks with the DI container + container.registerInstance( + HypercertsService, + mockHypercertsService as unknown as HypercertsService, + ); + container.registerInstance( + MetadataService, + mockMetadataService as unknown as MetadataService, + ); + container.registerInstance( + ContractService, + mockContractService as unknown as ContractService, + ); + container.registerInstance( + AttestationService, + mockAttestationService as unknown as AttestationService, + ); + container.registerInstance( + FractionService, + mockFractionService as unknown as FractionService, + ); + container.registerInstance( + SalesService, + mockSalesService as unknown as SalesService, + ); + container.registerInstance( + MarketplaceOrdersService, + mockMarketplaceOrdersService as unknown as MarketplaceOrdersService, + ); + + // Create a new instance for each test + resolver = container.resolve(HypercertResolver); + }); + + describe("hypercerts query resolver", () => { + it("should return hypercerts for given arguments", async () => { + // Arrange + const args: GetHypercertsArgs = { + where: { + hypercert_id: { eq: generateHypercertId() }, + }, + }; + const expectedResult = { + data: [ + { id: faker.string.uuid(), hypercert_id: generateHypercertId() }, + { id: faker.string.uuid(), hypercert_id: generateHypercertId() }, + ], + count: 2, + }; + mockHypercertsService.getHypercerts.mockResolvedValue(expectedResult); + + // Act + const result = await resolver.hypercerts(args); + + // Assert + expect(mockHypercertsService.getHypercerts).toHaveBeenCalledWith(args); + expect(result).toEqual(expectedResult); + }); + + it("should return null when service throws error", async () => { + // Arrange + const args: GetHypercertsArgs = {}; + const error = new Error("Service error"); + mockHypercertsService.getHypercerts.mockRejectedValue(error); + + // Act + const result = await resolver.hypercerts(args); + + // Assert + expect(result).toBeNull(); + expect(console.error).toHaveBeenCalledWith( + expect.stringContaining( + "[HypercertResolver::hypercerts] Error fetching hypercerts:", + ), + ); + }); + }); + + describe("metadata field resolver", () => { + it("should resolve metadata for hypercert with uri", async () => { + // Arrange + const hypercert: Hypercert = { + id: faker.string.uuid(), + uri: `ipfs://${faker.string.alphanumeric(46)}`, + } as Hypercert; + const expectedMetadata = generateMockMetadata(); + mockMetadataService.getMetadataSingle.mockResolvedValue(expectedMetadata); + + // Act + const result = await resolver.metadata(hypercert); + + // Assert + expect(mockMetadataService.getMetadataSingle).toHaveBeenCalledWith({ + where: { uri: { eq: hypercert.uri } }, + }); + expect(result).toEqual(expectedMetadata); + }); + + it("should return null when hypercert has no uri", async () => { + // Arrange + const hypercert: Hypercert = { + id: faker.string.uuid(), + } as Hypercert; + + // Act + const result = await resolver.metadata(hypercert); + + // Assert + expect(result).toBeNull(); + expect(mockMetadataService.getMetadataSingle).not.toHaveBeenCalled(); + expect(console.warn).toHaveBeenCalledWith( + expect.stringContaining( + "[HypercertResolver::metadata] No uri found for hypercert", + ), + ); + }); + + it("should return null when service throws error", async () => { + // Arrange + const hypercert: Hypercert = { + id: faker.string.uuid(), + uri: `ipfs://${faker.string.alphanumeric(46)}`, + } as Hypercert; + const error = new Error("Service error"); + mockMetadataService.getMetadataSingle.mockRejectedValue(error); + + // Act + const result = await resolver.metadata(hypercert); + + // Assert + expect(result).toBeNull(); + expect(console.error).toHaveBeenCalledWith( + expect.stringContaining( + "[HypercertResolver::metadata] Error fetching metadata:", + ), + ); + }); + }); + + describe("contract field resolver", () => { + it("should resolve contract for hypercert with contracts_id", async () => { + // Arrange + const hypercert: Hypercert = { + id: faker.string.uuid(), + contracts_id: faker.string.uuid(), + } as Hypercert; + const expectedContract = { + id: hypercert.contracts_id, + chain_id: faker.number.int(), + contract_address: faker.finance.ethereumAddress(), + }; + mockContractService.getContract.mockResolvedValue(expectedContract); + + // Act + const result = await resolver.contract(hypercert); + + // Assert + expect(mockContractService.getContract).toHaveBeenCalledWith({ + where: { id: { eq: hypercert.contracts_id } }, + }); + expect(result).toEqual(expectedContract); + }); + + it("should return null when hypercert has no contracts_id", async () => { + // Arrange + const hypercert: Hypercert = { + id: faker.string.uuid(), + } as Hypercert; + + // Act + const result = await resolver.contract(hypercert); + + // Assert + expect(result).toBeNull(); + expect(mockContractService.getContract).not.toHaveBeenCalled(); + expect(console.warn).toHaveBeenCalledWith( + expect.stringContaining( + "[HypercertResolver::contract] No contract id found for hypercert", + ), + ); + }); + + it("should return null when service throws error", async () => { + // Arrange + const hypercert: Hypercert = { + id: faker.string.uuid(), + contracts_id: faker.string.uuid(), + } as Hypercert; + const error = new Error("Service error"); + mockContractService.getContract.mockRejectedValue(error); + + // Act + const result = await resolver.contract(hypercert); + + // Assert + expect(result).toBeNull(); + expect(console.error).toHaveBeenCalledWith( + expect.stringContaining( + "[HypercertResolver::contract] Error fetching contract:", + ), + ); + }); + }); + + describe("attestations field resolver", () => { + it("should resolve attestations for hypercert with id", async () => { + // Arrange + const hypercert: Hypercert = { + id: faker.string.uuid(), + } as Hypercert; + const expectedAttestations = { + data: [ + { id: faker.string.uuid(), hypercert_id: hypercert.id }, + { id: faker.string.uuid(), hypercert_id: hypercert.id }, + ], + count: 2, + }; + mockAttestationService.getAttestations.mockResolvedValue( + expectedAttestations, + ); + + // Act + const result = await resolver.attestations(hypercert); + + // Assert + expect(mockAttestationService.getAttestations).toHaveBeenCalledWith({ + where: { hypercert: { id: { eq: hypercert.id } } }, + }); + expect(result).toEqual(expectedAttestations); + }); + + it("should return null when hypercert has no id", async () => { + // Arrange + const hypercert = {} as Hypercert; + + // Act + const result = await resolver.attestations(hypercert); + + // Assert + expect(result).toBeNull(); + expect(mockAttestationService.getAttestations).not.toHaveBeenCalled(); + expect(console.warn).toHaveBeenCalledWith( + expect.stringContaining( + "[HypercertResolver::attestations] No id found for hypercert", + ), + ); + }); + + it("should return null when service throws error", async () => { + // Arrange + const hypercert: Hypercert = { + id: faker.string.uuid(), + } as Hypercert; + const error = new Error("Service error"); + mockAttestationService.getAttestations.mockRejectedValue(error); + + // Act + const result = await resolver.attestations(hypercert); + + // Assert + expect(result).toBeNull(); + expect(console.error).toHaveBeenCalledWith( + expect.stringContaining( + "[HypercertResolver::attestations] Error fetching attestations:", + ), + ); + }); + }); + + describe("fractions field resolver", () => { + it("should resolve fractions for hypercert with hypercert_id", async () => { + // Arrange + const hypercert: Hypercert = { + id: faker.string.uuid(), + hypercert_id: generateHypercertId(), + } as Hypercert; + const expectedFractions = { + data: [ + { id: faker.string.uuid(), hypercert_id: hypercert.hypercert_id }, + { id: faker.string.uuid(), hypercert_id: hypercert.hypercert_id }, + ], + count: 2, + }; + mockFractionService.getFractions.mockResolvedValue(expectedFractions); + + // Act + const result = await resolver.fractions(hypercert); + + // Assert + expect(mockFractionService.getFractions).toHaveBeenCalledWith({ + where: { hypercert_id: { eq: hypercert.hypercert_id } }, + }); + expect(result).toEqual(expectedFractions); + }); + + it("should return null when hypercert has no hypercert_id", async () => { + // Arrange + const hypercert: Hypercert = { + id: faker.string.uuid(), + } as Hypercert; + + // Act + const result = await resolver.fractions(hypercert); + + // Assert + expect(result).toBeNull(); + expect(mockFractionService.getFractions).not.toHaveBeenCalled(); + expect(console.warn).toHaveBeenCalledWith( + expect.stringContaining( + "[HypercertResolver::fractions] No hypercert id found for", + ), + ); + }); + + it("should return null when service throws error", async () => { + // Arrange + const hypercert: Hypercert = { + id: faker.string.uuid(), + hypercert_id: generateHypercertId(), + } as Hypercert; + const error = new Error("Service error"); + mockFractionService.getFractions.mockRejectedValue(error); + + // Act + const result = await resolver.fractions(hypercert); + + // Assert + expect(result).toBeNull(); + expect(console.error).toHaveBeenCalledWith( + expect.stringContaining( + "[HypercertResolver::fractions] Error fetching fractions:", + ), + ); + }); + }); + + describe("sales field resolver", () => { + it("should resolve sales for hypercert with hypercert_id", async () => { + // Arrange + const hypercert: Hypercert = { + id: faker.string.uuid(), + hypercert_id: generateHypercertId(), + } as Hypercert; + const expectedSales = { + data: [ + { id: faker.string.uuid(), hypercert_id: hypercert.hypercert_id }, + { id: faker.string.uuid(), hypercert_id: hypercert.hypercert_id }, + ], + count: 2, + }; + mockSalesService.getSales.mockResolvedValue(expectedSales); + + // Act + const result = await resolver.sales(hypercert); + + // Assert + expect(mockSalesService.getSales).toHaveBeenCalledWith({ + where: { hypercert_id: { eq: hypercert.hypercert_id } }, + }); + expect(result).toEqual(expectedSales); + }); + + it("should return null when hypercert has no hypercert_id", async () => { + // Arrange + const hypercert: Hypercert = { + id: faker.string.uuid(), + } as Hypercert; + + // Act + const result = await resolver.sales(hypercert); + + // Assert + expect(result).toBeNull(); + expect(mockSalesService.getSales).not.toHaveBeenCalled(); + expect(console.warn).toHaveBeenCalledWith( + expect.stringContaining( + "[HypercertResolver::sales] No hypercert id found for", + ), + ); + }); + + it("should return null when service throws error", async () => { + // Arrange + const hypercert: Hypercert = { + id: faker.string.uuid(), + hypercert_id: generateHypercertId(), + } as Hypercert; + const error = new Error("Service error"); + mockSalesService.getSales.mockRejectedValue(error); + + // Act + const result = await resolver.sales(hypercert); + + // Assert + expect(result).toBeNull(); + expect(console.error).toHaveBeenCalledWith( + expect.stringContaining( + "[HypercertResolver::sales] Error fetching sales:", + ), + ); + }); + }); +}); diff --git a/test/services/graphql/resolvers/metadataResolver.test.ts b/test/services/graphql/resolvers/metadataResolver.test.ts new file mode 100644 index 00000000..aa3ebd6e --- /dev/null +++ b/test/services/graphql/resolvers/metadataResolver.test.ts @@ -0,0 +1,182 @@ +import { faker } from "@faker-js/faker"; +import { container } from "tsyringe"; +import type { Mock } from "vitest"; +import { beforeEach, describe, expect, it, vi } from "vitest"; +import { CachingKyselyService } from "../../../../src/client/kysely.js"; +import type { GetMetadataArgs } from "../../../../src/graphql/schemas/args/metadataArgs.js"; +import type { Metadata } from "../../../../src/graphql/schemas/typeDefs/metadataTypeDefs.js"; +import { MetadataService } from "../../../../src/services/database/entities/MetadataEntityService.js"; +import { MetadataResolver } from "../../../../src/services/graphql/resolvers/metadataResolver.js"; +import { + generateMinimalMockMetadata, + generateMockMetadata, +} from "../../../utils/testUtils.js"; + +describe("MetadataResolver", () => { + let resolver: MetadataResolver; + let mockMetadataService: { + getMetadata: Mock; + }; + let mockCachingKyselyService: { + getConnection: Mock; + }; + let mockConnection: { + selectFrom: Mock; + }; + let mockQuery: { + where: Mock; + select: Mock; + executeTakeFirst: Mock; + }; + + beforeEach(() => { + // Create mock services + mockQuery = { + where: vi.fn().mockReturnThis(), + select: vi.fn().mockReturnThis(), + executeTakeFirst: vi.fn(), + }; + + mockConnection = { + selectFrom: vi.fn().mockReturnValue(mockQuery), + }; + + mockCachingKyselyService = { + getConnection: vi.fn().mockReturnValue(mockConnection), + }; + + mockMetadataService = { + getMetadata: vi.fn(), + }; + + // Register mocks with the DI container + container.registerInstance( + MetadataService, + mockMetadataService as unknown as MetadataService, + ); + container.registerInstance( + CachingKyselyService, + mockCachingKyselyService as unknown as CachingKyselyService, + ); + + // Resolve the resolver with mocked dependencies + resolver = container.resolve(MetadataResolver); + }); + + describe("metadata query resolver", () => { + it("should return metadata records for given arguments", async () => { + // Arrange + const mockMetadata1 = generateMockMetadata(); + const mockMetadata2 = generateMockMetadata(); + const args: GetMetadataArgs = { + where: { + uri: { eq: mockMetadata1.uri }, + }, + }; + const expectedResult = { + data: [mockMetadata1, mockMetadata2], + count: 2, + }; + mockMetadataService.getMetadata.mockResolvedValue(expectedResult); + + // Act + const result = await resolver.metadata(args); + + // Assert + expect(mockMetadataService.getMetadata).toHaveBeenCalledWith(args); + expect(result).toEqual(expectedResult); + }); + + it("should handle empty result set", async () => { + // Arrange + const args: GetMetadataArgs = { + where: { + uri: { eq: `ipfs://${faker.string.alphanumeric(46)}` }, + }, + }; + const expectedResult = { + data: [], + count: 0, + }; + mockMetadataService.getMetadata.mockResolvedValue(expectedResult); + + // Act + const result = await resolver.metadata(args); + + // Assert + expect(mockMetadataService.getMetadata).toHaveBeenCalledWith(args); + expect(result).toEqual(expectedResult); + }); + + it("should handle errors from metadata service", async () => { + // Arrange + const args: GetMetadataArgs = {}; + const error = new Error("Service error"); + mockMetadataService.getMetadata.mockRejectedValue(error); + + // Act + const result = await resolver.metadata(args); + + // Assert + expect(result).toBeNull(); + expect(mockMetadataService.getMetadata).toHaveBeenCalledWith(args); + }); + }); + + describe("image field resolver", () => { + it("should resolve image for metadata with uri", async () => { + // Arrange + const metadata = generateMinimalMockMetadata(); + const expectedImage = faker.image.dataUri(); + mockQuery.executeTakeFirst.mockResolvedValue({ image: expectedImage }); + + // Act + const result = await resolver.image(metadata as Metadata); + + // Assert + expect(mockConnection.selectFrom).toHaveBeenCalledWith("metadata"); + expect(mockQuery.where).toHaveBeenCalledWith("uri", "=", metadata.uri); + expect(mockQuery.select).toHaveBeenCalledWith("image"); + expect(result).toBe(expectedImage); + }); + + it("should return null when metadata has no uri", async () => { + // Arrange + const metadata = { id: faker.string.uuid() }; + + // Act + const result = await resolver.image(metadata as Metadata); + + // Assert + expect(result).toBeNull(); + expect(mockConnection.selectFrom).not.toHaveBeenCalled(); + }); + + it("should return null when image query returns no result", async () => { + // Arrange + const metadata = generateMinimalMockMetadata(); + mockQuery.executeTakeFirst.mockResolvedValue(null); + + // Act + const result = await resolver.image(metadata as Metadata); + + // Assert + expect(result).toBeNull(); + expect(mockConnection.selectFrom).toHaveBeenCalledWith("metadata"); + }); + + it("should handle errors from database query", async () => { + // Arrange + const metadata = generateMinimalMockMetadata(); + const error = new Error("Database error"); + mockQuery.executeTakeFirst.mockRejectedValue(error); + + // Act + const result = await resolver.image(metadata as Metadata); + + // Assert + expect(result).toBeNull(); + expect(mockConnection.selectFrom).toHaveBeenCalledWith("metadata"); + }); + }); +}); diff --git a/test/services/graphql/resolvers/orderResolver.test.ts b/test/services/graphql/resolvers/orderResolver.test.ts new file mode 100644 index 00000000..642e3ad3 --- /dev/null +++ b/test/services/graphql/resolvers/orderResolver.test.ts @@ -0,0 +1,230 @@ +import { container } from "tsyringe"; +import type { Mock } from "vitest"; +import { beforeEach, describe, expect, it, vi } from "vitest"; +import { GetOrdersArgs } from "../../../../src/graphql/schemas/args/orderArgs.js"; +import { Order } from "../../../../src/graphql/schemas/typeDefs/orderTypeDefs.js"; +import { HypercertsService } from "../../../../src/services/database/entities/HypercertsEntityService.js"; +import { MarketplaceOrdersService } from "../../../../src/services/database/entities/MarketplaceOrdersEntityService.js"; +import { OrderResolver } from "../../../../src/services/graphql/resolvers/orderResolver.js"; +import { + generateMockHypercert, + generateMockOrder, +} from "../../../utils/testUtils.js"; + +vi.mock( + "../../../../src/utils/getTokenPriceInUSD.js", + async (importOriginal) => { + const actual = + await importOriginal< + typeof import("../../../../src/utils/getTokenPriceInUSD.js") + >(); + return { + ...actual, + getTokenPriceInUSD: vi.fn().mockResolvedValue(100), + }; + }, +); + +/** + * Test suite for OrderResolver. + * Tests the GraphQL resolver functionality for marketplace orders. + * + * Tests cover: + * - Query resolution for orders with various filters + * - Field resolution for related entities: + * - hypercert: Associated hypercert details and metadata + * - Error handling for all operations + * - Price calculation in USD + */ +describe("OrderResolver", () => { + let resolver: OrderResolver; + let mockMarketplaceOrdersService: { + getOrders: Mock; + }; + let mockHypercertService: { + getHypercerts: Mock; + getHypercert: Mock; + getHypercertMetadata: Mock; + }; + let mockOrder: ReturnType; + let mockHypercert: ReturnType; + + beforeEach(() => { + // Mock console methods + vi.spyOn(console, "error").mockImplementation(() => {}); + vi.spyOn(console, "warn").mockImplementation(() => {}); + + // Create mock services + mockMarketplaceOrdersService = { + getOrders: vi.fn(), + }; + + mockHypercertService = { + getHypercerts: vi.fn(), + getHypercert: vi.fn(), + getHypercertMetadata: vi.fn(), + }; + + // Register mocks with the DI container + container.registerInstance( + MarketplaceOrdersService, + mockMarketplaceOrdersService as unknown as MarketplaceOrdersService, + ); + container.registerInstance( + HypercertsService, + mockHypercertService as unknown as HypercertsService, + ); + + // Create test data + mockHypercert = generateMockHypercert(); + mockOrder = generateMockOrder({ hypercert_id: mockHypercert.hypercert_id }); + + // Create resolver instance + resolver = container.resolve(OrderResolver); + }); + + describe("orders query", () => { + it("should return orders with USD prices for given arguments", async () => { + // Arrange + const args: GetOrdersArgs = { + where: { + hypercert_id: { eq: mockOrder.hypercert_id }, + }, + }; + const expectedResult = { + data: [mockOrder], + count: 1, + }; + mockMarketplaceOrdersService.getOrders.mockResolvedValue(expectedResult); + mockHypercertService.getHypercerts.mockResolvedValue({ + data: [{ ...mockHypercert, units: BigInt(1000000) }], + }); + + // Act + const result = await resolver.orders(args); + + // Assert + expect(mockMarketplaceOrdersService.getOrders).toHaveBeenCalledWith(args); + expect(mockHypercertService.getHypercerts).toHaveBeenCalledWith({ + where: { + hypercert_id: { in: [mockOrder.hypercert_id] }, + }, + }); + + console.log(result.data[0]); + expect(result.data[0]).toHaveProperty("pricePerPercentInUSD"); + expect(result.count).toBe(1); + }); + + it("should handle empty orders response", async () => { + // Arrange + mockMarketplaceOrdersService.getOrders.mockResolvedValue({ + data: [], + count: 0, + }); + + // Act + const result = await resolver.orders({}); + + // Assert + expect(result).toEqual({ + data: [], + count: 0, + }); + }); + + it("should handle missing hypercert units", async () => { + // Arrange + const ordersResponse = { + data: [mockOrder], + count: 1, + }; + mockMarketplaceOrdersService.getOrders.mockResolvedValue(ordersResponse); + mockHypercertService.getHypercerts.mockResolvedValue({ + data: [{ ...mockHypercert, units: undefined }], + }); + + // Act + const result = await resolver.orders({}); + + // Assert + expect(result.data[0]).not.toHaveProperty("priceInUsd"); + expect(console.warn).toHaveBeenCalledWith( + expect.stringContaining("No hypercert units found for hypercert_id:"), + ); + }); + + it("should throw error on service failure", async () => { + // Arrange + const error = new Error("Service error"); + mockMarketplaceOrdersService.getOrders.mockRejectedValue(error); + + // Act & Assert + await expect(resolver.orders({})).rejects.toThrow( + "[OrderResolver::orders] Error fetching orders:", + ); + }); + }); + + describe("hypercert field resolver", () => { + it("should resolve hypercert with metadata", async () => { + // Arrange + const mockMetadata = { + name: "Test Hypercert", + description: "Test Description", + }; + mockHypercertService.getHypercert.mockResolvedValue(mockHypercert); + mockHypercertService.getHypercertMetadata.mockResolvedValue(mockMetadata); + + // Act + const result = await resolver.hypercert(mockOrder as unknown as Order); + + // Assert + expect(result).toEqual({ + ...mockHypercert, + metadata: mockMetadata, + }); + }); + + it("should handle missing required fields", async () => { + // Arrange + const invalidOrder = generateMockOrder({ itemIds: undefined }); + + // Act + const result = await resolver.hypercert(invalidOrder as unknown as Order); + + // Assert + expect(result).toBeNull(); + expect(console.warn).toHaveBeenCalledWith( + "[OrderResolver::hypercert] Missing tokenId or collectionId", + ); + }); + + it("should handle missing metadata", async () => { + // Arrange + mockHypercertService.getHypercert.mockResolvedValue(mockHypercert); + mockHypercertService.getHypercertMetadata.mockResolvedValue(null); + + // Act + const result = await resolver.hypercert(mockOrder as unknown as Order); + + // Assert + expect(result).toEqual({ + ...mockHypercert, + metadata: null, + }); + }); + + it("should handle service errors gracefully", async () => { + // Arrange + const error = new Error("Service error"); + mockHypercertService.getHypercert.mockRejectedValue(error); + + // Act + const result = await resolver.hypercert(mockOrder as unknown as Order); + + // Assert + expect(result).toBeNull(); + }); + }); +}); diff --git a/test/services/graphql/resolvers/salesResolver.test.ts b/test/services/graphql/resolvers/salesResolver.test.ts new file mode 100644 index 00000000..2de20d56 --- /dev/null +++ b/test/services/graphql/resolvers/salesResolver.test.ts @@ -0,0 +1,163 @@ +import { container } from "tsyringe"; +import type { Mock } from "vitest"; +import { beforeEach, describe, expect, it, vi } from "vitest"; +import type { GetSalesArgs } from "../../../../src/graphql/schemas/args/salesArgs.js"; +import type { Sale } from "../../../../src/graphql/schemas/typeDefs/salesTypeDefs.js"; +import { SalesService } from "../../../../src/services/database/entities/SalesEntityService.js"; +import { HypercertsService } from "../../../../src/services/database/entities/HypercertsEntityService.js"; +import { SalesResolver } from "../../../../src/services/graphql/resolvers/salesResolver.js"; +import { faker } from "@faker-js/faker"; +import { generateHypercertId } from "../../../utils/testUtils.js"; + +describe("SalesResolver", () => { + let resolver: SalesResolver; + let mockSalesService: { + getSales: Mock; + }; + let mockHypercertsService: { + getHypercert: Mock; + getHypercertMetadata: Mock; + }; + let mockSale: Sale; + + beforeEach(() => { + // Mock console methods + vi.spyOn(console, "warn").mockImplementation(() => {}); + vi.spyOn(console, "error").mockImplementation(() => {}); + + // Create mock services + mockSalesService = { + getSales: vi.fn(), + }; + + mockHypercertsService = { + getHypercert: vi.fn(), + getHypercertMetadata: vi.fn(), + }; + + // Register mocks with the DI container + container.registerInstance( + SalesService, + mockSalesService as unknown as SalesService, + ); + container.registerInstance( + HypercertsService, + mockHypercertsService as unknown as HypercertsService, + ); + + // Create test data + mockSale = { + id: faker.string.uuid(), + hypercert_id: generateHypercertId(), + buyer: faker.string.alphanumeric(42), + seller: faker.string.alphanumeric(42), + currency: faker.string.alphanumeric(42), + collection: faker.string.alphanumeric(42), + transaction_hash: faker.string.alphanumeric(66), + } as Sale; + + // Create a new instance for each test + resolver = container.resolve(SalesResolver); + }); + + describe("sales query resolver", () => { + it("should return sales for given arguments", async () => { + // Arrange + const args: GetSalesArgs = { + where: { + hypercert_id: { eq: generateHypercertId() }, + }, + }; + const expectedResult = { + data: [mockSale], + count: 1, + }; + mockSalesService.getSales.mockResolvedValue(expectedResult); + + // Act + const result = await resolver.sales(args); + + // Assert + expect(mockSalesService.getSales).toHaveBeenCalledWith(args); + expect(result).toEqual(expectedResult); + }); + + it("should return null when service throws error", async () => { + // Arrange + const args: GetSalesArgs = {}; + const error = new Error("Service error"); + mockSalesService.getSales.mockRejectedValue(error); + + // Act + const result = await resolver.sales(args); + + // Assert + expect(result).toBeNull(); + expect(console.error).toHaveBeenCalledWith( + expect.stringContaining("[SalesResolver::sales] Error fetching sales:"), + ); + }); + }); + + describe("hypercert field resolver", () => { + it("should resolve hypercert for sale with hypercert_id", async () => { + // Arrange + const expectedHypercert = { + id: faker.string.uuid(), + hypercert_id: mockSale.hypercert_id, + metadata: null, + }; + mockHypercertsService.getHypercert.mockResolvedValue(expectedHypercert); + + // Act + const result = await resolver.hypercert(mockSale); + + // Assert + expect(mockHypercertsService.getHypercert).toHaveBeenCalledWith({ + where: { + hypercert_id: { + eq: mockSale.hypercert_id, + }, + }, + }); + expect(result).toEqual(expectedHypercert); + }); + + it("should return null when sale has no hypercert_id", async () => { + // Arrange + const saleWithoutId: Sale = { + ...mockSale, + hypercert_id: undefined, + }; + + // Act + const result = await resolver.hypercert(saleWithoutId); + + // Assert + expect(result).toBeNull(); + expect(mockHypercertsService.getHypercert).not.toHaveBeenCalled(); + expect(console.warn).toHaveBeenCalledWith( + expect.stringContaining( + "[SalesResolver::hypercert_id] Missing hypercert_id", + ), + ); + }); + + it("should return null when service throws error", async () => { + // Arrange + const error = new Error("Service error"); + mockHypercertsService.getHypercert.mockRejectedValue(error); + + // Act + const result = await resolver.hypercert(mockSale); + + // Assert + expect(result).toBeNull(); + expect(console.error).toHaveBeenCalledWith( + expect.stringContaining( + "[SalesResolver::hypercert] Error fetching hypercert:", + ), + ); + }); + }); +}); diff --git a/test/services/graphql/resolvers/signatureRequestResolver.test.ts b/test/services/graphql/resolvers/signatureRequestResolver.test.ts new file mode 100644 index 00000000..07433926 --- /dev/null +++ b/test/services/graphql/resolvers/signatureRequestResolver.test.ts @@ -0,0 +1,84 @@ +import { beforeEach, describe, expect, it, vi } from "vitest"; +import { SignatureRequestResolver } from "../../../../src/services/graphql/resolvers/signatureRequestResolver.js"; +import { SignatureRequestsService } from "../../../../src/services/database/entities/SignatureRequestsEntityService.js"; +import { generateMockSignatureRequest } from "../../../utils/testUtils.js"; +import { container } from "tsyringe"; +import { SignatureRequest } from "../../../../src/graphql/schemas/typeDefs/signatureRequestTypeDefs.js"; +import { GetSignatureRequestsArgs } from "../../../../src/graphql/schemas/args/signatureRequestArgs.js"; + +describe("SignatureRequestResolver", () => { + let resolver: SignatureRequestResolver; + let mockSignatureRequestsService: SignatureRequestsService; + let mockSignatureRequest: ReturnType; + + beforeEach(() => { + mockSignatureRequest = generateMockSignatureRequest(); + + mockSignatureRequestsService = { + getSignatureRequests: vi.fn().mockResolvedValue({ + data: [mockSignatureRequest], + count: 1, + }), + } as unknown as SignatureRequestsService; + + container.clearInstances(); + container.registerInstance( + SignatureRequestsService, + mockSignatureRequestsService, + ); + resolver = container.resolve(SignatureRequestResolver); + }); + + describe("signatureRequests", () => { + it("should return signature requests with count", async () => { + const args = {} as GetSignatureRequestsArgs; + + const result = await resolver.signatureRequests(args); + + expect(result).toEqual({ + data: [mockSignatureRequest], + count: 1, + }); + expect( + mockSignatureRequestsService.getSignatureRequests, + ).toHaveBeenCalledWith(args); + }); + }); + + describe("message field resolver", () => { + it("should return stringified message when message is an object", () => { + const messageObj = { test: "data" }; + const request = { + ...mockSignatureRequest, + message: messageObj, + } as unknown as SignatureRequest; + + const result = resolver.message(request); + + expect(result).toBe(JSON.stringify(messageObj)); + }); + + it("should return message as is when it's already a string", () => { + const messageStr = '{"test":"data"}'; + const request = { + ...mockSignatureRequest, + message: messageStr, + } as SignatureRequest; + + const result = resolver.message(request); + + expect(result).toBe(messageStr); + }); + + it("should return fallback message when message is undefined", () => { + const request = { + ...mockSignatureRequest, + message: undefined, + } as SignatureRequest; + + const result = resolver.message(request); + + expect(result).toBe("could not parse message"); + }); + }); +}); diff --git a/test/setup-env.ts b/test/setup-env.ts index 220ba1cb..0ab69066 100644 --- a/test/setup-env.ts +++ b/test/setup-env.ts @@ -1,3 +1,5 @@ import dotenv from "dotenv"; dotenv.config({ path: "./.env" }); + +import "reflect-metadata"; diff --git a/test/tsconfig.json b/test/tsconfig.json new file mode 100644 index 00000000..35a3dcbf --- /dev/null +++ b/test/tsconfig.json @@ -0,0 +1,9 @@ +{ + "extends": "../tsconfig.json", + "compilerOptions": { + "noEmit": true, + "noUnusedLocals": false, + "emitDecoratorMetadata": true + }, + "include": [".", "../src"] +} diff --git a/test/utils/processCollectionToSection.test.ts b/test/utils/processCollectionToSection.test.ts index 60613962..0adeee18 100644 --- a/test/utils/processCollectionToSection.test.ts +++ b/test/utils/processCollectionToSection.test.ts @@ -1,7 +1,8 @@ -import { describe, it, expect } from "vitest"; -import { processCollectionToSection } from "../../src/utils/processCollectionToSection.js"; +import { Selectable } from "kysely"; import { sepolia } from "viem/chains"; -import { Database as DataDatabase } from "../../src/types/supabaseData.js"; +import { describe, expect, it } from "vitest"; +import { DataDatabase } from "../../src/types/kyselySupabaseData.js"; +import { processCollectionToSection } from "../../src/utils/processCollectionToSection.js"; describe("processCollectionToSection", async () => { const collection = { @@ -19,10 +20,10 @@ describe("processCollectionToSection", async () => { hypercerts: [], blueprints: [], users: [], - hypercert_metadata: [], + hyperboardHypercertMetadata: [], collection, }; - const user1: DataDatabase["public"]["Tables"]["users"]["Row"] = { + const user1: Selectable = { address: "0x1", chain_id: sepolia.id, avatar: "testAvatar1", @@ -31,7 +32,7 @@ describe("processCollectionToSection", async () => { created_at: new Date().toISOString(), }; - const user2: DataDatabase["public"]["Tables"]["users"]["Row"] = { + const user2: Selectable = { address: "0x2", chain_id: sepolia.id, avatar: "testAvatar2", @@ -111,31 +112,33 @@ describe("processCollectionToSection", async () => { hypercert_allow_lists_id: "test", }; - const hypercertMetadata1: DataDatabase["public"]["Tables"]["hyperboard_hypercert_metadata"]["Row"] = - { - hypercert_id: hypercert1.hypercert_id as string, - hyperboard_id: "testHyperboard1", - collection_id: "testCollection1", - display_size: 1, - created_at: new Date().toISOString(), - }; + const hypercertMetadata1: Selectable< + DataDatabase["hyperboard_hypercert_metadata"] + > = { + hypercert_id: hypercert1.hypercert_id as string, + hyperboard_id: "testHyperboard1", + collection_id: "testCollection1", + display_size: 1, + created_at: new Date().toISOString(), + }; - const hypercertMetadata2: DataDatabase["public"]["Tables"]["hyperboard_hypercert_metadata"]["Row"] = - { - hypercert_id: hypercert2.hypercert_id as string, - hyperboard_id: "testHyperboard2", - collection_id: "testCollection2", - display_size: 1, - created_at: new Date().toISOString(), - }; + const hypercertMetadata2: Selectable< + DataDatabase["hyperboard_hypercert_metadata"] + > = { + hypercert_id: hypercert2.hypercert_id as string, + hyperboard_id: "testHyperboard2", + collection_id: "testCollection2", + display_size: 1, + created_at: new Date().toISOString(), + }; it("should process empty collection to section", async () => { const emptySection = processCollectionToSection(emptyArgs); expect(emptySection).toBeDefined(); expect(emptySection.entries).toBeDefined(); - expect(emptySection.entries.length).toBe(0); + expect(emptySection.entries?.length).toBe(0); expect(emptySection.owners).toBeDefined(); - expect(emptySection.owners.length).toBe(0); + expect(emptySection.owners?.data?.length).toBe(0); }); it("should process allowlist entries according to size", async () => { @@ -144,18 +147,18 @@ describe("processCollectionToSection", async () => { hypercerts: [ { ...hypercert1, units: allowlistEntry1.units + allowlistEntry2.units }, ], - hypercert_metadata: [hypercertMetadata1], + hyperboardHypercertMetadata: [hypercertMetadata1], allowlistEntries: [allowlistEntry1, allowlistEntry2], }); - expect(section.owners.length).toBe(2); + expect(section.owners?.data?.length).toBe(2); expect( - section.owners.find( + section.owners?.data?.find( (owner) => owner.address === allowlistEntry1.user_address, )?.percentage_owned, ).toBe(25); expect( - section.owners.find( + section.owners?.data?.find( (owner) => owner.address === allowlistEntry2.user_address, )?.percentage_owned, ).toBe(75); @@ -165,39 +168,43 @@ describe("processCollectionToSection", async () => { const section = processCollectionToSection({ ...emptyArgs, hypercerts: [hypercert1], - hypercert_metadata: [hypercertMetadata1], + hyperboardHypercertMetadata: [hypercertMetadata1], allowlistEntries: [ allowlistEntry1, { ...allowlistEntry2, claimed: true }, ], }); - expect(section.owners.length).toBe(1); - expect(section.owners[0].percentage_owned).toBe(100); + expect(section.owners?.data?.length).toBe(1); + expect(section.owners?.data?.[0].percentage_owned).toBe(100); }); it("should use correct user metadata for allowlist entries", async () => { const { owners } = processCollectionToSection({ ...emptyArgs, hypercerts: [hypercert1], - hypercert_metadata: [hypercertMetadata1], + hyperboardHypercertMetadata: [hypercertMetadata1], allowlistEntries: [allowlistEntry1], users: [user1], }); console.log(owners); expect( - owners.find((owner) => owner.address === user1.address)?.avatar, + owners?.data?.find((owner) => owner.address === user1.address)?.avatar, ).toBe(user1.avatar); expect( - owners.find((owner) => owner.address === user1.address)?.display_name, + owners?.data?.find((owner) => owner.address === user1.address) + ?.display_name, ).toBe(user1.display_name); }); it("Should adjust for display size", () => { const { owners } = processCollectionToSection({ ...emptyArgs, - hypercerts: [hypercert1, { ...hypercert2, units: 157 }], - hypercert_metadata: [hypercertMetadata1, hypercertMetadata2], + hypercerts: [ + { ...hypercert1, units: 100 }, + { ...hypercert2, units: 100 }, + ], + hyperboardHypercertMetadata: [hypercertMetadata1, hypercertMetadata2], users: [user1, user2], fractions: [ { @@ -205,7 +212,7 @@ describe("processCollectionToSection", async () => { hypercert_id: hypercert1.hypercert_id, owner_address: user1.address, token_id: 1, - units: 1, + units: 100, creation_block_timestamp: 1, creation_block_number: 1, last_update_block_number: 1, @@ -219,7 +226,7 @@ describe("processCollectionToSection", async () => { hypercert_id: hypercert2.hypercert_id, owner_address: user2.address, token_id: 2, - units: 157, + units: 100, creation_block_timestamp: 1, creation_block_number: 1, last_update_block_number: 1, @@ -231,6 +238,10 @@ describe("processCollectionToSection", async () => { ], }); - expect(owners[0].percentage_owned).toBe(owners[1].percentage_owned); + expect(owners?.data?.[0].percentage_owned).toBe(50); + expect(owners?.data?.[1].percentage_owned).toBe(50); + expect(owners?.data?.[0].percentage_owned).toBe( + owners?.data?.[1].percentage_owned, + ); }); }); diff --git a/test/utils/processSectionsToHyperboardOwnership.test.ts b/test/utils/processSectionsToHyperboardOwnership.test.ts index 026b4f61..04ba738f 100644 --- a/test/utils/processSectionsToHyperboardOwnership.test.ts +++ b/test/utils/processSectionsToHyperboardOwnership.test.ts @@ -1,5 +1,6 @@ import { describe, it, expect } from "vitest"; import { processSectionsToHyperboardOwnership } from "../../src/utils/processSectionsToHyperboardOwnership.js"; +import { generateMockAddress } from "./testUtils.js"; describe("processSectionsToHyperboardOwnership", async () => { it("should return an empty array if no sections are provided", async () => { @@ -11,7 +12,7 @@ describe("processSectionsToHyperboardOwnership", async () => { it("should return empty array if section has no owners", async () => { const owners = processSectionsToHyperboardOwnership([ { - owners: [], + owners: { data: [], count: 0 }, }, ]); expect(owners).toBeDefined(); @@ -19,95 +20,117 @@ describe("processSectionsToHyperboardOwnership", async () => { }); it("should return ignore sections without owners", async () => { + const address = generateMockAddress(); const owners = processSectionsToHyperboardOwnership([ { - owners: [ - { - address: "0x123", - percentage_owned: 100, - }, - ], + owners: { + data: [ + { + address, + percentage_owned: 100, + }, + ], + count: 1, + }, }, { - owners: [], + owners: { data: [], count: 0 }, }, ]); expect(owners.length).toBe(1); - expect(owners[0].address).toBe("0x123"); + expect(owners[0].address).toBe(address); expect(owners[0].percentage_owned).toBe(100); }); it("should process a single section with a single owner", async () => { + const address = generateMockAddress(); const owners = processSectionsToHyperboardOwnership([ { - owners: [ - { - address: "0x123", - percentage_owned: 100, - }, - ], + owners: { + data: [ + { + address, + percentage_owned: 100, + }, + ], + count: 1, + }, }, ]); expect(owners.length).toBe(1); - expect(owners[0].address).toBe("0x123"); + expect(owners[0].address).toBe(address); expect(owners[0].percentage_owned).toBe(100); }); it("should process a single section with multiple owners", async () => { + const address1 = generateMockAddress(); + const address2 = generateMockAddress(); const owners = processSectionsToHyperboardOwnership([ { - owners: [ - { - address: "0x123", - percentage_owned: 50, - }, - { - address: "0x456", - percentage_owned: 50, - }, - ], + owners: { + data: [ + { + address: address1, + percentage_owned: 50, + }, + { + address: address2, + percentage_owned: 50, + }, + ], + count: 2, + }, }, ]); expect(owners.length).toBe(2); - expect(owners[0].address).toBe("0x123"); + expect(owners[0].address).toBe(address1); expect(owners[0].percentage_owned).toBe(50); - expect(owners[1].address).toBe("0x456"); + expect(owners[1].address).toBe(address2); expect(owners[1].percentage_owned).toBe(50); }); it("should process multiple sections with multiple owners", async () => { + const address1 = generateMockAddress(); + const address2 = generateMockAddress(); + const address3 = generateMockAddress(); const owners = processSectionsToHyperboardOwnership([ { - owners: [ - { - address: "0x123", - percentage_owned: 50, - }, - { - address: "0x456", - percentage_owned: 50, - }, - ], + owners: { + data: [ + { + address: address1, + percentage_owned: 50, + }, + { + address: address2, + percentage_owned: 50, + }, + ], + count: 2, + }, }, { - owners: [ - { - address: "0x123", - percentage_owned: 50, - }, - { - address: "0x789", - percentage_owned: 50, - }, - ], + owners: { + data: [ + { + address: address3, + percentage_owned: 50, + }, + { + address: address2, + percentage_owned: 50, + }, + ], + count: 2, + }, }, ]); expect(owners.length).toBe(3); - expect(owners[0].address).toBe("0x123"); - expect(owners[0].percentage_owned).toBe(50); - expect(owners[1].address).toBe("0x456"); - expect(owners[1].percentage_owned).toBe(25); - expect(owners[2].address).toBe("0x789"); + expect(owners[0].address).toBe(address1); + expect(owners[0].percentage_owned).toBe(25); + expect(owners[1].address).toBe(address2); + expect(owners[1].percentage_owned).toBe(50); + expect(owners[2].address).toBe(address3); expect(owners[2].percentage_owned).toBe(25); }); }); diff --git a/test/utils/testUtils.ts b/test/utils/testUtils.ts new file mode 100644 index 00000000..e1be7013 --- /dev/null +++ b/test/utils/testUtils.ts @@ -0,0 +1,780 @@ +import { faker } from "@faker-js/faker"; +import { currenciesByNetwork } from "@hypercerts-org/marketplace-sdk"; +import { Kysely, sql } from "kysely"; +import { DataType, newDb } from "pg-mem"; +import { getAddress } from "viem"; +import { expect } from "vitest"; +import { MarketplaceOrderSelect } from "../../src/services/database/entities/MarketplaceOrdersEntityService.js"; +import { CachingDatabase } from "../../src/types/kyselySupabaseCaching.js"; +import { DataDatabase } from "../../src/types/kyselySupabaseData.js"; + +export type TestDatabase = CachingDatabase | DataDatabase; + +export async function createTestDataDatabase( + setupSchema?: (db: Kysely) => Promise, +) { + const mem = newDb(); + + // Create database instance + const db = mem.adapters.createKysely() as Kysely; + + // NOTE: pg-mem does not support the generateUUID() function, so we need to register our own and for some reason it needs to be lowercase + mem.public.registerFunction({ + name: "generateuuid", + returns: DataType.uuid, + implementation: () => faker.string.uuid(), + }); + + // NOTE: pg-mem does not support the array_append function, so we need to register our own + mem.public.registerFunction({ + name: "array_append", + args: [ + mem.public.getType(DataType.text).asArray(), + mem.public.getType(DataType.text), + ], + returns: mem.public.getType(DataType.text).asArray(), + implementation: (arr: string[], element: string) => [...arr, element], + }); + + mem.public.registerFunction({ + name: "exists", + args: [mem.public.getType(DataType.uuid).asArray()], + returns: mem.public.getType(DataType.bool), + implementation: (arr: string[]) => arr.length > 0, + }); + + // Create marketplace_orders table + // TODO typings in DB are inconsisten do this will need to be updated when the DB is updated + await db.schema + .createTable("marketplace_orders") + .addColumn("id", "uuid", (col) => + col.primaryKey().defaultTo(sql`generateuuid()`), + ) + .addColumn("createdAt", "timestamp", (col) => + col.notNull().defaultTo(sql`now()`), + ) + .addColumn("quoteType", "bigint", (col) => col.notNull()) + .addColumn("globalNonce", "text", (col) => col.notNull()) + .addColumn("orderNonce", "text", (col) => col.notNull()) + .addColumn("strategyId", "bigint", (col) => col.notNull()) + .addColumn("collectionType", "bigint", (col) => col.notNull()) + .addColumn("collection", "text", (col) => col.notNull()) + .addColumn("currency", "text", (col) => col.notNull()) + .addColumn("signer", "text", (col) => col.notNull()) + .addColumn("startTime", "bigint", (col) => col.notNull()) + .addColumn("endTime", "bigint", (col) => col.notNull()) + .addColumn("price", "text", (col) => col.notNull()) + .addColumn("signature", "text", (col) => col.notNull()) + .addColumn("additionalParameters", "text", (col) => col.notNull()) + .addColumn("chainId", "bigint", (col) => col.notNull()) + .addColumn("subsetNonce", "bigint", (col) => col.notNull()) + .addColumn("itemIds", sql`text[]`, (col) => col.notNull()) + .addColumn("amounts", sql`bigint[]`, (col) => col.notNull()) + .addColumn("invalidated", "boolean", (col) => + col.notNull().defaultTo(false), + ) + .addColumn("validator_codes", sql`integer[]`) + .addColumn("hypercert_id", "text", (col) => col.notNull().defaultTo("")) + .execute(); + + // Create marketplace_order_nonces table + await db.schema + .createTable("marketplace_order_nonces") + .addColumn("address", "text", (col) => col.notNull()) + .addColumn("chain_id", "bigint", (col) => col.notNull()) + .addColumn("nonce_counter", "bigint", (col) => col.notNull()) + .addUniqueConstraint("marketplace_order_nonces_pkey", [ + "address", + "chain_id", + ]) + .execute(); + + // Create blueprints table + await db.schema + .createTable("blueprints") + .addColumn("id", "integer", (col) => col.primaryKey()) + .addColumn("created_at", "timestamp", (col) => + col.notNull().defaultTo(sql`now()`), + ) + .addColumn("form_values", "jsonb", (col) => col.notNull()) + .addColumn("minter_address", "text", (col) => col.notNull()) + .addColumn("minted", "boolean", (col) => col.notNull().defaultTo(false)) + .addColumn("hypercert_ids", sql`text[]`, (col) => col.notNull()) + .execute(); + + // Create collections table + await db.schema + .createTable("collections") + .addColumn("id", "uuid", (b) => + b.primaryKey().defaultTo(sql`generateuuid()`), + ) + .addColumn("name", "varchar") + .addColumn("description", "varchar") + .addColumn("chain_ids", sql`integer[]`, (col) => col.notNull()) + .addColumn("hidden", "boolean") + .addColumn("created_at", "timestamp") + .execute(); + + // Create users table + await db.schema + .createTable("users") + .addColumn("id", "uuid", (col) => + col.primaryKey().defaultTo(sql`generateuuid()`), + ) + .addColumn("address", "text", (col) => col.notNull()) + .addColumn("display_name", "text") + .addColumn("avatar", "text") + .addColumn("chain_id", "integer", (col) => col.notNull()) + .addColumn("created_at", "timestamp", (col) => + col.notNull().defaultTo(sql`now()`), + ) + .addUniqueConstraint("users_address_chain_id", ["address", "chain_id"]) + .execute(); + + // Create hypercerts table + await db.schema + .createTable("hypercerts") + .addColumn("hypercert_id", "text", (col) => col.notNull()) + .addColumn("collection_id", "uuid", (col) => col.notNull()) + .addColumn("created_at", "timestamp", (col) => + col.notNull().defaultTo(sql`now()`), + ) + .addUniqueConstraint("hypercerts_pkey", ["hypercert_id", "collection_id"]) + .execute(); + + // Create collection_blueprints table + await db.schema + .createTable("collection_blueprints") + .addColumn("blueprint_id", "integer", (col) => + col.notNull().references("blueprints.id").onDelete("cascade"), + ) + .addColumn("collection_id", "uuid", (col) => + col.notNull().references("collections.id").onDelete("cascade"), + ) + .addColumn("created_at", "timestamp", (col) => + col.notNull().defaultTo(sql`now()`), + ) + .addUniqueConstraint("collection_blueprints_pkey", [ + "blueprint_id", + "collection_id", + ]) + .execute(); + + // Create blueprint_admins table + await db.schema + .createTable("blueprint_admins") + .addColumn("created_at", "timestamp", (col) => + col.notNull().defaultTo(sql`now()`), + ) + .addColumn("user_id", "uuid", (col) => + col.notNull().references("users.id").onDelete("cascade"), + ) + .addColumn("blueprint_id", "integer", (col) => + col.notNull().references("blueprints.id").onDelete("cascade"), + ) + .addUniqueConstraint("blueprint_admins_pkey", ["user_id", "blueprint_id"]) + .execute(); + + // Create collection_admins table + await db.schema + .createTable("collection_admins") + .addColumn("collection_id", "uuid", (col) => + col.notNull().references("collections.id").onDelete("cascade"), + ) + .addColumn("user_id", "uuid", (col) => + col.notNull().references("users.id").onDelete("cascade"), + ) + .addUniqueConstraint("collection_admins_pkey", ["collection_id", "user_id"]) + .execute(); + + // Create blueprints_with_admins view + await db.schema + .createView("blueprints_with_admins") + .as( + db + .selectFrom("blueprints") + .innerJoin( + "blueprint_admins", + "blueprints.id", + "blueprint_admins.blueprint_id", + ) + .innerJoin("users", "blueprint_admins.user_id", "users.id") + .select([ + "blueprints.id as id", + "blueprints.form_values as form_values", + "blueprints.created_at as created_at", + "blueprints.minter_address as minter_address", + "blueprints.minted as minted", + "blueprints.hypercert_ids as hypercert_ids", + "users.address as admin_address", + "users.chain_id as admin_chain_id", + "users.avatar as avatar", + "users.display_name as display_name", + ]), + ) + .execute(); + + // Create hyperboards table + await db.schema + .createTable("hyperboards") + .addColumn("id", "uuid", (col) => + col.primaryKey().defaultTo(sql`generateuuid()`), + ) + .addColumn("created_at", "timestamptz", (col) => + col.notNull().defaultTo(sql`now()`), + ) + .addColumn("name", "text", (col) => col.notNull()) + .addColumn("background_image", "text") + .addColumn("grayscale_images", "boolean", (col) => + col.notNull().defaultTo(false), + ) + .addColumn("tile_border_color", "text") + .addColumn("chain_ids", sql`integer[]`, (col) => col.notNull()) + .execute(); + + // Create hyperboard_blueprint_metadata table + await db.schema + .createTable("hyperboard_blueprint_metadata") + .addColumn("blueprint_id", "integer", (col) => col.notNull()) + .addColumn("hyperboard_id", "text", (col) => col.notNull()) + .addColumn("collection_id", "uuid", (col) => col.notNull()) + .addColumn("display_size", "integer", (col) => col.notNull()) + .addColumn("created_at", "timestamp", (col) => + col.notNull().defaultTo(sql`now()`), + ) + .execute(); + + // Create hyperboard_hypercert_metadata table + await db.schema + .createTable("hyperboard_hypercert_metadata") + .addColumn("hyperboard_id", "text", (col) => col.notNull()) + .addColumn("hypercert_id", "text", (col) => col.notNull()) + .addColumn("collection_id", "text", (col) => col.notNull()) + .addColumn("display_size", "integer", (col) => col.notNull()) + .addColumn("created_at", "timestamp", (col) => + col.notNull().defaultTo(sql`now()`), + ) + .addUniqueConstraint("hyperboard_hypercert_metadata_pkey", [ + "hyperboard_id", + "hypercert_id", + "collection_id", + ]) + .execute(); + + // Create hyperboard_collections table + await db.schema + .createTable("hyperboard_collections") + .addColumn("created_at", "timestamptz", (col) => + col.notNull().defaultTo(sql`now()`), + ) + .addColumn("hyperboard_id", "uuid", (col) => + col.notNull().references("hyperboards.id").onDelete("cascade"), + ) + .addColumn("collection_id", "uuid", (col) => + col.notNull().references("collections.id").onDelete("cascade"), + ) + .addColumn("label", "text") + .addColumn("render_method", "text") + .addUniqueConstraint("hyperboard_collections_pkey", [ + "hyperboard_id", + "collection_id", + ]) + .execute(); + + // Create hyperboard_admins table + await db.schema + .createTable("hyperboard_admins") + .addColumn("created_at", "timestamptz", (col) => + col.notNull().defaultTo(sql`now()`), + ) + .addColumn("user_id", "uuid", (col) => + col.notNull().references("users.id").onDelete("cascade"), + ) + .addColumn("hyperboard_id", "uuid", (col) => + col.notNull().references("hyperboards.id").onDelete("cascade"), + ) + .addUniqueConstraint("hyperboard_admins_pkey", ["user_id", "hyperboard_id"]) + .execute(); + + // Create hyperboards_with_admins view + await db.schema + .createView("hyperboards_with_admins") + .orReplace() + .as( + db + .selectFrom("hyperboards") + .innerJoin( + "hyperboard_admins", + "hyperboards.id", + "hyperboard_admins.hyperboard_id", + ) + .innerJoin("users", "hyperboard_admins.user_id", "users.id") + .select([ + "hyperboards.id as id", + "hyperboards.created_at as created_at", + "hyperboards.name as name", + "hyperboards.background_image as background_image", + "hyperboards.grayscale_images as grayscale_images", + "hyperboards.tile_border_color as tile_border_color", + "hyperboards.chain_ids as chain_ids", + "users.address as admin_address", + "users.chain_id as admin_chain_id", + "users.avatar as avatar", + "users.display_name as display_name", + ]), + ) + .execute(); + + await db.schema + .createTable("signature_requests") + .addColumn("safe_address", "varchar", (col) => col.notNull()) + .addColumn("message_hash", "text", (col) => col.notNull()) + .addColumn("chain_id", "integer", (col) => col.notNull()) + .addColumn("timestamp", "integer", (col) => col.notNull()) + .addColumn("message", "jsonb", (col) => col.notNull()) + .addColumn("purpose", "varchar", (col) => + col.notNull().check(sql`purpose IN ('update_user_data')`), + ) + .addColumn("status", "varchar", (col) => + col.notNull().check(sql`status IN ('pending', 'executed', 'canceled')`), + ) + .addUniqueConstraint("signature_requests_pkey", [ + "safe_address", + "message_hash", + ]) + .execute(); + // Allow caller to setup additional schema + if (setupSchema) { + await setupSchema(db); + } + + return { db, mem }; +} + +/** + * Creates a test database instance with the given schema + * @param setupSchema - Optional function to setup additional schema beyond the base tables + * @returns Object containing database instance and memory db instance + */ +export async function createTestCachingDatabase( + setupSchema?: (db: Kysely) => Promise, +) { + const mem = newDb(); + const db = mem.adapters.createKysely() as Kysely; + + // Create base tables that are commonly needed + await db.schema + .createTable("contracts") + .addColumn("id", "varchar", (b) => b.primaryKey()) + .addColumn("chain_id", "integer") + .addColumn("contract_address", "varchar") + .addColumn("start_block", "integer") + .execute(); + + await db.schema + .createTable("claims") + .addColumn("id", "integer", (b) => b.primaryKey()) + .addColumn("contracts_id", "varchar") + .execute(); + + await db.schema + .createTable("fractions_view") + .addColumn("id", "varchar", (b) => b.primaryKey()) + .addColumn("claims_id", "varchar") + .addColumn("hypercert_id", "varchar") + .addColumn("fraction_id", "varchar") + .addColumn("owner_address", "varchar") + .addColumn("units", "integer") + .execute(); + + // Allow caller to setup additional schema + if (setupSchema) { + await setupSchema(db); + } + + return { db, mem }; +} + +export function generateChainId(): bigint { + return 11155111n; +} + +export function generateCurrency(): string { + const currency = currenciesByNetwork[11155111]["WETH"]; + if (!currency) { + throw new Error("Currency not found"); + } + return currency.address; +} + +/** + * Generates a mock Ethereum address using faker and viem's getAddress + * @returns A checksummed Ethereum address + */ +export function generateMockAddress(): string { + return getAddress(faker.finance.ethereumAddress()); +} + +// TODO can be more specific meeting constraint of claims/fraction token ids +export function generateTokenId(): bigint { + return faker.number.bigInt(); +} + +// chain_id-contract_address-fraction_id +export function generateFractionId(): string { + return `${generateChainId()}-${generateMockAddress()}-${generateTokenId().toString()}`; +} + +// TODO filter on allowed values for claim_id and fraction_id +// chain_id-contract_address-claim_id +export function generateHypercertId(): string { + return `${generateChainId()}-${generateMockAddress()}-${generateTokenId().toString()}`; +} + +/** + * Generates a mock contract record + * @returns A mock contract record + */ +export function generateMockContract() { + return { + id: faker.string.uuid(), + chain_id: faker.number.int({ min: 1, max: 100000 }), + contract_address: generateMockAddress(), + start_block: faker.number.int({ min: 1, max: 1000000 }), + }; +} + +/** + * Generates a mock fraction record + * @returns A mock fraction record + */ +export function generateMockFraction() { + return { + id: faker.string.uuid(), + claims_id: faker.string.uuid(), + hypercert_id: generateHypercertId(), + fraction_id: generateFractionId(), + owner_address: generateMockAddress(), + units: faker.number.bigInt({ min: 100000n, max: 100000000000n }), + }; +} + +/** + * Generates a mock metadata record with all required fields + * @returns A mock metadata record with realistic test data + */ +export function generateMockMetadata() { + return { + id: faker.string.uuid(), + name: faker.commerce.productName(), + description: faker.lorem.paragraph(), + uri: `ipfs://${faker.string.alphanumeric(46)}`, + external_url: faker.internet.url(), + work_scope: faker.lorem.sentence(), + work_timeframe_from: faker.date.past().toISOString(), + work_timeframe_to: faker.date.future().toISOString(), + impact_scope: faker.lorem.sentence(), + impact_timeframe_from: faker.date.past().toISOString(), + impact_timeframe_to: faker.date.future().toISOString(), + contributors: [faker.internet.userName(), faker.internet.userName()], + rights: faker.lorem.sentence(), + properties: {}, + allow_list_uri: null, + parsed: true, + }; +} + +/** + * Generates a minimal mock metadata record with only required fields + * Useful for testing specific fields or error cases + * @returns A minimal mock metadata record + */ +export function generateMinimalMockMetadata() { + return { + id: faker.string.uuid(), + uri: `ipfs://${faker.string.alphanumeric(46)}`, + }; +} + +/** + * Generates a mock blueprint record + * @returns A mock blueprint record with realistic test data + */ +export function generateMockBlueprint() { + return { + id: faker.number.int({ min: 1, max: 100000 }), + created_at: faker.date.past().toISOString(), + form_values: { + name: faker.commerce.productName(), + description: faker.lorem.paragraph(), + contributors: [faker.person.fullName(), faker.internet.username()], + work_scope: faker.lorem.sentence(), + impact_scope: faker.lorem.sentence(), + rights: faker.lorem.sentence(), + }, + minter_address: generateMockAddress(), + minted: faker.datatype.boolean(), + hypercert_ids: [generateHypercertId(), generateHypercertId()], + }; +} + +/** + * Generates a mock user record + * @returns A mock user record with realistic test data + */ +export function generateMockUser( + overrides?: Partial<{ + id: string; + address: string; + chain_id: number; + display_name: string; + avatar: string; + }>, +) { + const defaultUser = { + id: faker.string.uuid(), + address: generateMockAddress(), + chain_id: faker.number.int({ min: 1, max: 100000 }), + display_name: faker.internet.username(), + avatar: faker.image.avatar(), + created_at: faker.date.past().toISOString(), + }; + + return { + ...defaultUser, + ...overrides, + }; +} + +/** + * Generates a mock signature request + * @param overrides Optional overrides for the generated data + * @returns A mock signature request with realistic test data + */ +export function generateMockSignatureRequest() { + return { + safe_address: generateMockAddress(), + message_hash: `0x${Array.from({ length: 64 }, () => Math.floor(Math.random() * 16).toString(16)).join("")}`, + chain_id: faker.number.int({ min: 1, max: 100000 }), + timestamp: Math.floor(Date.now() / 1000), + message: JSON.stringify({ test: "data" }), + purpose: "update_user_data" as const, + status: "pending" as const, + }; +} + +export function generateMockHypercert() { + return { + chain_id: generateChainId(), + hypercert_id: generateHypercertId(), + units: faker.number.bigInt({ min: 100000n, max: 100000000000n }), + owner_address: generateMockAddress(), + created_at: faker.date.past().toISOString(), + contracts_id: generateMockContract().id, + token_id: generateTokenId(), + uri: `ipfs://${faker.string.alphanumeric(46)}`, + creation_block_number: faker.number.int({ min: 1, max: 1000000 }), + creation_block_timestamp: faker.date.past().toISOString(), + last_update_block_number: faker.number.int({ min: 1, max: 1000000 }), + last_update_block_timestamp: faker.date.past().toISOString(), + attestations_count: faker.number.int({ min: 0, max: 100 }), + sales_count: faker.number.int({ min: 0, max: 100 }), + }; +} + +export function generateMockCollection() { + return { + id: faker.string.uuid(), + created_at: faker.date.past().toISOString(), + name: faker.commerce.productName(), + description: faker.lorem.paragraph(), + chain_ids: [generateChainId()], + hidden: faker.datatype.boolean(), + }; +} + +/** + * Generates a mock marketplace order record + * @returns A mock marketplace order record + */ +export function generateMockOrder( + overrides?: Partial<{ + id: string; + createdAt: string; + quoteType: bigint; + globalNonce: string; + orderNonce: string; + strategyId: bigint; + collectionType: bigint; + collection: string; + currency: string; + signer: string; + startTime: bigint; + endTime: bigint; + price: string; + signature: string; + additionalParameters: string; + chainId: bigint; + subsetNonce: bigint; + itemIds: string[]; + amounts: bigint[]; + invalidated: boolean; + validator_codes: number[]; + hypercert_id: string; + }>, +) { + const defaultOrder = { + id: faker.string.uuid(), + createdAt: new Date().toISOString(), + quoteType: faker.number.bigInt({ min: 1n, max: 100n }), + globalNonce: faker.string.alphanumeric(10), + orderNonce: faker.string.alphanumeric(10), + strategyId: faker.number.bigInt({ min: 1n, max: 100n }), + collectionType: faker.number.bigInt({ min: 1n, max: 100n }), + collection: generateMockAddress(), + currency: generateCurrency(), + signer: generateMockAddress(), + startTime: faker.number.bigInt({ min: 1n, max: 100000n }), + endTime: faker.number.bigInt({ min: 100001n, max: 200000n }), + price: faker.number.bigInt({ min: 1000000n, max: 1000000000n }).toString(), + signature: faker.string.hexadecimal({ length: 130 }), + additionalParameters: faker.string.alphanumeric(10), + chainId: generateChainId(), + subsetNonce: faker.number.bigInt({ min: 1n, max: 100n }), + itemIds: [generateTokenId().toString(), generateTokenId().toString()], + amounts: [ + faker.number.bigInt({ min: 1n, max: 1000n }), + faker.number.bigInt({ min: 1n, max: 1000n }), + ], + invalidated: faker.datatype.boolean(), + validator_codes: [faker.number.int({ min: 1, max: 100 })], + hypercert_id: generateHypercertId(), + }; + + return { + ...defaultOrder, + ...overrides, + } as unknown as MarketplaceOrderSelect; +} + +export function generateMockHyperboard() { + const mockUser = generateMockUser(); + const mockCollection = generateMockCollection(); + + return { + id: faker.string.uuid(), + name: faker.commerce.productName(), + chain_ids: [faker.number.bigInt()], + background_image: faker.image.url(), + grayscale_images: faker.datatype.boolean(), + tile_border_color: faker.color.rgb(), + admins: { + data: [mockUser], + count: 1, + }, + sections: { + data: [ + { + label: faker.commerce.department(), + collections: [mockCollection], + entries: [ + { + id: faker.string.uuid(), + is_blueprint: faker.datatype.boolean(), + percentage_of_section: faker.number.float({ + min: 0, + max: 100, + fractionDigits: 2, + }), + display_size: faker.number.float({ + min: 1, + max: 10, + fractionDigits: 2, + }), + name: faker.commerce.productName(), + total_units: faker.number.bigInt({ min: 1000n, max: 1000000n }), + owners: { + data: [ + { + ...mockUser, + percentage: faker.number.float({ + min: 0, + max: 100, + fractionDigits: 2, + }), + units: faker.number.bigInt({ min: 1n, max: 1000n }), + }, + ], + count: 1, + }, + }, + ], + owners: { + data: [ + { + ...mockUser, + percentage_owned: faker.number.float({ + min: 0, + max: 100, + fractionDigits: 2, + }), + }, + ], + count: 1, + }, + }, + ], + count: 1, + }, + }; +} + +// Check similarity of mock and returned object. The createdAt field is a timestamp and will be different. Its value in seconds should be the same. +// Bigints and numbers are compared as strings. +export const checkSimilarity = (obj1: unknown, obj2: unknown) => { + // Extract all timestamp fields (both regular and timezone-aware) + const timestampFields = ["createdAt", "created_at"]; + const timestamps1: Record = {}; + const timestamps2: Record = {}; + const rest1: Record = {}; + const rest2: Record = {}; + + // Separate timestamp fields from other fields + Object.entries(obj1 || {}).forEach(([key, value]) => { + if (timestampFields.includes(key)) { + timestamps1[key] = value as string; + } else { + rest1[key] = value; + } + }); + + Object.entries(obj2 || {}).forEach(([key, value]) => { + if (timestampFields.includes(key)) { + timestamps2[key] = value as string; + } else { + rest2[key] = value; + } + }); + + // Compare non-timestamp fields + for (const key in rest1) { + if (typeof rest1[key] === "bigint" || typeof rest1[key] === "number") { + expect(rest1[key].toString()).toEqual(rest2[key]?.toString()); + } else if (Array.isArray(rest1[key])) { + for (let i = 0; i < rest1[key].length; i++) { + checkSimilarity(rest1[key][i], rest2[key]?.[i]); + } + } else { + expect(rest1[key]).toEqual(rest2[key]); + } + } + + // Compare timestamp fields + for (const key in timestamps1) { + if (timestamps1[key] && timestamps2[key]) { + const date1 = new Date(timestamps1[key]); + const date2 = new Date(timestamps2[key]); + expect(date1.getTime()).toEqual(date2.getTime()); + } + } +}; diff --git a/tsconfig.json b/tsconfig.json index 77a84298..5e05fafc 100644 --- a/tsconfig.json +++ b/tsconfig.json @@ -12,13 +12,11 @@ "moduleResolution": "NodeNext", "resolveJsonModule": true, "isolatedModules": true, -// "verbatimModuleSyntax": true, + // "verbatimModuleSyntax": true, "useDefineForClassFields": true, "incremental": true, "outDir": "./dist", - "rootDirs": [ - "src" - ], + "rootDirs": ["src"], "plugins": [ { "name": "@0no-co/graphqlsp", @@ -27,14 +25,8 @@ } ] }, - "include": [ - "src/**/*.ts", - "src/**/*.d.ts", - "src/**/*.json" - ], - "exclude": [ - "node_modules" - ], + "include": ["src/**/*.ts", "src/**/*.d.ts", "src/**/*.json"], + "exclude": ["node_modules"], "ts-node": { "swc": true } diff --git a/tsoa.json b/tsoa.json index 158dc009..e7722ede 100644 --- a/tsoa.json +++ b/tsoa.json @@ -16,7 +16,11 @@ "routesDir": "src/__generated__/routes", "esm": true, "middleware": { - "v1/upload": [{ "name": "upload.array", "args": ["files", 5] }] - } + "v2/upload": [{ "name": "upload.array", "args": ["files", 5] }] + }, + "iocModule": "src/lib/tsoa/iocContainer.ts", + "useNamedParameters": true, + "useMethodParameters": true, + "ioc": "tsyringe" } } diff --git a/vitest.config.ts b/vitest.config.ts index 3699c2df..cbee162a 100644 --- a/vitest.config.ts +++ b/vitest.config.ts @@ -1,9 +1,13 @@ import { resolve } from "node:path"; +import swc from "unplugin-swc"; import { configDefaults, defineConfig } from "vitest/config"; export default defineConfig({ test: { setupFiles: ["./test/setup-env.ts"], + globals: true, + environment: "node", + pool: "threads", exclude: [...configDefaults.exclude, "./lib/**/*"], coverage: { // you can include other reporters, but 'json-summary' is required, json is recommended @@ -11,10 +15,10 @@ export default defineConfig({ // If you want a coverage reports even if your tests are failing, include the reportOnFailure option reportOnFailure: true, thresholds: { - lines: 24, - branches: 72, - functions: 58, - statements: 24, + statements: 58, + branches: 38, + functions: 25, + lines: 58, }, include: ["src/**/*.ts"], exclude: [ @@ -23,7 +27,6 @@ export default defineConfig({ "**/types.ts", "src/__generated__/**/*", "src/graphql/**/*", - "src/services/**/*", "src/types/**/*", "src/abis/**/*", "./lib/**/*", @@ -34,4 +37,32 @@ export default defineConfig({ resolve: { alias: [{ find: "@", replacement: resolve(__dirname, "./src") }], }, + plugins: [ + // This is required to build the test files with SWC + swc.vite({ + sourceMaps: "inline", + jsc: { + target: "es2022", + externalHelpers: true, + keepClassNames: true, + parser: { + syntax: "typescript", + tsx: true, + decorators: true, + dynamicImport: true, + }, + transform: { + legacyDecorator: true, + decoratorMetadata: true, + }, + }, + module: { + type: "es6", + strictMode: true, + lazy: false, + noInterop: false, + }, + isModule: true, + }), + ], });