From c491617ceca7949344328fb4f8fdb3c1b11a7253 Mon Sep 17 00:00:00 2001 From: jaipaljadeja Date: Wed, 29 Apr 2026 13:47:10 +0530 Subject: [PATCH 01/12] flight: refactor and add record batch with metadata --- packages/flight/src/arrow-flight-client.ts | 168 ++++++++++++++++++ .../flight/src/arrow-flight-sql-client.ts | 147 +++++++++++++++ packages/flight/src/arrow-flight-sql.ts | 98 ---------- packages/flight/src/arrow-flight.ts | 126 ------------- packages/flight/src/arrow-utils.ts | 3 +- packages/flight/src/flight-client-error.ts | 27 +++ packages/flight/src/index.ts | 14 +- packages/flight/src/record-batch-decoder.ts | 20 ++- .../flight/src/record-batch-with-metadata.ts | 6 + packages/flight/test/arrow-flight-sql.test.ts | 151 +++++++--------- 10 files changed, 444 insertions(+), 316 deletions(-) create mode 100644 packages/flight/src/arrow-flight-client.ts create mode 100644 packages/flight/src/arrow-flight-sql-client.ts delete mode 100644 packages/flight/src/arrow-flight-sql.ts delete mode 100644 packages/flight/src/arrow-flight.ts create mode 100644 packages/flight/src/flight-client-error.ts create mode 100644 packages/flight/src/record-batch-with-metadata.ts diff --git a/packages/flight/src/arrow-flight-client.ts b/packages/flight/src/arrow-flight-client.ts new file mode 100644 index 0000000..6c7930b --- /dev/null +++ b/packages/flight/src/arrow-flight-client.ts @@ -0,0 +1,168 @@ +import type { RecordBatch, Schema } from "apache-arrow"; + +import { Config, Context, Effect, Layer, Scope, Stream } from "effect"; +import { + type CallOptions, + createClient, + type DefaultCallOptions, + type NormalizedServiceDefinition, +} from "nice-grpc"; + +import type { RecordBatchWithMetadata } from "./record-batch-with-metadata"; + +import { decodeFlightDataStream, decodeSchemaFromFlightInfo } from "./arrow-utils"; +import { + FlightDecodeError, + type FlightClientErrorLike, + FlightProtocolError, + FlightTransportError, +} from "./flight-client-error"; +import { type ClientOptions, createChannelFromConfig, type HostOrChannel } from "./proto-utils"; +import { + type FlightData, + type FlightDescriptor, + type FlightInfo, + type FlightServiceClient, + FlightServiceDefinition, + type HandshakeRequest, + type HandshakeResponse, + type PutResult, + type Ticket, +} from "./proto/Flight"; + +export type ArrowFlightClientOptions = HostOrChannel & { + readonly defaultCallOptions?: DefaultCallOptions< + NormalizedServiceDefinition + >; +}; + +export interface ArrowFlightClientService { + readonly handshake: ( + request: AsyncIterable, + options?: CallOptions, + ) => Stream.Stream; + readonly getFlightInfo: ( + request: FlightDescriptor, + options?: CallOptions, + ) => Effect.Effect; + readonly doGet: ( + request: Ticket, + options: { readonly schema: Schema } & CallOptions, + ) => Stream.Stream; + readonly doPut: ( + request: AsyncIterable, + options?: CallOptions, + ) => AsyncIterable; + readonly executeFlightInfo: ( + info: FlightInfo, + options?: CallOptions, + ) => Stream.Stream; +} + +export class ArrowFlightClient extends Context.Service< + ArrowFlightClient, + ArrowFlightClientService +>()("@useairfoil/flight/ArrowFlightClient") {} + +const mapTransportError = (message: string, cause: unknown) => + new FlightTransportError({ + message, + cause, + }); + +const mapDecodeError = (message: string, cause: unknown) => + new FlightDecodeError({ + message, + cause, + }); + +const streamFromAsyncIterable = ( + iterable: AsyncIterable, + message: string, +): Stream.Stream => + Stream.fromAsyncIterable(iterable, (cause) => mapTransportError(message, cause)); + +export const make = Effect.fnUntraced(function* ( + options: ArrowFlightClientOptions, +): Effect.fn.Return { + const ownsChannel = "host" in options; + const channel = createChannelFromConfig(options); + const client: FlightServiceClient = createClient( + FlightServiceDefinition, + channel, + options.defaultCallOptions, + ); + + if (ownsChannel) { + const scope = yield* Scope.Scope; + yield* Scope.addFinalizer( + scope, + Effect.sync(() => { + channel.close(); + }), + ); + } + + const doGet = ( + request: Ticket, + options: { readonly schema: Schema } & CallOptions, + ): Stream.Stream => { + const { schema: expectedSchema } = options; + return Stream.fromAsyncIterable( + decodeFlightDataStream(client.doGet(request, options), { expectedSchema }), + (cause) => mapDecodeError("Flight batch decoding failed", cause), + ); + }; + + const executeFlightInfo = ( + info: FlightInfo, + options?: CallOptions, + ): Stream.Stream => { + const schema = decodeSchemaFromFlightInfo(info); + if (!schema) { + return Stream.fail( + new FlightProtocolError({ + message: "FlightInfo must include a schema", + }), + ); + } + + return Stream.fromIterable(info.endpoint).pipe( + Stream.flatMap((endpoint) => + endpoint.ticket === undefined + ? Stream.empty + : doGet(endpoint.ticket, { + schema, + ...options, + }), + ), + ); + }; + + return ArrowFlightClient.of({ + handshake: (request, options) => + streamFromAsyncIterable(client.handshake(request, options), "Flight handshake failed"), + getFlightInfo: (request, options) => + Effect.tryPromise({ + try: () => client.getFlightInfo(request, options), + catch: (cause) => mapTransportError("Flight getFlightInfo failed", cause), + }), + doGet, + doPut: (request, options) => client.doPut(request, options), + executeFlightInfo, + }); +}); + +export const layer = (options: ArrowFlightClientOptions): Layer.Layer => + Layer.effect(ArrowFlightClient, make(options)); + +export const layerConfig = (options: Config.Wrap) => + Layer.effect( + ArrowFlightClient, + Effect.gen(function* () { + const resolved = yield* Config.unwrap(options); + return yield* make(resolved); + }), + ); + +export type { CallOptions, ClientOptions, HostOrChannel, RecordBatch, RecordBatchWithMetadata }; diff --git a/packages/flight/src/arrow-flight-sql-client.ts b/packages/flight/src/arrow-flight-sql-client.ts new file mode 100644 index 0000000..f37bcd6 --- /dev/null +++ b/packages/flight/src/arrow-flight-sql-client.ts @@ -0,0 +1,147 @@ +import type { CallOptions } from "nice-grpc"; + +import { Config, Context, Effect, Layer, Scope, Stream } from "effect"; + +import type { FlightClientErrorLike } from "./flight-client-error"; +import type { RemoveTypeUrl } from "./proto-utils"; +import type { RecordBatchWithMetadata } from "./record-batch-with-metadata"; + +import { + make as makeArrowFlightClient, + type ArrowFlightClientOptions, + type ArrowFlightClientService, +} from "./arrow-flight-client"; +import { Any } from "./proto/any"; +import { FlightDescriptor, FlightDescriptor_DescriptorType, type FlightInfo } from "./proto/Flight"; +import { + CommandGetCatalogs, + CommandGetDbSchemas, + CommandGetTables, + CommandGetTableTypes, + CommandStatementQuery, +} from "./proto/FlightSql"; + +export type ArrowFlightSqlClientOptions = ArrowFlightClientOptions; + +export interface ArrowFlightSqlClientService { + readonly executeFlightInfo: ( + info: FlightInfo, + options?: CallOptions, + ) => Stream.Stream; + readonly getCatalogs: ( + request: RemoveTypeUrl, + options?: CallOptions, + ) => Effect.Effect; + readonly getDbSchemas: ( + request: RemoveTypeUrl, + options?: CallOptions, + ) => Effect.Effect; + readonly getTables: ( + request: RemoveTypeUrl, + options?: CallOptions, + ) => Effect.Effect; + readonly getTableTypes: ( + request: RemoveTypeUrl, + options?: CallOptions, + ) => Effect.Effect; + readonly executeQuery: ( + request: RemoveTypeUrl, + options?: CallOptions, + ) => Effect.Effect; +} + +export class ArrowFlightSqlClient extends Context.Service< + ArrowFlightSqlClient, + ArrowFlightSqlClientService +>()("@useairfoil/flight/ArrowFlightSqlClient") {} + +export const make = Effect.fnUntraced(function* ( + options: ArrowFlightSqlClientOptions, +): Effect.fn.Return { + const inner: ArrowFlightClientService = yield* makeArrowFlightClient(options); + + return ArrowFlightSqlClient.of({ + executeFlightInfo: (info, options) => inner.executeFlightInfo(info, options), + getCatalogs: (request, options) => + inner.getFlightInfo( + createCommandDescriptor( + CommandGetCatalogs.$type, + CommandGetCatalogs.encode({ + $type: CommandGetCatalogs.$type, + ...request, + }).finish(), + ), + options, + ), + getDbSchemas: (request, options) => + inner.getFlightInfo( + createCommandDescriptor( + CommandGetDbSchemas.$type, + CommandGetDbSchemas.encode({ + $type: CommandGetDbSchemas.$type, + ...request, + }).finish(), + ), + options, + ), + getTables: (request, options) => + inner.getFlightInfo( + createCommandDescriptor( + CommandGetTables.$type, + CommandGetTables.encode({ + $type: CommandGetTables.$type, + ...request, + }).finish(), + ), + options, + ), + getTableTypes: (request, options) => + inner.getFlightInfo( + createCommandDescriptor( + CommandGetTableTypes.$type, + CommandGetTableTypes.encode({ + $type: CommandGetTableTypes.$type, + ...request, + }).finish(), + ), + options, + ), + executeQuery: (request, options) => + inner.getFlightInfo( + createCommandDescriptor( + CommandStatementQuery.$type, + CommandStatementQuery.encode({ + $type: CommandStatementQuery.$type, + ...request, + }).finish(), + ), + options, + ), + }); +}); + +export const layer = (options: ArrowFlightSqlClientOptions): Layer.Layer => + Layer.effect(ArrowFlightSqlClient, make(options)); + +export const layerConfig = (options: Config.Wrap) => + Layer.effect( + ArrowFlightSqlClient, + Effect.gen(function* () { + const resolved = yield* Config.unwrap(options); + return yield* make(resolved); + }), + ); + +function createCommandDescriptor(typeUrl: string, value: Uint8Array): FlightDescriptor { + const cmd = Any.create({ + typeUrl: `type.googleapis.com/${typeUrl}`, + value, + }); + + return FlightDescriptor.create({ + type: FlightDescriptor_DescriptorType.CMD, + cmd: Any.encode(cmd).finish(), + }); +} + +export type { CallOptions, FlightInfo, RecordBatchWithMetadata }; diff --git a/packages/flight/src/arrow-flight-sql.ts b/packages/flight/src/arrow-flight-sql.ts deleted file mode 100644 index 4262cfc..0000000 --- a/packages/flight/src/arrow-flight-sql.ts +++ /dev/null @@ -1,98 +0,0 @@ -import type { CallOptions } from "nice-grpc"; - -import type { ClientOptions, HostOrChannel, RemoveTypeUrl } from "./proto-utils"; - -import { ArrowFlightClient } from "./arrow-flight"; -import { Any } from "./proto/any"; -import { - FlightDescriptor, - FlightDescriptor_DescriptorType, - type FlightInfo, - type FlightServiceDefinition, -} from "./proto/Flight"; -import { - CommandGetCatalogs, - CommandGetDbSchemas, - CommandGetTables, - CommandGetTableTypes, - CommandStatementQuery, -} from "./proto/FlightSql"; - -export class ArrowFlightSqlClient { - private inner: ArrowFlightClient; - - constructor(config: HostOrChannel, options: ClientOptions = {}) { - this.inner = new ArrowFlightClient(config, options); - } - - executeFlightInfo(request: FlightInfo, options?: CallOptions) { - return this.inner.executeFlightInfo(request, options); - } - - async getCatalogs(request: RemoveTypeUrl, options?: CallOptions) { - const descriptor = createCommandDescriptor( - CommandGetCatalogs.$type, - CommandGetCatalogs.encode({ - $type: CommandGetCatalogs.$type, - ...request, - }).finish(), - ); - return this.inner.getFlightInfo(descriptor, options); - } - - async getDbSchemas(request: RemoveTypeUrl, options?: CallOptions) { - const descriptor = createCommandDescriptor( - CommandGetDbSchemas.$type, - CommandGetDbSchemas.encode({ - $type: CommandGetDbSchemas.$type, - ...request, - }).finish(), - ); - return this.inner.getFlightInfo(descriptor, options); - } - - async getTables(request: RemoveTypeUrl, options?: CallOptions) { - const descriptor = createCommandDescriptor( - CommandGetTables.$type, - CommandGetTables.encode({ - $type: CommandGetTables.$type, - ...request, - }).finish(), - ); - return this.inner.getFlightInfo(descriptor, options); - } - - async getTableTypes(request: RemoveTypeUrl, options?: CallOptions) { - const descriptor = createCommandDescriptor( - CommandGetTableTypes.$type, - CommandGetTableTypes.encode({ - $type: CommandGetTableTypes.$type, - ...request, - }).finish(), - ); - return this.inner.getFlightInfo(descriptor, options); - } - - async executeQuery(request: RemoveTypeUrl, options?: CallOptions) { - const descriptor = createCommandDescriptor( - CommandStatementQuery.$type, - CommandStatementQuery.encode({ - $type: CommandStatementQuery.$type, - ...request, - }).finish(), - ); - return this.inner.getFlightInfo(descriptor, options); - } -} - -function createCommandDescriptor(typeUrl: string, value: Uint8Array): FlightDescriptor { - const cmd = Any.create({ - typeUrl: `type.googleapis.com/${typeUrl}`, - value, - }); - - return FlightDescriptor.create({ - type: FlightDescriptor_DescriptorType.CMD, - cmd: Any.encode(cmd).finish(), - }); -} diff --git a/packages/flight/src/arrow-flight.ts b/packages/flight/src/arrow-flight.ts deleted file mode 100644 index 76dbdfb..0000000 --- a/packages/flight/src/arrow-flight.ts +++ /dev/null @@ -1,126 +0,0 @@ -import type { RecordBatch, Schema } from "apache-arrow"; - -import { type CallOptions, createClient } from "nice-grpc"; - -import { decodeFlightDataStream, decodeSchemaFromFlightInfo } from "./arrow-utils"; -import { type ClientOptions, createChannelFromConfig, type HostOrChannel } from "./proto-utils"; -import { - type FlightData, - type FlightDescriptor, - type FlightInfo, - type FlightServiceClient, - FlightServiceDefinition, - type HandshakeRequest, - type HandshakeResponse, - type PutResult, - type Ticket, -} from "./proto/Flight"; - -export class ArrowFlightClient { - private client: FlightServiceClient; - - constructor(config: HostOrChannel, options: ClientOptions = {}) { - const channel = createChannelFromConfig(config); - - this.client = createClient(FlightServiceDefinition, channel, options.defaultCallOptions); - } - - executeFlightInfo(info: FlightInfo, options?: CallOptions): AsyncGenerator { - const schema = decodeSchemaFromFlightInfo(info); - if (!schema) { - throw new Error("FlightInfo must have a schema"); - } - - // oxlint-disable-next-line no-this-alias - const client = this; - - return (async function* () { - for (const endpoint of info.endpoint) { - if (endpoint.ticket === undefined) { - continue; - } - - yield* client.doGet(endpoint.ticket, { schema, ...options }); - } - })(); - } - - /** - * Handshake between client and server. - * - * Depending on the server, the handshake may be required to determine the - * token that should be used for future operations. Both request and response - * are streams to allow multiple round-trips depending on auth mechanism. - */ - handshake( - request: AsyncIterable, - options?: CallOptions, - ): AsyncIterable { - return this.client.handshake(request, options); - } - - /** Get a list of available streams given a particular criteria. */ - // listFlights( - // request: proto.arrow_flight.Criteria, - // options?: ClientCallOptions, - // ): AsyncIterable; - - getFlightInfo(request: FlightDescriptor, options?: CallOptions): Promise { - return this.client.getFlightInfo(request, options); - } - - // /** Start a query and get information to poll its execution status. */ - // pollFlightInfo( - // request: proto.arrow_flight.FlightDescriptor, - // options?: ClientCallOptions, - // ): Promise; - - // /** Get the schema for a given FlightDescriptor. */ - // getSchema( - // request: proto.arrow_flight.FlightDescriptor, - // options?: ClientCallOptions, - // ): Promise; - - /** - * Retrieve a single stream associated with a particular descriptor - * associated with the referenced ticket. A Flight can be composed of one or - * more streams where each stream can be retrieved using a separate opaque - * ticket that the flight service uses for managing a collection of streams. - */ - doGet(request: Ticket, options: { schema: Schema } & CallOptions): AsyncIterable { - const { schema: expectedSchema } = options; - return decodeFlightDataStream(this.client.doGet(request, options), { - expectedSchema, - }); - } - - /** - * Push a stream to the flight service associated with a particular - * flight stream. This allows a client of a flight service to upload a stream - * of data. Depending on the particular flight service, a client consumer - * could be allowed to upload a single stream per descriptor or an unlimited - * number. In the latter, the service might implement a 'seal' action that - * can be applied to a descriptor once all streams are uploaded. - */ - doPut(request: AsyncIterable, options?: CallOptions): AsyncIterable { - return this.client.doPut(request, options); - } - - // /** Open a bidirectional data channel for a given descriptor. */ - // doExchange( - // request: AsyncIterable, - // options?: ClientCallOptions, - // ): AsyncIterable; - - // /** Execute a specific action against the flight service. */ - // doAction( - // request: proto.arrow_flight.Action, - // options?: ClientCallOptions, - // ): AsyncIterable; - - // /** Get all available action types. */ - // listActions( - // request: proto.arrow_flight.Empty, - // options?: ClientCallOptions, - // ): AsyncIterable; -} diff --git a/packages/flight/src/arrow-utils.ts b/packages/flight/src/arrow-utils.ts index bdccfb6..c583bee 100644 --- a/packages/flight/src/arrow-utils.ts +++ b/packages/flight/src/arrow-utils.ts @@ -1,6 +1,7 @@ import { Message, type Schema } from "apache-arrow"; import type { FlightData, FlightInfo } from "./proto/Flight"; +import type { RecordBatchWithMetadata } from "./record-batch-with-metadata"; import { RecordBatchStreamReaderFromFlightData } from "./record-batch-decoder"; @@ -24,7 +25,7 @@ export function getMessageSchema(message: Message): Schema | undefined { export function decodeFlightDataStream( stream: AsyncIterable, { expectedSchema: _expectedSchema }: { expectedSchema: Schema }, -) { +): AsyncIterable { // TODO: we want to validate the schema of the stream? return new RecordBatchStreamReaderFromFlightData(stream); } diff --git a/packages/flight/src/flight-client-error.ts b/packages/flight/src/flight-client-error.ts new file mode 100644 index 0000000..2352ea1 --- /dev/null +++ b/packages/flight/src/flight-client-error.ts @@ -0,0 +1,27 @@ +import { Data } from "effect"; + +export class FlightClientError extends Data.TaggedError("FlightClientError")<{ + readonly message: string; + readonly cause?: unknown; +}> {} + +export class FlightTransportError extends Data.TaggedError("FlightTransportError")<{ + readonly message: string; + readonly cause?: unknown; +}> {} + +export class FlightDecodeError extends Data.TaggedError("FlightDecodeError")<{ + readonly message: string; + readonly cause?: unknown; +}> {} + +export class FlightProtocolError extends Data.TaggedError("FlightProtocolError")<{ + readonly message: string; + readonly cause?: unknown; +}> {} + +export type FlightClientErrorLike = + | FlightClientError + | FlightTransportError + | FlightDecodeError + | FlightProtocolError; diff --git a/packages/flight/src/index.ts b/packages/flight/src/index.ts index 6ed23f5..9337fef 100644 --- a/packages/flight/src/index.ts +++ b/packages/flight/src/index.ts @@ -1,12 +1,20 @@ -export { Metadata } from "nice-grpc"; -export { ArrowFlightClient } from "./arrow-flight"; -export { ArrowFlightSqlClient } from "./arrow-flight-sql"; +export * as ArrowFlightClient from "./arrow-flight-client"; +export * as ArrowFlightSqlClient from "./arrow-flight-sql-client"; +export * as FlightClientError from "./flight-client-error"; +export type { ArrowFlightClientOptions, ArrowFlightClientService } from "./arrow-flight-client"; +export type { + ArrowFlightSqlClientOptions, + ArrowFlightSqlClientService, +} from "./arrow-flight-sql-client"; +export type { FlightClientErrorLike } from "./flight-client-error"; export { FlightDataEncoder } from "./flight-data-encoder"; export { FlightData, FlightDescriptor, FlightDescriptor_DescriptorType, + FlightInfo, PutResult, Ticket, } from "./proto/Flight"; export { type ClientOptions, createChannelFromConfig, type HostOrChannel } from "./proto-utils"; +export type { RecordBatchWithMetadata } from "./record-batch-with-metadata"; diff --git a/packages/flight/src/record-batch-decoder.ts b/packages/flight/src/record-batch-decoder.ts index 260d8d8..92460af 100644 --- a/packages/flight/src/record-batch-decoder.ts +++ b/packages/flight/src/record-batch-decoder.ts @@ -49,6 +49,7 @@ import { CompressedVectorLoader, VectorLoader } from "apache-arrow/visitor/vecto import * as flatbuffers from "flatbuffers"; import type { FlightData } from "./proto/Flight"; +import type { RecordBatchWithMetadata } from "./record-batch-with-metadata"; const invalidMessageType = (type: MessageHeader) => `Expected ${MessageHeader[type]} Message in stream, but was null or length 0.`; @@ -240,7 +241,7 @@ abstract class RecordBatchReaderImpl implements RecordB export class RecordBatchStreamReaderFromFlightData extends RecordBatchReaderImpl - implements AsyncIterableIterator> + implements AsyncIterableIterator> { protected _reader: AsyncIterator; @@ -261,8 +262,8 @@ export class RecordBatchStreamReaderFromFlightData // return true; // } - public [Symbol.asyncIterator](): AsyncIterableIterator> { - return this as AsyncIterableIterator>; + public [Symbol.asyncIterator](): AsyncIterableIterator> { + return this as AsyncIterableIterator>; } /* @@ -320,7 +321,13 @@ export class RecordBatchStreamReaderFromFlightData this._recordBatchIndex++; const header = message.header(); const recordBatch = this._loadRecordBatch(header, flight.dataBody); - return { done: false, value: recordBatch }; + return { + done: false, + value: { + batch: recordBatch, + appMetadata: flight.appMetadata, + }, + }; } else if (message.isDictionaryBatch()) { this._dictionaryIndex++; const header = message.header(); @@ -333,7 +340,10 @@ export class RecordBatchStreamReaderFromFlightData this._recordBatchIndex++; return { done: false, - value: new _InternalEmptyPlaceholderRecordBatch(this.schema), + value: { + batch: new _InternalEmptyPlaceholderRecordBatch(this.schema), + appMetadata: new Uint8Array(0), + }, }; } diff --git a/packages/flight/src/record-batch-with-metadata.ts b/packages/flight/src/record-batch-with-metadata.ts new file mode 100644 index 0000000..fe29236 --- /dev/null +++ b/packages/flight/src/record-batch-with-metadata.ts @@ -0,0 +1,6 @@ +import type { RecordBatch, TypeMap } from "apache-arrow"; + +export type RecordBatchWithMetadata = { + readonly batch: RecordBatch; + readonly appMetadata: Uint8Array; +}; diff --git a/packages/flight/test/arrow-flight-sql.test.ts b/packages/flight/test/arrow-flight-sql.test.ts index f38e66d..ca96ca4 100644 --- a/packages/flight/test/arrow-flight-sql.test.ts +++ b/packages/flight/test/arrow-flight-sql.test.ts @@ -1,47 +1,61 @@ -import { describe, expect, it } from "@effect/vitest"; +import { expect, layer } from "@effect/vitest"; import { TestWings } from "@useairfoil/wings-testing"; -import { Effect } from "effect"; +import { Effect, Layer, Stream } from "effect"; import { ChannelCredentials } from "nice-grpc"; import { Metadata } from "nice-grpc-common"; import type { FlightInfo } from "../src/proto/Flight"; -import { ArrowFlightSqlClient } from "../src/arrow-flight-sql"; +import { ArrowFlightSqlClient } from "../src"; -const wingsLayer = TestWings.container; +const sqlClientLayer = Layer.effect(ArrowFlightSqlClient.ArrowFlightSqlClient)( + Effect.gen(function* () { + const wings = yield* TestWings.Instance; + const host = yield* wings.grpcHostAndPort; + + return yield* ArrowFlightSqlClient.make({ + host, + credentials: ChannelCredentials.createInsecure(), + defaultCallOptions: { + "*": { + metadata: Metadata({ + "x-wings-namespace": "tenants/default/namespaces/default", + }), + }, + }, + }); + }), +).pipe(Layer.provide(TestWings.container)); -describe("ArrowFlightSqlClient", () => { +layer(sqlClientLayer, { timeout: "30 seconds" })("ArrowFlightSqlClient", (it) => { it.effect("get catalogs", () => - it.flakyTest( - Effect.gen(function* () { - const client = yield* createClient(); + Effect.gen(function* () { + const client = yield* ArrowFlightSqlClient.ArrowFlightSqlClient; + + const flightInfo = yield* client.getCatalogs({}); + const data = yield* executeFlightInfo(client, flightInfo); - const flightInfo = yield* Effect.promise(() => client.getCatalogs({})); - const data = yield* executeFlightInfo(client, flightInfo); - expect(data).toMatchInlineSnapshot(` + expect(data).toMatchInlineSnapshot(` [ { "catalog_name": "wings", }, ] `); - }).pipe(Effect.provide(wingsLayer), Effect.scoped), - "30 second", - ), + }), ); it.effect("get db schema", () => - it.flakyTest( - Effect.gen(function* () { - const client = yield* createClient(); - const flightInfo = yield* Effect.promise(() => - client.getDbSchemas({ - catalog: "wings", - }), - ); + Effect.gen(function* () { + const client = yield* ArrowFlightSqlClient.ArrowFlightSqlClient; - const data = yield* executeFlightInfo(client, flightInfo); - expect(data).toMatchInlineSnapshot(` + const flightInfo = yield* client.getDbSchemas({ + catalog: "wings", + }); + + const data = yield* executeFlightInfo(client, flightInfo); + + expect(data).toMatchInlineSnapshot(` [ { "catalog_name": "wings", @@ -53,25 +67,22 @@ describe("ArrowFlightSqlClient", () => { }, ] `); - }).pipe(Effect.provide(wingsLayer), Effect.scoped), - "30 second", - ), + }), ); it.effect("get tables", () => - it.flakyTest( - Effect.gen(function* () { - const client = yield* createClient(); - const flightInfo = yield* Effect.promise(() => - client.getTables({ - catalog: "wings", - includeSchema: false, - tableTypes: [], - }), - ); + Effect.gen(function* () { + const client = yield* ArrowFlightSqlClient.ArrowFlightSqlClient; - const data = yield* executeFlightInfo(client, flightInfo); - expect(data).toMatchInlineSnapshot(` + const flightInfo = yield* client.getTables({ + catalog: "wings", + includeSchema: false, + tableTypes: [], + }); + + const data = yield* executeFlightInfo(client, flightInfo); + + expect(data).toMatchInlineSnapshot(` [ { "catalog_name": "wings", @@ -111,54 +122,28 @@ describe("ArrowFlightSqlClient", () => { }, ] `); - }).pipe(Effect.provide(wingsLayer), Effect.scoped), - "30 second", - ), + }), ); it.effect("sql query", () => - it.flakyTest( - Effect.gen(function* () { - const client = yield* createClient(); - const flightInfo = yield* Effect.promise(() => - client.executeQuery({ - query: "show tables", - }), - ); + Effect.gen(function* () { + const client = yield* ArrowFlightSqlClient.ArrowFlightSqlClient; - const data = yield* executeFlightInfo(client, flightInfo); - expect(data).toHaveLength(13); - }).pipe(Effect.provide(wingsLayer), Effect.scoped), - "30 second", - ), + const flightInfo = yield* client.executeQuery({ + query: "show tables", + }); + + const data = yield* executeFlightInfo(client, flightInfo); + expect(data).toHaveLength(13); + }), ); }); -const createClient = () => - Effect.gen(function* () { - const wings = yield* TestWings.Instance; - const host = yield* wings.grpcHostAndPort; - return new ArrowFlightSqlClient( - { - host, - credentials: ChannelCredentials.createInsecure(), - }, - { - defaultCallOptions: { - "*": { - metadata: Metadata({ - "x-wings-namespace": "tenants/default/namespaces/default", - }), - }, - }, - }, - ); - }); - -const executeFlightInfo = (client: ArrowFlightSqlClient, info: FlightInfo) => - Effect.promise(async () => { - const data = (await Array.fromAsync(client.executeFlightInfo(info))).flatMap((batch) => - batch.toArray(), - ); - return data; - }); +const executeFlightInfo = ( + client: ArrowFlightSqlClient.ArrowFlightSqlClientService, + info: FlightInfo, +) => + client.executeFlightInfo(info).pipe( + Stream.runCollect, + Effect.map((batches) => Array.from(batches).flatMap(({ batch }) => batch.toArray())), + ); From f1076940df934c5c731dcffbfb6b2dc2042069d9 Mon Sep 17 00:00:00 2001 From: jaipaljadeja Date: Wed, 29 Apr 2026 13:47:28 +0530 Subject: [PATCH 02/12] wings: good refactor --- packages/wings/README.md | 249 ++++++++++++++++++ packages/wings/package.json | 40 +++ packages/wings/src/arrow/index.ts | 1 + .../config.ts | 4 +- packages/wings/src/cluster-client/index.ts | 114 ++++++++ packages/wings/src/cluster-client/layer.ts | 233 ++++++++++++++++ .../service.ts | 58 ++-- packages/wings/src/cluster-metadata/index.ts | 204 -------------- packages/wings/src/cluster-metadata/layer.ts | 230 ---------------- packages/wings/src/cluster/arrow-type.ts | 3 +- packages/wings/src/cluster/data-lake.ts | 9 +- packages/wings/src/cluster/namespace.ts | 3 +- packages/wings/src/cluster/object-store.ts | 9 +- packages/wings/src/cluster/topic.ts | 9 +- packages/wings/src/data-plane/fetcher.ts | 36 ++- packages/wings/src/data-plane/index.ts | 4 +- packages/wings/src/data-plane/layer.ts | 83 +++--- packages/wings/src/data-plane/publisher.ts | 15 +- packages/wings/src/data-plane/service.ts | 25 +- packages/wings/src/errors.ts | 24 -- .../wings/src/errors/cluster-client-error.ts | 7 + packages/wings/src/errors/config-error.ts | 6 + packages/wings/src/errors/grpc-error.ts | 7 + packages/wings/src/errors/index.ts | 5 + .../wings/src/errors/wings-client-error.ts | 7 + .../wings/src/errors/wings-decode-error.ts | 9 + packages/wings/src/index.ts | 11 +- packages/wings/src/partition-value.ts | 9 + packages/wings/src/schema/annotations.ts | 86 ++++++ packages/wings/src/schema/convert-schema.ts | 218 +++++++++++++++ packages/wings/src/schema/index.ts | 28 +- packages/wings/src/schema/types.ts | 185 +++++++++++++ packages/wings/src/topic.ts | 26 +- packages/wings/test/cluster-metadata.test.ts | 96 +++---- packages/wings/test/fetcher.test.ts | 56 ++-- packages/wings/test/publisher.test.ts | 34 +-- packages/wings/test/setup.ts | 3 + packages/wings/test/wings-converter.test.ts | 116 ++++---- packages/wings/tsdown.config.ts | 12 +- packages/wings/vitest.config.ts | 2 + 40 files changed, 1497 insertions(+), 779 deletions(-) create mode 100644 packages/wings/src/arrow/index.ts rename packages/wings/src/{cluster-metadata => cluster-client}/config.ts (75%) create mode 100644 packages/wings/src/cluster-client/index.ts create mode 100644 packages/wings/src/cluster-client/layer.ts rename packages/wings/src/{cluster-metadata => cluster-client}/service.ts (83%) delete mode 100644 packages/wings/src/cluster-metadata/index.ts delete mode 100644 packages/wings/src/cluster-metadata/layer.ts delete mode 100644 packages/wings/src/errors.ts create mode 100644 packages/wings/src/errors/cluster-client-error.ts create mode 100644 packages/wings/src/errors/config-error.ts create mode 100644 packages/wings/src/errors/grpc-error.ts create mode 100644 packages/wings/src/errors/index.ts create mode 100644 packages/wings/src/errors/wings-client-error.ts create mode 100644 packages/wings/src/errors/wings-decode-error.ts create mode 100644 packages/wings/src/schema/annotations.ts create mode 100644 packages/wings/src/schema/convert-schema.ts create mode 100644 packages/wings/src/schema/types.ts create mode 100644 packages/wings/test/setup.ts diff --git a/packages/wings/README.md b/packages/wings/README.md index bc4e9d3..2775a95 100644 --- a/packages/wings/README.md +++ b/packages/wings/README.md @@ -1 +1,250 @@ # @useairfoil/wings + +Effect-first TypeScript client toolkit for working with Airfoil cluster metadata, +topic schemas, and the Wings data plane. + +## Modules + +The package root is intentionally module-first: + +```ts +import { + Arrow, + Cluster, + ClusterClient, + Partition, + Schema, + Topic, + WingsClient, + ClusterClientError, + WingsError, +} from "@useairfoil/wings"; +``` + +Lowercase subpath exports are also available: + +```ts +import { Types, convertSchema, FieldId, TimeUnit } from "@useairfoil/wings/schema"; +import * as ClusterClient from "@useairfoil/wings/cluster-client"; +import * as WingsClient from "@useairfoil/wings/wings-client"; +import * as Arrow from "@useairfoil/wings/arrow"; +import * as Topic from "@useairfoil/wings/topic"; +import * as Partition from "@useairfoil/wings/partition"; +``` + +## Schema + +Schema helpers are exposed from `@useairfoil/wings/schema`. + +```ts +import { + FieldId, + FieldMetadata, + SchemaMetadata, + TimeUnit, + Types, + convertSchema, +} from "@useairfoil/wings/schema"; + +const Customer = Types.Struct({ + id: Types.String.annotate({ + [FieldId]: 1n, + [FieldMetadata]: { pii: "true" }, + }), + active: Types.NullOr(Types.Bool).annotate({ + [FieldId]: 2n, + }), + createdAt: Types.Timestamp(TimeUnit.MILLISECOND, "UTC").annotate({ + [FieldId]: 3n, + }), +}).annotate({ + [SchemaMetadata]: { source: "example" }, +}); + +const arrowSchema = convertSchema(Customer); +``` + +Available type builders: `String`, `Bool`, `Binary`, `Int8/16/32/64`, `UInt8/16/32/64`, `Float16/32/64`, `Date32/64`, `Timestamp(unit, tz?)`, `Duration(unit)`, `List(item)`, `Struct(fields)`, `NullOr(schema)`. + +`TimeUnit` values: `SECOND`, `MILLISECOND`, `MICROSECOND`, `NANOSECOND`. + +## Cluster Client + +`ClusterClient` is the Effect service for cluster metadata operations — tenants, namespaces, topics, object stores, and data lakes. + +```ts +import { Config, Effect } from "effect"; +import { ClusterClient } from "@useairfoil/wings"; + +const clusterLayer = ClusterClient.layer({ host: "localhost:7000" }); + +const clusterConfigLayer = ClusterClient.layerConfig({ + host: Config.string("WINGS_CLUSTER_URL").pipe(Config.withDefault("localhost:7000")), +}); +``` + +Example operations: + +```ts +const program = Effect.gen(function* () { + const topic = yield* ClusterClient.createTopic({ + parent: "tenants/default/namespaces/default", + topicId: "users", + fields: [{ name: "id", dataType: "Int32", nullable: false, id: 1n }], + compaction: { + freshnessSeconds: 60n, + ttlSeconds: undefined, + targetFileSizeBytes: 1024n * 1024n, + }, + }); + + const { topics } = yield* ClusterClient.listTopics({ + parent: "tenants/default/namespaces/default", + }); +}).pipe(Effect.provide(clusterLayer)); +``` + +All entity types expose `create`, `get`, `list`, and `delete` operations. + +## Wings Client + +`WingsClient` is the Effect service for data-plane fetch and publish operations. + +```ts +import { Config, Effect } from "effect"; +import { WingsClient } from "@useairfoil/wings"; + +const wingsLayer = WingsClient.layer({ + host: "localhost:7777", + namespace: "tenants/default/namespaces/default", +}); + +const wingsConfigLayer = WingsClient.layerConfig({ + host: Config.string("WINGS_URL").pipe(Config.withDefault("localhost:7777")), + namespace: Config.string("WINGS_NAMESPACE").pipe( + Config.withDefault("tenants/default/namespaces/default"), + ), +}); +``` + +### Fetch + +`WingsClient.fetch` returns a stream that continuously polls a topic for new data. + +```ts +import { Effect, Stream } from "effect"; +import { WingsClient } from "@useairfoil/wings"; + +const program = Effect.gen(function* () { + const stream = yield* WingsClient.fetch({ + topic, + offset: 0n, + minBatchSize: 1, + maxBatchSize: 100, + }); + + yield* stream.pipe(Stream.take(10), Stream.runDrain); +}).pipe(Effect.provide(wingsLayer)); +``` + +`FetchOptions`: + +| Field | Type | Default | +| ---------------- | ---------------- | -------- | +| `topic` | `Topic` | required | +| `partitionValue` | `PartitionValue` | — | +| `offset` | `bigint` | `0n` | +| `minBatchSize` | `number` | `1` | +| `maxBatchSize` | `number` | `100` | + +### Publish + +`WingsClient.publisher` creates a publisher bound to the `WingsClient` layer lifetime. + +```ts +const program = Effect.gen(function* () { + const pub = yield* WingsClient.publisher({ topic }); + const committed = yield* pub.push({ batch }); +}).pipe(Effect.provide(wingsLayer)); +``` + +Pass a `partitionValue` at publisher creation or override it per push: + +```ts +const pub = + yield * + WingsClient.publisher({ + topic, + partitionValue: Partition.PV.stringValue("tenant-a"), + }); + +yield * pub.push({ batch, partitionValue: Partition.PV.stringValue("tenant-b") }); +``` + +### Accessors + +```ts +const clusterClient = yield * WingsClient.clusterClient; +const flightClient = yield * WingsClient.flightClient; +``` + +## Topic Helpers + +```ts +import { Topic } from "@useairfoil/wings"; + +const schema = yield * Topic.topicSchema(topic); // Effect, safe +const schema = Topic.topicSchemaUnsafe(topic); // throws on invalid schema +const bytes = Topic.encodeTopicSchema(schema); +``` + +## Partition Helpers + +```ts +import { Partition } from "@useairfoil/wings"; + +Partition.PV.int32(42); +Partition.PV.int64(999n); +Partition.PV.stringValue("tenant-a"); +Partition.PV.bytesValue(new Uint8Array([1, 2, 3])); +Partition.PV.boolValue(true); +Partition.PV.null(); +// also: int8, int16, uint8, uint16, uint32, uint64 +``` + +## Arrow Helpers + +```ts +import { Arrow } from "@useairfoil/wings"; + +const table = Arrow.tableFromJSON([{ id: 1, name: "Alice" }]); +const table = Arrow.recordBatchToTable(batches); +const { rows, columns } = Arrow.arrowTableToRowColumns(table); + +const bytes = Arrow.serializeFieldsToSchemaBytes(fields); +const schema = Arrow.deserializeSchemaBytesToSchema(bytes); +``` + +## Errors + +All error classes extend Effect's `Data.TaggedError` and are exported from the package root. + +```ts +import { + ClusterClientError, + WingsError, + WingsDecodeError, + ConfigError, + GrpcError, +} from "@useairfoil/wings"; +``` + +| Class | When thrown | +| -------------------- | -------------------------------- | +| `ClusterClientError` | Cluster metadata operations | +| `WingsError` | Data-plane fetch/publish | +| `WingsDecodeError` | Schema or type decoding | +| `ConfigError` | Invalid or missing configuration | +| `GrpcError` | gRPC communication | + +Each error has `message`, optional `code`, and optional `cause`. diff --git a/packages/wings/package.json b/packages/wings/package.json index 53123f5..cdaf0da 100644 --- a/packages/wings/package.json +++ b/packages/wings/package.json @@ -21,10 +21,50 @@ "import": "./dist/cluster/index.js", "default": "./dist/cluster/index.js" }, + "./cluster": { + "types": "./dist/cluster/index.d.ts", + "import": "./dist/cluster/index.js", + "default": "./dist/cluster/index.js" + }, + "./cluster-client": { + "types": "./dist/cluster-client/index.d.ts", + "import": "./dist/cluster-client/index.js", + "default": "./dist/cluster-client/index.js" + }, "./Schema": { "types": "./dist/schema/index.d.ts", "import": "./dist/schema/index.js", "default": "./dist/schema/index.js" + }, + "./schema": { + "types": "./dist/schema/index.d.ts", + "import": "./dist/schema/index.js", + "default": "./dist/schema/index.js" + }, + "./wings-client": { + "types": "./dist/data-plane/index.d.ts", + "import": "./dist/data-plane/index.js", + "default": "./dist/data-plane/index.js" + }, + "./arrow": { + "types": "./dist/arrow/index.d.ts", + "import": "./dist/arrow/index.js", + "default": "./dist/arrow/index.js" + }, + "./errors": { + "types": "./dist/errors/index.d.ts", + "import": "./dist/errors/index.js", + "default": "./dist/errors/index.js" + }, + "./partition": { + "types": "./dist/partition-value.d.ts", + "import": "./dist/partition-value.js", + "default": "./dist/partition-value.js" + }, + "./topic": { + "types": "./dist/topic.d.ts", + "import": "./dist/topic.js", + "default": "./dist/topic.js" } }, "scripts": { diff --git a/packages/wings/src/arrow/index.ts b/packages/wings/src/arrow/index.ts new file mode 100644 index 0000000..ec6e905 --- /dev/null +++ b/packages/wings/src/arrow/index.ts @@ -0,0 +1 @@ +export * from "../lib/arrow"; diff --git a/packages/wings/src/cluster-metadata/config.ts b/packages/wings/src/cluster-client/config.ts similarity index 75% rename from packages/wings/src/cluster-metadata/config.ts rename to packages/wings/src/cluster-client/config.ts index 3a5b70c..cdb86a9 100644 --- a/packages/wings/src/cluster-metadata/config.ts +++ b/packages/wings/src/cluster-client/config.ts @@ -1,9 +1,9 @@ import type { CallOptions } from "nice-grpc"; /** - * Configuration shape for ClusterMetadata client + * Configuration shape for ClusterClient */ -export interface ClusterMetadataParams { +export interface ClusterClientOptions { /** * The gRPC host address * @example "localhost:7777" diff --git a/packages/wings/src/cluster-client/index.ts b/packages/wings/src/cluster-client/index.ts new file mode 100644 index 0000000..93c2a4c --- /dev/null +++ b/packages/wings/src/cluster-client/index.ts @@ -0,0 +1,114 @@ +import { ClusterClient, type ClusterClientService } from "./service"; + +export type { ClusterClientOptions } from "./config"; + +export { layer, layerConfig, make } from "./layer"; + +export { ClusterClient, type ClusterClientService } from "./service"; + +type ClusterClientFnParams = Parameters< + ClusterClientService[T] +>; + +/** + * Access the underlying protobuf client. + * Most applications should prefer the higher-level ClusterClient helpers. + */ +export const getProtobufClient = () => + ClusterClient.useSync((service) => service.getProtobufClient()); + +/** + * Creates a new tenant. + */ +export const createTenant = (...args: ClusterClientFnParams<"createTenant">) => + ClusterClient.use((service) => service.createTenant(...args)); + +/** + * Gets a tenant by name. + */ +export const getTenant = (...args: ClusterClientFnParams<"getTenant">) => + ClusterClient.use((service) => service.getTenant(...args)); + +/** + * Lists tenants with pagination. + */ +export const listTenants = (...args: ClusterClientFnParams<"listTenants">) => + ClusterClient.use((service) => service.listTenants(...args)); + +/** + * Deletes a tenant. + */ +export const deleteTenant = (...args: ClusterClientFnParams<"deleteTenant">) => + ClusterClient.use((service) => service.deleteTenant(...args)); + +/** + * Creates a new namespace. + */ +export const createNamespace = (...args: ClusterClientFnParams<"createNamespace">) => + ClusterClient.use((service) => service.createNamespace(...args)); + +/** + * Gets a namespace by name. + */ +export const getNamespace = (...args: ClusterClientFnParams<"getNamespace">) => + ClusterClient.use((service) => service.getNamespace(...args)); + +/** + * Lists namespaces with pagination. + */ +export const listNamespaces = (...args: ClusterClientFnParams<"listNamespaces">) => + ClusterClient.use((service) => service.listNamespaces(...args)); + +/** + * Deletes a namespace. + */ +export const deleteNamespace = (...args: ClusterClientFnParams<"deleteNamespace">) => + ClusterClient.use((service) => service.deleteNamespace(...args)); + +/** Creates a new topic. */ +export const createTopic = (...args: ClusterClientFnParams<"createTopic">) => + ClusterClient.use((service) => service.createTopic(...args)); + +/** Gets a topic by name. */ +export const getTopic = (...args: ClusterClientFnParams<"getTopic">) => + ClusterClient.use((service) => service.getTopic(...args)); + +/** Lists topics with pagination. */ +export const listTopics = (...args: ClusterClientFnParams<"listTopics">) => + ClusterClient.use((service) => service.listTopics(...args)); + +/** Deletes a topic. */ +export const deleteTopic = (...args: ClusterClientFnParams<"deleteTopic">) => + ClusterClient.use((service) => service.deleteTopic(...args)); + +/** Creates a new object store. */ +export const createObjectStore = (...args: ClusterClientFnParams<"createObjectStore">) => + ClusterClient.use((service) => service.createObjectStore(...args)); + +/** Gets an object store by name. */ +export const getObjectStore = (...args: ClusterClientFnParams<"getObjectStore">) => + ClusterClient.use((service) => service.getObjectStore(...args)); + +/** Lists object stores with pagination. */ +export const listObjectStores = (...args: ClusterClientFnParams<"listObjectStores">) => + ClusterClient.use((service) => service.listObjectStores(...args)); + +/** Deletes an object store. */ +export const deleteObjectStore = (...args: ClusterClientFnParams<"deleteObjectStore">) => + ClusterClient.use((service) => service.deleteObjectStore(...args)); + +/** Creates a new data lake. */ +export const createDataLake = (...args: ClusterClientFnParams<"createDataLake">) => + ClusterClient.use((service) => service.createDataLake(...args)); + +/** Gets a data lake by name. */ +export const getDataLake = (...args: ClusterClientFnParams<"getDataLake">) => + ClusterClient.use((service) => service.getDataLake(...args)); + +/** Lists data lakes with pagination. */ +export const listDataLakes = (...args: ClusterClientFnParams<"listDataLakes">) => + ClusterClient.use((service) => service.listDataLakes(...args)); + +/** Deletes a data lake. */ +export const deleteDataLake = (...args: ClusterClientFnParams<"deleteDataLake">) => + ClusterClient.use((service) => service.deleteDataLake(...args)); diff --git a/packages/wings/src/cluster-client/layer.ts b/packages/wings/src/cluster-client/layer.ts new file mode 100644 index 0000000..055cf90 --- /dev/null +++ b/packages/wings/src/cluster-client/layer.ts @@ -0,0 +1,233 @@ +import { createChannelFromConfig } from "@useairfoil/flight"; +import { Config, Effect, Layer, Scope } from "effect"; +import { type CallOptions, createClient } from "nice-grpc"; + +import type { ClusterClientOptions } from "./config"; + +import * as ClusterSchema from "../cluster"; +import { ClusterClientError } from "../errors"; +import { + type ClusterMetadataServiceClient, + ClusterMetadataServiceDefinition, +} from "../proto/wings/v1/cluster_metadata"; +import { ClusterClient, type ClusterClientService } from "./service"; + +/** + * Creates the ClusterClient service implementation from config. + * + * @example + * ```typescript + * const clusterClient = yield* ClusterClient.make({ + * host: "localhost:7000" + * }); + * ``` + */ +export const make = Effect.fnUntraced(function* ( + config: ClusterClientOptions, +): Effect.fn.Return { + const channel = createChannelFromConfig({ host: config.host }); + const grpcClient: ClusterMetadataServiceClient = createClient( + ClusterMetadataServiceDefinition, + channel, + ); + + const scope = yield* Scope.Scope; + yield* Scope.addFinalizer( + scope, + Effect.sync(() => { + channel.close(); + }), + ); + + const makeGrpcCall = + ( + grpcMethod: (req: ProtoReq, options?: CallOptions) => Promise, + toProto: (req: Req) => ProtoReq, + fromProto: (res: ProtoRes) => Res, + ) => + (req: Req, options?: CallOptions) => + Effect.tryPromise({ + try: async () => { + const protoReq = toProto(req); + const mergedOptions = + config.callOptions || options ? { ...config.callOptions, ...options } : undefined; + const protoRes = await grpcMethod.call(grpcClient, protoReq, mergedOptions); + return fromProto(protoRes); + }, + catch: handleGrpcError, + }); + + const service: ClusterClientService = { + getProtobufClient: () => grpcClient, + + createTenant: makeGrpcCall( + grpcClient.createTenant, + ClusterSchema.Tenant.Codec.CreateTenantRequest.toProto, + ClusterSchema.Tenant.Codec.Tenant.fromProto, + ), + + getTenant: makeGrpcCall( + grpcClient.getTenant, + ClusterSchema.Tenant.Codec.GetTenantRequest.toProto, + ClusterSchema.Tenant.Codec.Tenant.fromProto, + ), + + listTenants: makeGrpcCall( + grpcClient.listTenants, + ClusterSchema.Tenant.Codec.ListTenantsRequest.toProto, + ClusterSchema.Tenant.Codec.ListTenantsResponse.fromProto, + ), + + deleteTenant: makeGrpcCall( + grpcClient.deleteTenant, + ClusterSchema.Tenant.Codec.DeleteTenantRequest.toProto, + () => undefined, + ), + + createNamespace: makeGrpcCall( + grpcClient.createNamespace, + ClusterSchema.Namespace.Codec.CreateNamespaceRequest.toProto, + ClusterSchema.Namespace.Codec.Namespace.fromProto, + ), + + getNamespace: makeGrpcCall( + grpcClient.getNamespace, + ClusterSchema.Namespace.Codec.GetNamespaceRequest.toProto, + ClusterSchema.Namespace.Codec.Namespace.fromProto, + ), + + listNamespaces: makeGrpcCall( + grpcClient.listNamespaces, + ClusterSchema.Namespace.Codec.ListNamespacesRequest.toProto, + ClusterSchema.Namespace.Codec.ListNamespacesResponse.fromProto, + ), + + deleteNamespace: makeGrpcCall( + grpcClient.deleteNamespace, + ClusterSchema.Namespace.Codec.DeleteNamespaceRequest.toProto, + () => undefined, + ), + + createTopic: makeGrpcCall( + grpcClient.createTopic, + ClusterSchema.Topic.Codec.CreateTopicRequest.toProto, + ClusterSchema.Topic.Codec.Topic.fromProto, + ), + + getTopic: makeGrpcCall( + grpcClient.getTopic, + ClusterSchema.Topic.Codec.GetTopicRequest.toProto, + ClusterSchema.Topic.Codec.Topic.fromProto, + ), + + listTopics: makeGrpcCall( + grpcClient.listTopics, + ClusterSchema.Topic.Codec.ListTopicsRequest.toProto, + ClusterSchema.Topic.Codec.ListTopicsResponse.fromProto, + ), + + deleteTopic: makeGrpcCall( + grpcClient.deleteTopic, + ClusterSchema.Topic.Codec.DeleteTopicRequest.toProto, + () => undefined, + ), + + createObjectStore: makeGrpcCall( + grpcClient.createObjectStore, + ClusterSchema.ObjectStore.Codec.CreateObjectStoreRequest.toProto, + ClusterSchema.ObjectStore.Codec.ObjectStore.fromProto, + ), + + getObjectStore: makeGrpcCall( + grpcClient.getObjectStore, + ClusterSchema.ObjectStore.Codec.GetObjectStoreRequest.toProto, + ClusterSchema.ObjectStore.Codec.ObjectStore.fromProto, + ), + + listObjectStores: makeGrpcCall( + grpcClient.listObjectStores, + ClusterSchema.ObjectStore.Codec.ListObjectStoresRequest.toProto, + ClusterSchema.ObjectStore.Codec.ListObjectStoresResponse.fromProto, + ), + + deleteObjectStore: makeGrpcCall( + grpcClient.deleteObjectStore, + ClusterSchema.ObjectStore.Codec.DeleteObjectStoreRequest.toProto, + () => undefined, + ), + + createDataLake: makeGrpcCall( + grpcClient.createDataLake, + ClusterSchema.DataLake.Codec.CreateDataLakeRequest.toProto, + ClusterSchema.DataLake.Codec.DataLake.fromProto, + ), + + getDataLake: makeGrpcCall( + grpcClient.getDataLake, + ClusterSchema.DataLake.Codec.GetDataLakeRequest.toProto, + ClusterSchema.DataLake.Codec.DataLake.fromProto, + ), + + listDataLakes: makeGrpcCall( + grpcClient.listDataLakes, + ClusterSchema.DataLake.Codec.ListDataLakesRequest.toProto, + ClusterSchema.DataLake.Codec.ListDataLakesResponse.fromProto, + ), + + deleteDataLake: makeGrpcCall( + grpcClient.deleteDataLake, + ClusterSchema.DataLake.Codec.DeleteDataLakeRequest.toProto, + () => undefined, + ), + }; + + return ClusterClient.of(service); +}); + +/** + * Creates a ClusterClient Layer directly from config values. + * + * @param config - The cluster metadata configuration + * + * @example + * ```typescript + * import { ClusterClient } from "@useairfoil/wings"; + * + * const LocalClusterClient = ClusterClient.layer({ + * host: "localhost:7000" + * }); + * ``` + */ +export const layer = (config: ClusterClientOptions) => Layer.effect(ClusterClient)(make(config)); + +/** + * Creates a ClusterClient Layer using Effect's Config module. + * + * Reads configuration from environment variables using the Config module. + * + * @param config - The cluster metadata configuration wrapped in Config + * + * @example + * ```typescript + * import { Config } from "effect"; + * import { ClusterClient } from "@useairfoil/wings"; + * + * const LocalClusterClient = ClusterClient.layerConfig({ + * host: Config.string("WINGS_URL").pipe(Config.withDefault("localhost:7000")), + * }); + * ``` + */ +export const layerConfig = (config: Config.Wrap) => + Layer.effect( + ClusterClient, + Effect.gen(function* () { + const params = yield* Config.unwrap(config); + return yield* make(params); + }), + ); + +const handleGrpcError = (error: unknown) => + new ClusterClientError({ + message: error instanceof Error ? error.message : String(error), + cause: error, + }); diff --git a/packages/wings/src/cluster-metadata/service.ts b/packages/wings/src/cluster-client/service.ts similarity index 83% rename from packages/wings/src/cluster-metadata/service.ts rename to packages/wings/src/cluster-client/service.ts index 2d030f6..322652b 100644 --- a/packages/wings/src/cluster-metadata/service.ts +++ b/packages/wings/src/cluster-client/service.ts @@ -3,13 +3,13 @@ import type { CallOptions } from "nice-grpc"; import { Context, type Effect } from "effect"; import type * as ClusterSchema from "../cluster"; -import type { ClusterMetadataError } from "../errors"; +import type { ClusterClientError } from "../errors"; import type { ClusterMetadataServiceClient } from "../proto/wings/v1/cluster_metadata"; /** - * ClusterMetadata Service Interface + * ClusterClient Service Interface */ -export interface ClusterMetadataService { +export interface ClusterClientService { /** * Returns the underlying gRPC client that works with protobuf types. * @@ -22,13 +22,13 @@ export interface ClusterMetadataService { * @example * ```typescript * const program = Effect.gen(function* () { - * const service = yield* ClusterMetadata; + * const service = yield* ClusterClient; * const protobufClient = service.getProtobufClient(); * * // Use protobuf client to get protobuf Topic type * const topic = yield* Effect.tryPromise({ * try: () => protobufClient.getTopic({ name: "..." }), - * catch: (e) => new ClusterMetadataError({ message: String(e), cause: e }) + * catch: (e) => new ClusterClientError({ message: String(e), cause: e }) * }); * }); * ``` @@ -43,7 +43,7 @@ export interface ClusterMetadataService { readonly createTenant: ( req: ClusterSchema.Tenant.CreateTenantRequest, options?: CallOptions, - ) => Effect.Effect; + ) => Effect.Effect; /** * Gets a tenant by name. @@ -54,7 +54,7 @@ export interface ClusterMetadataService { readonly getTenant: ( req: ClusterSchema.Tenant.GetTenantRequest, options?: CallOptions, - ) => Effect.Effect; + ) => Effect.Effect; /** * Lists tenants with pagination. @@ -65,7 +65,7 @@ export interface ClusterMetadataService { readonly listTenants: ( req: ClusterSchema.Tenant.ListTenantsRequest, options?: CallOptions, - ) => Effect.Effect; + ) => Effect.Effect; /** * Deletes a tenant. @@ -76,7 +76,7 @@ export interface ClusterMetadataService { readonly deleteTenant: ( req: ClusterSchema.Tenant.DeleteTenantRequest, options?: CallOptions, - ) => Effect.Effect; + ) => Effect.Effect; /** * Creates a new namespace. @@ -87,7 +87,7 @@ export interface ClusterMetadataService { readonly createNamespace: ( req: ClusterSchema.Namespace.CreateNamespaceRequest, options?: CallOptions, - ) => Effect.Effect; + ) => Effect.Effect; /** * Gets a namespace by name. @@ -98,7 +98,7 @@ export interface ClusterMetadataService { readonly getNamespace: ( req: ClusterSchema.Namespace.GetNamespaceRequest, options?: CallOptions, - ) => Effect.Effect; + ) => Effect.Effect; /** * Lists namespaces with pagination. @@ -109,7 +109,7 @@ export interface ClusterMetadataService { readonly listNamespaces: ( req: ClusterSchema.Namespace.ListNamespacesRequest, options?: CallOptions, - ) => Effect.Effect; + ) => Effect.Effect; /** * Deletes a namespace. @@ -120,7 +120,7 @@ export interface ClusterMetadataService { readonly deleteNamespace: ( req: ClusterSchema.Namespace.DeleteNamespaceRequest, options?: CallOptions, - ) => Effect.Effect; + ) => Effect.Effect; /** * Creates a new topic. @@ -131,7 +131,7 @@ export interface ClusterMetadataService { readonly createTopic: ( req: ClusterSchema.Topic.CreateTopicRequest, options?: CallOptions, - ) => Effect.Effect; + ) => Effect.Effect; /** * Gets a topic by name. @@ -142,7 +142,7 @@ export interface ClusterMetadataService { readonly getTopic: ( req: ClusterSchema.Topic.GetTopicRequest, options?: CallOptions, - ) => Effect.Effect; + ) => Effect.Effect; /** * Lists topics with pagination. @@ -153,7 +153,7 @@ export interface ClusterMetadataService { readonly listTopics: ( req: ClusterSchema.Topic.ListTopicsRequest, options?: CallOptions, - ) => Effect.Effect; + ) => Effect.Effect; /** * Deletes a topic. @@ -164,7 +164,7 @@ export interface ClusterMetadataService { readonly deleteTopic: ( req: ClusterSchema.Topic.DeleteTopicRequest, options?: CallOptions, - ) => Effect.Effect; + ) => Effect.Effect; /** * Creates a new object store. @@ -175,7 +175,7 @@ export interface ClusterMetadataService { readonly createObjectStore: ( req: ClusterSchema.ObjectStore.CreateObjectStoreRequest, options?: CallOptions, - ) => Effect.Effect; + ) => Effect.Effect; /** * Gets an object store by name. @@ -186,7 +186,7 @@ export interface ClusterMetadataService { readonly getObjectStore: ( req: ClusterSchema.ObjectStore.GetObjectStoreRequest, options?: CallOptions, - ) => Effect.Effect; + ) => Effect.Effect; /** * Lists object stores with pagination. @@ -197,7 +197,7 @@ export interface ClusterMetadataService { readonly listObjectStores: ( req: ClusterSchema.ObjectStore.ListObjectStoresRequest, options?: CallOptions, - ) => Effect.Effect; + ) => Effect.Effect; /** * Deletes an object store. @@ -208,7 +208,7 @@ export interface ClusterMetadataService { readonly deleteObjectStore: ( req: ClusterSchema.ObjectStore.DeleteObjectStoreRequest, options?: CallOptions, - ) => Effect.Effect; + ) => Effect.Effect; /** * Creates a new data lake. @@ -219,7 +219,7 @@ export interface ClusterMetadataService { readonly createDataLake: ( req: ClusterSchema.DataLake.CreateDataLakeRequest, options?: CallOptions, - ) => Effect.Effect; + ) => Effect.Effect; /** * Gets a data lake by name. @@ -230,7 +230,7 @@ export interface ClusterMetadataService { readonly getDataLake: ( req: ClusterSchema.DataLake.GetDataLakeRequest, options?: CallOptions, - ) => Effect.Effect; + ) => Effect.Effect; /** * Lists data lakes with pagination. @@ -241,7 +241,7 @@ export interface ClusterMetadataService { readonly listDataLakes: ( req: ClusterSchema.DataLake.ListDataLakesRequest, options?: CallOptions, - ) => Effect.Effect; + ) => Effect.Effect; /** * Deletes a data lake. @@ -252,14 +252,14 @@ export interface ClusterMetadataService { readonly deleteDataLake: ( req: ClusterSchema.DataLake.DeleteDataLakeRequest, options?: CallOptions, - ) => Effect.Effect; + ) => Effect.Effect; } /** - * ClusterMetadata Service Tag + * ClusterClient Service Tag * - * Used to provide and access the ClusterMetadata service in the Effect context. + * Used to provide and access the ClusterClient service in the Effect context. */ -export class ClusterMetadata extends Context.Service()( - "@useairfoil/wings/ClusterMetadata", +export class ClusterClient extends Context.Service()( + "@useairfoil/wings/ClusterClient", ) {} diff --git a/packages/wings/src/cluster-metadata/index.ts b/packages/wings/src/cluster-metadata/index.ts deleted file mode 100644 index d0d0f17..0000000 --- a/packages/wings/src/cluster-metadata/index.ts +++ /dev/null @@ -1,204 +0,0 @@ -import { ClusterMetadata, type ClusterMetadataService } from "./service"; - -export type { ClusterMetadataParams } from "./config"; - -export { layer, layerConfig, make } from "./layer"; - -export { ClusterMetadata, type ClusterMetadataService } from "./service"; - -type ClusterMetadataFnParams = Parameters< - ClusterMetadataService[T] ->; - -/** - * Access the underlying gRPC client that works with protobuf types. - * - * Use this for advanced use cases when you need direct access to the protobuf - * client instead of the Effect Schema-based API. Note: for most use cases, - * the Effect Schema API (like `getTopic`, `createTopic`, etc.) is preferred. - * - * @returns Effect that provides access to the protobuf ClusterMetadataServiceClient - * - */ -export const getProtobufClient = () => - ClusterMetadata.useSync((service) => service.getProtobufClient()); - -/** - * Creates a new tenant. - * @param req - The create tenant request - * @param options - Optional gRPC call options - * @returns Effect that resolves to the created tenant - */ -export const createTenant = (...args: ClusterMetadataFnParams<"createTenant">) => - ClusterMetadata.use((service) => service.createTenant(...args)); - -/** - * Gets a tenant by name. - * @param req - The get tenant request - * @param options - Optional gRPC call options - * @returns Effect that resolves to the tenant - */ -export const getTenant = (...args: ClusterMetadataFnParams<"getTenant">) => - ClusterMetadata.use((service) => service.getTenant(...args)); - -/** - * Lists tenants with pagination. - * @param req - The list tenants request - * @param options - Optional gRPC call options - * @returns Effect that resolves to the list of tenants - */ -export const listTenants = (...args: ClusterMetadataFnParams<"listTenants">) => - ClusterMetadata.use((service) => service.listTenants(...args)); - -/** - * Deletes a tenant. - * @param req - The delete tenant request - * @param options - Optional gRPC call options - * @returns Effect that resolves when deletion is complete - */ -export const deleteTenant = (...args: ClusterMetadataFnParams<"deleteTenant">) => - ClusterMetadata.use((service) => service.deleteTenant(...args)); - -/** - * Creates a new namespace. - * @param req - The create namespace request - * @param options - Optional gRPC call options - * @returns Effect that resolves to the created namespace - */ -export const createNamespace = (...args: ClusterMetadataFnParams<"createNamespace">) => - ClusterMetadata.use((service) => service.createNamespace(...args)); - -/** - * Gets a namespace by name. - * @param req - The get namespace request - * @param options - Optional gRPC call options - * @returns Effect that resolves to the namespace - */ -export const getNamespace = (...args: ClusterMetadataFnParams<"getNamespace">) => - ClusterMetadata.use((service) => service.getNamespace(...args)); - -/** - * Lists namespaces with pagination. - * @param req - The list namespaces request - * @param options - Optional gRPC call options - * @returns Effect that resolves to the list of namespaces - */ -export const listNamespaces = (...args: ClusterMetadataFnParams<"listNamespaces">) => - ClusterMetadata.use((service) => service.listNamespaces(...args)); - -/** - * Deletes a namespace. - * @param req - The delete namespace request - * @param options - Optional gRPC call options - * @returns Effect that resolves when deletion is complete - */ -export const deleteNamespace = (...args: ClusterMetadataFnParams<"deleteNamespace">) => - ClusterMetadata.use((service) => service.deleteNamespace(...args)); - -/** - * Creates a new topic. - * @param req - The create topic request - * @param options - Optional gRPC call options - * @returns Effect that resolves to the created topic - */ -export const createTopic = (...args: ClusterMetadataFnParams<"createTopic">) => - ClusterMetadata.use((service) => service.createTopic(...args)); - -/** - * Gets a topic by name. - * @param req - The get topic request - * @param options - Optional gRPC call options - * @returns Effect that resolves to the topic - */ -export const getTopic = (...args: ClusterMetadataFnParams<"getTopic">) => - ClusterMetadata.use((service) => service.getTopic(...args)); - -/** - * Lists topics with pagination. - * @param req - The list topics request - * @param options - Optional gRPC call options - * @returns Effect that resolves to the list of topics - */ -export const listTopics = (...args: ClusterMetadataFnParams<"listTopics">) => - ClusterMetadata.use((service) => service.listTopics(...args)); - -/** - * Deletes a topic. - * @param req - The delete topic request - * @param options - Optional gRPC call options - * @returns Effect that resolves when deletion is complete - */ -export const deleteTopic = (...args: ClusterMetadataFnParams<"deleteTopic">) => - ClusterMetadata.use((service) => service.deleteTopic(...args)); - -/** - * Creates a new object store. - * @param req - The create object store request - * @param options - Optional gRPC call options - * @returns Effect that resolves to the created object store - */ -export const createObjectStore = (...args: ClusterMetadataFnParams<"createObjectStore">) => - ClusterMetadata.use((service) => service.createObjectStore(...args)); - -/** - * Gets an object store by name. - * @param req - The get object store request - * @param options - Optional gRPC call options - * @returns Effect that resolves to the object store - */ -export const getObjectStore = (...args: ClusterMetadataFnParams<"getObjectStore">) => - ClusterMetadata.use((service) => service.getObjectStore(...args)); - -/** - * Lists object stores with pagination. - * @param req - The list object stores request - * @param options - Optional gRPC call options - * @returns Effect that resolves to the list of object stores - */ -export const listObjectStores = (...args: ClusterMetadataFnParams<"listObjectStores">) => - ClusterMetadata.use((service) => service.listObjectStores(...args)); - -/** - * Deletes an object store. - * @param req - The delete object store request - * @param options - Optional gRPC call options - * @returns Effect that resolves when deletion is complete - */ -export const deleteObjectStore = (...args: ClusterMetadataFnParams<"deleteObjectStore">) => - ClusterMetadata.use((service) => service.deleteObjectStore(...args)); - -/** - * Creates a new data lake. - * @param req - The create data lake request - * @param options - Optional gRPC call options - * @returns Effect that resolves to the created data lake - */ -export const createDataLake = (...args: ClusterMetadataFnParams<"createDataLake">) => - ClusterMetadata.use((service) => service.createDataLake(...args)); - -/** - * Gets a data lake by name. - * @param req - The get data lake request - * @param options - Optional gRPC call options - * @returns Effect that resolves to the data lake - */ -export const getDataLake = (...args: ClusterMetadataFnParams<"getDataLake">) => - ClusterMetadata.use((service) => service.getDataLake(...args)); - -/** - * Lists data lakes with pagination. - * @param req - The list data lakes request - * @param options - Optional gRPC call options - * @returns Effect that resolves to the list of data lakes - */ -export const listDataLakes = (...args: ClusterMetadataFnParams<"listDataLakes">) => - ClusterMetadata.use((service) => service.listDataLakes(...args)); - -/** - * Deletes a data lake. - * @param req - The delete data lake request - * @param options - Optional gRPC call options - * @returns Effect that resolves when deletion is complete - */ -export const deleteDataLake = (...args: ClusterMetadataFnParams<"deleteDataLake">) => - ClusterMetadata.use((service) => service.deleteDataLake(...args)); diff --git a/packages/wings/src/cluster-metadata/layer.ts b/packages/wings/src/cluster-metadata/layer.ts deleted file mode 100644 index 762582e..0000000 --- a/packages/wings/src/cluster-metadata/layer.ts +++ /dev/null @@ -1,230 +0,0 @@ -import { createChannelFromConfig } from "@useairfoil/flight"; -import { Config, Effect, Layer } from "effect"; -import { type CallOptions, createClient } from "nice-grpc"; - -import type { ClusterMetadataParams } from "./config"; - -import * as ClusterSchema from "../cluster"; -import { ClusterMetadataError } from "../errors"; -import { - type ClusterMetadataServiceClient, - ClusterMetadataServiceDefinition, -} from "../proto/wings/v1/cluster_metadata"; -import { ClusterMetadata, type ClusterMetadataService } from "./service"; - -/** - * Creates the ClusterMetadata service implementation from config. - * - * @example - * ```typescript - * const clusterMetadata = yield* WingsClusterMetadata.make({ - * host: "localhost:7000" - * }); - * ``` - */ -export const make = (config: ClusterMetadataParams) => - Effect.gen(function* () { - const channel = createChannelFromConfig({ host: config.host }); - const grpcClient: ClusterMetadataServiceClient = createClient( - ClusterMetadataServiceDefinition, - channel, - ); - - yield* Effect.addFinalizer(() => - Effect.sync(() => { - channel.close(); - }), - ); - - const makeGrpcCall = - ( - grpcMethod: (req: ProtoReq, options?: CallOptions) => Promise, - toProto: (req: Req) => ProtoReq, - fromProto: (res: ProtoRes) => Res, - ) => - (req: Req, options?: CallOptions) => - Effect.tryPromise({ - try: async () => { - const protoReq = toProto(req); - const mergedOptions = - config.callOptions || options ? { ...config.callOptions, ...options } : undefined; - const protoRes = await grpcMethod.call(grpcClient, protoReq, mergedOptions); - return fromProto(protoRes); - }, - catch: handleGrpcError, - }); - - const service: ClusterMetadataService = { - getProtobufClient: () => grpcClient, - - createTenant: makeGrpcCall( - grpcClient.createTenant, - ClusterSchema.Tenant.Codec.CreateTenantRequest.toProto, - ClusterSchema.Tenant.Codec.Tenant.fromProto, - ), - - getTenant: makeGrpcCall( - grpcClient.getTenant, - ClusterSchema.Tenant.Codec.GetTenantRequest.toProto, - ClusterSchema.Tenant.Codec.Tenant.fromProto, - ), - - listTenants: makeGrpcCall( - grpcClient.listTenants, - ClusterSchema.Tenant.Codec.ListTenantsRequest.toProto, - ClusterSchema.Tenant.Codec.ListTenantsResponse.fromProto, - ), - - deleteTenant: makeGrpcCall( - grpcClient.deleteTenant, - ClusterSchema.Tenant.Codec.DeleteTenantRequest.toProto, - () => undefined, - ), - - createNamespace: makeGrpcCall( - grpcClient.createNamespace, - ClusterSchema.Namespace.Codec.CreateNamespaceRequest.toProto, - ClusterSchema.Namespace.Codec.Namespace.fromProto, - ), - - getNamespace: makeGrpcCall( - grpcClient.getNamespace, - ClusterSchema.Namespace.Codec.GetNamespaceRequest.toProto, - ClusterSchema.Namespace.Codec.Namespace.fromProto, - ), - - listNamespaces: makeGrpcCall( - grpcClient.listNamespaces, - ClusterSchema.Namespace.Codec.ListNamespacesRequest.toProto, - ClusterSchema.Namespace.Codec.ListNamespacesResponse.fromProto, - ), - - deleteNamespace: makeGrpcCall( - grpcClient.deleteNamespace, - ClusterSchema.Namespace.Codec.DeleteNamespaceRequest.toProto, - () => undefined, - ), - - createTopic: makeGrpcCall( - grpcClient.createTopic, - ClusterSchema.Topic.Codec.CreateTopicRequest.toProto, - ClusterSchema.Topic.Codec.Topic.fromProto, - ), - - getTopic: makeGrpcCall( - grpcClient.getTopic, - ClusterSchema.Topic.Codec.GetTopicRequest.toProto, - ClusterSchema.Topic.Codec.Topic.fromProto, - ), - - listTopics: makeGrpcCall( - grpcClient.listTopics, - ClusterSchema.Topic.Codec.ListTopicsRequest.toProto, - ClusterSchema.Topic.Codec.ListTopicsResponse.fromProto, - ), - - deleteTopic: makeGrpcCall( - grpcClient.deleteTopic, - ClusterSchema.Topic.Codec.DeleteTopicRequest.toProto, - () => undefined, - ), - - createObjectStore: makeGrpcCall( - grpcClient.createObjectStore, - ClusterSchema.ObjectStore.Codec.CreateObjectStoreRequest.toProto, - ClusterSchema.ObjectStore.Codec.ObjectStore.fromProto, - ), - - getObjectStore: makeGrpcCall( - grpcClient.getObjectStore, - ClusterSchema.ObjectStore.Codec.GetObjectStoreRequest.toProto, - ClusterSchema.ObjectStore.Codec.ObjectStore.fromProto, - ), - - listObjectStores: makeGrpcCall( - grpcClient.listObjectStores, - ClusterSchema.ObjectStore.Codec.ListObjectStoresRequest.toProto, - ClusterSchema.ObjectStore.Codec.ListObjectStoresResponse.fromProto, - ), - - deleteObjectStore: makeGrpcCall( - grpcClient.deleteObjectStore, - ClusterSchema.ObjectStore.Codec.DeleteObjectStoreRequest.toProto, - () => undefined, - ), - - createDataLake: makeGrpcCall( - grpcClient.createDataLake, - ClusterSchema.DataLake.Codec.CreateDataLakeRequest.toProto, - ClusterSchema.DataLake.Codec.DataLake.fromProto, - ), - - getDataLake: makeGrpcCall( - grpcClient.getDataLake, - ClusterSchema.DataLake.Codec.GetDataLakeRequest.toProto, - ClusterSchema.DataLake.Codec.DataLake.fromProto, - ), - - listDataLakes: makeGrpcCall( - grpcClient.listDataLakes, - ClusterSchema.DataLake.Codec.ListDataLakesRequest.toProto, - ClusterSchema.DataLake.Codec.ListDataLakesResponse.fromProto, - ), - - deleteDataLake: makeGrpcCall( - grpcClient.deleteDataLake, - ClusterSchema.DataLake.Codec.DeleteDataLakeRequest.toProto, - () => undefined, - ), - }; - - return ClusterMetadata.of(service); - }); - -/** - * Creates a ClusterMetadata Layer directly from config values. - * - * @param config - The cluster metadata configuration - * - * @example - * ```typescript - * import { ClusterMetadata } from "@useairfoil/wings"; - * - * const LocalClusterMetadata = ClusterMetadata.layer({ - * host: "localhost:7000" - * }); - * ``` - */ -export const layer = (config: ClusterMetadataParams) => Layer.effect(ClusterMetadata)(make(config)); - -/** - * Creates a ClusterMetadata Layer using Effect's Config module. - * - * Reads configuration from environment variables using the Config module. - * - * @param config - The cluster metadata configuration wrapped in Config - * - * @example - * ```typescript - * import { Config } from "effect"; - * import { ClusterMetadata } from "@useairfoil/wings"; - * - * const LocalClusterMetadata = ClusterMetadata.layerConfig({ - * host: Config.string("WINGS_URL").pipe(Config.withDefault("localhost:7000")), - * }); - * ``` - */ -export const layerConfig = (config: Config.Wrap) => - Layer.effect( - ClusterMetadata, - Effect.gen(function* () { - const params = yield* Config.unwrap(config); - return yield* make(params); - }), - ); - -const handleGrpcError = (error: unknown) => - new ClusterMetadataError({ - message: error instanceof Error ? error.message : String(error), - cause: error, - }); diff --git a/packages/wings/src/cluster/arrow-type.ts b/packages/wings/src/cluster/arrow-type.ts index e77cf95..fe70ed0 100644 --- a/packages/wings/src/cluster/arrow-type.ts +++ b/packages/wings/src/cluster/arrow-type.ts @@ -3,6 +3,7 @@ import * as SchemaTransformation from "effect/SchemaTransformation"; import type * as Proto from "../proto/schema/arrow_type"; +import { WingsDecodeError } from "../errors"; import { TimeUnit } from "../proto/schema/arrow_type"; export { TimeUnit }; @@ -298,7 +299,7 @@ function arrowTypeToProto(value: ArrowType): Proto.ArrowType { function arrowTypeFromProto(value: Proto.ArrowType): ArrowType { const e = value.arrowTypeEnum; - if (!e) throw new Error("ArrowType.arrowTypeEnum is undefined"); + if (!e) throw new WingsDecodeError("ArrowType.arrowTypeEnum is undefined"); switch (e.$case) { case "none": return { _tag: "none" }; diff --git a/packages/wings/src/cluster/data-lake.ts b/packages/wings/src/cluster/data-lake.ts index 9df034c..34ffee4 100644 --- a/packages/wings/src/cluster/data-lake.ts +++ b/packages/wings/src/cluster/data-lake.ts @@ -1,5 +1,6 @@ import { Schema, SchemaTransformation } from "effect"; +import { WingsDecodeError } from "../errors"; // ███████████ ███████████ ███████ ███████████ ███████ // ░░███░░░░░███░░███░░░░░███ ███░░░░░███ ░█░░░███░░░█ ███░░░░░███ // ░███ ░███ ░███ ░███ ███ ░░███░ ░███ ░ ███ ░░███ @@ -257,7 +258,7 @@ export const DataLakeConfig = DataLakeConfigProto.pipe( delta: { objectStore: proto.delta.objectStore }, }; default: - throw new Error("Unsupported data lake config"); + throw new WingsDecodeError("Unsupported data lake config"); } }, encode: (app): DataLakeConfigProto => { @@ -285,7 +286,7 @@ export const DataLakeConfig = DataLakeConfigProto.pipe( }, }; default: - throw new Error("Unsupported data lake config"); + throw new WingsDecodeError("Unsupported data lake config"); } }, }), @@ -300,7 +301,7 @@ export const DataLake = DataLakeProto.pipe( SchemaTransformation.transform({ decode: (proto): DataLakeApp => { if (!proto.dataLakeConfig) { - throw new Error("DataLake config is undefined"); + throw new WingsDecodeError("DataLake config is undefined"); } return { name: proto.name, @@ -324,7 +325,7 @@ export const CreateDataLakeRequest = CreateDataLakeRequestProto.pipe( SchemaTransformation.transform({ decode: (proto): CreateDataLakeRequestApp => { if (!proto.dataLake?.dataLakeConfig) { - throw new Error("DataLake metadata is undefined"); + throw new WingsDecodeError("DataLake metadata is undefined"); } return { parent: proto.parent, diff --git a/packages/wings/src/cluster/namespace.ts b/packages/wings/src/cluster/namespace.ts index b0e5f10..10eff30 100644 --- a/packages/wings/src/cluster/namespace.ts +++ b/packages/wings/src/cluster/namespace.ts @@ -1,5 +1,6 @@ import { Schema, SchemaTransformation } from "effect"; +import { WingsDecodeError } from "../errors"; // ███████████ ███████████ ███████ ███████████ ███████ // ░░███░░░░░███░░███░░░░░███ ███░░░░░███ ░█░░░███░░░█ ███░░░░░███ // ░███ ░███ ░███ ░███ ███ ░░███░ ░███ ░ ███ ░░███ @@ -175,7 +176,7 @@ export const CreateNamespaceRequest = CreateNamespaceRequestProto.pipe( SchemaTransformation.transform({ decode: (proto): CreateNamespaceRequestApp => { if (proto.namespace === undefined) { - throw new Error("Namespace metadata is undefined"); + throw new WingsDecodeError("Namespace metadata is undefined"); } return { parent: proto.parent, diff --git a/packages/wings/src/cluster/object-store.ts b/packages/wings/src/cluster/object-store.ts index 14f260e..af7c83c 100644 --- a/packages/wings/src/cluster/object-store.ts +++ b/packages/wings/src/cluster/object-store.ts @@ -1,5 +1,6 @@ import { Schema, SchemaTransformation } from "effect"; +import { WingsDecodeError } from "../errors"; // ███████████ ███████████ ███████ ███████████ ███████ // ░░███░░░░░███░░███░░░░░███ ███░░░░░███ ░█░░░███░░░█ ███░░░░░███ // ░███ ░███ ░███ ░███ ███ ░░███░ ░███ ░ ███ ░░███ @@ -231,7 +232,7 @@ export const ObjectStoreConfig = ObjectStoreConfigProto.pipe( }, }; default: - throw new Error("Unsupported object store config"); + throw new WingsDecodeError("Unsupported object store config"); } }, encode: (app): ObjectStoreConfigProto => { @@ -285,7 +286,7 @@ export const ObjectStoreConfig = ObjectStoreConfigProto.pipe( }, }; default: - throw new Error("Unsupported object store config"); + throw new WingsDecodeError("Unsupported object store config"); } }, }), @@ -307,7 +308,7 @@ export const ObjectStore = ObjectStoreProto.pipe( SchemaTransformation.transform({ decode: (proto): ObjectStoreApp => { if (!proto.objectStoreConfig) { - throw new Error("ObjectStore config is undefined"); + throw new WingsDecodeError("ObjectStore config is undefined"); } return { name: proto.name, @@ -413,7 +414,7 @@ export const CreateObjectStoreRequest = CreateObjectStoreRequestProto.pipe( SchemaTransformation.transform({ decode: (proto): CreateObjectStoreRequestApp => { if (!proto.objectStore?.objectStoreConfig) { - throw new Error("ObjectStore metadata is undefined"); + throw new WingsDecodeError("ObjectStore metadata is undefined"); } return { parent: proto.parent, diff --git a/packages/wings/src/cluster/topic.ts b/packages/wings/src/cluster/topic.ts index 24851a3..8d94f76 100644 --- a/packages/wings/src/cluster/topic.ts +++ b/packages/wings/src/cluster/topic.ts @@ -1,6 +1,7 @@ import { Schema as ApacheArrowSchema } from "apache-arrow"; import { Schema, SchemaTransformation } from "effect"; +import { WingsDecodeError } from "../errors"; import { arrowFieldToFieldConfig, arrowSchemaFromProto, @@ -287,10 +288,10 @@ export const Topic = TopicProto.pipe( SchemaTransformation.transform({ decode: (proto): TopicApp => { if (!proto.schema) { - throw new Error("Topic schema is undefined"); + throw new WingsDecodeError("Topic schema is undefined"); } if (!proto.compaction) { - throw new Error("Topic compaction is undefined"); + throw new WingsDecodeError("Topic compaction is undefined"); } return { name: proto.name, @@ -381,10 +382,10 @@ export const CreateTopicRequest = CreateTopicRequestProto.pipe( SchemaTransformation.transform({ decode: (proto): CreateTopicRequestApp => { if (!proto.topic?.compaction) { - throw new Error("Topic metadata is undefined"); + throw new WingsDecodeError("Topic metadata is undefined"); } if (!proto.topic.schema) { - throw new Error("Topic schema is undefined"); + throw new WingsDecodeError("Topic schema is undefined"); } const schema = arrowSchemaFromProto(proto.topic.schema); return { diff --git a/packages/wings/src/data-plane/fetcher.ts b/packages/wings/src/data-plane/fetcher.ts index 46804da..b37e011 100644 --- a/packages/wings/src/data-plane/fetcher.ts +++ b/packages/wings/src/data-plane/fetcher.ts @@ -1,4 +1,4 @@ -import type { ArrowFlightClient } from "@useairfoil/flight"; +import type { ArrowFlightClientService } from "@useairfoil/flight"; import type { RecordBatch } from "apache-arrow"; import { Effect, Ref, Stream } from "effect"; @@ -48,10 +48,10 @@ import { FetchTicket } from "../proto/utils"; * ) */ export const fetch = ( - client: ArrowFlightClient, + client: ArrowFlightClientService, options: FetchOptions, ): Effect.Effect, never> => - Effect.gen(function* () { + Effect.fnUntraced(function* () { const schema = arrowSchemaFromProto(ArrowTypeCodec.ArrowSchema.toProto(options.topic.schema)); // let currentOffset = options.offset ?? 0n; const currentOffsetRef = yield* Ref.make(options.offset ?? 0n); @@ -69,23 +69,17 @@ export const fetch = ( maxBatchSize: options.maxBatchSize ?? 100, }); - const batches: RecordBatch[] = yield* Effect.tryPromise({ - try: async () => { - const response = client.doGet(createTicket(ticket), { schema }); - const result: RecordBatch[] = []; - - for await (const batch of response) { - result.push(batch); - } - - return result; - }, - catch: (error) => - new WingsError({ - message: "Failed to fetch data", - cause: error, - }), - }); + const batches: RecordBatch[] = yield* client.doGet(createTicket(ticket), { schema }).pipe( + Stream.runCollect, + Effect.map((results) => Array.from(results, ({ batch }) => batch)), + Effect.mapError( + (error) => + new WingsError({ + message: "Failed to fetch data", + cause: error, + }), + ), + ); // Update offset. if (batches.length > 0) { @@ -100,4 +94,4 @@ export const fetch = ( return batches; }), ).pipe(Stream.flatMap((batches) => Stream.fromIterable(batches))); - }); + })(); diff --git a/packages/wings/src/data-plane/index.ts b/packages/wings/src/data-plane/index.ts index 4a60bfc..0a217f1 100644 --- a/packages/wings/src/data-plane/index.ts +++ b/packages/wings/src/data-plane/index.ts @@ -1,7 +1,7 @@ -export { layer, layerConfig, make, type WingsClientParams } from "./layer"; +export { layer, layerConfig, make, type WingsClientOptions } from "./layer"; export type { Publisher, PushOptions } from "./publisher"; export { - clusterMetadata, + clusterClient, type FetchOptions, fetch, flightClient, diff --git a/packages/wings/src/data-plane/layer.ts b/packages/wings/src/data-plane/layer.ts index 21cbcf8..82cb604 100644 --- a/packages/wings/src/data-plane/layer.ts +++ b/packages/wings/src/data-plane/layer.ts @@ -1,12 +1,12 @@ import type { CallOptions } from "nice-grpc-common"; -import { ArrowFlightClient, createChannelFromConfig } from "@useairfoil/flight"; +import { ArrowFlightClient } from "@useairfoil/flight"; import { Config, Effect, Layer, Scope } from "effect"; import { Metadata } from "nice-grpc"; -import type { ClusterMetadataParams } from "../cluster-metadata/config"; +import type { ClusterClientOptions } from "../cluster-client/config"; -import { make as makeClusterMetadata } from "../cluster-metadata/layer"; +import { make as makeClusterClient } from "../cluster-client/layer"; import * as FetcherModule from "./fetcher"; import * as PublisherModule from "./publisher"; import { WingsClient, type WingsClientService } from "./service"; @@ -14,7 +14,7 @@ import { WingsClient, type WingsClientService } from "./service"; /** * Configuration for WingsClient */ -export interface WingsClientParams { +export interface WingsClientOptions { /** * The gRPC host address * @example "localhost:7777" @@ -45,56 +45,45 @@ export interface WingsClientParams { * }); * ``` */ -export const make = ( - config: WingsClientParams, -): Effect.Effect => - Effect.gen(function* () { - const channel = createChannelFromConfig({ host: config.host }); - - const metadata = Metadata({ - "x-wings-namespace": config.namespace, - }); - - const mergedCallOptions: CallOptions = { - ...config.callOptions, - metadata, - }; +export const make = Effect.fnUntraced(function* ( + config: WingsClientOptions, +): Effect.fn.Return { + const metadata = Metadata({ + "x-wings-namespace": config.namespace, + }); - const flightClient = new ArrowFlightClient( - { channel }, - { - defaultCallOptions: { - "*": mergedCallOptions, - }, - }, - ); + const mergedCallOptions: CallOptions = { + ...config.callOptions, + metadata, + }; - const clusterMetadataConfig: ClusterMetadataParams = { - host: config.host, - callOptions: config.callOptions, - }; + const flightClient = yield* ArrowFlightClient.make({ + host: config.host, + defaultCallOptions: { + "*": mergedCallOptions, + }, + }); - const clusterMetadata = yield* makeClusterMetadata(clusterMetadataConfig); + const clusterClientConfig: ClusterClientOptions = { + host: config.host, + callOptions: config.callOptions, + }; - const layerScope = yield* Effect.scope; + const clusterClient = yield* makeClusterClient(clusterClientConfig); - yield* Effect.addFinalizer(() => - Effect.sync(() => { - channel.close(); - }), - ); + const layerScope = yield* Effect.scope; - return { - flightClient, - clusterMetadata, - fetch: (options) => FetcherModule.fetch(flightClient, options), - publisher: (options) => - PublisherModule.makePublisher(flightClient, options).pipe(Scope.provide(layerScope)), - }; - }); + return { + flightClient, + clusterClient, + fetch: (options) => FetcherModule.fetch(flightClient, options), + publisher: (options) => + PublisherModule.makePublisher(flightClient, options).pipe(Scope.provide(layerScope)), + }; +}); /** Create layer with direct config values */ -export const layer = (config: WingsClientParams): Layer.Layer => +export const layer = (config: WingsClientOptions): Layer.Layer => Layer.effect(WingsClient, make(config)); /** @@ -106,7 +95,7 @@ export const layer = (config: WingsClientParams): Layer.Layer => * namespace: Config.string("WINGS_NAMESPACE") * }) */ -export const layerConfig = (config: Config.Wrap) => +export const layerConfig = (config: Config.Wrap) => Layer.effect( WingsClient, Effect.gen(function* () { diff --git a/packages/wings/src/data-plane/publisher.ts b/packages/wings/src/data-plane/publisher.ts index 0764a69..c0254c0 100644 --- a/packages/wings/src/data-plane/publisher.ts +++ b/packages/wings/src/data-plane/publisher.ts @@ -1,5 +1,5 @@ import { - type ArrowFlightClient, + type ArrowFlightClientService, type FlightData, FlightDataEncoder, FlightDescriptor, @@ -34,13 +34,13 @@ export interface Publisher { * The fiber lifecycle is tied to the provided scope (typically the WingsClient layer). */ export const makePublisher = ( - client: ArrowFlightClient, + client: ArrowFlightClientService, options: { readonly topic: ClusterSchema.Topic.Topic; readonly partitionValue?: PartitionValue; }, ): Effect.Effect => - Effect.gen(function* () { + Effect.fnUntraced(function* (): Effect.fn.Return { const channel = new Channel(); const { topic, partitionValue: defaultPartitionValue } = options; @@ -105,7 +105,7 @@ export const makePublisher = ( ); // Background fiber that processes responses - const responseLoop = Effect.gen(function* () { + const processResponses = Effect.gen(function* () { while (true) { const result = yield* Effect.tryPromise({ try: () => responseIterator.next(), @@ -117,7 +117,6 @@ export const makePublisher = ( }); if (result.done) { - // Stream closed - fail all pending requests const pending = yield* Ref.get(pendingRef); for (const [requestId, deferred] of pending.entries()) { yield* Deferred.fail( @@ -161,11 +160,11 @@ export const makePublisher = ( } }); - const fiber = yield* Effect.forkScoped(responseLoop); + const responseFiber = yield* Effect.forkScoped(processResponses); yield* Effect.addFinalizer(() => Effect.gen(function* () { - yield* Fiber.interrupt(fiber); + yield* Fiber.interrupt(responseFiber); // Fail all pending requests const pending = yield* Ref.get(pendingRef); @@ -216,4 +215,4 @@ export const makePublisher = ( }; return publisher; - }); + })(); diff --git a/packages/wings/src/data-plane/service.ts b/packages/wings/src/data-plane/service.ts index 1088db8..5a59e84 100644 --- a/packages/wings/src/data-plane/service.ts +++ b/packages/wings/src/data-plane/service.ts @@ -1,10 +1,10 @@ -import type { ArrowFlightClient } from "@useairfoil/flight"; +import type { ArrowFlightClientService } from "@useairfoil/flight"; import type { RecordBatch } from "apache-arrow"; import { Context, type Effect, type Stream } from "effect"; import type * as ClusterSchema from "../cluster"; -import type { ClusterMetadataService } from "../cluster-metadata/service"; +import type { ClusterClientService } from "../cluster-client/service"; import type { WingsError } from "../errors"; import type { PartitionValue } from "../partition-value"; import type { Publisher } from "./publisher"; @@ -23,20 +23,18 @@ export interface PublisherOptions { } /** - * WingsClient Service Interface + * Main service for working with the Wings data plane. */ export interface WingsClientService { /** - * Low-level Arrow Flight client used by fetcher and publisher. - * Exposed for advanced use cases where you need direct Flight access. + * Low-level Flight client for advanced integrations. */ - readonly flightClient: ArrowFlightClient; + readonly flightClient: ArrowFlightClientService; /** - * Effect-based ClusterMetadata service for managing tenants, namespaces, - * topics, object stores and data lakes. + * Cluster metadata client that shares the same runtime configuration. */ - readonly clusterMetadata: ClusterMetadataService; + readonly clusterClient: ClusterClientService; readonly fetch: ( options: FetchOptions, @@ -58,15 +56,14 @@ export const fetch = ( ): Effect.Effect, WingsError, WingsClient> => WingsClient.use((service) => service.fetch(options)); -export const clusterMetadata = (): Effect.Effect => - WingsClient.useSync((service) => service.clusterMetadata); +export const clusterClient: Effect.Effect = + WingsClient.useSync((service) => service.clusterClient); -export const flightClient = (): Effect.Effect => +export const flightClient: Effect.Effect = WingsClient.useSync((service) => service.flightClient); /** - * Creates a publisher for pushing data to a topic. - * The publisher's background fiber is supervised by the WingsClient layer. + * Creates a publisher for pushing batches into a topic. */ export const publisher = ( options: PublisherOptions, diff --git a/packages/wings/src/errors.ts b/packages/wings/src/errors.ts deleted file mode 100644 index b763369..0000000 --- a/packages/wings/src/errors.ts +++ /dev/null @@ -1,24 +0,0 @@ -import { Data } from "effect"; - -export class ClusterMetadataError extends Data.TaggedError("ClusterMetadataError")<{ - readonly message: string; - readonly code?: string; - readonly cause?: unknown; -}> {} - -export class WingsError extends Data.TaggedError("WingsError")<{ - readonly message: string; - readonly code?: string; - readonly cause?: unknown; -}> {} - -export class GrpcError extends Data.TaggedError("GrpcError")<{ - readonly message: string; - readonly status: number; - readonly cause?: unknown; -}> {} - -export class ConfigError extends Data.TaggedError("ConfigError")<{ - readonly message: string; - readonly cause?: unknown; -}> {} diff --git a/packages/wings/src/errors/cluster-client-error.ts b/packages/wings/src/errors/cluster-client-error.ts new file mode 100644 index 0000000..69a134d --- /dev/null +++ b/packages/wings/src/errors/cluster-client-error.ts @@ -0,0 +1,7 @@ +import { Data } from "effect"; + +export class ClusterClientError extends Data.TaggedError("ClusterClientError")<{ + readonly message: string; + readonly code?: string; + readonly cause?: unknown; +}> {} diff --git a/packages/wings/src/errors/config-error.ts b/packages/wings/src/errors/config-error.ts new file mode 100644 index 0000000..35503b4 --- /dev/null +++ b/packages/wings/src/errors/config-error.ts @@ -0,0 +1,6 @@ +import { Data } from "effect"; + +export class ConfigError extends Data.TaggedError("ConfigError")<{ + readonly message: string; + readonly cause?: unknown; +}> {} diff --git a/packages/wings/src/errors/grpc-error.ts b/packages/wings/src/errors/grpc-error.ts new file mode 100644 index 0000000..c84a765 --- /dev/null +++ b/packages/wings/src/errors/grpc-error.ts @@ -0,0 +1,7 @@ +import { Data } from "effect"; + +export class GrpcError extends Data.TaggedError("GrpcError")<{ + readonly message: string; + readonly status: number; + readonly cause?: unknown; +}> {} diff --git a/packages/wings/src/errors/index.ts b/packages/wings/src/errors/index.ts new file mode 100644 index 0000000..93e992c --- /dev/null +++ b/packages/wings/src/errors/index.ts @@ -0,0 +1,5 @@ +export { ClusterClientError } from "./cluster-client-error"; +export { ConfigError } from "./config-error"; +export { GrpcError } from "./grpc-error"; +export { WingsDecodeError } from "./wings-decode-error"; +export { WingsError } from "./wings-client-error"; diff --git a/packages/wings/src/errors/wings-client-error.ts b/packages/wings/src/errors/wings-client-error.ts new file mode 100644 index 0000000..bcc45a8 --- /dev/null +++ b/packages/wings/src/errors/wings-client-error.ts @@ -0,0 +1,7 @@ +import { Data } from "effect"; + +export class WingsError extends Data.TaggedError("WingsError")<{ + readonly message: string; + readonly code?: string; + readonly cause?: unknown; +}> {} diff --git a/packages/wings/src/errors/wings-decode-error.ts b/packages/wings/src/errors/wings-decode-error.ts new file mode 100644 index 0000000..bbe5368 --- /dev/null +++ b/packages/wings/src/errors/wings-decode-error.ts @@ -0,0 +1,9 @@ +export class WingsDecodeError extends Error { + readonly cause?: unknown; + + constructor(message: string, options?: { readonly cause?: unknown }) { + super(message); + this.name = "WingsDecodeError"; + this.cause = options?.cause; + } +} diff --git a/packages/wings/src/index.ts b/packages/wings/src/index.ts index 169f31e..0d230dd 100644 --- a/packages/wings/src/index.ts +++ b/packages/wings/src/index.ts @@ -1,9 +1,8 @@ export * as Cluster from "./cluster"; -export * as WingsClusterMetadata from "./cluster-metadata"; +export * as ClusterClient from "./cluster-client"; export * as WingsClient from "./data-plane"; -export * from "./errors"; -export * from "./lib/arrow"; -export { tableFromJSON } from "./lib/arrow/helpers"; -export { PartitionValue, PV } from "./partition-value"; +export * as Arrow from "./arrow"; +export * as Partition from "./partition-value"; export * as Schema from "./schema"; -export { encodeTopicSchema, topicSchema } from "./topic"; +export * as Topic from "./topic"; +export * from "./errors"; diff --git a/packages/wings/src/partition-value.ts b/packages/wings/src/partition-value.ts index 01fdb3c..6ed1081 100644 --- a/packages/wings/src/partition-value.ts +++ b/packages/wings/src/partition-value.ts @@ -1,7 +1,16 @@ import { PartitionValue } from "./proto/wings/v1/log_metadata"; +/** Partition value type used by Wings partitioned topics. */ export { PartitionValue } from "./proto/wings/v1/log_metadata"; +/** + * Small helpers for building partition values. + * + * @example + * ```ts + * const partition = PV.int32(42) + * ``` + */ export const PV = { null(): PartitionValue { return PartitionValue.create({ diff --git a/packages/wings/src/schema/annotations.ts b/packages/wings/src/schema/annotations.ts new file mode 100644 index 0000000..fd9e33a --- /dev/null +++ b/packages/wings/src/schema/annotations.ts @@ -0,0 +1,86 @@ +import type * as Schema from "effect/Schema"; + +import type { TimeUnit } from "../cluster/arrow-type"; + +/** + * Field id annotation used to populate Arrow field ids. + */ +export const FieldId = Symbol.for("wings/fieldId"); +/** + * @internal + * Arrow type annotation used by Wings schema helpers. + */ +export const WingsType = Symbol.for("wings/arrowType"); +/** + * Field-level metadata annotation for Arrow fields. + */ +export const FieldMetadata = Symbol.for("wings/fieldMetadata"); +/** + * Schema-level metadata annotation for Arrow schemas. + */ +export const SchemaMetadata = Symbol.for("wings/schemaMetadata"); +/** + * @internal + * Nullable flag annotation used by the converter. + */ +export const WingsNullable = Symbol.for("wings/nullable"); + +/** + * @internal + * Arrow primitive tags supported by the Wings schema mapper. + */ +export type PrimitiveArrowTypeTag = + | "bool" + | "uint8" + | "int8" + | "uint16" + | "int16" + | "uint32" + | "int32" + | "uint64" + | "int64" + | "float16" + | "float32" + | "float64" + | "utf8" + | "binary" + | "date32" + | "date64"; + +/** + * @internal + * Internal annotation that encodes Arrow type information. + */ +export type WingsTypeAnnotation = + | { + readonly _tag: "primitive"; + readonly type: PrimitiveArrowTypeTag; + } + | { + readonly _tag: "timestamp"; + readonly timeUnit: TimeUnit; + readonly timezone?: string; + } + | { + readonly _tag: "duration"; + readonly timeUnit: TimeUnit; + } + | { + readonly _tag: "list"; + readonly item: Schema.Top; + }; + +/** + * Declares the Wings schema annotations types on the Effect Schema namespace. + */ +declare module "effect/Schema" { + namespace Annotations { + interface Schema<_A> { + [FieldId]?: number | bigint; + [WingsType]?: WingsTypeAnnotation; + [FieldMetadata]?: Readonly>; + [SchemaMetadata]?: Readonly>; + [WingsNullable]?: boolean; + } + } +} diff --git a/packages/wings/src/schema/convert-schema.ts b/packages/wings/src/schema/convert-schema.ts new file mode 100644 index 0000000..cf658b2 --- /dev/null +++ b/packages/wings/src/schema/convert-schema.ts @@ -0,0 +1,218 @@ +import type * as Schema from "effect/Schema"; + +import * as SchemaAST from "effect/SchemaAST"; + +import type { ArrowSchema, ArrowType, Field } from "../cluster/arrow-type"; + +import { WingsDecodeError } from "../errors"; +import { + FieldId, + FieldMetadata, + type PrimitiveArrowTypeTag, + SchemaMetadata, + WingsNullable, + WingsType, + type WingsTypeAnnotation, +} from "./annotations"; + +/** + * Converts a Wings Struct schema into a Wings ArrowSchema. + */ +export function convertSchema( + structSchema: Schema.Struct, +): ArrowSchema { + return { + fields: convertStructFields(structSchema.fields, "root"), + metadata: readSchemaMetadata(structSchema), + }; +} + +/** + * Converts a map of struct fields into Wings Arrow fields. + */ +function convertStructFields(fields: Schema.Struct.Fields, path: string): Field[] { + return Reflect.ownKeys(fields).map((key) => { + const schema = fields[key]; + return convertField(String(key), schema, path); + }); +} + +/** + * Converts a single Wings schema into an Wings Arrow field. + */ +function convertField(name: string, schema: Schema.Top, path: string): Field { + const id = readFieldId(schema, `${path}.${name}`); + const arrowType = mapEffectTypeToArrow(schema, `${path}.${name}`); + return { + name, + id, + arrowType, + nullable: readNullable(schema), + metadata: readFieldMetadata(schema), + }; +} + +/** + * Maps a Wings schema to the corresponding Wings Arrow type. + */ +function mapEffectTypeToArrow(schema: Schema.Top, path: string): ArrowType { + const annotation = readWingsTypeAnnotation(schema); + if (annotation) { + switch (annotation._tag) { + case "primitive": + return primitiveArrowType(annotation.type); + case "timestamp": + return { + _tag: "timestamp", + timestamp: { + timeUnit: annotation.timeUnit, + timezone: annotation.timezone ?? "", + }, + }; + case "duration": + return { _tag: "duration", duration: annotation.timeUnit }; + case "list": + return { + _tag: "list", + list: { + fieldType: convertListItem(annotation.item, `${path}.item`), + }, + }; + } + } + + if (isStructSchema(schema)) { + return { + _tag: "struct", + struct: { + subFieldTypes: convertStructFields(schema.fields, path), + }, + }; + } + + throw new WingsDecodeError(`Unsupported schema for "${path}". Use Wings types or Schema.Struct.`); +} + +/** + * Converts the list item schema into the Wings Arrow list field definition. + */ +function convertListItem(itemSchema: Schema.Top, path: string): Field { + return { + name: "item", + id: readFieldId(itemSchema, path), + arrowType: mapEffectTypeToArrow(itemSchema, path), + nullable: readNullable(itemSchema), + metadata: readFieldMetadata(itemSchema), + }; +} + +/** + * Reads the FieldId annotation and normalizes it to bigint. + */ +function readFieldId(schema: Schema.Top, path: string): bigint { + const annotations = getAnnotations(schema); + const value = annotations[FieldId]; + if (value === undefined) { + throw new WingsDecodeError(`Missing FieldId annotation for "${path}".`); + } + if (typeof value === "bigint") { + return value; + } + if (typeof value === "number" && Number.isInteger(value)) { + return BigInt(value); + } + throw new WingsDecodeError(`Invalid FieldId annotation for "${path}".`); +} + +/** + * Reads the internal Arrow type annotation from a schema. + */ +function readWingsTypeAnnotation(schema: Schema.Top): WingsTypeAnnotation | undefined { + const annotations = getAnnotations(schema); + return annotations[WingsType] as WingsTypeAnnotation | undefined; +} + +/** + * Reads field-level metadata annotations. + */ +function readFieldMetadata(schema: Schema.Top): Readonly> { + const annotations = getAnnotations(schema); + const metadata = annotations[FieldMetadata] as Readonly> | undefined; + return metadata ?? {}; +} + +/** + * Reads schema-level metadata annotations. + */ +function readSchemaMetadata(schema: Schema.Top): Readonly> { + const annotations = getAnnotations(schema); + const metadata = annotations[SchemaMetadata] as Readonly> | undefined; + return metadata ?? {}; +} + +/** + * Reads whether a schema should be marked nullable for Wings Arrow. + */ +function readNullable(schema: Schema.Top): boolean { + const annotations = getAnnotations(schema); + return annotations[WingsNullable] === true; +} + +/** + * Returns the annotation map from a schema AST. + */ +function getAnnotations(schema: Schema.Top): Record { + return (SchemaAST.resolve(schema.ast) ?? {}) as Record; +} + +/** + * Runtime check for struct schemas that expose a fields map. + */ +function isStructSchema(schema: Schema.Top): schema is Schema.Struct { + return ( + (typeof schema === "object" || typeof schema === "function") && + schema !== null && + "fields" in schema && + typeof schema.fields === "object" + ); +} + +/** + * Maps a primitive annotation to its Wings Arrow type tag. + */ +function primitiveArrowType(type: PrimitiveArrowTypeTag): ArrowType { + switch (type) { + case "bool": + return { _tag: "bool" }; + case "uint8": + return { _tag: "uint8" }; + case "int8": + return { _tag: "int8" }; + case "uint16": + return { _tag: "uint16" }; + case "int16": + return { _tag: "int16" }; + case "uint32": + return { _tag: "uint32" }; + case "int32": + return { _tag: "int32" }; + case "uint64": + return { _tag: "uint64" }; + case "int64": + return { _tag: "int64" }; + case "float16": + return { _tag: "float16" }; + case "float32": + return { _tag: "float32" }; + case "float64": + return { _tag: "float64" }; + case "utf8": + return { _tag: "utf8" }; + case "binary": + return { _tag: "binary" }; + case "date32": + return { _tag: "date32" }; + case "date64": + return { _tag: "date64" }; + } +} diff --git a/packages/wings/src/schema/index.ts b/packages/wings/src/schema/index.ts index d7c95f1..57df5c8 100644 --- a/packages/wings/src/schema/index.ts +++ b/packages/wings/src/schema/index.ts @@ -1,26 +1,4 @@ export { TimeUnit } from "../cluster/arrow-type"; -export { FieldId, FieldMetadata, SchemaMetadata } from "./wings-annotations"; -export { schemaConverter } from "./wings-converter"; -export { - WingsBinary, - WingsBool, - WingsDate32, - WingsDate64, - WingsDuration, - WingsFloat16, - WingsFloat32, - WingsFloat64, - WingsInt8, - WingsInt16, - WingsInt32, - WingsInt64, - WingsList, - WingsNullOr, - WingsString, - WingsStruct, - WingsTimestamp, - WingsUInt8, - WingsUInt16, - WingsUInt32, - WingsUInt64, -} from "./wings-types"; +export { FieldId, FieldMetadata, SchemaMetadata } from "./annotations"; +export { convertSchema } from "./convert-schema"; +export { Types } from "./types"; diff --git a/packages/wings/src/schema/types.ts b/packages/wings/src/schema/types.ts new file mode 100644 index 0000000..964c911 --- /dev/null +++ b/packages/wings/src/schema/types.ts @@ -0,0 +1,185 @@ +import * as Schema from "effect/Schema"; +import * as SchemaAST from "effect/SchemaAST"; + +import type { TimeUnit } from "../cluster/arrow-type"; + +import { WingsNullable, WingsType, type WingsTypeAnnotation } from "./annotations"; + +/** + * Attaches the internal Wings Arrow type annotation to a schema. + */ +const annotateWingsType = ( + schema: Schema.Schema, + annotation: WingsTypeAnnotation, +): Schema.Schema => schema.annotate({ [WingsType]: annotation }); + +/** + * Wraps a schema to accept null values and marks the Wings Arrow field nullable. + */ +export const WingsNullOr = (schema: Schema.Schema): Schema.Schema => { + const existingAnnotations = (SchemaAST.resolve(schema.ast) ?? {}) as Record; + const nullOr = Schema.NullOr(schema); + const nextAnnotations: Record = { + ...existingAnnotations, + [WingsNullable]: true, + }; + return nullOr.annotate(nextAnnotations); +}; + +/** + * Reads the Wings Arrow type annotation from a schema, if present. + */ +function _readWingsTypeAnnotation(schema: Schema.Top): WingsTypeAnnotation | undefined { + const annotations = (SchemaAST.resolve(schema.ast) ?? {}) as Record; + return annotations[WingsType] as WingsTypeAnnotation | undefined; +} + +/** Arrow UTF-8 string schema. */ +export const WingsString = annotateWingsType(Schema.String, { + _tag: "primitive", + type: "utf8", +}); + +/** Arrow boolean schema. */ +export const WingsBool = annotateWingsType(Schema.Boolean, { + _tag: "primitive", + type: "bool", +}); + +/** Arrow binary schema. */ +export const WingsBinary = annotateWingsType(Schema.Uint8Array, { + _tag: "primitive", + type: "binary", +}); + +/** Arrow uint8 schema. */ +export const WingsUInt8 = annotateWingsType(Schema.Number, { + _tag: "primitive", + type: "uint8", +}); + +/** Arrow int8 schema. */ +export const WingsInt8 = annotateWingsType(Schema.Number, { + _tag: "primitive", + type: "int8", +}); + +/** Arrow uint16 schema. */ +export const WingsUInt16 = annotateWingsType(Schema.Number, { + _tag: "primitive", + type: "uint16", +}); + +/** Arrow int16 schema. */ +export const WingsInt16 = annotateWingsType(Schema.Number, { + _tag: "primitive", + type: "int16", +}); + +/** Arrow uint32 schema. */ +export const WingsUInt32 = annotateWingsType(Schema.Number, { + _tag: "primitive", + type: "uint32", +}); + +/** Arrow int32 schema. */ +export const WingsInt32 = annotateWingsType(Schema.Number, { + _tag: "primitive", + type: "int32", +}); + +/** Arrow uint64 schema. */ +export const WingsUInt64 = annotateWingsType(Schema.BigInt, { + _tag: "primitive", + type: "uint64", +}); + +/** Arrow int64 schema. */ +export const WingsInt64 = annotateWingsType(Schema.BigInt, { + _tag: "primitive", + type: "int64", +}); + +/** Arrow float16 schema. */ +export const WingsFloat16 = annotateWingsType(Schema.Number, { + _tag: "primitive", + type: "float16", +}); + +/** Arrow float32 schema. */ +export const WingsFloat32 = annotateWingsType(Schema.Number, { + _tag: "primitive", + type: "float32", +}); + +/** Arrow float64 schema. */ +export const WingsFloat64 = annotateWingsType(Schema.Number, { + _tag: "primitive", + type: "float64", +}); + +/** Arrow date32 schema. */ +export const WingsDate32 = annotateWingsType(Schema.Date, { + _tag: "primitive", + type: "date32", +}); + +/** Arrow date64 schema. */ +export const WingsDate64 = annotateWingsType(Schema.Date, { + _tag: "primitive", + type: "date64", +}); + +/** Arrow timestamp schema with time unit and timezone. */ +export const WingsTimestamp = (timeUnit: TimeUnit, timezone?: string) => + annotateWingsType(Schema.Date, { + _tag: "timestamp", + timeUnit, + timezone, + }); + +/** Arrow duration schema with time unit. */ +export const WingsDuration = (timeUnit: TimeUnit) => + annotateWingsType(Schema.Number, { + _tag: "duration", + timeUnit, + }); + +/** + * Arrow list schema with a single item field definition. + * The item schema must include a FieldId annotation. + */ +export const WingsList = (item: Item) => + annotateWingsType(Schema.Array(item), { + _tag: "list", + item, + }); + +/** + * Convenience alias for defining nested Wings structs. + */ +export const WingsStruct = Schema.Struct; + +export const Types = { + Binary: WingsBinary, + Bool: WingsBool, + Date32: WingsDate32, + Date64: WingsDate64, + Duration: WingsDuration, + Float16: WingsFloat16, + Float32: WingsFloat32, + Float64: WingsFloat64, + Int8: WingsInt8, + Int16: WingsInt16, + Int32: WingsInt32, + Int64: WingsInt64, + List: WingsList, + NullOr: WingsNullOr, + String: WingsString, + Struct: WingsStruct, + Timestamp: WingsTimestamp, + UInt8: WingsUInt8, + UInt16: WingsUInt16, + UInt32: WingsUInt32, + UInt64: WingsUInt64, +} as const; diff --git a/packages/wings/src/topic.ts b/packages/wings/src/topic.ts index 37cac19..10903ff 100644 --- a/packages/wings/src/topic.ts +++ b/packages/wings/src/topic.ts @@ -1,18 +1,40 @@ import type { Schema } from "apache-arrow"; +import { Effect } from "effect"; + import type { Topic } from "./proto/wings/v1/cluster_metadata"; +import { WingsDecodeError, WingsError } from "./errors"; import { arrowSchemaFromProto, arrowSchemaToProto } from "./lib/arrow"; import { Schema as ProtoSchema } from "./proto/schema/arrow_type"; -export function topicSchema(topic: Topic): Schema { +/** + * Returns a topic's Arrow schema synchronously. + * Use this in places where missing schema data should fail immediately. + */ +export function topicSchemaUnsafe(topic: Topic): Schema { if (!topic.schema) { - throw new Error("Topic schema is undefined"); + throw new WingsDecodeError("Topic schema is undefined"); } return arrowSchemaFromProto(topic.schema); } +/** + * Decodes a topic's Arrow schema into an `Effect`. + * This is the recommended entry point when the topic comes from external data. + */ +export const topicSchema = (topic: Topic) => + Effect.try({ + try: () => topicSchemaUnsafe(topic), + catch: (cause) => + new WingsError({ + message: "Failed to decode topic schema", + cause, + }), + }); + +/** Serializes an Arrow schema into the bytes used by Wings APIs. */ export function encodeTopicSchema(schema: Schema): Uint8Array { const protoSchema = arrowSchemaToProto(schema); return ProtoSchema.encode(protoSchema).finish(); diff --git a/packages/wings/test/cluster-metadata.test.ts b/packages/wings/test/cluster-metadata.test.ts index 72b6dab..3bb1626 100644 --- a/packages/wings/test/cluster-metadata.test.ts +++ b/packages/wings/test/cluster-metadata.test.ts @@ -3,16 +3,16 @@ import { TestWings } from "@useairfoil/wings-testing"; import { Effect, Exit, Layer } from "effect"; import { customAlphabet } from "nanoid"; -import { WingsClusterMetadata } from "../src"; +import { ClusterClient } from "../src"; import * as ClusterSchema from "../src/cluster"; const makeId = customAlphabet("abcdefghijklmnopqrstuvwxyz", 12); -const wingsLayer = Layer.effect(WingsClusterMetadata.ClusterMetadata)( +const wingsLayer = Layer.effect(ClusterClient.ClusterClient)( Effect.gen(function* () { const w = yield* TestWings.Instance; const host = yield* w.grpcHostAndPort; - return yield* WingsClusterMetadata.make({ + return yield* ClusterClient.make({ host, }); }), @@ -25,7 +25,7 @@ layer(testLayer, { timeout: "60 seconds" })("ClusterMetadata", (it) => { describe("Layer Configuration", () => { it.effect("should create layer with direct config", () => Effect.gen(function* () { - const result = yield* WingsClusterMetadata.listTenants({ + const result = yield* ClusterClient.listTenants({ pageSize: 10, }); @@ -40,7 +40,7 @@ layer(testLayer, { timeout: "60 seconds" })("ClusterMetadata", (it) => { Effect.gen(function* () { const tenantId = makeId(); - const tenant = yield* WingsClusterMetadata.createTenant({ + const tenant = yield* ClusterClient.createTenant({ tenantId, }); @@ -52,9 +52,9 @@ layer(testLayer, { timeout: "60 seconds" })("ClusterMetadata", (it) => { Effect.gen(function* () { const tenantId = makeId(); - yield* WingsClusterMetadata.createTenant({ tenantId }); + yield* ClusterClient.createTenant({ tenantId }); - const tenant = yield* WingsClusterMetadata.getTenant({ + const tenant = yield* ClusterClient.getTenant({ name: `tenants/${tenantId}`, }); @@ -64,7 +64,7 @@ layer(testLayer, { timeout: "60 seconds" })("ClusterMetadata", (it) => { it.effect("should list tenants", () => Effect.gen(function* () { - const response = yield* WingsClusterMetadata.listTenants({ + const response = yield* ClusterClient.listTenants({ pageSize: 100, }); @@ -77,14 +77,14 @@ layer(testLayer, { timeout: "60 seconds" })("ClusterMetadata", (it) => { Effect.gen(function* () { const tenantId = makeId(); - yield* WingsClusterMetadata.createTenant({ tenantId }); + yield* ClusterClient.createTenant({ tenantId }); - yield* WingsClusterMetadata.deleteTenant({ + yield* ClusterClient.deleteTenant({ name: `tenants/${tenantId}`, }); const exit = yield* Effect.exit( - WingsClusterMetadata.getTenant({ + ClusterClient.getTenant({ name: `tenants/${tenantId}`, }), ); @@ -96,7 +96,7 @@ layer(testLayer, { timeout: "60 seconds" })("ClusterMetadata", (it) => { it.effect("should handle tenant not found error", () => Effect.gen(function* () { const exit = yield* Effect.exit( - WingsClusterMetadata.getTenant({ + ClusterClient.getTenant({ name: "tenants/nonexistent", }), ); @@ -112,11 +112,11 @@ layer(testLayer, { timeout: "60 seconds" })("ClusterMetadata", (it) => { const tenantId = makeId(); const namespaceId = makeId(); - yield* WingsClusterMetadata.createTenant({ tenantId }); + yield* ClusterClient.createTenant({ tenantId }); const objectStoreId = makeId(); - yield* WingsClusterMetadata.createObjectStore({ + yield* ClusterClient.createObjectStore({ parent: `tenants/${tenantId}`, objectStoreId, objectStoreConfig: { @@ -133,7 +133,7 @@ layer(testLayer, { timeout: "60 seconds" })("ClusterMetadata", (it) => { }); const dataLakeId = makeId(); - yield* WingsClusterMetadata.createDataLake({ + yield* ClusterClient.createDataLake({ parent: `tenants/${tenantId}`, dataLakeId, dataLakeConfig: { @@ -142,7 +142,7 @@ layer(testLayer, { timeout: "60 seconds" })("ClusterMetadata", (it) => { }, }); - const namespace = yield* WingsClusterMetadata.createNamespace({ + const namespace = yield* ClusterClient.createNamespace({ parent: `tenants/${tenantId}`, namespaceId, flushSizeBytes: BigInt(1024 * 1024), @@ -160,10 +160,10 @@ layer(testLayer, { timeout: "60 seconds" })("ClusterMetadata", (it) => { const tenantId = makeId(); const namespaceId = makeId(); - yield* WingsClusterMetadata.createTenant({ tenantId }); + yield* ClusterClient.createTenant({ tenantId }); const objectStoreId = makeId(); - yield* WingsClusterMetadata.createObjectStore({ + yield* ClusterClient.createObjectStore({ parent: `tenants/${tenantId}`, objectStoreId, objectStoreConfig: { @@ -180,7 +180,7 @@ layer(testLayer, { timeout: "60 seconds" })("ClusterMetadata", (it) => { }); const dataLakeId = makeId(); - yield* WingsClusterMetadata.createDataLake({ + yield* ClusterClient.createDataLake({ parent: `tenants/${tenantId}`, dataLakeId, dataLakeConfig: { @@ -189,7 +189,7 @@ layer(testLayer, { timeout: "60 seconds" })("ClusterMetadata", (it) => { }, }); - yield* WingsClusterMetadata.createNamespace({ + yield* ClusterClient.createNamespace({ parent: `tenants/${tenantId}`, namespaceId, flushSizeBytes: BigInt(1024 * 1024), @@ -198,7 +198,7 @@ layer(testLayer, { timeout: "60 seconds" })("ClusterMetadata", (it) => { dataLake: `tenants/${tenantId}/data-lakes/${dataLakeId}`, }); - const namespace = yield* WingsClusterMetadata.getNamespace({ + const namespace = yield* ClusterClient.getNamespace({ name: `tenants/${tenantId}/namespaces/${namespaceId}`, }); @@ -208,7 +208,7 @@ layer(testLayer, { timeout: "60 seconds" })("ClusterMetadata", (it) => { it.effect("should list namespaces", () => Effect.gen(function* () { - const response = yield* WingsClusterMetadata.listNamespaces({ + const response = yield* ClusterClient.listNamespaces({ parent: "tenants/default", pageSize: 100, }); @@ -224,7 +224,7 @@ layer(testLayer, { timeout: "60 seconds" })("ClusterMetadata", (it) => { Effect.gen(function* () { const topicId = makeId(); - const topic = yield* WingsClusterMetadata.createTopic({ + const topic = yield* ClusterClient.createTopic({ parent: "tenants/default/namespaces/default", topicId, fields: [ @@ -246,7 +246,7 @@ layer(testLayer, { timeout: "60 seconds" })("ClusterMetadata", (it) => { Effect.gen(function* () { const topicId = makeId(); - yield* WingsClusterMetadata.createTopic({ + yield* ClusterClient.createTopic({ parent: "tenants/default/namespaces/default", topicId, fields: [{ name: "field1", dataType: "Int32", nullable: false, id: 1n }], @@ -257,7 +257,7 @@ layer(testLayer, { timeout: "60 seconds" })("ClusterMetadata", (it) => { }, }); - const topic = yield* WingsClusterMetadata.getTopic({ + const topic = yield* ClusterClient.getTopic({ name: `tenants/default/namespaces/default/topics/${topicId}`, }); @@ -268,7 +268,7 @@ layer(testLayer, { timeout: "60 seconds" })("ClusterMetadata", (it) => { it.effect("should list topics", () => Effect.gen(function* () { - const response = yield* WingsClusterMetadata.listTopics({ + const response = yield* ClusterClient.listTopics({ parent: "tenants/default/namespaces/default", pageSize: 100, }); @@ -282,7 +282,7 @@ layer(testLayer, { timeout: "60 seconds" })("ClusterMetadata", (it) => { Effect.gen(function* () { const topicId = makeId(); - yield* WingsClusterMetadata.createTopic({ + yield* ClusterClient.createTopic({ parent: "tenants/default/namespaces/default", topicId, fields: [{ name: "field1", dataType: "Int32", nullable: false, id: 1n }], @@ -293,13 +293,13 @@ layer(testLayer, { timeout: "60 seconds" })("ClusterMetadata", (it) => { }, }); - yield* WingsClusterMetadata.deleteTopic({ + yield* ClusterClient.deleteTopic({ name: `tenants/default/namespaces/default/topics/${topicId}`, force: true, }); const exit = yield* Effect.exit( - WingsClusterMetadata.getTopic({ + ClusterClient.getTopic({ name: `tenants/default/namespaces/default/topics/${topicId}`, }), ); @@ -315,9 +315,9 @@ layer(testLayer, { timeout: "60 seconds" })("ClusterMetadata", (it) => { const tenantId = makeId(); const objectStoreId = makeId(); - yield* WingsClusterMetadata.createTenant({ tenantId }); + yield* ClusterClient.createTenant({ tenantId }); - const objectStore = yield* WingsClusterMetadata.createObjectStore({ + const objectStore = yield* ClusterClient.createObjectStore({ parent: `tenants/${tenantId}`, objectStoreId, objectStoreConfig: { @@ -342,9 +342,9 @@ layer(testLayer, { timeout: "60 seconds" })("ClusterMetadata", (it) => { const tenantId = makeId(); const objectStoreId = makeId(); - yield* WingsClusterMetadata.createTenant({ tenantId }); + yield* ClusterClient.createTenant({ tenantId }); - yield* WingsClusterMetadata.createObjectStore({ + yield* ClusterClient.createObjectStore({ parent: `tenants/${tenantId}`, objectStoreId, objectStoreConfig: { @@ -360,7 +360,7 @@ layer(testLayer, { timeout: "60 seconds" })("ClusterMetadata", (it) => { }, }); - const objectStore = yield* WingsClusterMetadata.getObjectStore({ + const objectStore = yield* ClusterClient.getObjectStore({ name: `tenants/${tenantId}/object-stores/${objectStoreId}`, }); @@ -370,7 +370,7 @@ layer(testLayer, { timeout: "60 seconds" })("ClusterMetadata", (it) => { it.effect("should list object stores", () => Effect.gen(function* () { - const response = yield* WingsClusterMetadata.listObjectStores({ + const response = yield* ClusterClient.listObjectStores({ parent: "tenants/default", pageSize: 100, }); @@ -387,9 +387,9 @@ layer(testLayer, { timeout: "60 seconds" })("ClusterMetadata", (it) => { const tenantId = makeId(); const dataLakeId = makeId(); - yield* WingsClusterMetadata.createTenant({ tenantId }); + yield* ClusterClient.createTenant({ tenantId }); - const dataLake = yield* WingsClusterMetadata.createDataLake({ + const dataLake = yield* ClusterClient.createDataLake({ parent: `tenants/${tenantId}`, dataLakeId, dataLakeConfig: { @@ -407,9 +407,9 @@ layer(testLayer, { timeout: "60 seconds" })("ClusterMetadata", (it) => { const tenantId = makeId(); const dataLakeId = makeId(); - yield* WingsClusterMetadata.createTenant({ tenantId }); + yield* ClusterClient.createTenant({ tenantId }); - const dataLake = yield* WingsClusterMetadata.createDataLake({ + const dataLake = yield* ClusterClient.createDataLake({ parent: `tenants/${tenantId}`, dataLakeId, dataLakeConfig: { @@ -427,9 +427,9 @@ layer(testLayer, { timeout: "60 seconds" })("ClusterMetadata", (it) => { const tenantId = makeId(); const dataLakeId = makeId(); - yield* WingsClusterMetadata.createTenant({ tenantId }); + yield* ClusterClient.createTenant({ tenantId }); - yield* WingsClusterMetadata.createDataLake({ + yield* ClusterClient.createDataLake({ parent: `tenants/${tenantId}`, dataLakeId, dataLakeConfig: { @@ -438,7 +438,7 @@ layer(testLayer, { timeout: "60 seconds" })("ClusterMetadata", (it) => { }, }); - const dataLake = yield* WingsClusterMetadata.getDataLake({ + const dataLake = yield* ClusterClient.getDataLake({ name: `tenants/${tenantId}/data-lakes/${dataLakeId}`, }); @@ -448,7 +448,7 @@ layer(testLayer, { timeout: "60 seconds" })("ClusterMetadata", (it) => { it.effect("should list data lakes", () => Effect.gen(function* () { - const response = yield* WingsClusterMetadata.listDataLakes({ + const response = yield* ClusterClient.listDataLakes({ parent: "tenants/default", pageSize: 100, }); @@ -461,12 +461,12 @@ layer(testLayer, { timeout: "60 seconds" })("ClusterMetadata", (it) => { describe("Error Handling", () => { it.effect("should handle connection errors gracefully", () => { - const errorLayer = WingsClusterMetadata.layer({ + const errorLayer = ClusterClient.layer({ host: "localhost:9999", // Non-existent port }); return Effect.gen(function* () { const exit = yield* Effect.exit( - WingsClusterMetadata.listTenants({ + ClusterClient.listTenants({ pageSize: 10, }), ); @@ -475,12 +475,12 @@ layer(testLayer, { timeout: "60 seconds" })("ClusterMetadata", (it) => { }).pipe(Effect.provide(errorLayer)); }); - it.effect("should catch ClusterMetadataError with Effect.catchTag", () => + it.effect("should catch ClusterClientError with Effect.catchTag", () => Effect.gen(function* () { - const result = yield* WingsClusterMetadata.getTenant({ + const result = yield* ClusterClient.getTenant({ name: "tenants/nonexistent", }).pipe( - Effect.catchTag("ClusterMetadataError", (error) => + Effect.catchTag("ClusterClientError", (error) => Effect.succeed({ name: "fallback-tenant", error: error.message }), ), ); diff --git a/packages/wings/test/fetcher.test.ts b/packages/wings/test/fetcher.test.ts index f5c8592..4af2de2 100644 --- a/packages/wings/test/fetcher.test.ts +++ b/packages/wings/test/fetcher.test.ts @@ -1,9 +1,9 @@ -import { describe, expect, it } from "@effect/vitest"; +import { expect, layer } from "@effect/vitest"; import { TestWings } from "@useairfoil/wings-testing"; import { Effect, Layer, Stream } from "effect"; import { customAlphabet } from "nanoid"; -import { arrowTableToRowColumns, PV, recordBatchToTable, WingsClient } from "../src"; +import { Arrow, Partition, WingsClient } from "../src"; import { makeTestBatch } from "./helpers"; const makeTopicId = customAlphabet("abcdefghijklmnopqrstuvwxyz", 12); @@ -19,13 +19,15 @@ const wingsLayer = Layer.effect(WingsClient.WingsClient)( }), ); -describe("Fetcher", () => { +const testLayer = wingsLayer.pipe(Layer.provide(TestWings.container)); + +layer(testLayer, { timeout: "30 seconds" })("Fetcher", (it) => { it.effect("should fetch data without partition key", () => it.flakyTest( Effect.gen(function* () { const topicId = makeTopicId(); const topic = yield* Effect.gen(function* () { - const cm = yield* WingsClient.clusterMetadata(); + const cm = yield* WingsClient.clusterClient; return yield* cm.createTopic({ parent: "tenants/default/namespaces/default", @@ -51,8 +53,8 @@ describe("Fetcher", () => { const batches = yield* stream.pipe(Stream.take(2), Stream.runCollect); - const table = recordBatchToTable([...batches]); - const { columns, rows } = arrowTableToRowColumns(table); + const table = Arrow.recordBatchToTable([...batches]); + const { columns, rows } = Arrow.arrowTableToRowColumns(table); expect(rows).toMatchObject([ { __offset__: 0n, my_field: 1 }, @@ -74,7 +76,7 @@ describe("Fetcher", () => { name: "__offset__", type: "Uint64", }); - }).pipe(Effect.provide(wingsLayer), Effect.provide(TestWings.container)), + }), "30 second", ), ); @@ -85,7 +87,7 @@ describe("Fetcher", () => { Effect.gen(function* () { const topicId = makeTopicId(); const topic = yield* Effect.gen(function* () { - const cm = yield* WingsClient.clusterMetadata(); + const cm = yield* WingsClient.clusterClient; return yield* cm.createTopic({ parent: "tenants/default/namespaces/default", topicId, @@ -106,23 +108,23 @@ describe("Fetcher", () => { yield* publisher.push({ batch: makeTestBatch({ partitionValue: 1000 }), - partitionValue: PV.int32(1000), + partitionValue: Partition.PV.int32(1000), }); yield* publisher.push({ batch: makeTestBatch({ partitionValue: 2000 }), - partitionValue: PV.int32(2000), + partitionValue: Partition.PV.int32(2000), }); const streamP1 = yield* WingsClient.fetch({ topic, - partitionValue: PV.int32(1000), + partitionValue: Partition.PV.int32(1000), offset: 0n, }); const batchesP1 = yield* streamP1.pipe(Stream.take(1), Stream.runCollect); - const tableP1 = recordBatchToTable([...batchesP1]); - const { rows: rowsP1 } = arrowTableToRowColumns(tableP1); + const tableP1 = Arrow.recordBatchToTable([...batchesP1]); + const { rows: rowsP1 } = Arrow.arrowTableToRowColumns(tableP1); expect(rowsP1).toMatchObject([ { __offset__: 0n, my_field: 1, my_part: 1000 }, @@ -133,14 +135,14 @@ describe("Fetcher", () => { const streamP2 = yield* WingsClient.fetch({ topic, - partitionValue: PV.int32(2000), + partitionValue: Partition.PV.int32(2000), offset: 0n, }); const batchesP2 = yield* streamP2.pipe(Stream.take(1), Stream.runCollect); - const tableP2 = recordBatchToTable([...batchesP2]); - const { rows: rowsP2 } = arrowTableToRowColumns(tableP2); + const tableP2 = Arrow.recordBatchToTable([...batchesP2]); + const { rows: rowsP2 } = Arrow.arrowTableToRowColumns(tableP2); expect(rowsP2).toMatchObject([ { __offset__: 0n, my_field: 1, my_part: 2000 }, @@ -148,7 +150,7 @@ describe("Fetcher", () => { { __offset__: 2n, my_field: 3, my_part: 2000 }, { __offset__: 3n, my_field: 4, my_part: 2000 }, ]); - }).pipe(Effect.provide(wingsLayer), Effect.provide(TestWings.container)), + }), "30 second", ), ); @@ -158,7 +160,7 @@ describe("Fetcher", () => { Effect.gen(function* () { const topicId = makeTopicId(); const topic = yield* Effect.gen(function* () { - const cm = yield* WingsClient.clusterMetadata(); + const cm = yield* WingsClient.clusterClient; return yield* cm.createTopic({ parent: "tenants/default/namespaces/default", topicId, @@ -189,7 +191,7 @@ describe("Fetcher", () => { expect(batches.length).toBeGreaterThan(0); const firstBatch = batches[0]; expect(firstBatch.numRows).toBeGreaterThan(0); - }).pipe(Effect.provide(wingsLayer), Effect.provide(TestWings.container)), + }), "30 second", ), ); @@ -199,7 +201,7 @@ describe("Fetcher", () => { Effect.gen(function* () { const topicId = makeTopicId(); const topic = yield* Effect.gen(function* () { - const cm = yield* WingsClient.clusterMetadata(); + const cm = yield* WingsClient.clusterClient; return yield* cm.createTopic({ parent: "tenants/default/namespaces/default", topicId, @@ -224,12 +226,12 @@ describe("Fetcher", () => { const batches = yield* stream.pipe(Stream.take(1), Stream.runCollect); - const table = recordBatchToTable([...batches]); - const { rows } = arrowTableToRowColumns(table); + const table = Arrow.recordBatchToTable([...batches]); + const { rows } = Arrow.arrowTableToRowColumns(table); // Should start from offset 2 expect(rows[0].__offset__).toBeGreaterThanOrEqual(2n); - }).pipe(Effect.provide(wingsLayer), Effect.provide(TestWings.container)), + }), "30 second", ), ); @@ -240,7 +242,7 @@ describe("Fetcher", () => { const topicId = makeTopicId(); const topic = yield* Effect.gen(function* () { - const cm = yield* WingsClient.clusterMetadata(); + const cm = yield* WingsClient.clusterClient; return yield* cm.createTopic({ parent: "tenants/default/namespaces/default", topicId, @@ -267,12 +269,12 @@ describe("Fetcher", () => { expect(batches.length).toBe(2); - const table = recordBatchToTable([...batches]); - const { rows } = arrowTableToRowColumns(table); + const table = Arrow.recordBatchToTable([...batches]); + const { rows } = Arrow.arrowTableToRowColumns(table); // Should have 8 rows total (2 batches * 4 rows each) expect(rows).toHaveLength(8); - }).pipe(Effect.provide(wingsLayer), Effect.provide(TestWings.container)), + }), "30 second", ), ); diff --git a/packages/wings/test/publisher.test.ts b/packages/wings/test/publisher.test.ts index d5f7f43..c866448 100644 --- a/packages/wings/test/publisher.test.ts +++ b/packages/wings/test/publisher.test.ts @@ -1,9 +1,9 @@ -import { describe, expect, it } from "@effect/vitest"; +import { expect, layer } from "@effect/vitest"; import { TestWings } from "@useairfoil/wings-testing"; import { Effect, Layer } from "effect"; import { customAlphabet } from "nanoid"; -import { PV, WingsClient } from "../src"; +import { Partition, WingsClient } from "../src"; import { makeTestBatch } from "./helpers"; const makeTopicId = customAlphabet("abcdefghijklmnopqrstuvwxyz", 12); @@ -19,14 +19,16 @@ const wingsLayer = Layer.effect(WingsClient.WingsClient)( }), ); -describe("Publisher", () => { +const testLayer = wingsLayer.pipe(Layer.provide(TestWings.container)); + +layer(testLayer, { timeout: "30 seconds" })("Publisher", (it) => { it.effect("should push data without partition values", () => it.flakyTest( Effect.gen(function* () { const topicId = makeTopicId(); const results = yield* Effect.gen(function* () { const topic = yield* Effect.gen(function* () { - const cm = yield* WingsClient.clusterMetadata(); + const cm = yield* WingsClient.clusterClient; return yield* cm.createTopic({ parent: "tenants/default/namespaces/default", topicId, @@ -85,7 +87,7 @@ describe("Publisher", () => { }, }, }); - }).pipe(Effect.provide(wingsLayer), Effect.provide(TestWings.container)), + }), "30 second", ), ); @@ -97,7 +99,7 @@ describe("Publisher", () => { const results = yield* Effect.gen(function* () { const topic = yield* Effect.gen(function* () { - const cm = yield* WingsClient.clusterMetadata(); + const cm = yield* WingsClient.clusterClient; return yield* cm.createTopic({ parent: "tenants/default/namespaces/default", topicId, @@ -118,15 +120,15 @@ describe("Publisher", () => { const b0 = publisher.push({ batch: makeTestBatch({ partitionValue: 1000 }), - partitionValue: PV.int32(1000), + partitionValue: Partition.PV.int32(1000), }); const b1 = publisher.push({ batch: makeTestBatch({ partitionValue: 2000 }), - partitionValue: PV.int32(2000), + partitionValue: Partition.PV.int32(2000), }); const b2 = publisher.push({ batch: makeTestBatch({ partitionValue: 3000 }), - partitionValue: PV.int32(3000), + partitionValue: Partition.PV.int32(3000), }); return yield* Effect.all([b0, b1, b2], { @@ -169,7 +171,7 @@ describe("Publisher", () => { }, }, }); - }).pipe(Effect.provide(wingsLayer), Effect.provide(TestWings.container)), + }), "30 second", ), ); @@ -181,7 +183,7 @@ describe("Publisher", () => { const results = yield* Effect.gen(function* () { const topic = yield* Effect.gen(function* () { - const cm = yield* WingsClient.clusterMetadata(); + const cm = yield* WingsClient.clusterClient; return yield* cm.createTopic({ parent: "tenants/default/namespaces/default", topicId, @@ -200,7 +202,7 @@ describe("Publisher", () => { const publisher = yield* WingsClient.publisher({ topic, - partitionValue: PV.int32(5000), + partitionValue: Partition.PV.int32(5000), }); const b0 = publisher.push({ @@ -209,7 +211,7 @@ describe("Publisher", () => { const b1 = publisher.push({ batch: makeTestBatch({ partitionValue: 6000 }), - partitionValue: PV.int32(6000), + partitionValue: Partition.PV.int32(6000), }); return yield* Effect.all([b0, b1], { concurrency: "unbounded" }); @@ -217,7 +219,7 @@ describe("Publisher", () => { expect(results[0].result?.$case).toBe("accepted"); expect(results[1].result?.$case).toBe("accepted"); - }).pipe(Effect.provide(wingsLayer), Effect.provide(TestWings.container)), + }), "30 second", ), ); @@ -229,7 +231,7 @@ describe("Publisher", () => { const results = yield* Effect.gen(function* () { const topic = yield* Effect.gen(function* () { - const cm = yield* WingsClient.clusterMetadata(); + const cm = yield* WingsClient.clusterClient; return yield* cm.createTopic({ parent: "tenants/default/namespaces/default", topicId, @@ -255,7 +257,7 @@ describe("Publisher", () => { for (const result of results) { expect(result.result?.$case).toBe("accepted"); } - }).pipe(Effect.provide(wingsLayer), Effect.provide(TestWings.container)), + }), "30 second", ), ); diff --git a/packages/wings/test/setup.ts b/packages/wings/test/setup.ts new file mode 100644 index 0000000..24c1917 --- /dev/null +++ b/packages/wings/test/setup.ts @@ -0,0 +1,3 @@ +import { afterEach } from "vitest"; + +afterEach(() => new Promise((resolve) => setTimeout(resolve, 200))); diff --git a/packages/wings/test/wings-converter.test.ts b/packages/wings/test/wings-converter.test.ts index a9687f1..58eb3a0 100644 --- a/packages/wings/test/wings-converter.test.ts +++ b/packages/wings/test/wings-converter.test.ts @@ -2,81 +2,79 @@ import { Schema } from "effect"; import { describe, expect, it } from "vitest"; import { TimeUnit } from "../src/cluster/arrow-type"; -import { - FieldId, - FieldMetadata, - SchemaMetadata, - schemaConverter, - WingsBinary, - WingsBool, - WingsDate32, - WingsDate64, - WingsDuration, - WingsFloat16, - WingsFloat32, - WingsFloat64, - WingsInt8, - WingsInt16, - WingsInt32, - WingsInt64, - WingsList, - WingsNullOr, - WingsString, - WingsStruct, - WingsTimestamp, - WingsUInt8, - WingsUInt16, - WingsUInt32, - WingsUInt64, -} from "../src/schema"; +import { FieldId, FieldMetadata, SchemaMetadata, convertSchema, Types } from "../src/schema"; -describe("schemaConverter", () => { +const { + Binary, + Bool, + Date32, + Date64, + Duration, + Float16, + Float32, + Float64, + Int8, + Int16, + Int32, + Int64, + List, + NullOr, + String, + Struct, + Timestamp, + UInt8, + UInt16, + UInt32, + UInt64, +} = Types; + +describe("convertSchema", () => { it("converts Wings schemas to ArrowSchema", () => { - const Customer = WingsStruct({ - id: WingsString.annotate({ + const Customer = Struct({ + id: String.annotate({ [FieldId]: 1n, [FieldMetadata]: { pii: "true" }, }), - active: WingsNullOr(WingsBool).annotate({ + active: NullOr(Bool).annotate({ [FieldId]: 18n, }), - activePre: WingsNullOr( - WingsBool.annotate({ + activePre: NullOr( + Bool.annotate({ [FieldId]: 118n, [FieldMetadata]: { source: "base" }, }), ), - payload: WingsBinary.annotate({ [FieldId]: 2n }), - u8: WingsUInt8.annotate({ [FieldId]: 3n }), - i8: WingsInt8.annotate({ [FieldId]: 4n }), - u16: WingsUInt16.annotate({ [FieldId]: 5n }), - i16: WingsInt16.annotate({ [FieldId]: 6n }), - u32: WingsUInt32.annotate({ [FieldId]: 7n }), - i32: WingsInt32.annotate({ [FieldId]: 8n }), - u64: WingsUInt64.annotate({ [FieldId]: 9n }), - i64: WingsInt64.annotate({ [FieldId]: 10n }), - f16: WingsFloat16.annotate({ [FieldId]: 11n }), - f32: WingsFloat32.annotate({ [FieldId]: 12n }), - f64: WingsFloat64.annotate({ [FieldId]: 13n }), - date32: WingsDate32.annotate({ [FieldId]: 14n }), - date64: WingsDate64.annotate({ [FieldId]: 15n }), - createdAt: WingsTimestamp(TimeUnit.MILLISECOND, "UTC").annotate({ + payload: Binary.annotate({ [FieldId]: 2n }), + u8: UInt8.annotate({ [FieldId]: 3n }), + i8: Int8.annotate({ [FieldId]: 4n }), + u16: UInt16.annotate({ [FieldId]: 5n }), + i16: Int16.annotate({ [FieldId]: 6n }), + u32: UInt32.annotate({ [FieldId]: 7n }), + i32: Int32.annotate({ [FieldId]: 8n }), + u64: UInt64.annotate({ [FieldId]: 9n }), + i64: Int64.annotate({ [FieldId]: 10n }), + f16: Float16.annotate({ [FieldId]: 11n }), + f32: Float32.annotate({ [FieldId]: 12n }), + f64: Float64.annotate({ [FieldId]: 13n }), + date32: Date32.annotate({ [FieldId]: 14n }), + date64: Date64.annotate({ [FieldId]: 15n }), + createdAt: Timestamp(TimeUnit.MILLISECOND, "UTC").annotate({ [FieldId]: 16n, }), - elapsed: WingsDuration(TimeUnit.SECOND).annotate({ [FieldId]: 17n }), - tags: WingsList( - WingsString.annotate({ + elapsed: Duration(TimeUnit.SECOND).annotate({ [FieldId]: 17n }), + tags: List( + String.annotate({ [FieldId]: 190n, [FieldMetadata]: { tag: "true" }, }), ).annotate({ [FieldId]: 19n }), - address: WingsStruct({ - city: WingsString.annotate({ [FieldId]: 20n }), - zip: WingsUInt32.annotate({ [FieldId]: 21n }), + address: Struct({ + city: String.annotate({ [FieldId]: 20n }), + zip: UInt32.annotate({ [FieldId]: 21n }), }).annotate({ [FieldId]: 22n }), }).annotate({ [SchemaMetadata]: { source: "test" } }); - const result = schemaConverter(Customer); + const result = convertSchema(Customer); expect(result).toMatchInlineSnapshot(` { @@ -323,11 +321,11 @@ describe("schemaConverter", () => { }); it("throws when FieldId is missing", () => { - const Missing = WingsStruct({ - name: WingsString, + const Missing = Struct({ + name: String, }); - expect(() => schemaConverter(Missing)).toThrow("Missing FieldId annotation"); + expect(() => convertSchema(Missing)).toThrow("Missing FieldId annotation"); }); it("throws on unsupported Effect schema", () => { @@ -335,6 +333,6 @@ describe("schemaConverter", () => { count: Schema.Number.annotate({ [FieldId]: 1n }), }); - expect(() => schemaConverter(Unsupported)).toThrow("Unsupported schema"); + expect(() => convertSchema(Unsupported)).toThrow("Unsupported schema"); }); }); diff --git a/packages/wings/tsdown.config.ts b/packages/wings/tsdown.config.ts index 62bbe54..f4dd5d8 100644 --- a/packages/wings/tsdown.config.ts +++ b/packages/wings/tsdown.config.ts @@ -1,7 +1,17 @@ import { defineConfig } from "tsdown"; export default defineConfig({ - entry: ["src/index.ts", "src/cluster/index.ts", "src/schema/index.ts"], + entry: [ + "src/index.ts", + "src/arrow/index.ts", + "src/cluster/index.ts", + "src/cluster-client/index.ts", + "src/data-plane/index.ts", + "src/errors/index.ts", + "src/partition-value.ts", + "src/schema/index.ts", + "src/topic.ts", + ], format: ["esm"], dts: true, sourcemap: true, diff --git a/packages/wings/vitest.config.ts b/packages/wings/vitest.config.ts index 50ca1af..64c9445 100644 --- a/packages/wings/vitest.config.ts +++ b/packages/wings/vitest.config.ts @@ -5,5 +5,7 @@ export default defineConfig({ fileParallelism: false, testTimeout: 60_000, hookTimeout: 60_000, + // Add a setup file to wait for the container to be ready and closed + setupFiles: ["./test/setup.ts"], }, }); From 53a711943f0c583d3eb850fc53d9b534285f8ef4 Mon Sep 17 00:00:00 2001 From: jaipaljadeja Date: Wed, 29 Apr 2026 16:08:15 +0530 Subject: [PATCH 03/12] cli: refactor --- packages/cli/package.json | 5 +- .../cluster/data-lake/create-iceberg.ts | 29 ++--- .../cluster/data-lake/create-parquet.ts | 29 ++--- .../src/commands/cluster/data-lake/create.ts | 4 +- .../src/commands/cluster/data-lake/delete.ts | 20 +-- .../cli/src/commands/cluster/data-lake/get.ts | 18 +-- .../src/commands/cluster/data-lake/list.ts | 32 +++-- packages/cli/src/commands/cluster/index.ts | 40 +++--- .../src/commands/cluster/namespace/create.ts | 31 ++--- .../src/commands/cluster/namespace/delete.ts | 16 ++- .../cli/src/commands/cluster/namespace/get.ts | 18 +-- .../src/commands/cluster/namespace/list.ts | 44 +++---- .../cluster/object-store/create-aws.ts | 28 ++--- .../cluster/object-store/create-azure.ts | 27 ++-- .../cluster/object-store/create-google.ts | 18 +-- .../cluster/object-store/create-s3.ts | 25 +--- .../commands/cluster/object-store/create.ts | 8 +- .../commands/cluster/object-store/delete.ts | 20 +-- .../src/commands/cluster/object-store/get.ts | 20 ++- .../src/commands/cluster/object-store/list.ts | 32 +++-- .../cli/src/commands/cluster/tenant/create.ts | 22 ++-- .../cli/src/commands/cluster/tenant/delete.ts | 14 +-- .../cli/src/commands/cluster/tenant/get.ts | 18 +-- .../cli/src/commands/cluster/tenant/list.ts | 28 ++--- .../cli/src/commands/cluster/topic/create.ts | 35 ++---- .../cli/src/commands/cluster/topic/delete.ts | 24 ++-- .../cli/src/commands/cluster/topic/get.ts | 24 ++-- .../cli/src/commands/cluster/topic/list.ts | 28 +++-- packages/cli/src/commands/dev.ts | 26 ++-- packages/cli/src/commands/sql.ts | 34 ++--- packages/cli/src/index.ts | 5 +- packages/cli/src/utils/client.ts | 6 +- packages/cli/src/utils/docker.ts | 19 ++- packages/cli/src/utils/wings.ts | 32 ++--- packages/cli/test-commands.sh | 116 ++++++++++-------- 35 files changed, 411 insertions(+), 484 deletions(-) diff --git a/packages/cli/package.json b/packages/cli/package.json index bd82a83..3739b2d 100644 --- a/packages/cli/package.json +++ b/packages/cli/package.json @@ -11,7 +11,10 @@ "README.md" ], "type": "module", - "module": "index.ts", + "main": "./dist/index.js", + "exports": { + ".": "./dist/index.js" + }, "scripts": { "build": "tsdown", "typecheck": "tsc --noEmit", diff --git a/packages/cli/src/commands/cluster/data-lake/create-iceberg.ts b/packages/cli/src/commands/cluster/data-lake/create-iceberg.ts index 32aacb9..d6a9011 100644 --- a/packages/cli/src/commands/cluster/data-lake/create-iceberg.ts +++ b/packages/cli/src/commands/cluster/data-lake/create-iceberg.ts @@ -1,11 +1,11 @@ import * as p from "@clack/prompts"; -import { WingsClusterMetadata } from "@useairfoil/wings"; +import { ClusterClient } from "@useairfoil/wings"; import { printTable } from "console-table-printer"; import { Effect } from "effect"; import { Command, Flag } from "effect/unstable/cli"; -import { makeClusterMetadataLayer } from "../../../utils/client.js"; -import { hostOption, portOption } from "../../../utils/options.js"; +import { makeClusterClientLayer } from "../../../utils/client"; +import { hostOption, portOption } from "../../../utils/options"; const parentOption = Flag.string("parent").pipe( Flag.withDescription("Parent tenant in format: tenants/{tenant}"), @@ -23,37 +23,30 @@ export const createDataLakeIcebergCommand = Command.make( host: hostOption, port: portOption, }, - ({ parent, dataLakeId, host, port }) => + ({ parent, dataLakeId }) => Effect.gen(function* () { p.intro("🏞️ Create Iceberg Data Lake"); - const layer = makeClusterMetadataLayer(host, port); - const s = p.spinner(); s.start("Creating Iceberg data lake..."); - const result = yield* WingsClusterMetadata.createDataLake({ + const result = yield* ClusterClient.createDataLake({ parent, dataLakeId, dataLakeConfig: { _tag: "iceberg", iceberg: {}, }, - }).pipe( - Effect.provide(layer), - Effect.tapError(() => Effect.sync(() => s.stop("Failed to create data lake"))), - ); + }).pipe(Effect.tapError(() => Effect.sync(() => s.stop("Failed to create data lake")))); s.stop("Iceberg data lake created successfully"); yield* Effect.sync(() => { - printTable([ - { - name: result.name, - type: "Iceberg", - }, - ]); + printTable([{ name: result.name, type: "Iceberg" }]); p.outro("✓ Done"); }); }), -).pipe(Command.withDescription("Create a new Iceberg data lake")); +).pipe( + Command.withDescription("Create a new Iceberg data lake"), + Command.provide(({ host, port }) => makeClusterClientLayer(host, port)), +); diff --git a/packages/cli/src/commands/cluster/data-lake/create-parquet.ts b/packages/cli/src/commands/cluster/data-lake/create-parquet.ts index 8f83f36..75f652a 100644 --- a/packages/cli/src/commands/cluster/data-lake/create-parquet.ts +++ b/packages/cli/src/commands/cluster/data-lake/create-parquet.ts @@ -1,11 +1,11 @@ import * as p from "@clack/prompts"; -import { WingsClusterMetadata } from "@useairfoil/wings"; +import { ClusterClient } from "@useairfoil/wings"; import { printTable } from "console-table-printer"; import { Effect } from "effect"; import { Command, Flag } from "effect/unstable/cli"; -import { makeClusterMetadataLayer } from "../../../utils/client.js"; -import { hostOption, portOption } from "../../../utils/options.js"; +import { makeClusterClientLayer } from "../../../utils/client"; +import { hostOption, portOption } from "../../../utils/options"; const parentOption = Flag.string("parent").pipe( Flag.withDescription("Parent tenant in format: tenants/{tenant}"), @@ -23,37 +23,30 @@ export const createDataLakeParquetCommand = Command.make( host: hostOption, port: portOption, }, - ({ parent, dataLakeId, host, port }) => + ({ parent, dataLakeId }) => Effect.gen(function* () { p.intro("🏞️ Create Parquet Data Lake"); - const layer = makeClusterMetadataLayer(host, port); - const s = p.spinner(); s.start("Creating Parquet data lake..."); - const result = yield* WingsClusterMetadata.createDataLake({ + const result = yield* ClusterClient.createDataLake({ parent, dataLakeId, dataLakeConfig: { _tag: "parquet", parquet: {}, }, - }).pipe( - Effect.provide(layer), - Effect.tapError(() => Effect.sync(() => s.stop("Failed to create data lake"))), - ); + }).pipe(Effect.tapError(() => Effect.sync(() => s.stop("Failed to create data lake")))); s.stop("Parquet data lake created successfully"); yield* Effect.sync(() => { - printTable([ - { - name: result.name, - type: "Parquet", - }, - ]); + printTable([{ name: result.name, type: "Parquet" }]); p.outro("✓ Done"); }); }), -).pipe(Command.withDescription("Create a new Parquet data lake")); +).pipe( + Command.withDescription("Create a new Parquet data lake"), + Command.provide(({ host, port }) => makeClusterClientLayer(host, port)), +); diff --git a/packages/cli/src/commands/cluster/data-lake/create.ts b/packages/cli/src/commands/cluster/data-lake/create.ts index 83315b3..96ac287 100644 --- a/packages/cli/src/commands/cluster/data-lake/create.ts +++ b/packages/cli/src/commands/cluster/data-lake/create.ts @@ -1,8 +1,8 @@ import { Effect } from "effect"; import { Command } from "effect/unstable/cli"; -import { createDataLakeIcebergCommand } from "./create-iceberg.js"; -import { createDataLakeParquetCommand } from "./create-parquet.js"; +import { createDataLakeIcebergCommand } from "./create-iceberg"; +import { createDataLakeParquetCommand } from "./create-parquet"; export const createDataLakeCommand = Command.make("create-data-lake", {}, () => Effect.void).pipe( Command.withDescription("Create a new data lake"), diff --git a/packages/cli/src/commands/cluster/data-lake/delete.ts b/packages/cli/src/commands/cluster/data-lake/delete.ts index cbd2d32..6b6e65c 100644 --- a/packages/cli/src/commands/cluster/data-lake/delete.ts +++ b/packages/cli/src/commands/cluster/data-lake/delete.ts @@ -1,10 +1,10 @@ import * as p from "@clack/prompts"; -import { WingsClusterMetadata } from "@useairfoil/wings"; +import { ClusterClient } from "@useairfoil/wings"; import { Effect } from "effect"; import { Command, Flag } from "effect/unstable/cli"; -import { makeClusterMetadataLayer } from "../../../utils/client.js"; -import { forceOption, hostOption, portOption } from "../../../utils/options.js"; +import { makeClusterClientLayer } from "../../../utils/client"; +import { forceOption, hostOption, portOption } from "../../../utils/options"; const nameOption = Flag.string("name").pipe( Flag.withDescription("Data lake name in format: tenants/{tenant}/data-lakes/{data-lake}"), @@ -18,7 +18,7 @@ export const deleteDataLakeCommand = Command.make( host: hostOption, port: portOption, }, - ({ name, force, host, port }) => + ({ name, force }) => Effect.gen(function* () { p.intro("🗑️ Delete Data Lake"); @@ -34,21 +34,21 @@ export const deleteDataLakeCommand = Command.make( if (p.isCancel(confirm) || !confirm) { p.cancel("Deletion cancelled"); - process.exit(0); + return; } } - const layer = makeClusterMetadataLayer(host, port); - const s = p.spinner(); s.start("Deleting data lake..."); - yield* WingsClusterMetadata.deleteDataLake({ name }).pipe( - Effect.provide(layer), + yield* ClusterClient.deleteDataLake({ name }).pipe( Effect.tapError(() => Effect.sync(() => s.stop("Failed to delete data lake"))), ); s.stop("Data lake deleted successfully"); p.outro("✓ Done"); }), -).pipe(Command.withDescription("Delete a data lake from the cluster")); +).pipe( + Command.withDescription("Delete a data lake from the cluster"), + Command.provide(({ host, port }) => makeClusterClientLayer(host, port)), +); diff --git a/packages/cli/src/commands/cluster/data-lake/get.ts b/packages/cli/src/commands/cluster/data-lake/get.ts index f998576..8b0e12a 100644 --- a/packages/cli/src/commands/cluster/data-lake/get.ts +++ b/packages/cli/src/commands/cluster/data-lake/get.ts @@ -1,11 +1,11 @@ import * as p from "@clack/prompts"; -import { WingsClusterMetadata } from "@useairfoil/wings"; +import { ClusterClient } from "@useairfoil/wings"; import { printTable } from "console-table-printer"; import { Effect } from "effect"; import { Command, Flag } from "effect/unstable/cli"; -import { makeClusterMetadataLayer } from "../../../utils/client.js"; -import { hostOption, portOption } from "../../../utils/options.js"; +import { makeClusterClientLayer } from "../../../utils/client"; +import { hostOption, portOption } from "../../../utils/options"; const nameOption = Flag.string("name").pipe( Flag.withDescription("Data lake name in format: tenants/{tenant}/data-lakes/{data-lake}"), @@ -18,15 +18,12 @@ export const getDataLakeCommand = Command.make( host: hostOption, port: portOption, }, - ({ name, host, port }) => + ({ name }) => Effect.gen(function* () { - const layer = makeClusterMetadataLayer(host, port); - const s = p.spinner(); s.start("Fetching data lake..."); - const dataLake = yield* WingsClusterMetadata.getDataLake({ name }).pipe( - Effect.provide(layer), + const dataLake = yield* ClusterClient.getDataLake({ name }).pipe( Effect.tapError(() => Effect.sync(() => s.stop("Failed to get data lake"))), ); @@ -37,4 +34,7 @@ export const getDataLakeCommand = Command.make( p.outro("✓ Done"); }); }), -).pipe(Command.withDescription("Get details of a specific data lake")); +).pipe( + Command.withDescription("Get details of a specific data lake"), + Command.provide(({ host, port }) => makeClusterClientLayer(host, port)), +); diff --git a/packages/cli/src/commands/cluster/data-lake/list.ts b/packages/cli/src/commands/cluster/data-lake/list.ts index 5569bc1..b67e0d5 100644 --- a/packages/cli/src/commands/cluster/data-lake/list.ts +++ b/packages/cli/src/commands/cluster/data-lake/list.ts @@ -1,11 +1,11 @@ import * as p from "@clack/prompts"; -import { WingsClusterMetadata } from "@useairfoil/wings"; +import { ClusterClient } from "@useairfoil/wings"; import { printTable } from "console-table-printer"; import { Effect, Option } from "effect"; import { Command, Flag } from "effect/unstable/cli"; -import { makeClusterMetadataLayer } from "../../../utils/client.js"; -import { hostOption, pageSizeOption, pageTokenOption, portOption } from "../../../utils/options.js"; +import { makeClusterClientLayer } from "../../../utils/client"; +import { hostOption, pageSizeOption, pageTokenOption, portOption } from "../../../utils/options"; const parentOption = Flag.string("parent").pipe( Flag.withDescription("Parent tenant in format: tenants/{tenant}"), @@ -20,21 +20,16 @@ export const listDataLakesCommand = Command.make( host: hostOption, port: portOption, }, - ({ parent, pageSize, pageToken, host, port }) => + ({ parent, pageSize, pageToken }) => Effect.gen(function* () { - const layer = makeClusterMetadataLayer(host, port); - const s = p.spinner(); s.start("Fetching data lakes..."); - const response = yield* WingsClusterMetadata.listDataLakes({ + const response = yield* ClusterClient.listDataLakes({ parent, pageSize, pageToken: Option.getOrUndefined(pageToken), - }).pipe( - Effect.provide(layer), - Effect.tapError(() => Effect.sync(() => s.stop("Failed to list data lakes"))), - ); + }).pipe(Effect.tapError(() => Effect.sync(() => s.stop("Failed to list data lakes")))); s.stop(`Found ${response.dataLakes.length} data lake(s)`); @@ -43,12 +38,10 @@ export const listDataLakesCommand = Command.make( p.log.warn("No data lakes found"); } else { printTable( - response.dataLakes.map( - (dataLake: { name: string; dataLakeConfig: { _tag?: string | null } }) => ({ - name: dataLake.name, - type: dataLake.dataLakeConfig._tag || "-", - }), - ), + response.dataLakes.map((dataLake) => ({ + name: dataLake.name, + type: dataLake.dataLakeConfig._tag || "-", + })), ); } @@ -59,4 +52,7 @@ export const listDataLakesCommand = Command.make( p.outro("✓ Done"); }); }), -).pipe(Command.withDescription("List all data lakes belonging to a tenant")); +).pipe( + Command.withDescription("List all data lakes belonging to a tenant"), + Command.provide(({ host, port }) => makeClusterClientLayer(host, port)), +); diff --git a/packages/cli/src/commands/cluster/index.ts b/packages/cli/src/commands/cluster/index.ts index c0503f2..6e5d0c8 100644 --- a/packages/cli/src/commands/cluster/index.ts +++ b/packages/cli/src/commands/cluster/index.ts @@ -1,26 +1,26 @@ import { Effect } from "effect"; import { Command } from "effect/unstable/cli"; -import { createDataLakeCommand } from "./data-lake/create.js"; -import { deleteDataLakeCommand } from "./data-lake/delete.js"; -import { getDataLakeCommand } from "./data-lake/get.js"; -import { listDataLakesCommand } from "./data-lake/list.js"; -import { createNamespaceCommand } from "./namespace/create.js"; -import { deleteNamespaceCommand } from "./namespace/delete.js"; -import { getNamespaceCommand } from "./namespace/get.js"; -import { listNamespacesCommand } from "./namespace/list.js"; -import { createObjectStoreCommand } from "./object-store/create.js"; -import { deleteObjectStoreCommand } from "./object-store/delete.js"; -import { getObjectStoreCommand } from "./object-store/get.js"; -import { listObjectStoresCommand } from "./object-store/list.js"; -import { createTenantCommand } from "./tenant/create.js"; -import { deleteTenantCommand } from "./tenant/delete.js"; -import { getTenantCommand } from "./tenant/get.js"; -import { listTenantsCommand } from "./tenant/list.js"; -import { createTopicCommand } from "./topic/create.js"; -import { deleteTopicCommand } from "./topic/delete.js"; -import { getTopicCommand } from "./topic/get.js"; -import { listTopicsCommand } from "./topic/list.js"; +import { createDataLakeCommand } from "./data-lake/create"; +import { deleteDataLakeCommand } from "./data-lake/delete"; +import { getDataLakeCommand } from "./data-lake/get"; +import { listDataLakesCommand } from "./data-lake/list"; +import { createNamespaceCommand } from "./namespace/create"; +import { deleteNamespaceCommand } from "./namespace/delete"; +import { getNamespaceCommand } from "./namespace/get"; +import { listNamespacesCommand } from "./namespace/list"; +import { createObjectStoreCommand } from "./object-store/create"; +import { deleteObjectStoreCommand } from "./object-store/delete"; +import { getObjectStoreCommand } from "./object-store/get"; +import { listObjectStoresCommand } from "./object-store/list"; +import { createTenantCommand } from "./tenant/create"; +import { deleteTenantCommand } from "./tenant/delete"; +import { getTenantCommand } from "./tenant/get"; +import { listTenantsCommand } from "./tenant/list"; +import { createTopicCommand } from "./topic/create"; +import { deleteTopicCommand } from "./topic/delete"; +import { getTopicCommand } from "./topic/get"; +import { listTopicsCommand } from "./topic/list"; export const clusterCommand = Command.make("cluster", {}, () => Effect.void).pipe( Command.withDescription("Interact with the cluster metadata server"), diff --git a/packages/cli/src/commands/cluster/namespace/create.ts b/packages/cli/src/commands/cluster/namespace/create.ts index f83e525..7b77986 100644 --- a/packages/cli/src/commands/cluster/namespace/create.ts +++ b/packages/cli/src/commands/cluster/namespace/create.ts @@ -1,11 +1,11 @@ import * as p from "@clack/prompts"; -import { WingsClusterMetadata } from "@useairfoil/wings"; +import { ClusterClient } from "@useairfoil/wings"; import { printTable } from "console-table-printer"; import { Effect } from "effect"; import { Command, Flag } from "effect/unstable/cli"; -import { makeClusterMetadataLayer } from "../../../utils/client.js"; -import { hostOption, portOption } from "../../../utils/options.js"; +import { makeClusterClientLayer } from "../../../utils/client"; +import { hostOption, portOption } from "../../../utils/options"; const parentOption = Flag.string("parent").pipe( Flag.withDescription("Parent tenant in format: tenants/{tenant} (e.g., 'tenants/default')"), @@ -49,35 +49,21 @@ export const createNamespaceCommand = Command.make( host: hostOption, port: portOption, }, - ({ - parent, - namespaceId, - flushSizeBytes, - flushIntervalMillis, - objectStore, - dataLake, - host, - port, - }) => + ({ parent, namespaceId, flushSizeBytes, flushIntervalMillis, objectStore, dataLake }) => Effect.gen(function* () { p.intro("📁 Create Namespace"); - const layer = makeClusterMetadataLayer(host, port); - const s = p.spinner(); s.start("Creating namespace..."); - const namespace = yield* WingsClusterMetadata.createNamespace({ + const namespace = yield* ClusterClient.createNamespace({ parent, namespaceId, flushSizeBytes: BigInt(flushSizeBytes), flushIntervalMillis: BigInt(flushIntervalMillis), objectStore, dataLake, - }).pipe( - Effect.provide(layer), - Effect.tapError(() => Effect.sync(() => s.stop("Failed to create namespace"))), - ); + }).pipe(Effect.tapError(() => Effect.sync(() => s.stop("Failed to create namespace")))); s.stop("Namespace created successfully"); @@ -94,4 +80,7 @@ export const createNamespaceCommand = Command.make( p.outro("✓ Done"); }); }), -).pipe(Command.withDescription("Create a new namespace belonging to a tenant")); +).pipe( + Command.withDescription("Create a new namespace belonging to a tenant"), + Command.provide(({ host, port }) => makeClusterClientLayer(host, port)), +); diff --git a/packages/cli/src/commands/cluster/namespace/delete.ts b/packages/cli/src/commands/cluster/namespace/delete.ts index b21bbe3..0d4f901 100644 --- a/packages/cli/src/commands/cluster/namespace/delete.ts +++ b/packages/cli/src/commands/cluster/namespace/delete.ts @@ -1,10 +1,10 @@ import * as p from "@clack/prompts"; -import { WingsClusterMetadata } from "@useairfoil/wings"; +import { ClusterClient } from "@useairfoil/wings"; import { Effect } from "effect"; import { Command, Flag } from "effect/unstable/cli"; -import { makeClusterMetadataLayer } from "../../../utils/client.js"; -import { forceOption, hostOption, portOption } from "../../../utils/options.js"; +import { makeClusterClientLayer } from "../../../utils/client"; +import { forceOption, hostOption, portOption } from "../../../utils/options"; const nameOption = Flag.string("name").pipe( Flag.withDescription("Namespace name in format: tenants/{tenant}/namespaces/{namespace}"), @@ -18,7 +18,7 @@ export const deleteNamespaceCommand = Command.make( host: hostOption, port: portOption, }, - ({ name, force, host, port }) => + ({ name, force }) => Effect.gen(function* () { p.intro("🗑️ Delete Namespace"); @@ -34,17 +34,14 @@ export const deleteNamespaceCommand = Command.make( if (p.isCancel(confirm) || !confirm) { p.cancel("Deletion cancelled"); - process.exit(0); + return; } } - const layer = makeClusterMetadataLayer(host, port); - const s = p.spinner(); s.start("Deleting namespace..."); - yield* WingsClusterMetadata.deleteNamespace({ name }).pipe( - Effect.provide(layer), + yield* ClusterClient.deleteNamespace({ name }).pipe( Effect.tapError(() => Effect.sync(() => s.stop("Failed to delete namespace"))), ); @@ -55,4 +52,5 @@ export const deleteNamespaceCommand = Command.make( Command.withDescription( "Delete a namespace from the cluster (fails if namespace has any topics)", ), + Command.provide(({ host, port }) => makeClusterClientLayer(host, port)), ); diff --git a/packages/cli/src/commands/cluster/namespace/get.ts b/packages/cli/src/commands/cluster/namespace/get.ts index a101367..858d54e 100644 --- a/packages/cli/src/commands/cluster/namespace/get.ts +++ b/packages/cli/src/commands/cluster/namespace/get.ts @@ -1,11 +1,11 @@ import * as p from "@clack/prompts"; -import { WingsClusterMetadata } from "@useairfoil/wings"; +import { ClusterClient } from "@useairfoil/wings"; import { printTable } from "console-table-printer"; import { Effect } from "effect"; import { Command, Flag } from "effect/unstable/cli"; -import { makeClusterMetadataLayer } from "../../../utils/client.js"; -import { hostOption, portOption } from "../../../utils/options.js"; +import { makeClusterClientLayer } from "../../../utils/client"; +import { hostOption, portOption } from "../../../utils/options"; const nameOption = Flag.string("name").pipe( Flag.withDescription("Namespace name in format: tenants/{tenant}/namespaces/{namespace}"), @@ -18,15 +18,12 @@ export const getNamespaceCommand = Command.make( host: hostOption, port: portOption, }, - ({ name, host, port }) => + ({ name }) => Effect.gen(function* () { - const layer = makeClusterMetadataLayer(host, port); - const s = p.spinner(); s.start("Fetching namespace..."); - const namespace = yield* WingsClusterMetadata.getNamespace({ name }).pipe( - Effect.provide(layer), + const namespace = yield* ClusterClient.getNamespace({ name }).pipe( Effect.tapError(() => Effect.sync(() => s.stop("Failed to get namespace"))), ); @@ -45,4 +42,7 @@ export const getNamespaceCommand = Command.make( p.outro("✓ Done"); }); }), -).pipe(Command.withDescription("Get details of a specific namespace")); +).pipe( + Command.withDescription("Get details of a specific namespace"), + Command.provide(({ host, port }) => makeClusterClientLayer(host, port)), +); diff --git a/packages/cli/src/commands/cluster/namespace/list.ts b/packages/cli/src/commands/cluster/namespace/list.ts index 715e7b7..ad98fc0 100644 --- a/packages/cli/src/commands/cluster/namespace/list.ts +++ b/packages/cli/src/commands/cluster/namespace/list.ts @@ -1,11 +1,11 @@ import * as p from "@clack/prompts"; -import { WingsClusterMetadata } from "@useairfoil/wings"; +import { ClusterClient } from "@useairfoil/wings"; import { printTable } from "console-table-printer"; import { Effect, Option } from "effect"; import { Command, Flag } from "effect/unstable/cli"; -import { makeClusterMetadataLayer } from "../../../utils/client.js"; -import { hostOption, pageSizeOption, pageTokenOption, portOption } from "../../../utils/options.js"; +import { makeClusterClientLayer } from "../../../utils/client"; +import { hostOption, pageSizeOption, pageTokenOption, portOption } from "../../../utils/options"; const parentOption = Flag.string("parent").pipe( Flag.withDescription("Parent tenant in format: tenants/{tenant} (e.g., 'tenants/default')"), @@ -20,21 +20,16 @@ export const listNamespacesCommand = Command.make( host: hostOption, port: portOption, }, - ({ parent, pageSize, pageToken, host, port }) => + ({ parent, pageSize, pageToken }) => Effect.gen(function* () { - const layer = makeClusterMetadataLayer(host, port); - const s = p.spinner(); s.start("Fetching namespaces..."); - const response = yield* WingsClusterMetadata.listNamespaces({ + const response = yield* ClusterClient.listNamespaces({ parent, pageSize, pageToken: Option.getOrUndefined(pageToken), - }).pipe( - Effect.provide(layer), - Effect.tapError(() => Effect.sync(() => s.stop("Failed to list namespaces"))), - ); + }).pipe(Effect.tapError(() => Effect.sync(() => s.stop("Failed to list namespaces")))); s.stop(`Found ${response.namespaces.length} namespace(s)`); @@ -43,21 +38,13 @@ export const listNamespacesCommand = Command.make( p.log.warn("No namespaces found"); } else { printTable( - response.namespaces.map( - (namespace: { - name: string; - flushSizeBytes: bigint; - flushIntervalMillis: bigint; - objectStore?: string | null; - dataLake?: string | null; - }) => ({ - name: namespace.name, - flush_size_bytes: namespace.flushSizeBytes.toString(), - flush_interval_millis: namespace.flushIntervalMillis.toString(), - object_store: namespace.objectStore || "-", - data_lake: namespace.dataLake || "-", - }), - ), + response.namespaces.map((namespace) => ({ + name: namespace.name, + flush_size_bytes: namespace.flushSizeBytes.toString(), + flush_interval_millis: namespace.flushIntervalMillis.toString(), + object_store: namespace.objectStore || "-", + data_lake: namespace.dataLake || "-", + })), ); } @@ -68,4 +55,7 @@ export const listNamespacesCommand = Command.make( p.outro("✓ Done"); }); }), -).pipe(Command.withDescription("List all namespaces belonging to a tenant")); +).pipe( + Command.withDescription("List all namespaces belonging to a tenant"), + Command.provide(({ host, port }) => makeClusterClientLayer(host, port)), +); diff --git a/packages/cli/src/commands/cluster/object-store/create-aws.ts b/packages/cli/src/commands/cluster/object-store/create-aws.ts index f74aac8..25ab72e 100644 --- a/packages/cli/src/commands/cluster/object-store/create-aws.ts +++ b/packages/cli/src/commands/cluster/object-store/create-aws.ts @@ -1,11 +1,11 @@ import * as p from "@clack/prompts"; -import { WingsClusterMetadata } from "@useairfoil/wings"; +import { ClusterClient } from "@useairfoil/wings"; import { printTable } from "console-table-printer"; import { Effect, Option } from "effect"; import { Command, Flag } from "effect/unstable/cli"; -import { makeClusterMetadataLayer } from "../../../utils/client.js"; -import { hostOption, portOption } from "../../../utils/options.js"; +import { makeClusterClientLayer } from "../../../utils/client"; +import { hostOption, portOption } from "../../../utils/options"; const parentOption = Flag.string("parent").pipe( Flag.withDescription("Parent tenant in format: tenants/{tenant}"), @@ -50,26 +50,14 @@ export const createObjectStoreAwsCommand = Command.make( host: hostOption, port: portOption, }, - ({ - parent, - objectStoreId, - bucketName, - prefix, - accessKeyId, - secretAccessKey, - region, - host, - port, - }) => + ({ parent, objectStoreId, bucketName, prefix, accessKeyId, secretAccessKey, region }) => Effect.gen(function* () { p.intro("🗄️ Create AWS S3 Object Store"); - const layer = makeClusterMetadataLayer(host, port); - const s = p.spinner(); s.start("Creating AWS S3 object store..."); - const result = yield* WingsClusterMetadata.createObjectStore({ + const result = yield* ClusterClient.createObjectStore({ parent, objectStoreId, objectStoreConfig: { @@ -83,7 +71,6 @@ export const createObjectStoreAwsCommand = Command.make( }, }, }).pipe( - Effect.provide(layer), Effect.tapError(() => Effect.sync(() => s.stop("Failed to create AWS object store"))), ); @@ -101,4 +88,7 @@ export const createObjectStoreAwsCommand = Command.make( p.outro("✓ Done"); }); }), -).pipe(Command.withDescription("Create a new AWS S3 object store")); +).pipe( + Command.withDescription("Create a new AWS S3 object store"), + Command.provide(({ host, port }) => makeClusterClientLayer(host, port)), +); diff --git a/packages/cli/src/commands/cluster/object-store/create-azure.ts b/packages/cli/src/commands/cluster/object-store/create-azure.ts index 7c7d412..f4e1062 100644 --- a/packages/cli/src/commands/cluster/object-store/create-azure.ts +++ b/packages/cli/src/commands/cluster/object-store/create-azure.ts @@ -1,11 +1,11 @@ import * as p from "@clack/prompts"; -import { WingsClusterMetadata } from "@useairfoil/wings"; +import { ClusterClient } from "@useairfoil/wings"; import { printTable } from "console-table-printer"; import { Effect, Option } from "effect"; import { Command, Flag } from "effect/unstable/cli"; -import { makeClusterMetadataLayer } from "../../../utils/client.js"; -import { hostOption, portOption } from "../../../utils/options.js"; +import { makeClusterClientLayer } from "../../../utils/client"; +import { hostOption, portOption } from "../../../utils/options"; const parentOption = Flag.string("parent").pipe( Flag.withDescription("Parent tenant in format: tenants/{tenant}"), @@ -44,25 +44,14 @@ export const createObjectStoreAzureCommand = Command.make( host: hostOption, port: portOption, }, - ({ - parent, - objectStoreId, - containerName, - prefix, - storageAccountName, - storageAccountKey, - host, - port, - }) => + ({ parent, objectStoreId, containerName, prefix, storageAccountName, storageAccountKey }) => Effect.gen(function* () { p.intro("🗄️ Create Azure Blob Storage Object Store"); - const layer = makeClusterMetadataLayer(host, port); - const s = p.spinner(); s.start("Creating Azure object store..."); - const result = yield* WingsClusterMetadata.createObjectStore({ + const result = yield* ClusterClient.createObjectStore({ parent, objectStoreId, objectStoreConfig: { @@ -75,7 +64,6 @@ export const createObjectStoreAzureCommand = Command.make( }, }, }).pipe( - Effect.provide(layer), Effect.tapError(() => Effect.sync(() => s.stop("Failed to create Azure object store"))), ); @@ -93,4 +81,7 @@ export const createObjectStoreAzureCommand = Command.make( p.outro("✓ Done"); }); }), -).pipe(Command.withDescription("Create a new Azure Blob Storage object store")); +).pipe( + Command.withDescription("Create a new Azure Blob Storage object store"), + Command.provide(({ host, port }) => makeClusterClientLayer(host, port)), +); diff --git a/packages/cli/src/commands/cluster/object-store/create-google.ts b/packages/cli/src/commands/cluster/object-store/create-google.ts index d7e1d9e..ac4e8e3 100644 --- a/packages/cli/src/commands/cluster/object-store/create-google.ts +++ b/packages/cli/src/commands/cluster/object-store/create-google.ts @@ -1,11 +1,11 @@ import * as p from "@clack/prompts"; -import { WingsClusterMetadata } from "@useairfoil/wings"; +import { ClusterClient } from "@useairfoil/wings"; import { printTable } from "console-table-printer"; import { Effect, Option } from "effect"; import { Command, Flag } from "effect/unstable/cli"; -import { makeClusterMetadataLayer } from "../../../utils/client.js"; -import { hostOption, portOption } from "../../../utils/options.js"; +import { makeClusterClientLayer } from "../../../utils/client"; +import { hostOption, portOption } from "../../../utils/options"; const parentOption = Flag.string("parent").pipe( Flag.withDescription("Parent tenant in format: tenants/{tenant}"), @@ -44,16 +44,14 @@ export const createObjectStoreGoogleCommand = Command.make( host: hostOption, port: portOption, }, - ({ parent, objectStoreId, bucketName, prefix, serviceAccount, serviceAccountKey, host, port }) => + ({ parent, objectStoreId, bucketName, prefix, serviceAccount, serviceAccountKey }) => Effect.gen(function* () { p.intro("🗄️ Create Google Cloud Storage Object Store"); - const layer = makeClusterMetadataLayer(host, port); - const s = p.spinner(); s.start("Creating Google Cloud Storage object store..."); - const result = yield* WingsClusterMetadata.createObjectStore({ + const result = yield* ClusterClient.createObjectStore({ parent, objectStoreId, objectStoreConfig: { @@ -66,7 +64,6 @@ export const createObjectStoreGoogleCommand = Command.make( }, }, }).pipe( - Effect.provide(layer), Effect.tapError(() => Effect.sync(() => s.stop("Failed to create Google object store"))), ); @@ -84,4 +81,7 @@ export const createObjectStoreGoogleCommand = Command.make( p.outro("✓ Done"); }); }), -).pipe(Command.withDescription("Create a new Google Cloud Storage object store")); +).pipe( + Command.withDescription("Create a new Google Cloud Storage object store"), + Command.provide(({ host, port }) => makeClusterClientLayer(host, port)), +); diff --git a/packages/cli/src/commands/cluster/object-store/create-s3.ts b/packages/cli/src/commands/cluster/object-store/create-s3.ts index 2c3b41a..ad2ce9b 100644 --- a/packages/cli/src/commands/cluster/object-store/create-s3.ts +++ b/packages/cli/src/commands/cluster/object-store/create-s3.ts @@ -1,11 +1,11 @@ import * as p from "@clack/prompts"; -import { WingsClusterMetadata } from "@useairfoil/wings"; +import { ClusterClient } from "@useairfoil/wings"; import { printTable } from "console-table-printer"; import { Effect, Option } from "effect"; import { Command, Flag } from "effect/unstable/cli"; -import { makeClusterMetadataLayer } from "../../../utils/client.js"; -import { hostOption, portOption } from "../../../utils/options.js"; +import { makeClusterClientLayer } from "../../../utils/client"; +import { hostOption, portOption } from "../../../utils/options"; const parentOption = Flag.string("parent").pipe( Flag.withDescription("Parent tenant in format: tenants/{tenant}"), @@ -53,27 +53,14 @@ export const createObjectStoreS3Command = Command.make( host: hostOption, port: portOption, }, - ({ - parent, - objectStoreId, - bucketName, - prefix, - accessKeyId, - secretAccessKey, - region, - endpoint, - host, - port, - }) => + ({ parent, objectStoreId, bucketName, prefix, accessKeyId, secretAccessKey, region, endpoint }) => Effect.gen(function* () { p.intro("🗄️ Create S3-Compatible Object Store"); - const layer = makeClusterMetadataLayer(host, port); - const s = p.spinner(); s.start("Creating S3-compatible object store..."); - const result = yield* WingsClusterMetadata.createObjectStore({ + const result = yield* ClusterClient.createObjectStore({ parent, objectStoreId, objectStoreConfig: { @@ -89,7 +76,6 @@ export const createObjectStoreS3Command = Command.make( }, }, }).pipe( - Effect.provide(layer), Effect.tapError(() => Effect.sync(() => s.stop("Failed to create S3-compatible object store")), ), @@ -111,4 +97,5 @@ export const createObjectStoreS3Command = Command.make( }), ).pipe( Command.withDescription("Create a new S3-compatible object store (MinIO, DigitalOcean, etc.)"), + Command.provide(({ host, port }) => makeClusterClientLayer(host, port)), ); diff --git a/packages/cli/src/commands/cluster/object-store/create.ts b/packages/cli/src/commands/cluster/object-store/create.ts index 2c2742a..410f692 100644 --- a/packages/cli/src/commands/cluster/object-store/create.ts +++ b/packages/cli/src/commands/cluster/object-store/create.ts @@ -1,10 +1,10 @@ import { Effect } from "effect"; import { Command } from "effect/unstable/cli"; -import { createObjectStoreAwsCommand } from "./create-aws.js"; -import { createObjectStoreAzureCommand } from "./create-azure.js"; -import { createObjectStoreGoogleCommand } from "./create-google.js"; -import { createObjectStoreS3Command } from "./create-s3.js"; +import { createObjectStoreAwsCommand } from "./create-aws"; +import { createObjectStoreAzureCommand } from "./create-azure"; +import { createObjectStoreGoogleCommand } from "./create-google"; +import { createObjectStoreS3Command } from "./create-s3"; export const createObjectStoreCommand = Command.make( "create-object-store", diff --git a/packages/cli/src/commands/cluster/object-store/delete.ts b/packages/cli/src/commands/cluster/object-store/delete.ts index f0987b8..af723d2 100644 --- a/packages/cli/src/commands/cluster/object-store/delete.ts +++ b/packages/cli/src/commands/cluster/object-store/delete.ts @@ -1,10 +1,10 @@ import * as p from "@clack/prompts"; -import { WingsClusterMetadata } from "@useairfoil/wings"; +import { ClusterClient } from "@useairfoil/wings"; import { Effect } from "effect"; import { Command, Flag } from "effect/unstable/cli"; -import { makeClusterMetadataLayer } from "../../../utils/client.js"; -import { forceOption, hostOption, portOption } from "../../../utils/options.js"; +import { makeClusterClientLayer } from "../../../utils/client"; +import { forceOption, hostOption, portOption } from "../../../utils/options"; const nameOption = Flag.string("name").pipe( Flag.withDescription( @@ -20,7 +20,7 @@ export const deleteObjectStoreCommand = Command.make( host: hostOption, port: portOption, }, - ({ name, force, host, port }) => + ({ name, force }) => Effect.gen(function* () { p.intro("🗑️ Delete Object Store"); @@ -36,21 +36,21 @@ export const deleteObjectStoreCommand = Command.make( if (p.isCancel(confirm) || !confirm) { p.cancel("Deletion cancelled"); - process.exit(0); + return; } } - const layer = makeClusterMetadataLayer(host, port); - const s = p.spinner(); s.start("Deleting object store..."); - yield* WingsClusterMetadata.deleteObjectStore({ name }).pipe( - Effect.provide(layer), + yield* ClusterClient.deleteObjectStore({ name }).pipe( Effect.tapError(() => Effect.sync(() => s.stop("Failed to delete object store"))), ); s.stop("Object store deleted successfully"); p.outro("✓ Done"); }), -).pipe(Command.withDescription("Delete an object store from the cluster")); +).pipe( + Command.withDescription("Delete an object store from the cluster"), + Command.provide(({ host, port }) => makeClusterClientLayer(host, port)), +); diff --git a/packages/cli/src/commands/cluster/object-store/get.ts b/packages/cli/src/commands/cluster/object-store/get.ts index 6504f6a..6403474 100644 --- a/packages/cli/src/commands/cluster/object-store/get.ts +++ b/packages/cli/src/commands/cluster/object-store/get.ts @@ -1,11 +1,11 @@ import * as p from "@clack/prompts"; -import { WingsClusterMetadata } from "@useairfoil/wings"; +import { ClusterClient } from "@useairfoil/wings"; import { printTable } from "console-table-printer"; import { Effect } from "effect"; import { Command, Flag } from "effect/unstable/cli"; -import { makeClusterMetadataLayer } from "../../../utils/client.js"; -import { hostOption, portOption } from "../../../utils/options.js"; +import { makeClusterClientLayer } from "../../../utils/client"; +import { hostOption, portOption } from "../../../utils/options"; const nameOption = Flag.string("name").pipe( Flag.withDescription( @@ -20,17 +20,12 @@ export const getObjectStoreCommand = Command.make( host: hostOption, port: portOption, }, - ({ name, host, port }) => + ({ name }) => Effect.gen(function* () { - const layer = makeClusterMetadataLayer(host, port); - const s = p.spinner(); s.start("Fetching object store..."); - const objectStore = yield* WingsClusterMetadata.getObjectStore({ - name, - }).pipe( - Effect.provide(layer), + const objectStore = yield* ClusterClient.getObjectStore({ name }).pipe( Effect.tapError(() => Effect.sync(() => s.stop("Failed to get object store"))), ); @@ -46,4 +41,7 @@ export const getObjectStoreCommand = Command.make( p.outro("✓ Done"); }); }), -).pipe(Command.withDescription("Get details of a specific object store")); +).pipe( + Command.withDescription("Get details of a specific object store"), + Command.provide(({ host, port }) => makeClusterClientLayer(host, port)), +); diff --git a/packages/cli/src/commands/cluster/object-store/list.ts b/packages/cli/src/commands/cluster/object-store/list.ts index 580fd4b..d658966 100644 --- a/packages/cli/src/commands/cluster/object-store/list.ts +++ b/packages/cli/src/commands/cluster/object-store/list.ts @@ -1,11 +1,11 @@ import * as p from "@clack/prompts"; -import { WingsClusterMetadata } from "@useairfoil/wings"; +import { ClusterClient } from "@useairfoil/wings"; import { printTable } from "console-table-printer"; import { Effect, Option } from "effect"; import { Command, Flag } from "effect/unstable/cli"; -import { makeClusterMetadataLayer } from "../../../utils/client.js"; -import { hostOption, pageSizeOption, pageTokenOption, portOption } from "../../../utils/options.js"; +import { makeClusterClientLayer } from "../../../utils/client"; +import { hostOption, pageSizeOption, pageTokenOption, portOption } from "../../../utils/options"; const parentOption = Flag.string("parent").pipe( Flag.withDescription("Parent tenant in format: tenants/{tenant}"), @@ -20,21 +20,16 @@ export const listObjectStoresCommand = Command.make( host: hostOption, port: portOption, }, - ({ parent, pageSize, pageToken, host, port }) => + ({ parent, pageSize, pageToken }) => Effect.gen(function* () { - const layer = makeClusterMetadataLayer(host, port); - const s = p.spinner(); s.start("Fetching object stores..."); - const response = yield* WingsClusterMetadata.listObjectStores({ + const response = yield* ClusterClient.listObjectStores({ parent, pageSize, pageToken: Option.getOrUndefined(pageToken), - }).pipe( - Effect.provide(layer), - Effect.tapError(() => Effect.sync(() => s.stop("Failed to list object stores"))), - ); + }).pipe(Effect.tapError(() => Effect.sync(() => s.stop("Failed to list object stores")))); s.stop(`Found ${response.objectStores.length} object store(s)`); @@ -43,12 +38,10 @@ export const listObjectStoresCommand = Command.make( p.log.warn("No object stores found"); } else { printTable( - response.objectStores.map( - (objectStore: { name: string; objectStoreConfig: { _tag?: string | null } }) => ({ - name: objectStore.name, - type: objectStore.objectStoreConfig._tag || "-", - }), - ), + response.objectStores.map((objectStore) => ({ + name: objectStore.name, + type: objectStore.objectStoreConfig._tag || "-", + })), ); } @@ -59,4 +52,7 @@ export const listObjectStoresCommand = Command.make( p.outro("✓ Done"); }); }), -).pipe(Command.withDescription("List all object stores belonging to a tenant")); +).pipe( + Command.withDescription("List all object stores belonging to a tenant"), + Command.provide(({ host, port }) => makeClusterClientLayer(host, port)), +); diff --git a/packages/cli/src/commands/cluster/tenant/create.ts b/packages/cli/src/commands/cluster/tenant/create.ts index df435e3..b81da5a 100644 --- a/packages/cli/src/commands/cluster/tenant/create.ts +++ b/packages/cli/src/commands/cluster/tenant/create.ts @@ -1,11 +1,11 @@ import * as p from "@clack/prompts"; -import { WingsClusterMetadata } from "@useairfoil/wings"; +import { ClusterClient } from "@useairfoil/wings"; import { printTable } from "console-table-printer"; import { Effect } from "effect"; import { Command, Flag } from "effect/unstable/cli"; -import { makeClusterMetadataLayer } from "../../../utils/client.js"; -import { hostOption, portOption } from "../../../utils/options.js"; +import { makeClusterClientLayer } from "../../../utils/client"; +import { hostOption, portOption } from "../../../utils/options"; const tenantIdOption = Flag.string("tenant-id").pipe( Flag.withDescription("Unique identifier for the tenant (e.g., 'acme-corp')"), @@ -18,21 +18,16 @@ export const createTenantCommand = Command.make( host: hostOption, port: portOption, }, - ({ tenantId, host, port }) => + ({ tenantId }) => Effect.gen(function* () { p.intro("🏢 Create Tenant"); - const layer = makeClusterMetadataLayer(host, port); - const s = p.spinner(); s.start("Creating tenant..."); - const tenant = yield* WingsClusterMetadata.createTenant({ + const tenant = yield* ClusterClient.createTenant({ tenantId, - }).pipe( - Effect.provide(layer), - Effect.tapError(() => Effect.sync(() => s.stop("Failed to create tenant"))), - ); + }).pipe(Effect.tapError(() => Effect.sync(() => s.stop("Failed to create tenant")))); s.stop("Tenant created successfully"); @@ -41,4 +36,7 @@ export const createTenantCommand = Command.make( p.outro("✓ Done"); }); }), -).pipe(Command.withDescription("Create a new tenant in the cluster")); +).pipe( + Command.withDescription("Create a new tenant in the cluster"), + Command.provide(({ host, port }) => makeClusterClientLayer(host, port)), +); diff --git a/packages/cli/src/commands/cluster/tenant/delete.ts b/packages/cli/src/commands/cluster/tenant/delete.ts index ed932ca..2a2a2ef 100644 --- a/packages/cli/src/commands/cluster/tenant/delete.ts +++ b/packages/cli/src/commands/cluster/tenant/delete.ts @@ -1,9 +1,9 @@ import * as p from "@clack/prompts"; -import { WingsClusterMetadata } from "@useairfoil/wings"; +import { ClusterClient } from "@useairfoil/wings"; import { Effect } from "effect"; import { Command, Flag } from "effect/unstable/cli"; -import { makeClusterMetadataLayer } from "../../../utils/client"; +import { makeClusterClientLayer } from "../../../utils/client"; import { forceOption, hostOption, portOption } from "../../../utils/options"; const nameOption = Flag.string("name").pipe( @@ -18,7 +18,7 @@ export const deleteTenantCommand = Command.make( host: hostOption, port: portOption, }, - ({ name, force, host, port }) => + ({ name, force }) => Effect.gen(function* () { p.intro("🗑️ Delete Tenant"); @@ -34,17 +34,14 @@ export const deleteTenantCommand = Command.make( if (p.isCancel(confirm) || !confirm) { p.cancel("Deletion cancelled"); - process.exit(0); + return; } } - const layer = makeClusterMetadataLayer(host, port); - const s = p.spinner(); s.start("Deleting tenant..."); - yield* WingsClusterMetadata.deleteTenant({ name }).pipe( - Effect.provide(layer), + yield* ClusterClient.deleteTenant({ name }).pipe( Effect.tapError(() => Effect.sync(() => s.stop("Failed to delete tenant"))), ); @@ -53,4 +50,5 @@ export const deleteTenantCommand = Command.make( }), ).pipe( Command.withDescription("Delete a tenant from the cluster (fails if tenant has any namespaces)"), + Command.provide(({ host, port }) => makeClusterClientLayer(host, port)), ); diff --git a/packages/cli/src/commands/cluster/tenant/get.ts b/packages/cli/src/commands/cluster/tenant/get.ts index d2806b2..5acae8f 100644 --- a/packages/cli/src/commands/cluster/tenant/get.ts +++ b/packages/cli/src/commands/cluster/tenant/get.ts @@ -1,11 +1,11 @@ import * as p from "@clack/prompts"; -import { WingsClusterMetadata } from "@useairfoil/wings"; +import { ClusterClient } from "@useairfoil/wings"; import { printTable } from "console-table-printer"; import { Effect } from "effect"; import { Command, Flag } from "effect/unstable/cli"; -import { makeClusterMetadataLayer } from "../../../utils/client.js"; -import { hostOption, portOption } from "../../../utils/options.js"; +import { makeClusterClientLayer } from "../../../utils/client"; +import { hostOption, portOption } from "../../../utils/options"; const nameOption = Flag.string("name").pipe( Flag.withDescription("Tenant name in format: tenants/{tenant} (e.g., 'tenants/acme-corp')"), @@ -18,15 +18,12 @@ export const getTenantCommand = Command.make( host: hostOption, port: portOption, }, - ({ name, host, port }) => + ({ name }) => Effect.gen(function* () { - const layer = makeClusterMetadataLayer(host, port); - const s = p.spinner(); s.start("Fetching tenant..."); - const tenant = yield* WingsClusterMetadata.getTenant({ name }).pipe( - Effect.provide(layer), + const tenant = yield* ClusterClient.getTenant({ name }).pipe( Effect.tapError(() => Effect.sync(() => s.stop("Failed to get tenant"))), ); @@ -37,4 +34,7 @@ export const getTenantCommand = Command.make( p.outro("✓ Done"); }); }), -).pipe(Command.withDescription("Get details of a specific tenant")); +).pipe( + Command.withDescription("Get details of a specific tenant"), + Command.provide(({ host, port }) => makeClusterClientLayer(host, port)), +); diff --git a/packages/cli/src/commands/cluster/tenant/list.ts b/packages/cli/src/commands/cluster/tenant/list.ts index b593535..62d8a09 100644 --- a/packages/cli/src/commands/cluster/tenant/list.ts +++ b/packages/cli/src/commands/cluster/tenant/list.ts @@ -1,11 +1,11 @@ import * as p from "@clack/prompts"; -import { WingsClusterMetadata } from "@useairfoil/wings"; +import { ClusterClient } from "@useairfoil/wings"; import { printTable } from "console-table-printer"; import { Effect, Option } from "effect"; import { Command } from "effect/unstable/cli"; -import { makeClusterMetadataLayer } from "../../../utils/client.js"; -import { hostOption, pageSizeOption, pageTokenOption, portOption } from "../../../utils/options.js"; +import { makeClusterClientLayer } from "../../../utils/client"; +import { hostOption, pageSizeOption, pageTokenOption, portOption } from "../../../utils/options"; export const listTenantsCommand = Command.make( "list-tenants", @@ -15,20 +15,15 @@ export const listTenantsCommand = Command.make( host: hostOption, port: portOption, }, - ({ pageSize, pageToken, host, port }) => + ({ pageSize, pageToken }) => Effect.gen(function* () { - const layer = makeClusterMetadataLayer(host, port); - const s = p.spinner(); s.start("Fetching tenants..."); - const response = yield* WingsClusterMetadata.listTenants({ + const response = yield* ClusterClient.listTenants({ pageSize, pageToken: Option.getOrUndefined(pageToken), - }).pipe( - Effect.provide(layer), - Effect.tapError(() => Effect.sync(() => s.stop("Failed to list tenants"))), - ); + }).pipe(Effect.tapError(() => Effect.sync(() => s.stop("Failed to list tenants")))); s.stop(`Found ${response.tenants.length} tenant(s)`); @@ -36,11 +31,7 @@ export const listTenantsCommand = Command.make( if (response.tenants.length === 0) { p.log.warn("No tenants found"); } else { - printTable( - response.tenants.map((tenant: { name: string }) => ({ - name: tenant.name, - })), - ); + printTable(response.tenants.map((tenant) => ({ name: tenant.name }))); } if (response.nextPageToken) { @@ -50,4 +41,7 @@ export const listTenantsCommand = Command.make( p.outro("✓ Done"); }); }), -).pipe(Command.withDescription("List all tenants in the cluster")); +).pipe( + Command.withDescription("List all tenants in the cluster"), + Command.provide(({ host, port }) => makeClusterClientLayer(host, port)), +); diff --git a/packages/cli/src/commands/cluster/topic/create.ts b/packages/cli/src/commands/cluster/topic/create.ts index 110ebcd..2d1fc3c 100644 --- a/packages/cli/src/commands/cluster/topic/create.ts +++ b/packages/cli/src/commands/cluster/topic/create.ts @@ -1,16 +1,13 @@ -import type { FieldConfig } from "@useairfoil/wings"; - import * as p from "@clack/prompts"; -import { WingsClusterMetadata } from "@useairfoil/wings"; +import { Arrow, ClusterClient } from "@useairfoil/wings"; import { printTable } from "console-table-printer"; import { Effect, Option } from "effect"; import { Command, Flag } from "effect/unstable/cli"; import * as fs from "node:fs"; -import { makeClusterMetadataLayer } from "../../../utils/client.js"; -import { hostOption, portOption } from "../../../utils/options.js"; +import { makeClusterClientLayer } from "../../../utils/client"; +import { hostOption, portOption } from "../../../utils/options"; -// Supported simple types (no config required) const SUPPORTED_INLINE_TYPES = [ "Int8", "Int16", @@ -40,6 +37,7 @@ const SUPPORTED_INLINE_TYPES = [ ] as const; type SupportedInlineType = (typeof SUPPORTED_INLINE_TYPES)[number]; +type FieldConfig = Arrow.FieldConfig; const parentOption = Flag.string("parent").pipe( Flag.withDescription("Parent namespace in format: tenants/{tenant}/namespaces/{namespace}"), @@ -84,9 +82,6 @@ const targetFileSizeBytesOption = Flag.integer("target-file-size-bytes").pipe( Flag.withDefault(1024 * 1024), ); -/** - * Parse a field string in format "name:Type" or "name:Type?" (nullable) - */ function parseFieldString(fieldStr: string, index: number): FieldConfig { const match = fieldStr.match(/^([^:]+):([^?]+)(\?)?$/); if (!match) { @@ -121,16 +116,10 @@ function parseFieldString(fieldStr: string, index: number): FieldConfig { }; } -/** - * Parse multiple field strings into FieldConfig array - */ function parseFieldsFromArgs(fields: ReadonlyArray): FieldConfig[] { return fields.map((field, index) => parseFieldString(field, index)); } -/** - * Load fields from a JSON schema file with basic validation - */ function loadFieldsFromFile(filePath: string): FieldConfig[] { if (!fs.existsSync(filePath)) { throw new Error(`Schema file not found: "${filePath}"`); @@ -187,8 +176,6 @@ export const createTopicCommand = Command.make( freshnessSeconds, ttlSeconds, targetFileSizeBytes, - host, - port, }) => Effect.gen(function* () { p.intro("📋 Create Topic"); @@ -238,12 +225,10 @@ export const createTopicCommand = Command.make( partitionKeyId = partitionField.id; } - const layer = makeClusterMetadataLayer(host, port); - const s = p.spinner(); s.start("Creating topic..."); - const topic = yield* WingsClusterMetadata.createTopic({ + const topic = yield* ClusterClient.createTopic({ parent, topicId, description: Option.getOrUndefined(description), @@ -254,10 +239,7 @@ export const createTopicCommand = Command.make( ttlSeconds: Option.getOrUndefined(Option.map(ttlSeconds, (ttl) => BigInt(ttl))), targetFileSizeBytes: BigInt(targetFileSizeBytes), }, - }).pipe( - Effect.provide(layer), - Effect.tapError(() => Effect.sync(() => s.stop("Failed to create topic"))), - ); + }).pipe(Effect.tapError(() => Effect.sync(() => s.stop("Failed to create topic")))); s.stop("Topic created successfully"); @@ -293,4 +275,7 @@ export const createTopicCommand = Command.make( p.outro("✓ Done"); }); }), -).pipe(Command.withDescription("Create a new topic belonging to a namespace")); +).pipe( + Command.withDescription("Create a new topic belonging to a namespace"), + Command.provide(({ host, port }) => makeClusterClientLayer(host, port)), +); diff --git a/packages/cli/src/commands/cluster/topic/delete.ts b/packages/cli/src/commands/cluster/topic/delete.ts index c4df75b..de96e37 100644 --- a/packages/cli/src/commands/cluster/topic/delete.ts +++ b/packages/cli/src/commands/cluster/topic/delete.ts @@ -1,10 +1,10 @@ import * as p from "@clack/prompts"; -import { WingsClusterMetadata } from "@useairfoil/wings"; +import { ClusterClient } from "@useairfoil/wings"; import { Effect } from "effect"; import { Command, Flag } from "effect/unstable/cli"; -import { makeClusterMetadataLayer } from "../../../utils/client.js"; -import { forceOption, hostOption, portOption } from "../../../utils/options.js"; +import { makeClusterClientLayer } from "../../../utils/client"; +import { forceOption, hostOption, portOption } from "../../../utils/options"; const nameOption = Flag.string("name").pipe( Flag.withDescription( @@ -26,7 +26,7 @@ export const deleteTopicCommand = Command.make( host: hostOption, port: portOption, }, - ({ name, forceDelete, force, host, port }) => + ({ name, forceDelete, force }) => Effect.gen(function* () { p.intro("🗑️ Delete Topic"); @@ -42,24 +42,22 @@ export const deleteTopicCommand = Command.make( if (p.isCancel(confirm) || !confirm) { p.cancel("Deletion cancelled"); - process.exit(0); + return; } } - const layer = makeClusterMetadataLayer(host, port); - const s = p.spinner(); s.start("Deleting topic..."); - yield* WingsClusterMetadata.deleteTopic({ + yield* ClusterClient.deleteTopic({ name, force: forceDelete, - }).pipe( - Effect.provide(layer), - Effect.tapError(() => Effect.sync(() => s.stop("Failed to delete topic"))), - ); + }).pipe(Effect.tapError(() => Effect.sync(() => s.stop("Failed to delete topic")))); s.stop("Topic deleted successfully"); p.outro("✓ Done"); }), -).pipe(Command.withDescription("Delete a topic from the cluster")); +).pipe( + Command.withDescription("Delete a topic from the cluster"), + Command.provide(({ host, port }) => makeClusterClientLayer(host, port)), +); diff --git a/packages/cli/src/commands/cluster/topic/get.ts b/packages/cli/src/commands/cluster/topic/get.ts index 7c90bcc..4b37e70 100644 --- a/packages/cli/src/commands/cluster/topic/get.ts +++ b/packages/cli/src/commands/cluster/topic/get.ts @@ -1,11 +1,11 @@ import * as p from "@clack/prompts"; -import { WingsClusterMetadata } from "@useairfoil/wings"; +import { ClusterClient } from "@useairfoil/wings"; import { printTable } from "console-table-printer"; import { Effect } from "effect"; import { Command, Flag } from "effect/unstable/cli"; -import { makeClusterMetadataLayer } from "../../../utils/client.js"; -import { hostOption, portOption } from "../../../utils/options.js"; +import { makeClusterClientLayer } from "../../../utils/client"; +import { hostOption, portOption } from "../../../utils/options"; const nameOption = Flag.string("name").pipe( Flag.withDescription( @@ -20,15 +20,12 @@ export const getTopicCommand = Command.make( host: hostOption, port: portOption, }, - ({ name, host, port }) => + ({ name }) => Effect.gen(function* () { - const layer = makeClusterMetadataLayer(host, port); - const s = p.spinner(); s.start("Fetching topic..."); - const topic = yield* WingsClusterMetadata.getTopic({ name }).pipe( - Effect.provide(layer), + const topic = yield* ClusterClient.getTopic({ name }).pipe( Effect.tapError(() => Effect.sync(() => s.stop("Failed to get topic"))), ); @@ -39,7 +36,11 @@ export const getTopicCommand = Command.make( { name: topic.name, description: topic.description || "-", - partition_key: topic.partitionKey?.toString() || "-", + partition_key: + topic.partitionKey !== undefined + ? (topic.schema.fields.find((f) => f.id === topic.partitionKey)?.name ?? + topic.partitionKey.toString()) + : "-", freshness_seconds: topic.compaction.freshnessSeconds.toString(), ttl_seconds: topic.compaction.ttlSeconds?.toString() || "-", }, @@ -48,4 +49,7 @@ export const getTopicCommand = Command.make( p.outro("✓ Done"); }); }), -).pipe(Command.withDescription("Get details of a specific topic")); +).pipe( + Command.withDescription("Get details of a specific topic"), + Command.provide(({ host, port }) => makeClusterClientLayer(host, port)), +); diff --git a/packages/cli/src/commands/cluster/topic/list.ts b/packages/cli/src/commands/cluster/topic/list.ts index f2e76a9..dd92b6c 100644 --- a/packages/cli/src/commands/cluster/topic/list.ts +++ b/packages/cli/src/commands/cluster/topic/list.ts @@ -1,11 +1,11 @@ import * as p from "@clack/prompts"; -import { WingsClusterMetadata } from "@useairfoil/wings"; +import { ClusterClient } from "@useairfoil/wings"; import { printTable } from "console-table-printer"; import { Effect, Option } from "effect"; import { Command, Flag } from "effect/unstable/cli"; -import { makeClusterMetadataLayer } from "../../../utils/client.js"; -import { hostOption, pageSizeOption, pageTokenOption, portOption } from "../../../utils/options.js"; +import { makeClusterClientLayer } from "../../../utils/client"; +import { hostOption, pageSizeOption, pageTokenOption, portOption } from "../../../utils/options"; const parentOption = Flag.string("parent").pipe( Flag.withDescription("Parent namespace in format: tenants/{tenant}/namespaces/{namespace}"), @@ -20,21 +20,16 @@ export const listTopicsCommand = Command.make( host: hostOption, port: portOption, }, - ({ parent, pageSize, pageToken, host, port }) => + ({ parent, pageSize, pageToken }) => Effect.gen(function* () { - const layer = makeClusterMetadataLayer(host, port); - const s = p.spinner(); s.start("Fetching topics..."); - const response = yield* WingsClusterMetadata.listTopics({ + const response = yield* ClusterClient.listTopics({ parent, pageSize, pageToken: Option.getOrUndefined(pageToken), - }).pipe( - Effect.provide(layer), - Effect.tapError(() => Effect.sync(() => s.stop("Failed to list topics"))), - ); + }).pipe(Effect.tapError(() => Effect.sync(() => s.stop("Failed to list topics")))); s.stop(`Found ${response.topics.length} topic(s)`); @@ -46,7 +41,11 @@ export const listTopicsCommand = Command.make( response.topics.map((topic) => ({ name: topic.name, description: topic.description || "-", - partition_key: topic.partitionKey?.toString() || "-", + partition_key: + topic.partitionKey !== undefined + ? (topic.schema.fields.find((f) => f.id === topic.partitionKey)?.name ?? + topic.partitionKey.toString()) + : "-", })), ); } @@ -58,4 +57,7 @@ export const listTopicsCommand = Command.make( p.outro("✓ Done"); }); }), -).pipe(Command.withDescription("List all topics belonging to a namespace")); +).pipe( + Command.withDescription("List all topics belonging to a namespace"), + Command.provide(({ host, port }) => makeClusterClientLayer(host, port)), +); diff --git a/packages/cli/src/commands/dev.ts b/packages/cli/src/commands/dev.ts index be72e03..c26a18b 100644 --- a/packages/cli/src/commands/dev.ts +++ b/packages/cli/src/commands/dev.ts @@ -1,8 +1,13 @@ import { Effect, FileSystem } from "effect"; import { Command, Flag, Prompt } from "effect/unstable/cli"; -import { ChildProcess, ChildProcessSpawner } from "effect/unstable/process"; - -import { checkDockerVersion, createDockerVolume, runDockerContainer } from "../utils/docker"; +import { ChildProcess } from "effect/unstable/process"; + +import { + checkDockerVersion, + createDockerVolume, + pullDockerImage, + runDockerContainer, +} from "../utils/docker"; import { downloadWings, getWingsPath } from "../utils/wings"; const dockerOption = Flag.boolean("docker").pipe( @@ -55,7 +60,6 @@ export const devCommand = Command.make( const runWithBinary = (options: { version: string; yes: boolean; stress: boolean }) => Effect.gen(function* () { - const spawner = yield* ChildProcessSpawner.ChildProcessSpawner; const fs = yield* FileSystem.FileSystem; yield* Effect.logInfo("🪽 Airfoil Dev"); @@ -90,17 +94,15 @@ const runWithBinary = (options: { version: string; yes: boolean; stress: boolean yield* Effect.logInfo(`Starting Wings dev server...`); - const proc = ChildProcess.make(wingsPath, ["dev"], { + const handle = yield* ChildProcess.make(wingsPath, ["dev"], { stdin: "inherit", stdout: "inherit", }); - const handle = yield* spawner.spawn(proc); - const exitCode = yield* handle.exitCode; if (exitCode !== 0) { - yield* Effect.fail(new Error(`Process exited with code ${exitCode}`)); + return yield* Effect.fail(new Error(`Process exited with code ${exitCode}`)); } }); @@ -110,11 +112,8 @@ const runWithDocker = (options: { tag: string; forcePull: boolean }) => let tag = options.tag; - // If tag is not "latest" and doesn't include architecture, append it - // Format: 0.1.0-alpha.10-aarch64 or 0.1.0-alpha.10-x86_64 if (tag !== "latest" && !tag.includes("aarch64") && !tag.includes("x86_64")) { const arch = process.arch === "arm64" ? "aarch64" : "x86_64"; - // Remove 'v' prefix if present for docker tags const cleanTag = tag.startsWith("v") ? tag.substring(1) : tag; tag = `${cleanTag}-${arch}`; } @@ -126,6 +125,11 @@ const runWithDocker = (options: { tag: string; forcePull: boolean }) => yield* checkDockerVersion(); + if (options.forcePull) { + yield* Effect.logInfo(`Pulling Docker image: ${image}`); + yield* pullDockerImage(image); + } + yield* createDockerVolume("wings-data"); yield* runDockerContainer(image, "wings-data"); diff --git a/packages/cli/src/commands/sql.ts b/packages/cli/src/commands/sql.ts index d0da43a..dc201c1 100644 --- a/packages/cli/src/commands/sql.ts +++ b/packages/cli/src/commands/sql.ts @@ -1,6 +1,6 @@ import { ArrowFlightSqlClient } from "@useairfoil/flight"; import { printTable } from "console-table-printer"; -import { Effect, FileSystem, Option } from "effect"; +import { Effect, FileSystem, Option, Stream } from "effect"; import { Argument, Command, Flag } from "effect/unstable/cli"; import { Metadata } from "nice-grpc-common"; @@ -58,32 +58,22 @@ export const sqlCommand = Command.make( yield* Effect.logInfo(`Namespace: ${namespace}`); - const client = new ArrowFlightSqlClient( - { - host: hostPort, - }, - { - defaultCallOptions: { - "*": { - metadata: Metadata({ - "x-wings-namespace": namespace, - }), - }, + const client = yield* ArrowFlightSqlClient.make({ + host: hostPort, + defaultCallOptions: { + "*": { + metadata: Metadata({ + "x-wings-namespace": namespace, + }), }, }, - ); - - const flightInfo = yield* Effect.tryPromise({ - try: () => client.executeQuery({ query: sqlQuery }), - catch: (error) => (error instanceof Error ? error : new Error("Query failed")), }); - const batches = yield* Effect.tryPromise({ - try: () => Array.fromAsync(client.executeFlightInfo(flightInfo)), - catch: (error) => (error instanceof Error ? error : new Error("Query failed")), - }); + const flightInfo = yield* client.executeQuery({ query: sqlQuery }); + + const batches = yield* client.executeFlightInfo(flightInfo).pipe(Stream.runCollect); - const data = batches.flatMap((batch) => batch.toArray()); + const data = Array.from(batches).flatMap(({ batch }) => batch.toArray()); if (json) { yield* Effect.log(JSON.stringify(data, null, 2)); diff --git a/packages/cli/src/index.ts b/packages/cli/src/index.ts index c6413c2..55c3067 100644 --- a/packages/cli/src/index.ts +++ b/packages/cli/src/index.ts @@ -1,6 +1,6 @@ #!/usr/bin/env node import { NodeRuntime, NodeServices } from "@effect/platform-node"; -import { Effect } from "effect"; +import { Effect, Layer } from "effect"; import { Command } from "effect/unstable/cli"; import { FetchHttpClient } from "effect/unstable/http"; @@ -20,8 +20,7 @@ const cli = Command.run(program, { }); cli.pipe( - Effect.provide(FetchHttpClient.layer), - Effect.provide(NodeServices.layer), + Effect.provide(Layer.mergeAll(FetchHttpClient.layer, NodeServices.layer)), Effect.scoped, NodeRuntime.runMain, ); diff --git a/packages/cli/src/utils/client.ts b/packages/cli/src/utils/client.ts index 5a52cd9..0063217 100644 --- a/packages/cli/src/utils/client.ts +++ b/packages/cli/src/utils/client.ts @@ -1,6 +1,6 @@ -import { WingsClusterMetadata } from "@useairfoil/wings"; +import { ClusterClient } from "@useairfoil/wings"; -export const makeClusterMetadataLayer = (host: string, port: number) => - WingsClusterMetadata.layer({ +export const makeClusterClientLayer = (host: string, port: number) => + ClusterClient.layer({ host: `${host}:${port}`, }); diff --git a/packages/cli/src/utils/docker.ts b/packages/cli/src/utils/docker.ts index 07365af..6d3a0d0 100644 --- a/packages/cli/src/utils/docker.ts +++ b/packages/cli/src/utils/docker.ts @@ -12,7 +12,22 @@ export const checkDockerVersion = () => export const createDockerVolume = (volumeName: string) => Effect.gen(function* () { const docker = yield* ChildProcess.make("docker", ["volume", "create", volumeName]); - yield* docker.exitCode; + const exitCode = yield* docker.exitCode; + if (exitCode !== 0) { + return yield* Effect.fail(new Error(`Docker volume create failed with code ${exitCode}`)); + } + }); + +export const pullDockerImage = (image: string) => + Effect.gen(function* () { + const docker = yield* ChildProcess.make("docker", ["pull", image], { + stdout: "inherit", + stderr: "inherit", + }); + const exitCode = yield* docker.exitCode; + if (exitCode !== 0) { + return yield* Effect.fail(new Error(`Docker pull failed with code ${exitCode}`)); + } }); export const runDockerContainer = (image: string, volumeName: string) => @@ -38,6 +53,6 @@ export const runDockerContainer = (image: string, volumeName: string) => const exitCode = yield* docker.exitCode; if (exitCode !== 0) { - yield* Effect.fail(new Error(`Docker process exited with code ${exitCode}`)); + return yield* Effect.fail(new Error(`Docker process exited with code ${exitCode}`)); } }); diff --git a/packages/cli/src/utils/wings.ts b/packages/cli/src/utils/wings.ts index 4669ea7..b36e363 100644 --- a/packages/cli/src/utils/wings.ts +++ b/packages/cli/src/utils/wings.ts @@ -56,25 +56,29 @@ export const downloadWings = (version: string, targetPath: string, isStress = fa const client = yield* HttpClient.HttpClient; const filename = getBinaryFilename(isStress); + const tempPath = `${targetPath}.download`; const downloadPath = version === "latest" ? "latest/download" : `download/${version}`; const url = `${GITHUB_RELEASES_URL}/${downloadPath}/${filename}`; yield* fs.makeDirectory(WINGS_DIR, { recursive: true }); - - const response = yield* client.get(url); - const buffer = yield* HttpClientResponse.matchStatus({ - 200: (response) => response.arrayBuffer, - orElse: (response) => { - throw new Error(`Failed to download Wings from ${url}: ${response.status}`); - }, - })(response); - - yield* fs.writeFile(targetPath, new Uint8Array(buffer)); - - yield* verifyChecksum(version, targetPath, isStress); - - yield* fs.chmod(targetPath, 0o755); + yield* fs.remove(tempPath).pipe(Effect.ignore); + + const download = Effect.gen(function* () { + const response = yield* client.get(url); + const buffer = yield* HttpClientResponse.matchStatus({ + 200: (response) => response.arrayBuffer, + orElse: (response) => + Effect.fail(new Error(`Failed to download Wings from ${url}: ${response.status}`)), + })(response); + + yield* fs.writeFile(tempPath, new Uint8Array(buffer)); + yield* verifyChecksum(version, tempPath, isStress); + yield* fs.chmod(tempPath, 0o755); + yield* fs.rename(tempPath, targetPath); + }); + + yield* download.pipe(Effect.onError(() => fs.remove(tempPath).pipe(Effect.ignore))); }); /** diff --git a/packages/cli/test-commands.sh b/packages/cli/test-commands.sh index bcb5a60..f33879b 100755 --- a/packages/cli/test-commands.sh +++ b/packages/cli/test-commands.sh @@ -1,16 +1,18 @@ #!/bin/bash # Test script for Airfoil CLI cluster commands -# Make sure Wings dev server is running first: bun run airfoil:dev --docker +# Run from packages/cli: bash test-commands.sh +# Make sure Wings dev server is running first: pnpm exec tsx src/index.ts dev --docker -set -e # Exit on error +set -e -echo "=== Testing Airfoil CLI Cluster Commands ===" -echo "" +CLI="pnpm exec tsx src/index.ts" -# Colors for output GREEN='\033[0;32m' BLUE='\033[0;34m' -NC='\033[0m' # No Color +NC='\033[0m' + +echo -e "${BLUE}=== Testing Airfoil CLI Cluster Commands ===${NC}" +echo "" # ============================================ # TENANT COMMANDS @@ -19,15 +21,15 @@ echo -e "${BLUE}=== 1. TENANT COMMANDS ===${NC}" echo "" echo -e "${GREEN}Creating tenant 'test-tenant'...${NC}" -bun run airfoil:dev cluster create-tenant --tenant-id test-tenant +$CLI cluster create-tenant --tenant-id test-tenant echo "" echo -e "${GREEN}Listing all tenants...${NC}" -bun run airfoil:dev cluster list-tenants +$CLI cluster list-tenants echo "" echo -e "${GREEN}Getting tenant 'test-tenant'...${NC}" -bun run airfoil:dev cluster get-tenant --name tenants/test-tenant +$CLI cluster get-tenant --name tenants/test-tenant echo "" # ============================================ @@ -36,9 +38,8 @@ echo "" echo -e "${BLUE}=== 2. OBJECT STORE COMMANDS ===${NC}" echo "" -# AWS Object Store echo -e "${GREEN}Creating AWS object store...${NC}" -bun run airfoil:dev cluster create-object-store aws \ +$CLI cluster create-object-store aws \ --parent tenants/test-tenant \ --object-store-id test-aws-store \ --bucket-name my-test-bucket \ @@ -47,9 +48,8 @@ bun run airfoil:dev cluster create-object-store aws \ --region us-west-2 echo "" -# Azure Object Store echo -e "${GREEN}Creating Azure object store...${NC}" -bun run airfoil:dev cluster create-object-store azure \ +$CLI cluster create-object-store azure \ --parent tenants/test-tenant \ --object-store-id test-azure-store \ --container-name mycontainer \ @@ -57,9 +57,8 @@ bun run airfoil:dev cluster create-object-store azure \ --storage-account-key "myaccountkey123==" echo "" -# Google Object Store echo -e "${GREEN}Creating Google object store...${NC}" -bun run airfoil:dev cluster create-object-store google \ +$CLI cluster create-object-store google \ --parent tenants/test-tenant \ --object-store-id test-google-store \ --bucket-name my-gcs-bucket \ @@ -67,9 +66,8 @@ bun run airfoil:dev cluster create-object-store google \ --service-account-key "base64-encoded-key" echo "" -# S3-compatible Object Store echo -e "${GREEN}Creating S3-compatible object store...${NC}" -bun run airfoil:dev cluster create-object-store s3 \ +$CLI cluster create-object-store s3 \ --parent tenants/test-tenant \ --object-store-id test-s3-store \ --bucket-name my-s3-bucket \ @@ -79,11 +77,11 @@ bun run airfoil:dev cluster create-object-store s3 \ echo "" echo -e "${GREEN}Listing all object stores...${NC}" -bun run airfoil:dev cluster list-object-stores --parent tenants/test-tenant +$CLI cluster list-object-stores --parent tenants/test-tenant echo "" echo -e "${GREEN}Getting AWS object store...${NC}" -bun run airfoil:dev cluster get-object-store --name tenants/test-tenant/object-stores/test-aws-store +$CLI cluster get-object-store --name tenants/test-tenant/object-stores/test-aws-store echo "" # ============================================ @@ -92,26 +90,24 @@ echo "" echo -e "${BLUE}=== 3. DATA LAKE COMMANDS ===${NC}" echo "" -# Iceberg Data Lake echo -e "${GREEN}Creating Iceberg data lake...${NC}" -bun run airfoil:dev cluster create-data-lake iceberg \ +$CLI cluster create-data-lake iceberg \ --parent tenants/test-tenant \ --data-lake-id test-iceberg-lake echo "" -# Parquet Data Lake echo -e "${GREEN}Creating Parquet data lake...${NC}" -bun run airfoil:dev cluster create-data-lake parquet \ +$CLI cluster create-data-lake parquet \ --parent tenants/test-tenant \ --data-lake-id test-parquet-lake echo "" echo -e "${GREEN}Listing all data lakes...${NC}" -bun run airfoil:dev cluster list-data-lakes --parent tenants/test-tenant +$CLI cluster list-data-lakes --parent tenants/test-tenant echo "" echo -e "${GREEN}Getting Iceberg data lake...${NC}" -bun run airfoil:dev cluster get-data-lake --name tenants/test-tenant/data-lakes/test-iceberg-lake +$CLI cluster get-data-lake --name tenants/test-tenant/data-lakes/test-iceberg-lake echo "" # ============================================ @@ -120,20 +116,20 @@ echo "" echo -e "${BLUE}=== 4. NAMESPACE COMMANDS ===${NC}" echo "" -echo -e "${GREEN}Creating namespace 'test-namespace' in tenant 'test-tenant' with object store and data lake...${NC}" -bun run airfoil:dev cluster create-namespace \ +echo -e "${GREEN}Creating namespace 'test-namespace'...${NC}" +$CLI cluster create-namespace \ --parent tenants/test-tenant \ --namespace-id test-namespace \ --object-store tenants/test-tenant/object-stores/test-aws-store \ --data-lake tenants/test-tenant/data-lakes/test-iceberg-lake echo "" -echo -e "${GREEN}Listing all namespaces in tenant 'test-tenant'...${NC}" -bun run airfoil:dev cluster list-namespaces --parent tenants/test-tenant +echo -e "${GREEN}Listing all namespaces...${NC}" +$CLI cluster list-namespaces --parent tenants/test-tenant echo "" echo -e "${GREEN}Getting namespace 'test-namespace'...${NC}" -bun run airfoil:dev cluster get-namespace --name tenants/test-tenant/namespaces/test-namespace +$CLI cluster get-namespace --name tenants/test-tenant/namespaces/test-namespace echo "" # ============================================ @@ -142,9 +138,8 @@ echo "" echo -e "${BLUE}=== 5. TOPIC COMMANDS ===${NC}" echo "" -# Create topic with simple fields -echo -e "${GREEN}Creating topic 'test-topic' with simple fields...${NC}" -bun run airfoil:dev cluster create-topic \ +echo -e "${GREEN}Creating topic with simple fields...${NC}" +$CLI cluster create-topic \ --parent tenants/test-tenant/namespaces/test-namespace \ --topic-id test-topic \ --fields "id:Utf8" \ @@ -156,9 +151,8 @@ bun run airfoil:dev cluster create-topic \ --freshness-seconds 300 echo "" -# Create topic with nullable fields -echo -e "${GREEN}Creating topic 'test-topic-nullable' with nullable fields...${NC}" -bun run airfoil:dev cluster create-topic \ +echo -e "${GREEN}Creating topic with nullable fields...${NC}" +$CLI cluster create-topic \ --parent tenants/test-tenant/namespaces/test-namespace \ --topic-id test-topic-nullable \ --fields "user_id:Utf8" \ @@ -167,43 +161,61 @@ bun run airfoil:dev cluster create-topic \ --partition-key user_id echo "" -echo -e "${GREEN}Listing all topics in namespace...${NC}" -bun run airfoil:dev cluster list-topics --parent tenants/test-tenant/namespaces/test-namespace +echo -e "${GREEN}Listing all topics...${NC}" +$CLI cluster list-topics --parent tenants/test-tenant/namespaces/test-namespace echo "" echo -e "${GREEN}Getting topic 'test-topic'...${NC}" -bun run airfoil:dev cluster get-topic --name tenants/test-tenant/namespaces/test-namespace/topics/test-topic +$CLI cluster get-topic --name tenants/test-tenant/namespaces/test-namespace/topics/test-topic echo "" # ============================================ -# CLEANUP (DELETE) COMMANDS +# CLEANUP # ============================================ -echo -e "${BLUE}=== 6. CLEANUP (Testing Delete Commands) ===${NC}" +echo -e "${BLUE}=== 6. CLEANUP ===${NC}" echo "" echo -e "${GREEN}Deleting topics...${NC}" -bun run airfoil:dev cluster delete-topic --name tenants/test-tenant/namespaces/test-namespace/topics/test-topic --force -bun run airfoil:dev cluster delete-topic --name tenants/test-tenant/namespaces/test-namespace/topics/test-topic-nullable --force +$CLI cluster delete-topic \ + --name tenants/test-tenant/namespaces/test-namespace/topics/test-topic \ + --force +$CLI cluster delete-topic \ + --name tenants/test-tenant/namespaces/test-namespace/topics/test-topic-nullable \ + --force echo "" -echo -e "${GREEN}Deleting namespace (must be deleted before object stores and data lakes)...${NC}" -bun run airfoil:dev cluster delete-namespace --name tenants/test-tenant/namespaces/test-namespace --force +echo -e "${GREEN}Deleting namespace...${NC}" +$CLI cluster delete-namespace \ + --name tenants/test-tenant/namespaces/test-namespace \ + --force echo "" echo -e "${GREEN}Deleting data lakes...${NC}" -bun run airfoil:dev cluster delete-data-lake --name tenants/test-tenant/data-lakes/test-iceberg-lake --force -bun run airfoil:dev cluster delete-data-lake --name tenants/test-tenant/data-lakes/test-parquet-lake --force +$CLI cluster delete-data-lake \ + --name tenants/test-tenant/data-lakes/test-iceberg-lake \ + --force +$CLI cluster delete-data-lake \ + --name tenants/test-tenant/data-lakes/test-parquet-lake \ + --force echo "" echo -e "${GREEN}Deleting object stores...${NC}" -bun run airfoil:dev cluster delete-object-store --name tenants/test-tenant/object-stores/test-aws-store --force -bun run airfoil:dev cluster delete-object-store --name tenants/test-tenant/object-stores/test-azure-store --force -bun run airfoil:dev cluster delete-object-store --name tenants/test-tenant/object-stores/test-google-store --force -bun run airfoil:dev cluster delete-object-store --name tenants/test-tenant/object-stores/test-s3-store --force +$CLI cluster delete-object-store \ + --name tenants/test-tenant/object-stores/test-aws-store \ + --force +$CLI cluster delete-object-store \ + --name tenants/test-tenant/object-stores/test-azure-store \ + --force +$CLI cluster delete-object-store \ + --name tenants/test-tenant/object-stores/test-google-store \ + --force +$CLI cluster delete-object-store \ + --name tenants/test-tenant/object-stores/test-s3-store \ + --force echo "" echo -e "${GREEN}Deleting tenant...${NC}" -bun run airfoil:dev cluster delete-tenant --name tenants/test-tenant --force +$CLI cluster delete-tenant --name tenants/test-tenant --force echo "" echo -e "${BLUE}=== ALL TESTS COMPLETED SUCCESSFULLY ===${NC}" From 80b1d612ec9f0d79258d7c83d95bbc2a210f1c0e Mon Sep 17 00:00:00 2001 From: jaipaljadeja Date: Thu, 30 Apr 2026 14:49:13 +0530 Subject: [PATCH 04/12] ack: refactor --- packages/connector-kit/README.md | 370 +++++++++++++++--- packages/connector-kit/package.json | 29 +- packages/connector-kit/src/core/index.ts | 19 + packages/connector-kit/src/core/types.ts | 2 +- .../errors.ts => errors/connector-error.ts} | 0 packages/connector-kit/src/errors/index.ts | 1 + packages/connector-kit/src/index.ts | 36 +- .../connector-kit/src/ingestion/engine.ts | 350 ++++++++--------- packages/connector-kit/src/ingestion/index.ts | 2 + .../src/ingestion/state-store.ts | 34 +- packages/connector-kit/src/publisher/index.ts | 5 + .../connector-kit/src/publisher/service.ts | 27 +- packages/connector-kit/src/publisher/wings.ts | 91 ++--- packages/connector-kit/src/runtime/context.ts | 4 +- packages/connector-kit/src/streams/index.ts | 4 + .../connector-kit/src/streams/pull-stream.ts | 7 +- .../src/streams/webhook-queue.ts | 17 +- packages/connector-kit/src/webhook/index.ts | 9 + packages/connector-kit/src/webhook/server.ts | 40 +- packages/connector-kit/src/webhook/types.ts | 8 +- packages/connector-kit/test/engine.test.ts | 65 ++- packages/connector-kit/tsdown.config.ts | 9 +- 22 files changed, 747 insertions(+), 382 deletions(-) create mode 100644 packages/connector-kit/src/core/index.ts rename packages/connector-kit/src/{core/errors.ts => errors/connector-error.ts} (100%) create mode 100644 packages/connector-kit/src/errors/index.ts create mode 100644 packages/connector-kit/src/ingestion/index.ts create mode 100644 packages/connector-kit/src/publisher/index.ts create mode 100644 packages/connector-kit/src/streams/index.ts create mode 100644 packages/connector-kit/src/webhook/index.ts diff --git a/packages/connector-kit/README.md b/packages/connector-kit/README.md index 353d61f..b7119cb 100644 --- a/packages/connector-kit/README.md +++ b/packages/connector-kit/README.md @@ -2,36 +2,237 @@ Toolkit for building Airfoil connectors with Effect. ---- +`@useairfoil/connector-kit` gives connector authors a small set of runtime primitives: -## User guide (build a connector) +- connector definitions +- pull and webhook live sources +- webhook routing and payload decoding +- state persistence boundaries +- publisher boundaries +- an ingestion engine that runs entities and events -This section is for connector authors who want to build and run a connector. +It is designed for connectors that: -### Install +- model data with Effect `Schema` +- ingest from polling, webhooks, or both +- publish batches to Airfoil or a custom destination +- run inside an Effect application with explicit Layers + +## Install ```bash -pnpm add @useairfoil/connector-kit +pnpm add @useairfoil/connector-kit effect +``` + +## Package Shape + +The package root exposes the common connector authoring API. + +```ts +import { + ConnectorError, + defineConnector, + defineEntity, + defineEvent, + Ingestion, + Publisher, + Streams, + Webhook, +} from "@useairfoil/connector-kit"; +``` + +Subpath exports are available for the runtime domains: + +- `@useairfoil/connector-kit/ingestion` +- `@useairfoil/connector-kit/publisher` +- `@useairfoil/connector-kit/streams` +- `@useairfoil/connector-kit/webhook` +- `@useairfoil/connector-kit/errors` + +Core definition helpers are intentionally root-only. + +## Core API + +### `defineConnector` + +Creates the top-level connector definition. + +### `defineEntity` + +Defines an entity with: + +- a schema +- a primary key +- a live source +- a backfill stream +- an optional row transform + +Entities are treated as upsert-style streams. The ingestion engine de-duplicates entity backfill rows that were already observed in live ingestion. + +### `defineEvent` + +Defines an event stream with: + +- a schema +- a live source +- an optional backfill stream +- an optional row transform + +Events preserve backfill-before-live ordering when backfill is provided. + +### Exported core types + +The root also exports the common shared types, including: + +- `Batch` +- `Cursor` +- `EntityDefinition` +- `EventDefinition` +- `Transform` +- `ConnectorDefinition` + +## Ingestion + +`Ingestion` contains the runtime engine and state-store boundary. + +Common entry points: + +- `Ingestion.runConnector` +- `Ingestion.StateStore` +- `Ingestion.layerMemory` + +`Ingestion.runConnector(...)`: + +- runs all entity and event ingestion flows +- merges entity live and backfill streams +- de-duplicates overlapping entity backfill rows already seen live +- runs event backfill before event live ingestion +- publishes through `Publisher.Publisher` +- persists state through `Ingestion.StateStore` +- optionally mounts webhook routes and a health endpoint + +Use `Ingestion.layerMemory` for development and tests, or provide your own `StateStore` implementation for durable state. + +## Publisher + +`Publisher` contains the publishing boundary and packaged publisher implementations. + +Common entry points: + +- `Publisher.Publisher` +- `Publisher.layerWings` + +`Publisher.Publisher` is the Effect service boundary for publishing batches. + +The ingestion engine only advances state after publish acknowledgement succeeds. + +Use `Publisher.layerWings(...)` when you want to publish directly to Wings topics. + +## Streams + +`Streams` contains stream helpers for pull-based and webhook-based ingestion. + +Common entry points: + +- `Streams.makePullStream` +- `Streams.makeWebhookQueue` + +### `Streams.makePullStream` + +Builds a polling or backfill stream from a cursor-based page fetcher. + +```ts +import { Streams } from "@useairfoil/connector-kit"; + +const backfill = Streams.makePullStream({ + initialCursor: undefined, + fetchPage: (cursor) => fetchPage(cursor), +}); +``` + +The fetcher returns: + +- `cursor` +- `rows` +- `hasMore` + +Empty pages are skipped until either rows arrive or `hasMore` becomes `false`. + +### `Streams.makeWebhookQueue` + +Creates a queue-backed live source for webhook ingestion. + +```ts +const webhook = yield * Streams.makeWebhookQueue({ capacity: 1024 }); ``` -### Minimal example +This returns: + +- `queue`: where the webhook handler offers batches +- `stream`: the live stream consumed by the ingestion engine + +## Webhooks + +`Webhook` contains the route type, the route helper, and the router builder. + +Common entry points: + +- `Webhook.route` +- `Webhook.WebhookRoute` +- `Webhook.buildWebhookRouter` + +### `Webhook.route` + +Use `Webhook.route(...)` to define routes with schema-driven payload inference. + +```ts +const route = Webhook.route({ + path: "/webhooks/example", + schema: PayloadSchema, + handle: (payload, request, rawBody) => { + payload.id; + return Effect.void; + }, +}); +``` + +The `payload` type is inferred from the route `schema`, so most callers do not need to write `Webhook.WebhookRoute<...>` annotations manually. + +### Route behavior + +Webhook request handling does the following: -This snippet uses Node. Bun is also supported by swapping in Bun's HttpServer -layer. +- reads the raw body as bytes +- decodes JSON +- validates the payload with Effect `Schema` +- passes the decoded payload, request, and raw body to the route handler +- returns `400` for invalid payloads +- returns `500` for handler failures + +`Ingestion.runConnector(..., { webhook: { routes } })` automatically mounts: + +- all provided POST routes +- a health endpoint at `/health` by default + +You can override: + +- `healthPath` +- `disableHttpLogger` + +## Minimal Example ```ts import { NodeHttpServer } from "@effect/platform-node"; -import { Schema, Effect, Layer, Queue, Stream } from "effect"; -import { createServer } from "node:http"; import { - type WebhookRoute, defineConnector, defineEntity, + Ingestion, Publisher, - runConnector, - StateStoreInMemory, - makeWebhookQueue, + Streams, + Webhook, } from "@useairfoil/connector-kit"; +import { Effect, Layer, Queue, Schema, Stream } from "effect"; +import { createServer } from "node:http"; const Customer = Schema.Struct({ id: Schema.String, @@ -40,10 +241,10 @@ const Customer = Schema.Struct({ }); const program = Effect.gen(function* () { - const webhook = yield* makeWebhookQueue>(); + const webhook = yield* Streams.makeWebhookQueue>(); - const routes: ReadonlyArray>> = [ - { + const routes = [ + Webhook.route({ path: "/webhook/customers", schema: Customer, handle: (payload) => @@ -51,7 +252,7 @@ const program = Effect.gen(function* () { cursor: new Date(), rows: [payload], }).pipe(Effect.asVoid), - }, + }), ]; const connector = defineConnector({ @@ -68,55 +269,115 @@ const program = Effect.gen(function* () { events: [], }); - yield* runConnector(connector, { + yield* Ingestion.runConnector(connector, { initialCutoff: new Date(), webhook: { routes }, }); -}).pipe( - Effect.provide(NodeHttpServer.layer(createServer, { port: 8080 })), - Effect.provide(StateStoreInMemory), - Effect.provide( - Layer.succeed(Publisher, { - publish: () => Effect.succeed({ success: true }), - }), - ), +}); + +const runtimeLayer = Layer.mergeAll( + NodeHttpServer.layer(createServer, { port: 8080 }), + Ingestion.layerMemory, + Layer.succeed(Publisher.Publisher)({ + publish: () => Effect.succeed({ success: true }), + }), +); + +Effect.runPromise(program.pipe(Effect.provide(runtimeLayer))); +``` + +## Typical Runtime Wiring + +Your application usually provides: + +- a `Publisher.Publisher` Layer +- an `Ingestion.StateStore` Layer +- an HTTP server Layer if webhook routes are enabled +- any API client and configuration Layers your connector needs + +Compose the runtime layers first, then provide once: + +```ts +const runtimeLayer = Layer.mergeAll( + serverLayer, + Ingestion.layerMemory, + publisherLayer, + connectorLayer, ); -Effect.runPromise(program); +program.pipe(Effect.provide(runtimeLayer)); +``` + +## Using `Publisher.layerWings` + +```ts +import { Ingestion, Publisher } from "@useairfoil/connector-kit"; +import { Layer } from "effect"; +import { WingsClient } from "@useairfoil/wings"; + +const publisherLayer = Publisher.layerWings({ + connector, + topics: { + customers: customerTopic, + }, +}); + +const runtimeLayer = Layer.mergeAll( + WingsClient.layer({ + host: "localhost:7777", + dataPlaneHost: "localhost:8815", + }), + Ingestion.layerMemory, + publisherLayer, +); ``` ---- +`Publisher.layerWings(...)` expects: -## Development (concepts and architecture) +- the connector definition +- a topic mapping keyed by entity or event name +- optional partition values keyed by entity or event name -### Core concepts +## State and Delivery Semantics -- `defineConnector` describes your connector and its entities. -- `defineEntity` wires live and backfill streams for each entity. -- `Publisher` is the output boundary (where batches go). -- `StateStore` tracks cursors and backfill state. -- `runConnector(..., { webhook: { routes } })` wires webhook routes and health endpoint into the HTTP runtime you provide. +Current runtime behavior: -### Layers and Effect services +- entities merge live and backfill concurrently +- entities de-duplicate overlapping backfill rows using primary keys +- events process backfill before live +- publish acknowledgement must succeed before cursor state is persisted -Connector-kit is designed around Effect services and Layers. Your application should provide: +This means a publisher rejection does not silently advance ingestion state. -- a `Publisher` Layer -- a `StateStore` Layer -- an HTTP server Layer (if you pass webhook routes to `runConnector`) -- any custom services your connector needs (API clients, Effect Config) +## Errors -`runConnector` automatically provides connector runtime context for internal -tracing/metrics annotations. +`ConnectorError` is the package-level runtime error type for connector flows. -### Testing with VCR +Use it for: -VCR is provided via `@useairfoil/effect-vcr` as an Effect `HttpClient` layer. This keeps HTTP recording out of connector logic. +- publisher failures +- connector-side transform failures +- state-store failures +- connector runtime failures that should stay in the Effect error channel + +## Testing + +For tests, the most common setup is: + +- `Ingestion.layerMemory` for state +- a small in-memory `Publisher.Publisher` test layer +- `Ingestion.runConnector(...)` inside `Effect.scoped` + +The package’s own tests use this style for ingestion behavior validation. + +## HTTP Recording + +If your connector uses `HttpClient`, keep recording concerns outside connector logic by providing a VCR-backed `HttpClient` Layer. ```ts -import { FetchHttpClient } from "effect/unstable/http"; import { FileSystemCassetteStore, VcrHttpClient } from "@useairfoil/effect-vcr"; import { Layer } from "effect"; +import { FetchHttpClient } from "effect/unstable/http"; const vcrLayer = VcrHttpClient.layer({ vcrName: "producer-polar", @@ -126,3 +387,18 @@ const vcrLayer = VcrHttpClient.layer({ Layer.provideMerge(FetchHttpClient.layer), ); ``` + +## Recommended Imports + +Use root imports for: + +- connector definition primitives +- the common runtime modules +- `ConnectorError` + +Use subpath imports when you want to be explicit about a specific runtime domain, for example: + +```ts +import { layerMemory } from "@useairfoil/connector-kit/ingestion"; +import { layerWings } from "@useairfoil/connector-kit/publisher"; +``` diff --git a/packages/connector-kit/package.json b/packages/connector-kit/package.json index a78679f..bdc9997 100644 --- a/packages/connector-kit/package.json +++ b/packages/connector-kit/package.json @@ -15,6 +15,31 @@ "types": "./dist/index.d.ts", "import": "./dist/index.js", "default": "./dist/index.js" + }, + "./ingestion": { + "types": "./dist/ingestion/index.d.ts", + "import": "./dist/ingestion/index.js", + "default": "./dist/ingestion/index.js" + }, + "./publisher": { + "types": "./dist/publisher/index.d.ts", + "import": "./dist/publisher/index.js", + "default": "./dist/publisher/index.js" + }, + "./streams": { + "types": "./dist/streams/index.d.ts", + "import": "./dist/streams/index.js", + "default": "./dist/streams/index.js" + }, + "./webhook": { + "types": "./dist/webhook/index.d.ts", + "import": "./dist/webhook/index.js", + "default": "./dist/webhook/index.js" + }, + "./errors": { + "types": "./dist/errors/index.d.ts", + "import": "./dist/errors/index.js", + "default": "./dist/errors/index.js" } }, "scripts": { @@ -25,9 +50,7 @@ }, "dependencies": { "@useairfoil/flight": "workspace:*", - "@useairfoil/wings": "workspace:*", - "consola": "^3.4.2", - "json-stable-stringify": "^1.3.0" + "@useairfoil/wings": "workspace:*" }, "devDependencies": { "@effect/vitest": "catalog:", diff --git a/packages/connector-kit/src/core/index.ts b/packages/connector-kit/src/core/index.ts new file mode 100644 index 0000000..a778e97 --- /dev/null +++ b/packages/connector-kit/src/core/index.ts @@ -0,0 +1,19 @@ +export { defineConnector, defineEntity, defineEvent } from "./builder"; +export type { + BackfillStream, + Batch, + ConnectorDefinition, + Cursor, + EntityDefinition, + EntityKey, + EntityRow, + EntitySchema, + EntityType, + EventDefinition, + IngestionState, + LiveSource, + LiveStream, + StreamState, + Transform, + WebhookStream, +} from "./types"; diff --git a/packages/connector-kit/src/core/types.ts b/packages/connector-kit/src/core/types.ts index fb1b9a5..92470fe 100644 --- a/packages/connector-kit/src/core/types.ts +++ b/packages/connector-kit/src/core/types.ts @@ -1,6 +1,6 @@ import type { Effect, Queue, Schema, Stream } from "effect"; -import type { ConnectorError } from "./errors"; +import type { ConnectorError } from "../errors"; export type Cursor = string | number | bigint | Date; diff --git a/packages/connector-kit/src/core/errors.ts b/packages/connector-kit/src/errors/connector-error.ts similarity index 100% rename from packages/connector-kit/src/core/errors.ts rename to packages/connector-kit/src/errors/connector-error.ts diff --git a/packages/connector-kit/src/errors/index.ts b/packages/connector-kit/src/errors/index.ts new file mode 100644 index 0000000..0fbaccc --- /dev/null +++ b/packages/connector-kit/src/errors/index.ts @@ -0,0 +1 @@ +export { ConnectorError } from "./connector-error"; diff --git a/packages/connector-kit/src/index.ts b/packages/connector-kit/src/index.ts index 5dade62..c95b88e 100644 --- a/packages/connector-kit/src/index.ts +++ b/packages/connector-kit/src/index.ts @@ -1,30 +1,6 @@ -export { defineConnector, defineEntity, defineEvent } from "./core/builder"; -export { ConnectorError } from "./core/errors"; -export type { - BackfillStream, - Batch, - ConnectorDefinition, - Cursor, - EntityDefinition, - EntityKey, - EntityRow, - EntitySchema, - EntityType, - EventDefinition, - IngestionState, - LiveSource, - LiveStream, - StreamState, - Transform, - WebhookStream, -} from "./core/types"; -export type { RunConnectorOptions } from "./ingestion/engine"; -export { runConnector } from "./ingestion/engine"; -export { StateStore, StateStoreInMemory } from "./ingestion/state-store"; -export { Publisher } from "./publisher/service"; -export { WingsPublisherLayer } from "./publisher/wings"; -export { ConnectorRuntimeContext, ConnectorRuntimeContextLayer } from "./runtime/context"; -export { makePullStream } from "./streams/pull-stream"; -export { makeWebhookQueue } from "./streams/webhook-queue"; -export { buildWebhookRouter } from "./webhook/server"; -export type { WebhookRoute } from "./webhook/types"; +export * from "./core"; +export * as Ingestion from "./ingestion"; +export * as Publisher from "./publisher"; +export * as Streams from "./streams"; +export * as Webhook from "./webhook"; +export * from "./errors"; diff --git a/packages/connector-kit/src/ingestion/engine.ts b/packages/connector-kit/src/ingestion/engine.ts index 985781b..54bd267 100644 --- a/packages/connector-kit/src/ingestion/engine.ts +++ b/packages/connector-kit/src/ingestion/engine.ts @@ -1,7 +1,6 @@ import { Effect, Layer, Metric, Queue, Ref, Stream } from "effect"; import { HttpRouter, type HttpServer, HttpServerResponse } from "effect/unstable/http"; -import type { ConnectorError } from "../core/errors"; import type { Batch, ConnectorDefinition, @@ -17,8 +16,9 @@ import type { } from "../core/types"; import type { WebhookRoute } from "../webhook/types"; +import { ConnectorError } from "../errors"; import { Publisher } from "../publisher/service"; -import { ConnectorRuntimeContext, ConnectorRuntimeContextLayer } from "../runtime/context"; +import { ConnectorRuntimeContext, layer as connectorRuntimeContextLayer } from "../runtime/context"; import { buildWebhookRouter } from "../webhook/server"; import { StateStore } from "./state-store"; @@ -44,9 +44,9 @@ type RunConnectorBaseOptions = { readonly initialCutoff?: Cursor; }; -export type RunConnectorOptions = RunConnectorBaseOptions & { +export type RunConnectorOptions = RunConnectorBaseOptions & { readonly webhook?: { - readonly routes: ReadonlyArray>; + readonly routes: ReadonlyArray; readonly healthPath?: HttpRouter.PathInput; readonly disableHttpLogger?: boolean; }; @@ -56,33 +56,31 @@ type RunConnectorNoWebhookOptions = RunConnectorBaseOptions & { readonly webhook?: undefined; }; -type RunConnectorWebhookOptions = RunConnectorOptions & { - readonly webhook: NonNullable["webhook"]>; +type RunConnectorWebhookOptions = RunConnectorOptions & { + readonly webhook: NonNullable; }; export function runConnector( connector: ConnectorDefinition, options?: RunConnectorNoWebhookOptions, ): Effect.Effect; -export function runConnector( +export function runConnector( connector: ConnectorDefinition, - options: RunConnectorWebhookOptions, + options: RunConnectorWebhookOptions, ): Effect.Effect; -export function runConnector( - connector: ConnectorDefinition, - options?: RunConnectorOptions, -) { +export function runConnector(connector: ConnectorDefinition, options?: RunConnectorOptions) { + const runtimeLayer = options?.webhook + ? Layer.mergeAll( + connectorRuntimeContextLayer(connector), + makeWebhookServerLayer(options.webhook), + ) + : connectorRuntimeContextLayer(connector); + return Effect.withSpan( Effect.gen(function* () { const initialCutoff = options?.initialCutoff ?? new Date(); - const ingestion = runIngestion(connector, initialCutoff); - - if (!options?.webhook) { - return yield* ingestion; - } - - return yield* ingestion.pipe(Effect.provide(makeWebhookServerLayer(options.webhook))); - }).pipe(Effect.provide(ConnectorRuntimeContextLayer(connector))), + return yield* runIngestion(connector, initialCutoff); + }).pipe(Effect.provide(runtimeLayer)), "connector.run", { attributes: { @@ -106,8 +104,8 @@ const runIngestion = ( }).pipe(Effect.asVoid); }; -const makeWebhookServerLayer = (options: { - readonly routes: ReadonlyArray>; +const makeWebhookServerLayer = (options: { + readonly routes: ReadonlyArray; readonly healthPath?: HttpRouter.PathInput; readonly disableHttpLogger?: boolean; }): Layer.Layer => { @@ -127,119 +125,113 @@ const createInitialState = (cutoff: Cursor): IngestionState => ({ live: { cutoff }, }); -const makeStateRef = ( - key: string, - initialCutoff: Cursor, -): Effect.Effect>, ConnectorError, StateStore> => - Effect.gen(function* () { - // Load persisted state or initialize a new one for this stream. - const store = yield* StateStore; - const existing = yield* store.getState(key); - const initial = existing ?? createInitialState(initialCutoff); - return yield* Ref.make(initial); - }); +const makeStateRef = Effect.fnUntraced(function* (key: string, initialCutoff: Cursor) { + // Load persisted state or initialize a new one for this stream. + const store = yield* StateStore; + const existing = yield* store.getState(key); + const initial = existing ?? createInitialState(initialCutoff); + return yield* Ref.make(initial); +}); -const runEntity = ( +const runEntity = Effect.fnUntraced(function* ( entity: EntityDefinition, initialCutoff: Cursor, -): Effect.Effect => - Effect.gen(function* () { - type Row = EntityRow; - const stateRef = yield* makeStateRef(entity.name, initialCutoff); - // Tracks which primary keys have already been emitted. - const seenRef = yield* Ref.make(new Set()); - - const liveStream = resolveLiveStream(entity.live); - const tagLive = (batch: Batch) => ({ - source: "live" as const, - batch, +) { + type Row = EntityRow; + const stateRef = yield* makeStateRef(entity.name, initialCutoff); + // Tracks which primary keys have already been emitted. + const seenRef = yield* Ref.make(new Set()); + + const liveStream = resolveLiveStream(entity.live); + const tagLive = (batch: Batch) => ({ + source: "live" as const, + batch, + }); + const updateSeen = (rows: ReadonlyArray) => + Ref.update(seenRef, (seen) => { + const next = new Set(seen); + for (const row of rows) { + const key = String(row[entity.primaryKey]); + next.add(key); + } + return next; }); - const updateSeen = (rows: ReadonlyArray) => - Ref.update(seenRef, (seen) => { - const next = new Set(seen); - for (const row of rows) { + + // Entities are upserts, so live and backfill can overlap. We keep an in-memory + // seen set (primary keys) so backfill does not re-emit rows already observed live. + const backfillTagged = Stream.mapEffect(entity.backfill, (batch) => + Ref.get(seenRef).pipe( + Effect.map((seen) => { + const filtered = batch.rows.filter((row) => { const key = String(row[entity.primaryKey]); - next.add(key); - } - return next; - }); - - // Entities are upserts, so live and backfill can overlap. We keep an in-memory - // seen set (primary keys) so backfill does not re-emit rows already observed live. - const backfillTagged = Stream.mapEffect(entity.backfill, (batch) => - Ref.get(seenRef).pipe( - Effect.map((seen) => { - const filtered = batch.rows.filter((row) => { - const key = String(row[entity.primaryKey]); - return !seen.has(key); - }); - return { - source: "backfill" as const, - batch: { cursor: batch.cursor, rows: filtered }, - }; - }), - ), - ).pipe(Stream.tap(({ batch }) => updateSeen(batch.rows))); - - // For webhook live sources, we wait for the first live batch before starting - // backfill. That first batch establishes the cutoff timestamp and seeds the - // seen set so backfill can de-dupe correctly. - // Queue-backed streams are single-consumer; splitting with take/drop would - // consume and discard elements. Take the first element directly from the - // queue, then let Stream.fromQueue continue from element #2. - if (isWebhookStream(entity.live)) { - const firstBatch = yield* Queue.take(entity.live.queue); - yield* updateSeen(firstBatch.rows); - yield* processTaggedStream( - Stream.make({ source: "live" as const, batch: firstBatch }), - entity.name, - entity.transform, - stateRef, - ); - - // liveStream is Stream.fromQueue on the same queue, continues from element #2. - const liveTailTagged = Stream.map(liveStream, tagLive).pipe( - Stream.tap(({ batch }) => updateSeen(batch.rows)), - ); - const merged = Stream.merge(liveTailTagged, backfillTagged); - yield* processTaggedStream(merged, entity.name, entity.transform, stateRef); - return; - } - - // For pull-based live sources, we can merge immediately because there is no - // webhook cutoff gating and the live stream is not queue-backed. - const liveTagged = Stream.map(liveStream, tagLive).pipe( - Stream.tap(({ batch }) => updateSeen(batch.rows)), + return !seen.has(key); + }); + return { + source: "backfill" as const, + batch: { cursor: batch.cursor, rows: filtered }, + }; + }), + ), + ).pipe(Stream.tap(({ batch }) => updateSeen(batch.rows))); + + // For webhook live sources, we wait for the first live batch before starting + // backfill. That first batch establishes the cutoff timestamp and seeds the + // seen set so backfill can de-dupe correctly. + // Queue-backed streams are single-consumer; splitting with take/drop would + // consume and discard elements. Take the first element directly from the + // queue, then let Stream.fromQueue continue from element #2. + if (isWebhookStream(entity.live)) { + const firstBatch = yield* Queue.take(entity.live.queue); + yield* updateSeen(firstBatch.rows); + yield* processTaggedStream( + Stream.make({ source: "live" as const, batch: firstBatch }), + entity.name, + entity.transform, + stateRef, ); - const merged = Stream.merge(liveTagged, backfillTagged); + // liveStream is Stream.fromQueue on the same queue, continues from element #2. + const liveTailTagged = Stream.map(liveStream, tagLive).pipe( + Stream.tap(({ batch }) => updateSeen(batch.rows)), + ); + const merged = Stream.merge(liveTailTagged, backfillTagged); yield* processTaggedStream(merged, entity.name, entity.transform, stateRef); - }); + return; + } -const runEvent = ( + // For pull-based live sources, we can merge immediately because there is no + // webhook cutoff gating and the live stream is not queue-backed. + const liveTagged = Stream.map(liveStream, tagLive).pipe( + Stream.tap(({ batch }) => updateSeen(batch.rows)), + ); + + const merged = Stream.merge(liveTagged, backfillTagged); + yield* processTaggedStream(merged, entity.name, entity.transform, stateRef); +}); + +const runEvent = Effect.fnUntraced(function* ( event: EventDefinition, initialCutoff: Cursor, -): Effect.Effect => - Effect.gen(function* () { - type Row = EntityRow; - const stateRef = yield* makeStateRef(event.name, initialCutoff); - const liveStream = resolveLiveStream(event.live); - - // Events must backfill first to preserve ordering. - if (event.backfill) { - const backfillTagged = Stream.map(event.backfill, (batch) => ({ - source: "backfill" as const, - batch, - })); - yield* processTaggedStream(backfillTagged, event.name, event.transform, stateRef); - } - - const liveTagged = Stream.map(liveStream, (batch) => ({ - source: "live" as const, +) { + type Row = EntityRow; + const stateRef = yield* makeStateRef(event.name, initialCutoff); + const liveStream = resolveLiveStream(event.live); + + // Events must backfill first to preserve ordering. + if (event.backfill) { + const backfillTagged = Stream.map(event.backfill, (batch) => ({ + source: "backfill" as const, batch, })); - yield* processTaggedStream(liveTagged, event.name, event.transform, stateRef); - }); + yield* processTaggedStream(backfillTagged, event.name, event.transform, stateRef); + } + + const liveTagged = Stream.map(liveStream, (batch) => ({ + source: "live" as const, + batch, + })); + yield* processTaggedStream(liveTagged, event.name, event.transform, stateRef); +}); const updateState = ( state: IngestionState, @@ -256,66 +248,68 @@ const resolveLiveStream = (source: LiveSource): Stream.Stream, Co const isWebhookStream = (source: LiveSource): source is WebhookStream => typeof source === "object" && source !== null && "queue" in source && "stream" in source; -const processTaggedStream = >( +const processTaggedStream = Effect.fnUntraced(function* >( stream: Stream.Stream, ConnectorError>, name: string, transform: Transform | undefined, stateRef: Ref.Ref>, -): Effect.Effect => - Effect.gen(function* () { - const runtime = yield* ConnectorRuntimeContext; - const connectorName = runtime.connector.name; - - yield* Stream.runForEach(stream, ({ source, batch }) => - Effect.withSpan( - Effect.gen(function* () { - const metric = { - connector: connectorName, - stream: name, - source, - }; - - yield* Metric.update(Metric.withAttributes(connectorBatchesTotal, metric), 1); - yield* Metric.update( - Metric.withAttributes(connectorRowsTotal, metric), - batch.rows.length, - ); - yield* Metric.update( - Metric.withAttributes(connectorBatchSize, metric), - batch.rows.length, - ); - - // Optional per-row transformation. - const rows = transform ? yield* Effect.forEach(batch.rows, transform) : batch.rows; - - // Publish before updating cursor state. - const publisher = yield* Publisher; - yield* publisher.publish({ - name, - source, - batch: { - cursor: batch.cursor, - rows, - }, - }); - - // Persist state only after publish succeeds. - const nextState = yield* Ref.updateAndGet(stateRef, (state) => - updateState(state, source, batch.cursor), +) { + const runtime = yield* ConnectorRuntimeContext; + const connectorName = runtime.connector.name; + + yield* Stream.runForEach(stream, ({ source, batch }) => + Effect.withSpan( + Effect.gen(function* () { + const metric = { + connector: connectorName, + stream: name, + source, + }; + + yield* Metric.update(Metric.withAttributes(connectorBatchesTotal, metric), 1); + yield* Metric.update(Metric.withAttributes(connectorRowsTotal, metric), batch.rows.length); + yield* Metric.update(Metric.withAttributes(connectorBatchSize, metric), batch.rows.length); + + // Optional per-row transformation. + const rows = transform ? yield* Effect.forEach(batch.rows, transform) : batch.rows; + + // Publish before updating cursor state. + const publisher = yield* Publisher; + const ack = yield* publisher.publish({ + name, + source, + batch: { + cursor: batch.cursor, + rows, + }, + }); + + // TODO: check if this is correct + if (!ack.success) { + return yield* Effect.fail( + new ConnectorError({ + message: `Publisher rejected batch for ${name}`, + }), ); + } - const store = yield* StateStore; - yield* store.setState(name, nextState); - }), - "connector.batch.process", - { - attributes: { - "connector.name": connectorName, - "connector.stream.name": name, - "connector.stream.source": source, - "connector.batch.rows": batch.rows.length, - }, + // Persist state only after publish succeeds. + const nextState = yield* Ref.updateAndGet(stateRef, (state) => + updateState(state, source, batch.cursor), + ); + + const store = yield* StateStore; + yield* store.setState(name, nextState); + }), + "connector.batch.process", + { + attributes: { + "connector.name": connectorName, + "connector.stream.name": name, + "connector.stream.source": source, + "connector.batch.rows": batch.rows.length, }, - ), - ); - }); + }, + ), + ); +}); diff --git a/packages/connector-kit/src/ingestion/index.ts b/packages/connector-kit/src/ingestion/index.ts new file mode 100644 index 0000000..e1dd2f5 --- /dev/null +++ b/packages/connector-kit/src/ingestion/index.ts @@ -0,0 +1,2 @@ +export { runConnector, type RunConnectorOptions } from "./engine"; +export { layerMemory, StateStore, type StateStoreService } from "./state-store"; diff --git a/packages/connector-kit/src/ingestion/state-store.ts b/packages/connector-kit/src/ingestion/state-store.ts index f787a47..23b4372 100644 --- a/packages/connector-kit/src/ingestion/state-store.ts +++ b/packages/connector-kit/src/ingestion/state-store.ts @@ -1,26 +1,28 @@ import { Context, Effect, Layer, Ref } from "effect"; -import type { ConnectorError } from "../core/errors"; import type { Cursor, IngestionState } from "../core/types"; +import type { ConnectorError } from "../errors"; -export class StateStore extends Context.Service< - StateStore, - { - readonly getState: ( - key: string, - ) => Effect.Effect | undefined, ConnectorError>; - readonly setState: ( - key: string, - state: IngestionState, - ) => Effect.Effect; - } ->()("StateStore") {} +export interface StateStoreService { + readonly getState: ( + key: string, + ) => Effect.Effect | undefined, ConnectorError>; + readonly setState: ( + key: string, + state: IngestionState, + ) => Effect.Effect; +} -export const StateStoreInMemory = Layer.effect(StateStore)( +export class StateStore extends Context.Service()( + "@useairfoil/connector-kit/StateStore", +) {} + +/** In-memory state store backed by a `Ref`. Suitable for development and testing. */ +export const layerMemory: Layer.Layer = Layer.effect(StateStore)( Effect.gen(function* () { const ref = yield* Ref.make(new Map>()); - return { + return StateStore.of({ getState: (key) => Effect.map(Ref.get(ref), (map) => map.get(key)), setState: (key, state) => Effect.map( @@ -31,6 +33,6 @@ export const StateStoreInMemory = Layer.effect(StateStore)( }), () => undefined, ), - }; + }); }), ); diff --git a/packages/connector-kit/src/publisher/index.ts b/packages/connector-kit/src/publisher/index.ts new file mode 100644 index 0000000..76dfda9 --- /dev/null +++ b/packages/connector-kit/src/publisher/index.ts @@ -0,0 +1,5 @@ +export type { PublishAck, PublishOptions, PublisherService, PublishSource } from "./service"; +export type { WingsPublisherConfig } from "./wings"; + +export { Publisher } from "./service"; +export { layerWings } from "./wings"; diff --git a/packages/connector-kit/src/publisher/service.ts b/packages/connector-kit/src/publisher/service.ts index 9f8c023..3b75647 100644 --- a/packages/connector-kit/src/publisher/service.ts +++ b/packages/connector-kit/src/publisher/service.ts @@ -2,20 +2,25 @@ import type * as Effect from "effect/Effect"; import { Context } from "effect"; -import type { ConnectorError } from "../core/errors"; import type { Batch } from "../core/types"; +import type { ConnectorError } from "../errors"; + +export type PublishSource = "live" | "backfill"; + +export type PublishOptions = { + readonly name: string; + readonly source: PublishSource; + readonly batch: Batch>; +}; export type PublishAck = { readonly success: boolean; }; -export class Publisher extends Context.Service< - Publisher, - { - readonly publish: (options: { - readonly name: string; - readonly source: "live" | "backfill"; - readonly batch: Batch>; - }) => Effect.Effect; - } ->()("Publisher") {} +export interface PublisherService { + readonly publish: (options: PublishOptions) => Effect.Effect; +} + +export class Publisher extends Context.Service()( + "@useairfoil/connector-kit/Publisher", +) {} diff --git a/packages/connector-kit/src/publisher/wings.ts b/packages/connector-kit/src/publisher/wings.ts index 49c1edb..2bb0d76 100644 --- a/packages/connector-kit/src/publisher/wings.ts +++ b/packages/connector-kit/src/publisher/wings.ts @@ -1,12 +1,10 @@ -import type { PartitionValue } from "@useairfoil/wings"; - import * as Wings from "@useairfoil/wings"; import { Effect, Layer } from "effect"; import type { ConnectorDefinition } from "../core/types"; -import { ConnectorError } from "../core/errors"; -import { Publisher } from "./service"; +import { ConnectorError } from "../errors"; +import { Publisher, type PublisherService } from "./service"; type Rows = Record; @@ -15,7 +13,7 @@ export type WingsPublisherConfig = { /** Map of entity/event name to Wings topic. */ readonly topics: Record; /** per-stream partition value (key is entity/event name). */ - readonly partitionValues?: Record; + readonly partitionValues?: Record; }; /** Publisher entry for a single entity/event. */ @@ -25,29 +23,25 @@ type PublisherEntry = { /** Partition field name (if any). */ readonly partitionField?: string; /** Partition value (if any). */ - readonly partitionValue?: PartitionValue; + readonly partitionValue?: Wings.Partition.PartitionValue; }; +/** Convert JSON rows into an Arrow RecordBatch for Wings. Returns a typed failure if rows are empty. */ const buildRecordBatch = (rows: ReadonlyArray) => { - // Convert JSON rows into an Arrow RecordBatch for Wings. - const table = Wings.tableFromJSON(Array.from(rows)); + const table = Wings.Arrow.tableFromJSON(Array.from(rows)); const [batch] = table.batches; - - if (!batch) { - throw new ConnectorError({ message: "No rows to publish" }); - } - - return batch; + return batch + ? Effect.succeed(batch) + : Effect.fail(new ConnectorError({ message: "No rows to publish" })); }; -export const WingsPublisherLayer = ( +export const layerWings = ( config: WingsPublisherConfig, ): Layer.Layer => Layer.effect(Publisher)( Effect.gen(function* () { const entries = new Map(); - // create and store a wings publisher for each entity/event. for (const def of [...config.connector.entities, ...config.connector.events]) { const topic = config.topics[def.name]; if (!topic) { @@ -80,38 +74,39 @@ export const WingsPublisherLayer = ( }); } - return { - publish: ({ name, source: _source, batch }) => - Effect.gen(function* () { - const entry = entries.get(name); - if (!entry) { - return yield* Effect.fail(new ConnectorError({ message: `Unknown stream ${name}` })); - } - - if (batch.rows.length === 0) { - return { success: true }; - } - - const recordBatch = buildRecordBatch(batch.rows); - const result = yield* entry.publisher - .push({ - batch: recordBatch, - partitionValue: entry.partitionValue, - }) - .pipe( - Effect.mapError( - (error) => - new ConnectorError({ - message: error.message, - cause: error, - }), - ), - ); - - return { - success: !!(result.result && result.result.$case === "accepted"), - }; - }), + const service: PublisherService = { + publish: Effect.fn("publisher/publish")(function* ({ name, source: _source, batch }) { + const entry = entries.get(name); + if (!entry) { + return yield* Effect.fail(new ConnectorError({ message: `Unknown stream ${name}` })); + } + + if (batch.rows.length === 0) { + return { success: true }; + } + + const recordBatch = yield* buildRecordBatch(batch.rows); + const result = yield* entry.publisher + .push({ + batch: recordBatch, + partitionValue: entry.partitionValue, + }) + .pipe( + Effect.mapError( + (error) => + new ConnectorError({ + message: error.message, + cause: error, + }), + ), + ); + + return { + success: !!(result.result && result.result.$case === "accepted"), + }; + }), }; + + return Publisher.of(service); }), ); diff --git a/packages/connector-kit/src/runtime/context.ts b/packages/connector-kit/src/runtime/context.ts index a0a0742..eab1a0b 100644 --- a/packages/connector-kit/src/runtime/context.ts +++ b/packages/connector-kit/src/runtime/context.ts @@ -11,5 +11,5 @@ export class ConnectorRuntimeContext extends Context.Service< ConnectorRuntimeContextValue >()("@useairfoil/connector-kit/ConnectorRuntimeContext") {} -export const ConnectorRuntimeContextLayer = (connector: ConnectorDefinition) => - Layer.succeed(ConnectorRuntimeContext)({ connector }); +export const layer = (connector: ConnectorDefinition) => + Layer.succeed(ConnectorRuntimeContext)(ConnectorRuntimeContext.of({ connector })); diff --git a/packages/connector-kit/src/streams/index.ts b/packages/connector-kit/src/streams/index.ts new file mode 100644 index 0000000..6a17b78 --- /dev/null +++ b/packages/connector-kit/src/streams/index.ts @@ -0,0 +1,4 @@ +export type { PullFetcher, PullPage, PullStreamOptions } from "./pull-stream"; +export type { WebhookStream } from "../core"; +export { makePullStream } from "./pull-stream"; +export { makeWebhookQueue } from "./webhook-queue"; diff --git a/packages/connector-kit/src/streams/pull-stream.ts b/packages/connector-kit/src/streams/pull-stream.ts index 634ef05..43b56a5 100644 --- a/packages/connector-kit/src/streams/pull-stream.ts +++ b/packages/connector-kit/src/streams/pull-stream.ts @@ -1,7 +1,7 @@ import { Effect, Stream } from "effect"; -import type { ConnectorError } from "../core/errors"; import type { Batch, Cursor } from "../core/types"; +import type { ConnectorError } from "../errors"; export type PullPage = { readonly cursor: Cursor; @@ -21,8 +21,9 @@ export type PullStreamOptions = { export const makePullStream = ( options: PullStreamOptions, ): Stream.Stream, ConnectorError, R> => - Stream.unfold({ cursor: options.initialCursor, done: false }, (state) => - Effect.gen(function* () { + Stream.unfold( + { cursor: options.initialCursor, done: false }, + Effect.fnUntraced(function* (state) { if (state.done) { return undefined; } diff --git a/packages/connector-kit/src/streams/webhook-queue.ts b/packages/connector-kit/src/streams/webhook-queue.ts index d0b058f..5c23322 100644 --- a/packages/connector-kit/src/streams/webhook-queue.ts +++ b/packages/connector-kit/src/streams/webhook-queue.ts @@ -1,14 +1,13 @@ import { Effect, Queue, Stream } from "effect"; -import type { ConnectorError } from "../core/errors"; import type { Batch, WebhookStream } from "../core/types"; +import type { ConnectorError } from "../errors"; -export const makeWebhookQueue = (options?: { +export const makeWebhookQueue = Effect.fnUntraced(function* (options?: { readonly capacity?: number; -}): Effect.Effect, never> => - Effect.gen(function* () { - const capacity = options?.capacity ?? 1024; - const queue = yield* Queue.bounded>(capacity); - const stream: Stream.Stream, ConnectorError> = Stream.fromQueue(queue); - return { queue, stream }; - }); +}) { + const capacity = options?.capacity ?? 1024; + const queue = yield* Queue.bounded>(capacity); + const stream: Stream.Stream, ConnectorError> = Stream.fromQueue(queue); + return { queue, stream } satisfies WebhookStream; +}); diff --git a/packages/connector-kit/src/webhook/index.ts b/packages/connector-kit/src/webhook/index.ts new file mode 100644 index 0000000..3a327fa --- /dev/null +++ b/packages/connector-kit/src/webhook/index.ts @@ -0,0 +1,9 @@ +import type * as Schema from "effect/Schema"; + +import type { WebhookRoute } from "./types"; + +export { buildWebhookRouter } from "./server"; +export type { WebhookRoute } from "./types"; + +export const route = >(definition: WebhookRoute): WebhookRoute => + definition; diff --git a/packages/connector-kit/src/webhook/server.ts b/packages/connector-kit/src/webhook/server.ts index 4c74896..2a96206 100644 --- a/packages/connector-kit/src/webhook/server.ts +++ b/packages/connector-kit/src/webhook/server.ts @@ -7,8 +7,8 @@ class InvalidWebhookPayloadError extends Data.TaggedError("InvalidWebhookPayload readonly message: string; }> {} -const makeHandler = (route: WebhookRoute) => - Effect.gen(function* () { +const makeHandler = Effect.fn("webhook/handler")( + function* >(route: WebhookRoute) { const request = yield* HttpServerRequest.HttpServerRequest; const rawBuffer = yield* request.arrayBuffer.pipe( Effect.mapError( @@ -33,32 +33,32 @@ const makeHandler = (route: WebhookRoute) => ), ); yield* route.handle(payload, request, rawBody); - // unsafeJson serializes synchronously — no Effect, no HttpBodyError + // jsonUnsafe serializes synchronously — no Effect, no HttpBodyError return HttpServerResponse.jsonUnsafe({ ok: true }); - }).pipe( - Effect.catchTag("InvalidWebhookPayloadError", () => - Effect.succeed( - HttpServerResponse.jsonUnsafe( - { ok: false, error: "Invalid webhook payload" }, - { status: 400 }, - ), + }, + Effect.catchTag("InvalidWebhookPayloadError", () => + Effect.succeed( + HttpServerResponse.jsonUnsafe( + { ok: false, error: "Invalid webhook payload" }, + { status: 400 }, ), ), - Effect.catchCause((cause) => - Effect.logWarning(`Webhook handler error: ${Cause.pretty(cause)}`).pipe( - Effect.andThen( - Effect.succeed( - HttpServerResponse.jsonUnsafe( - { ok: false, error: "Webhook handler failed" }, - { status: 500 }, - ), + ), + Effect.catchCause((cause) => + Effect.logWarning(`Webhook handler error: ${Cause.pretty(cause)}`).pipe( + Effect.andThen( + Effect.succeed( + HttpServerResponse.jsonUnsafe( + { ok: false, error: "Webhook handler failed" }, + { status: 500 }, ), ), ), ), - ); + ), +); -export const buildWebhookRouter = (routes: ReadonlyArray>) => +export const buildWebhookRouter = (routes: ReadonlyArray) => HttpRouter.addAll( routes.map((route) => HttpRouter.route("POST", route.path, makeHandler(route))), ); diff --git a/packages/connector-kit/src/webhook/types.ts b/packages/connector-kit/src/webhook/types.ts index 930f3e7..99c6b76 100644 --- a/packages/connector-kit/src/webhook/types.ts +++ b/packages/connector-kit/src/webhook/types.ts @@ -4,13 +4,13 @@ import type { Effect } from "effect"; import type * as Schema from "effect/Schema"; import type { HttpRouter, HttpServerRequest } from "effect/unstable/http"; -import type { ConnectorError } from "../core/errors"; +import type { ConnectorError } from "../errors"; -export type WebhookRoute = { +export type WebhookRoute = Schema.Schema> = { readonly path: HttpRouter.PathInput; - readonly schema: Schema.Schema; + readonly schema: S; readonly handle: ( - payload: TPayload, + payload: Schema.Schema.Type, request: HttpServerRequest.HttpServerRequest, rawBody?: Uint8Array, ) => Effect.Effect; diff --git a/packages/connector-kit/test/engine.test.ts b/packages/connector-kit/test/engine.test.ts index 230fc76..3543d42 100644 --- a/packages/connector-kit/test/engine.test.ts +++ b/packages/connector-kit/test/engine.test.ts @@ -1,11 +1,12 @@ import { describe, expect, it } from "@effect/vitest"; import { Deferred, Effect, Layer, Queue, Ref, Schema, Stream } from "effect"; -import type { ConnectorError } from "../src/core/errors"; +import type { Cursor, IngestionState } from "../src/core/types"; +import type { ConnectorError } from "../src/errors"; -import { defineConnector, defineEntity } from "../src/core/builder"; +import { defineConnector, defineEntity, defineEvent } from "../src/core/builder"; import { runConnector } from "../src/ingestion/engine"; -import { StateStoreInMemory } from "../src/ingestion/state-store"; +import { layerMemory, StateStore } from "../src/ingestion/state-store"; import { Publisher } from "../src/publisher/service"; import { makeWebhookQueue } from "../src/streams/webhook-queue"; @@ -80,8 +81,7 @@ describe("engine merging logic", () => { yield* Effect.forkScoped( runConnector(connector, { initialCutoff: new Date() }).pipe( - Effect.provide(StateStoreInMemory), - Effect.provide(publisherLayer), + Effect.provide(Layer.mergeAll(layerMemory, publisherLayer)), ), ); @@ -144,8 +144,7 @@ describe("engine merging logic", () => { yield* Effect.forkScoped( runConnector(connector, { initialCutoff: new Date() }).pipe( - Effect.provide(StateStoreInMemory), - Effect.provide(publisherLayer), + Effect.provide(Layer.mergeAll(layerMemory, publisherLayer)), ), ); @@ -205,8 +204,7 @@ describe("engine merging logic", () => { yield* Effect.forkScoped( runConnector(connector, { initialCutoff: new Date() }).pipe( - Effect.provide(StateStoreInMemory), - Effect.provide(publisherLayer), + Effect.provide(Layer.mergeAll(layerMemory, publisherLayer)), ), ); @@ -223,4 +221,53 @@ describe("engine merging logic", () => { expect(count).toBe(1); }).pipe(Effect.scoped), ); + + it.effect("does not persist state when the publisher rejects a batch", () => + Effect.gen(function* () { + const row: TestRow = { + id: "event-1", + created_at: "2024-01-01T00:00:00Z", + }; + + const event = defineEvent({ + name: "events", + schema: TestRowSchema, + live: Stream.empty, + backfill: Stream.make({ cursor: "2024-01-01T00:00:00Z", rows: [row] }), + }); + + const connector = defineConnector({ + name: "test", + entities: [], + events: [event], + }); + + const stateRef = yield* Ref.make(new Map>()); + + const stateStoreLayer = Layer.succeed(StateStore)({ + getState: (key) => Effect.map(Ref.get(stateRef), (state) => state.get(key)), + setState: (key, state) => + Ref.update(stateRef, (current) => { + const next = new Map(current); + next.set(key, state); + return next; + }), + }); + + const rejectingPublisherLayer = Layer.succeed(Publisher)({ + publish: () => Effect.succeed({ success: false }), + }); + + const result = yield* Effect.result( + runConnector(connector, { initialCutoff: new Date("2024-01-02T00:00:00Z") }).pipe( + Effect.provide(Layer.mergeAll(stateStoreLayer, rejectingPublisherLayer)), + ), + ); + + expect(result._tag).toBe("Failure"); + + const persisted = yield* Ref.get(stateRef); + expect(persisted.get("events")).toBeUndefined(); + }).pipe(Effect.scoped), + ); }); diff --git a/packages/connector-kit/tsdown.config.ts b/packages/connector-kit/tsdown.config.ts index 7b434c7..f54f480 100644 --- a/packages/connector-kit/tsdown.config.ts +++ b/packages/connector-kit/tsdown.config.ts @@ -1,7 +1,14 @@ import { defineConfig } from "tsdown"; export default defineConfig({ - entry: ["src/index.ts"], + entry: [ + "src/index.ts", + "src/ingestion/index.ts", + "src/publisher/index.ts", + "src/streams/index.ts", + "src/webhook/index.ts", + "src/errors/index.ts", + ], format: ["esm"], dts: true, sourcemap: true, From 57c2d15ef76d6ea79f3add02d347dde0cca2967d Mon Sep 17 00:00:00 2001 From: jaipaljadeja Date: Thu, 30 Apr 2026 14:49:26 +0530 Subject: [PATCH 05/12] vcr: refactor --- packages/effect-vcr/README.md | 246 ++++++++++++++---- packages/effect-vcr/package.json | 20 ++ packages/effect-vcr/src/cassette-store.ts | 37 ++- .../src/file-system-cassette-store.ts | 23 +- packages/effect-vcr/src/index.ts | 11 +- packages/effect-vcr/src/sanitize.ts | 24 +- packages/effect-vcr/src/types.ts | 8 +- packages/effect-vcr/src/vcr-http-client.ts | 100 +++---- .../test/file-system-cassette-store.test.ts | 33 ++- packages/effect-vcr/test/helpers.ts | 11 +- .../effect-vcr/test/vcr-http-client.test.ts | 115 ++++---- packages/effect-vcr/tsdown.config.ts | 8 +- 12 files changed, 436 insertions(+), 200 deletions(-) diff --git a/packages/effect-vcr/README.md b/packages/effect-vcr/README.md index 088667d..4e25d5a 100644 --- a/packages/effect-vcr/README.md +++ b/packages/effect-vcr/README.md @@ -1,79 +1,237 @@ # @useairfoil/effect-vcr -An HttpClient for Effect that records all HTTP interactions and stores them on tape, ready to be replayed for future test runs. +An `HttpClient` for Effect that records HTTP interactions to cassette files and replays them for future test runs. -The first time the following test is run, it sends the HTTP request to httpbin.org and stores the request/response pair to a cassette. All subsequent tests will replay the response from the cassette, making no HTTP requests. +The first time the following test is run, it sends the HTTP request to `httpbin.org` and stores the request/response pair to a cassette. All subsequent test runs replay the stored response and make no live HTTP requests. ```ts // my-program.ts +import { Effect } from "effect"; +import { HttpClient } from "effect/unstable/http"; + export const program = Effect.gen(function* () { const client = yield* HttpClient.HttpClient; - const response = yield* client.get("https://httpbin.org"); + const response = yield* client.get("https://httpbin.org/robots.txt"); return yield* response.text; }); // my-program.test.ts -import { VcrHttpClient, FileSystemCassetteStore } from "@useairfoil/effect-vcr"; +import { NodeServices } from "@effect/platform-node"; +import { FileSystemCassetteStore, VcrHttpClient } from "@useairfoil/effect-vcr"; +import { describe, it } from "@effect/vitest"; +import { Effect, Layer } from "effect"; +import { FetchHttpClient } from "effect/unstable/http"; + +const cassetteStoreLayer = FileSystemCassetteStore.layer().pipe(Layer.provide(NodeServices.layer)); + +const vcrRuntimeLayer = Layer.mergeAll( + FetchHttpClient.layer, + NodeServices.layer, + cassetteStoreLayer, +); + +const vcrLayer = VcrHttpClient.layer({ + vcrName: "httpbin", + mode: "auto", +}).pipe(Layer.provide(vcrRuntimeLayer)); describe("my awesome program", () => { - it.effect("works", () => - Effect.gen(function* () { - yield* program; - }), - ).pipe( - // Wrap the HttpClient with the VCR - Effect.provide(VcrHttpClient.layer()), - // Store cassettes on the file system - Effect.provide(FileSystemCassetteStore.layer()), - // HttpClient based on fetch - Effect.provide(FetchHttpClient.layer), - // Node is used to access the file system - Effect.provide(NodeServices.layer), - ); + it.effect("works", () => program.pipe(Effect.provide(vcrLayer))); }); ``` -The main use case for the VCR is to allow open source projects to run tests against a real API endpoint without exposing secrets to forks. +The main use case for the VCR is to let open source projects run tests against a real upstream API without exposing secrets to forks. + +- project owners generate cassettes locally by talking to the real API +- external contributors run tests against committed cassettes +- CI runs deterministically from cassettes +- scheduled or manual runs can still hit the live API to catch upstream drift -- Project owners generate cassettes locally by interacting with the upstream API -- External contributors run their tests using the cassettes -- CI runs using cassettes -- Optionally, CI runs against the upstream API periodically, to catch any schema drift +## Package shape + +Root exports: + +- `CassetteStore` +- `FileSystemCassetteStore` +- `VcrHttpClient` +- `VcrConfig` +- `VcrMode` +- `VcrRequest` +- `VcrResponse` +- `VcrEntry` +- `Cassette` +- `CassetteFile` + +Focused subpath exports are also available: + +- `@useairfoil/effect-vcr/cassette-store` +- `@useairfoil/effect-vcr/file-system-cassette-store` +- `@useairfoil/effect-vcr/types` +- `@useairfoil/effect-vcr/vcr-http-client` ## Usage -Use the VCR by providing the VcrHttpClient and FileSystemCassetteStore layers to the Effect being tested. +Use the VCR by providing `VcrHttpClient.layer(...)` to the Effect being tested. + +- `VcrHttpClient` wraps an existing `HttpClient` and adds record/replay behavior +- `FileSystemCassetteStore` persists cassette files to disk + +`VcrHttpClient.layer(...)` depends on: -- VcrHttpClient: this HttpClient wraps another HttpClient, adding support for recording and replaying HTTP interactions. -- FileSystemCassetteStore: stores cassettes on the file system. +- a live `HttpClient` implementation such as `FetchHttpClient.layer` +- `Path.Path` for cassette name resolution +- `CassetteStore.CassetteStore` for persistence + +`FileSystemCassetteStore.layer(...)` depends on: + +- `FileSystem.FileSystem` +- `Path.Path` + +In Node tests, `NodeServices.layer` satisfies those platform services. ## Configuration and defaults ### VcrHttpClient -- `vcrName?: string`: the VCR name. Used to selectively disable VCRs during test runs (more on this in the runtime configuration section). -- `cassetteName?: string`: the cassette name. If not set, defaults to the current vitest file name. -- `mode: "record" | "replay" | "auto"`: controls the VCR behaviour. Defaults to `auto`. - - `record`: always call the live client and write a cassette. - - `replay`: only serve from cassette; missing entries fail. - - `auto`: replay if cassette exists, otherwise record. If `CI=true`, missing cassette fails. -- `redact`: control which sensitive headers and JSON body keys to remove from requests and responses. - - `.requestHeaders?: string[]`: redact the specified request headers. - - `.responseHeaders?: string[]`: redact the specified response headers. - - `.requestBodyKeys?: string[]`: redact the specified request JSON keys. - - `.responseBodyKeys?: string[]`: redact the specified response JSON keys. -- `matchIgnore`: control which fields are ignored when matching requests. - - `.requestHeaders?: string[]`: ignore the specified request headers. - - `.requestBodyKeys?: string[]`: ignore the specified request JSON keys. -- `match: (request: VcrRequest, entry: VcrEntry) => boolean`: custom request to entry matcher. +```ts +type VcrConfig = { + readonly vcrName?: string; + readonly cassetteName?: string; + readonly mode?: "record" | "replay" | "auto"; + readonly redact?: { + readonly requestHeaders?: ReadonlyArray; + readonly responseHeaders?: ReadonlyArray; + readonly requestBodyKeys?: ReadonlyArray; + readonly responseBodyKeys?: ReadonlyArray; + }; + readonly matchIgnore?: { + readonly requestHeaders?: ReadonlyArray; + readonly requestBodyKeys?: ReadonlyArray; + }; + readonly match?: (request: VcrRequest, entry: VcrEntry) => boolean; +}; +``` + +- `vcrName?: string` + - logical VCR name used by `ACK_DISABLE_VCR` +- `cassetteName?: string` + - cassette file basename or full file name + - `users` resolves to `users.cassette` + - `users.cassette` is preserved as-is + - when omitted in Vitest, the cassette file defaults to `.cassette` and the current test name becomes the export key inside that file +- `mode?: "record" | "replay" | "auto"` + - `record`: always call the live client and write a cassette + - `replay`: only serve from cassette; missing entries fail + - `auto`: replay if the cassette exists, otherwise record; when `CI=true`, missing cassettes fail instead of recording +- `redact` + - remove sensitive headers or JSON body keys before writing to disk +- `matchIgnore` + - ignore request headers or JSON body keys when computing the request lookup key +- `match` + - custom request matcher for advanced lookup behavior + +Defaults: + +- `mode` defaults to `"auto"` +- `authorization` is ignored for matching by default +- `authorization` is redacted from recorded request headers by default ### FileSystemCassetteStore -- `cassetteDir?: string`: change the location of the cassette. By default, the VCR stores cassettes in the `__cassettes__` folder next to the test. +- `cassetteDir?: string` + - changes the cassette root directory + - when omitted in Vitest, cassettes are written to the `__cassettes__` directory beside the test file + - outside Vitest, provide `cassetteDir` explicitly + +## Matching and redaction + +`matchIgnore` and `redact` solve different problems. + +- `matchIgnore` changes how requests are matched to cassette entries +- `redact` changes what is persisted to disk + +Typical usage: + +```ts +const vcrLayer = VcrHttpClient.layer({ + vcrName: "shopify-products", + mode: "auto", + matchIgnore: { + requestHeaders: ["x-shopify-access-token", "authorization"], + }, + redact: { + requestHeaders: ["x-shopify-access-token", "authorization"], + }, +}); +``` + +Use `matchIgnore` when a request field should not affect cassette identity. + +Use `redact` when a field should never be written to disk. ## Runtime configuration The VCR is controlled by the following environment variables: -- `CI=true`: if set and the VCR mode is `auto`, tests with a missing cassette fail. -- `ACK_DISABLE_VCR=<...>`: disable the VCR. If `'*'`, then all VCRs are disabled. To selectively disable VCRs, pass a comma separated list of VCRs to disable (based on their `vcrName` config). Use this to test against the upstream API. In this case, the cassettes are not updated. +- `CI=true` + - if the VCR mode is `auto`, tests with a missing cassette fail instead of recording +- `ACK_DISABLE_VCR=<...>` + - disables the VCR and falls back to the wrapped live client + - `*` disables all configured VCRs + - a comma-separated list disables only matching `vcrName` values + +When the VCR is disabled, cassette files are not updated. + +## Common patterns + +### Build the VCR runtime once + +```ts +const cassetteStoreLayer = FileSystemCassetteStore.layer().pipe(Layer.provide(NodeServices.layer)); + +const vcrRuntimeLayer = Layer.mergeAll( + FetchHttpClient.layer, + NodeServices.layer, + cassetteStoreLayer, +); + +const vcrLayer = VcrHttpClient.layer({ + vcrName: "example", + mode: "auto", +}).pipe(Layer.provide(vcrRuntimeLayer)); +``` + +This keeps the dependency graph explicit: + +- `FileSystemCassetteStore.layer(...)` gets its platform services from `NodeServices.layer` +- `VcrHttpClient.layer(...)` gets its live client, `Path.Path`, and cassette store from `vcrRuntimeLayer` + +### Override config in tests + +```ts +import { ConfigProvider, Layer } from "effect"; + +const vcrTestRuntimeLayer = Layer.mergeAll( + vcrRuntimeLayer, + ConfigProvider.layer( + ConfigProvider.fromUnknown({ + CI: false, + ACK_DISABLE_VCR: "", + }), + ), +); + +const vcrLayer = VcrHttpClient.layer({ + vcrName: "example", + mode: "auto", +}).pipe(Layer.provide(vcrTestRuntimeLayer)); +``` + +### Use focused subpath imports + +```ts +import * as VcrHttpClient from "@useairfoil/effect-vcr/vcr-http-client"; +import * as FileSystemCassetteStore from "@useairfoil/effect-vcr/file-system-cassette-store"; +``` + +Use root imports for normal package consumption and subpath imports when you want a narrower surface. diff --git a/packages/effect-vcr/package.json b/packages/effect-vcr/package.json index d340d1d..744abc2 100644 --- a/packages/effect-vcr/package.json +++ b/packages/effect-vcr/package.json @@ -18,6 +18,26 @@ "types": "./dist/index.d.ts", "import": "./dist/index.js", "default": "./dist/index.js" + }, + "./cassette-store": { + "types": "./dist/cassette-store.d.ts", + "import": "./dist/cassette-store.js", + "default": "./dist/cassette-store.js" + }, + "./file-system-cassette-store": { + "types": "./dist/file-system-cassette-store.d.ts", + "import": "./dist/file-system-cassette-store.js", + "default": "./dist/file-system-cassette-store.js" + }, + "./types": { + "types": "./dist/types.d.ts", + "import": "./dist/types.js", + "default": "./dist/types.js" + }, + "./vcr-http-client": { + "types": "./dist/vcr-http-client.d.ts", + "import": "./dist/vcr-http-client.js", + "default": "./dist/vcr-http-client.js" } }, "scripts": { diff --git a/packages/effect-vcr/src/cassette-store.ts b/packages/effect-vcr/src/cassette-store.ts index c90cf4f..cdb2944 100644 --- a/packages/effect-vcr/src/cassette-store.ts +++ b/packages/effect-vcr/src/cassette-store.ts @@ -1,12 +1,14 @@ -import { Context, DateTime, Effect, Schema } from "effect"; +import { Context, Data, DateTime, Effect } from "effect"; import type { Cassette, CassetteFile } from "./types"; +export type CassetteStoreOperation = "exists" | "load" | "save" | "loadOrInit" | "resolveRoot"; + /** * Storage contract for VCR cassettes. * */ -export interface CassetteStore { +export interface CassetteStoreService { /** Returns true if a cassette with the given name exists. */ readonly exists: (name: string) => Effect.Effect; @@ -23,15 +25,15 @@ export interface CassetteStore { /** * Effect service tag for the cassette store. */ -export const CassetteStore: Context.Service = Context.Service( +export class CassetteStore extends Context.Service()( "@useairfoil/effect-vcr/CassetteStore", -); +) {} /** * Creates a new empty cassette with a timestamp and format version. */ export const createEmptyCassette = (): Effect.Effect => - Effect.gen(function* () { + Effect.fnUntraced(function* () { const now = yield* DateTime.now; return { meta: { @@ -40,7 +42,7 @@ export const createEmptyCassette = (): Effect.Effect => }, entries: {}, }; - }); + })(); export const createEmptyCassetteFile = (): Effect.Effect => Effect.map(createEmptyCassette(), (cassette) => ({ @@ -53,18 +55,15 @@ export const createEmptyCassetteFile = (): Effect.Effect => * `operation` indicates which store function failed. * `path` is the file or directory path that was targeted. */ -export class CassetteStoreError extends Schema.TaggedErrorClass()( - "CassetteStoreError", - { - operation: Schema.String, - path: Schema.String, - message: Schema.optional(Schema.String), - cause: Schema.optional(Schema.Unknown), - }, -) {} +export class CassetteStoreError extends Data.TaggedError("CassetteStoreError")<{ + readonly operation: CassetteStoreOperation; + readonly path: string; + readonly message?: string; + readonly cause?: unknown; +}> {} export const toStoreError = ( - operation: "exists" | "load" | "save" | "loadOrInit", + operation: CassetteStoreOperation, path: string, cause?: unknown, message?: string, @@ -93,7 +92,5 @@ export const parseCassette = (content: string, path: string) => ); /** Creates a `CassetteStore` implementation that uses the given `impl` object. */ -export const make = (impl: CassetteStore): CassetteStore => - CassetteStore.of({ - ...impl, - }); +export const make = (impl: CassetteStoreService): CassetteStoreService => + CassetteStore.of({ ...impl }); diff --git a/packages/effect-vcr/src/file-system-cassette-store.ts b/packages/effect-vcr/src/file-system-cassette-store.ts index 574aca3..b9c2555 100644 --- a/packages/effect-vcr/src/file-system-cassette-store.ts +++ b/packages/effect-vcr/src/file-system-cassette-store.ts @@ -5,24 +5,35 @@ import type { CassetteFile } from "./types"; import * as CassetteStore from "./cassette-store"; import { getVitestState } from "./vitest-state"; +export type FileSystemCassetteStoreConfig = { + readonly cassetteDir?: string; +}; + /** * FileSystem-backed cassette store. * * Requires a platform FileSystem layer (Node/Bun) to be provided by the user. */ -export const layer = ({ cassetteDir }: { cassetteDir?: string } = {}) => +export const layer = (config: FileSystemCassetteStoreConfig = {}) => Layer.effect(CassetteStore.CassetteStore)( - Effect.gen(function* () { + Effect.fnUntraced(function* () { const fs = yield* FileSystem.FileSystem; const path = yield* Path.Path; let rootDir: string; - if (cassetteDir) { - rootDir = cassetteDir; + if (config.cassetteDir) { + rootDir = config.cassetteDir; } else { const { testPath } = getVitestState(); if (!testPath) { - return yield* Effect.fail(new Error("vcr must be used within a vitest test")); + return yield* Effect.fail( + CassetteStore.toStoreError( + "resolveRoot", + "__cassettes__", + undefined, + "VCR cassette directory could not be inferred. Provide cassetteDir when not running in Vitest.", + ), + ); } const testFolder = path.dirname(testPath); rootDir = path.join(testFolder, "__cassettes__"); @@ -75,5 +86,5 @@ export const layer = ({ cassetteDir }: { cassetteDir?: string } = {}) => save, loadOrInit, }); - }), + })(), ); diff --git a/packages/effect-vcr/src/index.ts b/packages/effect-vcr/src/index.ts index 317d318..83c8d6d 100644 --- a/packages/effect-vcr/src/index.ts +++ b/packages/effect-vcr/src/index.ts @@ -1,13 +1,4 @@ export * as CassetteStore from "./cassette-store"; export * as FileSystemCassetteStore from "./file-system-cassette-store"; - export * as VcrHttpClient from "./vcr-http-client"; -export type { - Cassette, - CassetteFile, - Configuration, - VcrEntry, - VcrMode, - VcrRequest, - VcrResponse, -} from "./types"; +export * from "./types"; diff --git a/packages/effect-vcr/src/sanitize.ts b/packages/effect-vcr/src/sanitize.ts index d4abd7a..7c16dd6 100644 --- a/packages/effect-vcr/src/sanitize.ts +++ b/packages/effect-vcr/src/sanitize.ts @@ -125,16 +125,20 @@ export const buildRequestKey = ( ignoreHeaders: options.ignoreHeaders, ignoreBodyKeys: options.ignoreBodyKeys, }); - const key = stableStringify({ - method: sanitized.method.toUpperCase(), - url: sanitized.url, - headers: sanitized.headers ?? {}, - body: sanitized.body ?? "", - }); - if (!key) { - throw new Error("Failed to build VCR request key"); - } - return key; + return ( + stableStringify({ + method: sanitized.method.toUpperCase(), + url: sanitized.url, + headers: sanitized.headers ?? {}, + body: sanitized.body ?? "", + }) ?? + JSON.stringify({ + method: sanitized.method.toUpperCase(), + url: sanitized.url, + headers: sanitized.headers ?? {}, + body: sanitized.body ?? "", + }) + ); }); /** diff --git a/packages/effect-vcr/src/types.ts b/packages/effect-vcr/src/types.ts index 6a7d674..21d0d44 100644 --- a/packages/effect-vcr/src/types.ts +++ b/packages/effect-vcr/src/types.ts @@ -54,8 +54,14 @@ export type CassetteFile = { /** * VCR configuration. */ -export type Configuration = { +export type VcrConfig = { readonly vcrName?: string; + /** + * Cassette file name or basename. + * + * `users` resolves to `users.cassette`. + * `users.cassette` is preserved as-is. + */ readonly cassetteName?: string; readonly mode?: VcrMode; /** diff --git a/packages/effect-vcr/src/vcr-http-client.ts b/packages/effect-vcr/src/vcr-http-client.ts index ea9fc9f..621c2d1 100644 --- a/packages/effect-vcr/src/vcr-http-client.ts +++ b/packages/effect-vcr/src/vcr-http-client.ts @@ -1,4 +1,4 @@ -import { Config, Effect, Option, Path } from "effect"; +import { Config, Data, Effect, Option, Path } from "effect"; import { HttpClient, HttpClientError, @@ -6,16 +6,9 @@ import { HttpClientResponse, } from "effect/unstable/http"; -import type { - Cassette, - CassetteFile, - Configuration, - VcrEntry, - VcrRequest, - VcrResponse, -} from "./types"; +import type { Cassette, CassetteFile, VcrConfig, VcrEntry, VcrRequest, VcrResponse } from "./types"; -import { CassetteStore, createEmptyCassette } from "./cassette-store"; +import { CassetteStore, type CassetteStoreService, createEmptyCassette } from "./cassette-store"; import { buildRequestKey, redactRequest, redactResponse } from "./sanitize"; import { getVitestState } from "./vitest-state"; @@ -24,6 +17,14 @@ import { getVitestState } from "./vitest-state"; */ const decoder = new TextDecoder(); +export class VcrHttpClientError extends Data.TaggedError("VcrHttpClientError")<{ + readonly message: string; + readonly cause?: unknown; +}> {} + +const toCassetteFileName = (name: string): string => + name.endsWith(".cassette") ? name : `${name}.cassette`; + /** * Resolve the cassette file name and export key. * @@ -32,19 +33,24 @@ const decoder = new TextDecoder(); * - .cassette file name * - export key = current test name (describe > test) */ -const resolveCassetteLocation = (config: Configuration) => - Effect.gen(function* () { +const resolveCassetteLocation = (config: VcrConfig) => + Effect.fnUntraced(function* () { const path = yield* Path.Path; if (config.cassetteName) { return { - name: `${config.cassetteName}.cassette`, + name: toCassetteFileName(config.cassetteName), exportKey: "default", }; } const { testPath, currentTestName } = getVitestState(); if (!testPath || !currentTestName) { - throw new Error("VCR cassette path could not be inferred. Provide cassetteName."); + return yield* Effect.fail( + new VcrHttpClientError({ + message: + "VCR cassette path could not be inferred. Provide cassetteName when not running in Vitest.", + }), + ); } const fileName = path.basename(testPath, ".ts"); @@ -54,12 +60,12 @@ const resolveCassetteLocation = (config: Configuration) => name: cassetteName, exportKey: currentTestName, }; - }); + })(); /** * Apply defaults for common VCR behavior while preserving explicit overrides. */ -const normalizeConfig = (config: Configuration) => ({ +const normalizeConfig = (config: VcrConfig): VcrConfig => ({ vcrName: config.vcrName, cassetteName: config.cassetteName, mode: config.mode ?? "auto", @@ -181,7 +187,7 @@ const toVcrResponse = ( * Load a cassette and map store errors into HttpClient errors. */ const readCassetteFile = ( - store: CassetteStore, + store: CassetteStoreService, name: string, request: HttpClientRequest.HttpClientRequest, ) => store.load(name).pipe(Effect.mapError((error) => toRequestError(request, error))); @@ -190,7 +196,7 @@ const readCassetteFile = ( * Load a cassette or initialize a new one if missing. */ const loadOrInitCassetteFile = ( - store: CassetteStore, + store: CassetteStoreService, name: string, request: HttpClientRequest.HttpClientRequest, ) => store.loadOrInit(name).pipe(Effect.mapError((error) => toRequestError(request, error))); @@ -199,7 +205,7 @@ const loadOrInitCassetteFile = ( * Persist the updated cassette. */ const saveCassetteFile = ( - store: CassetteStore, + store: CassetteStoreService, name: string, cassette: CassetteFile, request: HttpClientRequest.HttpClientRequest, @@ -209,7 +215,7 @@ const saveCassetteFile = ( * Load a named cassette export from the file, or return a fresh empty cassette. */ const readCassetteExport = ( - store: CassetteStore, + store: CassetteStoreService, name: string, exportKey: string, request: HttpClientRequest.HttpClientRequest, @@ -228,7 +234,7 @@ const readCassetteExport = ( const findEntry = ( request: VcrRequest, cassette: Cassette, - config: Configuration, + config: VcrConfig, ): Effect.Effect => { if (config.match) { return Effect.succeed( @@ -241,14 +247,25 @@ const findEntry = ( }).pipe(Effect.map((key) => cassette.entries[key])); }; +const replayResponse = ( + request: HttpClientRequest.HttpClientRequest, + entry: VcrEntry, +): HttpClientResponse.HttpClientResponse => { + const web = new Response(entry.response.body, { + status: entry.response.status, + headers: entry.response.headers, + }); + return HttpClientResponse.fromWeb(request, web); +}; + /** * Replay a response from cassette data using HttpClientResponse.fromWeb. */ const replay = ( - store: CassetteStore, + store: CassetteStoreService, request: HttpClientRequest.HttpClientRequest, vcrRequest: VcrRequest, - config: Configuration, + config: VcrConfig, name: string, exportKey: string, ) => @@ -267,11 +284,7 @@ const replay = ( ); } - const web = new Response(entry.response.body, { - status: entry.response.status, - headers: entry.response.headers, - }); - return Effect.succeed(HttpClientResponse.fromWeb(request, web)); + return Effect.succeed(replayResponse(request, entry)); }), ), ), @@ -281,16 +294,15 @@ const replay = ( * Record a live response into the cassette and return the original response. */ const record = ( - store: CassetteStore, + store: CassetteStoreService, request: HttpClientRequest.HttpClientRequest, vcrRequest: VcrRequest, effect: Effect.Effect, - config: Configuration, + config: VcrConfig, name: string, exportKey: string, ) => - Effect.gen(function* () { - // Execute the live request and capture the response body (Effect caches reads). + Effect.fnUntraced(function* () { const response = yield* effect; const body = yield* response.text; const vcrResponse = toVcrResponse(response, body); @@ -308,7 +320,6 @@ const record = ( }) : vcrResponse; - // Load or create the cassette before inserting the new entry. const file = yield* loadOrInitCassetteFile(store, name, request); const cassette = file.exports[exportKey] ?? (yield* createEmptyCassette()); const key = yield* buildRequestKey(vcrRequest, { @@ -332,16 +343,15 @@ const record = ( [exportKey]: next, }, }; - // Persist updated cassette to disk. yield* saveCassetteFile(store, name, nextFile, request); return response; - }); + })(); /** * Build a VCR-aware HttpClient that replays or records per config. */ -const makeVcrHttpClient = (config: Configuration) => - Effect.gen(function* () { +const makeVcrHttpClient = (config: VcrConfig = {}) => + Effect.fnUntraced(function* () { const live = yield* HttpClient.HttpClient; const normalized = normalizeConfig(config); @@ -352,14 +362,13 @@ const makeVcrHttpClient = (config: Configuration) => return live; } - // CassetteStore is provided via Layer for platform-specific persistence. const store = yield* CassetteStore; const { name, exportKey } = yield* resolveCassetteLocation(normalized); return live.pipe( HttpClient.transform((effect, request) => - Effect.gen(function* () { + Effect.fnUntraced(function* () { const vcrRequest = toVcrRequest(request); if (normalized.mode === "replay") { return yield* replay(store, request, vcrRequest, normalized, name, exportKey); @@ -369,7 +378,6 @@ const makeVcrHttpClient = (config: Configuration) => return yield* record(store, request, vcrRequest, effect, normalized, name, exportKey); } - // Auto mode: replay if cassette exists, otherwise record (or fail in CI). const available = yield* store .exists(name) .pipe(Effect.mapError((error) => toRequestError(request, error))); @@ -393,21 +401,17 @@ const makeVcrHttpClient = (config: Configuration) => const entry = yield* findEntry(vcrRequest, cassette, normalized); if (entry) { - const web = new Response(entry.response.body, { - status: entry.response.status, - headers: entry.response.headers, - }); - return HttpClientResponse.fromWeb(request, web); + return replayResponse(request, entry); } return yield* record(store, request, vcrRequest, effect, normalized, name, exportKey); - }), + })(), ), ); - }); + })(); /** * Layer that provides a VCR-wrapped HttpClient. */ -export const layer = (config: Configuration = {}) => +export const layer = (config: VcrConfig = {}) => HttpClient.layerMergedContext(makeVcrHttpClient(config)); diff --git a/packages/effect-vcr/test/file-system-cassette-store.test.ts b/packages/effect-vcr/test/file-system-cassette-store.test.ts index b8ed70a..0798363 100644 --- a/packages/effect-vcr/test/file-system-cassette-store.test.ts +++ b/packages/effect-vcr/test/file-system-cassette-store.test.ts @@ -1,6 +1,6 @@ import { NodeServices } from "@effect/platform-node"; import { describe, expect, it } from "@effect/vitest"; -import { Effect, FileSystem } from "effect"; +import { Effect, FileSystem, Layer } from "effect"; import { FetchHttpClient, HttpClient } from "effect/unstable/http"; import { VcrHttpClient, FileSystemCassetteStore } from "../src/"; @@ -13,10 +13,17 @@ describe("FileSystemCassetteStore", () => { const text = yield* response.text; expect(text.length).toBeGreaterThan(0); }).pipe( - Effect.provide(VcrHttpClient.layer()), - Effect.provide(FileSystemCassetteStore.layer()), - Effect.provide(FetchHttpClient.layer), - Effect.provide(NodeServices.layer), + Effect.provide( + VcrHttpClient.layer().pipe( + Layer.provide( + Layer.mergeAll( + FetchHttpClient.layer, + NodeServices.layer, + FileSystemCassetteStore.layer().pipe(Layer.provide(NodeServices.layer)), + ), + ), + ), + ), ), ); @@ -34,9 +41,19 @@ describe("FileSystemCassetteStore", () => { const text = yield* response.text; expect(text.length).toBeGreaterThan(0); }).pipe( - Effect.provide(VcrHttpClient.layer()), - Effect.provide(FileSystemCassetteStore.layer({ cassetteDir: dir })), - Effect.provide(FetchHttpClient.layer), + Effect.provide( + VcrHttpClient.layer().pipe( + Layer.provide( + Layer.mergeAll( + FetchHttpClient.layer, + NodeServices.layer, + FileSystemCassetteStore.layer({ cassetteDir: dir }).pipe( + Layer.provide(NodeServices.layer), + ), + ), + ), + ), + ), ); const filesAfter = yield* fs.readDirectory(dir); diff --git a/packages/effect-vcr/test/helpers.ts b/packages/effect-vcr/test/helpers.ts index f699819..12a5735 100644 --- a/packages/effect-vcr/test/helpers.ts +++ b/packages/effect-vcr/test/helpers.ts @@ -3,7 +3,12 @@ import { HttpClient, HttpClientError, HttpClientResponse } from "effect/unstable import type { CassetteFile } from "../src/types"; -import { CassetteStore, CassetteStoreError, createEmptyCassette } from "../src/cassette-store"; +import { + CassetteStore, + CassetteStoreError, + type CassetteStoreService, + createEmptyCassette, +} from "../src/cassette-store"; export const makeLiveClient = (body: string, status = 200) => HttpClient.make((request) => @@ -32,7 +37,7 @@ export const makeFailingClient = () => export const mockCassetteStoreLayer = () => { const cassettes = new Map(); - const store: CassetteStore = { + const store: CassetteStoreService = { exists: (path: string) => Effect.succeed(cassettes.has(path)), load: (path: string) => cassettes.has(path) @@ -64,6 +69,6 @@ export const mockCassetteStoreLayer = () => { return { cassettes, - layer: Layer.succeed(CassetteStore)(store), + layer: Layer.succeed(CassetteStore)(CassetteStore.of(store)), }; }; diff --git a/packages/effect-vcr/test/vcr-http-client.test.ts b/packages/effect-vcr/test/vcr-http-client.test.ts index ffdb764..9ce819c 100644 --- a/packages/effect-vcr/test/vcr-http-client.test.ts +++ b/packages/effect-vcr/test/vcr-http-client.test.ts @@ -3,7 +3,7 @@ import { describe, expect, it } from "@effect/vitest"; import { ConfigProvider, Effect, Exit, Layer } from "effect"; import { HttpClient, HttpClientRequest } from "effect/unstable/http"; -import type { Configuration, VcrEntry } from "../src/types"; +import type { VcrConfig, VcrEntry } from "../src/types"; import { buildRequestKey } from "../src/sanitize"; import { layer } from "../src/vcr-http-client"; @@ -11,7 +11,7 @@ import { makeFailingClient, makeLiveClient, mockCassetteStoreLayer } from "./hel describe("record mode", () => { it.effect("stores a cassette entry", () => { - const config: Configuration = { + const config: VcrConfig = { vcrName: "test-vcr", cassetteName: "record-basic", mode: "record", @@ -21,7 +21,9 @@ describe("record mode", () => { const live = makeLiveClient("ok"); const liveLayer = Layer.succeed(HttpClient.HttpClient)(live); - const vcrLayer = layer(config).pipe(Layer.provide(Layer.mergeAll(storeLayer, liveLayer))); + const vcrLayer = layer(config).pipe( + Layer.provide(Layer.mergeAll(storeLayer, liveLayer, NodeServices.layer)), + ); return Effect.gen(function* () { const client = yield* HttpClient.HttpClient; @@ -35,13 +37,13 @@ describe("record mode", () => { expect(Object.keys(cassette!.entries)).toHaveLength(1); const entry = Object.values(cassette!.entries)[0]; expect(entry.response.body).toBe("ok"); - }).pipe(Effect.provide(vcrLayer), Effect.provide(NodeServices.layer)); + }).pipe(Effect.provide(vcrLayer)); }); }); describe("replay mode", () => { it.effect("returns stored response without live client", () => { - const config: Configuration = { + const config: VcrConfig = { cassetteName: "replay-basic", mode: "replay", }; @@ -63,7 +65,9 @@ describe("replay mode", () => { }; const liveLayer = Layer.succeed(HttpClient.HttpClient)(client); - const vcrLayer = layer(config).pipe(Layer.provide(Layer.mergeAll(storeLayer, liveLayer))); + const vcrLayer = layer(config).pipe( + Layer.provide(Layer.mergeAll(storeLayer, liveLayer, NodeServices.layer)), + ); return Effect.gen(function* () { const requestKey = yield* buildRequestKey( @@ -84,20 +88,22 @@ describe("replay mode", () => { const text = yield* response.text; expect(text).toBe("replayed"); - }).pipe(Effect.provide(vcrLayer), Effect.provide(NodeServices.layer)); + }).pipe(Effect.provide(vcrLayer)); }); }); describe("auto mode", () => { it.effect("replays when cassette exists", () => { - const config: Configuration = { + const config: VcrConfig = { cassetteName: "auto-replay", mode: "auto", }; const { layer: storeLayer, cassettes } = mockCassetteStoreLayer(); const client = makeFailingClient(); const liveLayer = Layer.succeed(HttpClient.HttpClient)(client); - const vcrLayer = layer(config).pipe(Layer.provide(Layer.mergeAll(storeLayer, liveLayer))); + const vcrLayer = layer(config).pipe( + Layer.provide(Layer.mergeAll(storeLayer, liveLayer, NodeServices.layer)), + ); return Effect.gen(function* () { const requestKey = yield* buildRequestKey( @@ -129,13 +135,13 @@ describe("auto mode", () => { const text = yield* response.text; expect(text).toBe("auto-replay"); - }).pipe(Effect.provide(vcrLayer), Effect.provide(NodeServices.layer)); + }).pipe(Effect.provide(vcrLayer)); }); }); describe("record with redaction", () => { it.effect("removes sensitive data from stored cassette", () => { - const config: Configuration = { + const config: VcrConfig = { cassetteName: "redact-ignore", mode: "record", redact: { @@ -158,7 +164,9 @@ describe("record with redaction", () => { ); const liveLayer = Layer.succeed(HttpClient.HttpClient)(client); - const vcrLayer = layer(config).pipe(Layer.provide(Layer.mergeAll(storeLayer, liveLayer))); + const vcrLayer = layer(config).pipe( + Layer.provide(Layer.mergeAll(storeLayer, liveLayer, NodeServices.layer)), + ); return Effect.gen(function* () { const client = yield* HttpClient.HttpClient; @@ -171,21 +179,23 @@ describe("record with redaction", () => { expect(entry.request.headers?.authorization).toBeUndefined(); expect(entry.request.body).toContain("keep"); expect(entry.request.body).not.toContain("token"); - }).pipe(Effect.provide(vcrLayer), Effect.provide(NodeServices.layer)); + }).pipe(Effect.provide(vcrLayer)); }); }); describe("auto mode in CI", () => { it.effect("fails when cassette is missing", () => { - const config: Configuration = { - cassetteName: "auto-ci-miss.cassette", + const config: VcrConfig = { + cassetteName: "auto-ci-miss", mode: "auto", }; const { layer: storeLayer } = mockCassetteStoreLayer(); const client = makeLiveClient("ok"); const liveLayer = Layer.succeed(HttpClient.HttpClient)(client); - const vcrLayer = layer(config).pipe(Layer.provide(Layer.mergeAll(storeLayer, liveLayer))); + const vcrLayer = layer(config).pipe( + Layer.provide(Layer.mergeAll(storeLayer, liveLayer, NodeServices.layer)), + ); return Effect.gen(function* () { const client = yield* HttpClient.HttpClient; @@ -193,11 +203,10 @@ describe("auto mode in CI", () => { expect(Exit.isFailure(result)).toBe(true); }).pipe( - Effect.provide(vcrLayer), - Effect.provide(NodeServices.layer), - Effect.provideService( - ConfigProvider.ConfigProvider, - ConfigProvider.fromUnknown({ CI: true }), + Effect.provide( + vcrLayer.pipe( + Layer.provide(ConfigProvider.layer(ConfigProvider.fromUnknown({ CI: true }))), + ), ), ); }); @@ -205,7 +214,7 @@ describe("auto mode in CI", () => { describe("ACK_DISABLE_VCR with vcr name", () => { it.effect("bypasses VCR in replay mode when vcrName is disabled", () => { - const config: Configuration = { + const config: VcrConfig = { vcrName: "my-test", cassetteName: "context-disable", mode: "replay", @@ -214,7 +223,9 @@ describe("ACK_DISABLE_VCR with vcr name", () => { const client = makeLiveClient("live-response"); const liveLayer = Layer.succeed(HttpClient.HttpClient)(client); - const vcrLayer = layer(config).pipe(Layer.provide(Layer.mergeAll(storeLayer, liveLayer))); + const vcrLayer = layer(config).pipe( + Layer.provide(Layer.mergeAll(storeLayer, liveLayer, NodeServices.layer)), + ); return Effect.gen(function* () { const client = yield* HttpClient.HttpClient; @@ -223,17 +234,18 @@ describe("ACK_DISABLE_VCR with vcr name", () => { expect(text).toBe("live-response"); }).pipe( - Effect.provide(vcrLayer), - Effect.provide(NodeServices.layer), - Effect.provideService( - ConfigProvider.ConfigProvider, - ConfigProvider.fromUnknown({ ACK_DISABLE_VCR: "my-test" }), + Effect.provide( + vcrLayer.pipe( + Layer.provide( + ConfigProvider.layer(ConfigProvider.fromUnknown({ ACK_DISABLE_VCR: "my-test" })), + ), + ), ), ); }); it.effect("keeps VCR replay behavior when vcrName is not disabled", () => { - const config: Configuration = { + const config: VcrConfig = { vcrName: "not-my-test", cassetteName: "context-missing", mode: "replay", @@ -242,7 +254,9 @@ describe("ACK_DISABLE_VCR with vcr name", () => { const live = makeLiveClient("live-response"); const liveLayer = Layer.succeed(HttpClient.HttpClient)(live); - const vcrLayer = layer(config).pipe(Layer.provide(Layer.mergeAll(storeLayer, liveLayer))); + const vcrLayer = layer(config).pipe( + Layer.provide(Layer.mergeAll(storeLayer, liveLayer, NodeServices.layer)), + ); return Effect.gen(function* () { const client = yield* HttpClient.HttpClient; @@ -250,11 +264,12 @@ describe("ACK_DISABLE_VCR with vcr name", () => { expect(Exit.isFailure(result)).toBe(true); }).pipe( - Effect.provide(vcrLayer), - Effect.provide(NodeServices.layer), - Effect.provideService( - ConfigProvider.ConfigProvider, - ConfigProvider.fromUnknown({ ACK_DISABLE_VCR: "my-test" }), + Effect.provide( + vcrLayer.pipe( + Layer.provide( + ConfigProvider.layer(ConfigProvider.fromUnknown({ ACK_DISABLE_VCR: "my-test" })), + ), + ), ), ); }); @@ -262,7 +277,7 @@ describe("ACK_DISABLE_VCR with vcr name", () => { describe("ACK_DISABLE_VCR with '*'", () => { it.effect("bypasses VCR in replay mode when vcrName specified", () => { - const config: Configuration = { + const config: VcrConfig = { vcrName: "my-test", cassetteName: "context-disable", mode: "replay", @@ -271,7 +286,9 @@ describe("ACK_DISABLE_VCR with '*'", () => { const client = makeLiveClient("live-response"); const liveLayer = Layer.succeed(HttpClient.HttpClient)(client); - const vcrLayer = layer(config).pipe(Layer.provide(Layer.mergeAll(storeLayer, liveLayer))); + const vcrLayer = layer(config).pipe( + Layer.provide(Layer.mergeAll(storeLayer, liveLayer, NodeServices.layer)), + ); return Effect.gen(function* () { const client = yield* HttpClient.HttpClient; @@ -280,17 +297,16 @@ describe("ACK_DISABLE_VCR with '*'", () => { expect(text).toBe("live-response"); }).pipe( - Effect.provide(vcrLayer), - Effect.provide(NodeServices.layer), - Effect.provideService( - ConfigProvider.ConfigProvider, - ConfigProvider.fromUnknown({ ACK_DISABLE_VCR: "*" }), + Effect.provide( + vcrLayer.pipe( + Layer.provide(ConfigProvider.layer(ConfigProvider.fromUnknown({ ACK_DISABLE_VCR: "*" }))), + ), ), ); }); it.effect("bypasses VCR in replay mode when vcrName is undefined", () => { - const config: Configuration = { + const config: VcrConfig = { cassetteName: "context-disable", mode: "replay", }; @@ -298,7 +314,9 @@ describe("ACK_DISABLE_VCR with '*'", () => { const client = makeLiveClient("live-response"); const liveLayer = Layer.succeed(HttpClient.HttpClient)(client); - const vcrLayer = layer(config).pipe(Layer.provide(Layer.mergeAll(storeLayer, liveLayer))); + const vcrLayer = layer(config).pipe( + Layer.provide(Layer.mergeAll(storeLayer, liveLayer, NodeServices.layer)), + ); return Effect.gen(function* () { const client = yield* HttpClient.HttpClient; @@ -307,11 +325,10 @@ describe("ACK_DISABLE_VCR with '*'", () => { expect(text).toBe("live-response"); }).pipe( - Effect.provide(vcrLayer), - Effect.provide(NodeServices.layer), - Effect.provideService( - ConfigProvider.ConfigProvider, - ConfigProvider.fromUnknown({ ACK_DISABLE_VCR: "*" }), + Effect.provide( + vcrLayer.pipe( + Layer.provide(ConfigProvider.layer(ConfigProvider.fromUnknown({ ACK_DISABLE_VCR: "*" }))), + ), ), ); }); diff --git a/packages/effect-vcr/tsdown.config.ts b/packages/effect-vcr/tsdown.config.ts index 7b434c7..7cfd6a2 100644 --- a/packages/effect-vcr/tsdown.config.ts +++ b/packages/effect-vcr/tsdown.config.ts @@ -1,7 +1,13 @@ import { defineConfig } from "tsdown"; export default defineConfig({ - entry: ["src/index.ts"], + entry: [ + "src/index.ts", + "src/cassette-store.ts", + "src/file-system-cassette-store.ts", + "src/types.ts", + "src/vcr-http-client.ts", + ], format: ["esm"], dts: true, sourcemap: true, From 65af0023233f2f29e78e68f066e64df05af7d5b3 Mon Sep 17 00:00:00 2001 From: jaipaljadeja Date: Thu, 30 Apr 2026 14:49:58 +0530 Subject: [PATCH 06/12] wings-testing: improve effect standards --- packages/wings-testing/test/container.test.ts | 5 ++--- 1 file changed, 2 insertions(+), 3 deletions(-) diff --git a/packages/wings-testing/test/container.test.ts b/packages/wings-testing/test/container.test.ts index b53d5dd..d6d582d 100644 --- a/packages/wings-testing/test/container.test.ts +++ b/packages/wings-testing/test/container.test.ts @@ -1,5 +1,5 @@ import { describe, expect, it } from "@effect/vitest"; -import { Effect, flow } from "effect"; +import { Effect, flow, Layer } from "effect"; import { FetchHttpClient, HttpClient, HttpClientRequest } from "effect/unstable/http"; import { TestWings } from "../src"; @@ -24,8 +24,7 @@ describe("WingsContainer", () => { expect(httpHost).toBeTruthy(); expect(response.status).toBe(404); }).pipe( - Effect.provide(TestWings.container), - Effect.provide(FetchHttpClient.layer), + Effect.provide(Layer.mergeAll(TestWings.container, FetchHttpClient.layer)), Effect.scoped, ), { timeout: 60_000 }, From a516341b1783196446a173d883f6a34240888b53 Mon Sep 17 00:00:00 2001 From: jaipaljadeja Date: Thu, 30 Apr 2026 14:50:57 +0530 Subject: [PATCH 07/12] connectors: udpate connectors and template --- connectors/producer-polar/README.md | 224 +++++++----------- connectors/producer-polar/src/api.ts | 6 +- connectors/producer-polar/src/connector.ts | 34 ++- connectors/producer-polar/src/index.ts | 4 +- connectors/producer-polar/src/sandbox.ts | 24 +- connectors/producer-polar/src/streams.ts | 31 +-- .../producer-polar/test/api.vcr.test.ts | 40 ++-- connectors/producer-polar/test/helpers.ts | 2 +- .../producer-polar/test/webhook.test.ts | 21 +- connectors/producer-shopify/README.md | 160 +++++++++---- connectors/producer-shopify/src/api.ts | 7 +- connectors/producer-shopify/src/connector.ts | 34 ++- connectors/producer-shopify/src/index.ts | 4 +- connectors/producer-shopify/src/sandbox.ts | 29 ++- connectors/producer-shopify/src/streams.ts | 27 +-- .../producer-shopify/test/api.vcr.test.ts | 56 +++-- connectors/producer-shopify/test/helpers.ts | 2 +- .../producer-shopify/test/webhook.test.ts | 38 +-- docker-compose.yaml | 2 +- pnpm-lock.yaml | 51 +++- templates/producer-template/README.md | 185 ++++++++++----- templates/producer-template/src/api.ts | 6 +- templates/producer-template/src/connector.ts | 34 ++- templates/producer-template/src/index.ts | 4 +- templates/producer-template/src/sandbox.ts | 26 +- templates/producer-template/src/streams.ts | 27 +-- .../producer-template/test/api.vcr.test.ts | 41 ++-- templates/producer-template/test/helpers.ts | 2 +- .../producer-template/test/webhook.test.ts | 21 +- 29 files changed, 619 insertions(+), 523 deletions(-) diff --git a/connectors/producer-polar/README.md b/connectors/producer-polar/README.md index c879566..fd04ccd 100644 --- a/connectors/producer-polar/README.md +++ b/connectors/producer-polar/README.md @@ -1,195 +1,149 @@ -# producer-polar +# @useairfoil/producer-polar -A demo connector that streams Polar data (customers, checkouts, orders, subscriptions) into Airfoil via webhooks and backfill. +Polar producer connector for Airfoil Connector Kit. ---- +Current scope: -## User guide (use the library) +- entities: `customers`, `checkouts`, `orders`, `subscriptions` +- backfill source: Polar REST API +- live source: Polar webhooks on `/webhooks/polar` -This section shows how to wire the connector in your own application. The built-in sandbox is intended for internal testing only. +## Public Exports -### Install +- `PolarApiClient` +- `layerApiClient(config)` +- `PolarConnector` +- `layerConfig` +- `PolarConfig` +- `PolarConfigConfig` +- `PolarConnectorRuntime` -```bash -pnpm add @useairfoil/producer-polar +## Runtime Shape + +`PolarConnector` is a `Context.Service` that resolves to: + +```ts +type PolarConnectorRuntime = { + readonly connector: ConnectorDefinition; + readonly routes: ReadonlyArray>; +}; ``` -### Provide config via environment +Use `layerConfig` to build that service from Effect Config. -The connector reads config from Effect Config. The simplest way is to provide a `ConfigProvider.fromEnv()`. +## Configuration -Required env vars: +Required: ```env -POLAR_ACCESS_TOKEN=polar_oat_XX -POLAR_API_BASE_URL=https://sandbox-api.polar.sh/v1/ +POLAR_ACCESS_TOKEN=polar_oat_xxx ``` Optional: ```env -POLAR_ORGANIZATION_ID=512929b6-XX -POLAR_WEBHOOK_SECRET=polar_whs_XXX +POLAR_API_BASE_URL=https://sandbox-api.polar.sh/v1/ +POLAR_ORGANIZATION_ID=org_xxx +POLAR_WEBHOOK_SECRET=polar_whs_xxx POLAR_WEBHOOK_PORT=8080 +ACK_TELEMETRY_ENABLED=false +ACK_OTLP_BASE_URL=http://localhost:4318 +ACK_SERVICE_NAME=producer-polar ``` -### Minimal wiring (Node + Fetch) - -This example uses Node. Bun works too if you provide Bun's HttpServer layer. - -You must provide these runtime layers: - -- `PolarConnectorConfig()` -- `ConfigProvider` (usually `fromEnv`) -- `HttpServer` platform layer (Node or Bun) -- `HttpClient` layer (Fetch or VCR) -- `Publisher` and `StateStore` layers +## Minimal Runtime Wiring ```ts -import { FetchHttpClient } from "effect/unstable/http"; import { NodeHttpServer } from "@effect/platform-node"; -import { Publisher, runConnector, StateStoreInMemory } from "@useairfoil/connector-kit"; +import { Ingestion, Publisher } from "@useairfoil/connector-kit"; import { ConfigProvider, Effect, Layer } from "effect"; +import { FetchHttpClient } from "effect/unstable/http"; import { createServer } from "node:http"; -import { PolarConnector, PolarConnectorConfig } from "@useairfoil/producer-polar"; -const ConsolePublisher = Layer.succeed(Publisher, { - publish: () => Effect.succeed({ success: true }), +import { layerConfig, PolarConnector } from "@useairfoil/producer-polar"; + +const ConsolePublisher = Layer.succeed(Publisher.Publisher)({ + publish: ({ name, source, batch }) => Effect.succeed({ success: true }), }); +const envLayer = Layer.mergeAll( + FetchHttpClient.layer, + Layer.succeed(ConfigProvider.ConfigProvider, ConfigProvider.fromEnv()), +); + +const connectorLayer = layerConfig.pipe(Layer.provide(envLayer)); + const program = Effect.gen(function* () { const { connector, routes } = yield* PolarConnector; const serverLayer = NodeHttpServer.layer(createServer, { port: 8080 }); - return yield* runConnector(connector, { + return yield* Ingestion.runConnector(connector, { initialCutoff: new Date(), - webhook: { routes }, + webhook: { + routes, + healthPath: "/health", + disableHttpLogger: true, + }, }).pipe(Effect.provide(serverLayer)); -}).pipe( - Effect.provide(StateStoreInMemory), - Effect.provide(ConsolePublisher), - Effect.provide(FetchHttpClient.layer), - Effect.provide(PolarConnectorConfig()), - Effect.withConfigProvider(ConfigProvider.fromEnv()), -); - -Effect.runPromise(program); -``` - ---- +}); -## Development (architecture and internals) +const runtimeLayer = Layer.mergeAll(Ingestion.layerMemory, ConsolePublisher, connectorLayer); -### How the connector works +const runnable = Effect.scoped(program).pipe(Effect.provide(runtimeLayer)); -``` -Polar -> webhook -> /webhooks/polar -> resolveWebhookDispatch -> entity queues - | - +---------+--------+ - | backfill stream | <- historical pages - | live stream | <- webhook events - +---------+--------+ - | - Publisher +Effect.runPromise(runnable); ``` -The first live webhook event for each entity sets the cutoff. Backfill then fetches historical records up to that cutoff so live and historical data do not overlap. +## Webhook Behavior -### Effect layers in this connector +- webhook path: `POST /webhooks/polar` +- route payloads are schema-validated through `Webhook.route(...)` +- when `POLAR_WEBHOOK_SECRET` is set, webhook signatures are verified against the raw request body +- the first live webhook event establishes the cutoff used to start backfill for each entity stream -At runtime you typically provide: +## API Client Layer -- `PolarConnectorConfig()` (builds the connector from Effect Config) -- a `ConfigProvider` (usually `ConfigProvider.fromEnv()`) -- a platform `HttpServer` layer (Node or Bun) -- an `HttpClient` layer (Fetch or VCR) -- a `Publisher` and `StateStore` layer +`layerApiClient(config)` builds `PolarApiClient` from a raw `PolarConfig` value. -Minimal wiring (Node + FetchHttpClient): +This is useful for focused API tests or custom runtimes that do not need the full connector service. ```ts +import { Effect, Layer, Option, Schema } from "effect"; import { FetchHttpClient } from "effect/unstable/http"; -import { NodeHttpServer } from "@effect/platform-node"; -import { Publisher, runConnector, StateStoreInMemory } from "@useairfoil/connector-kit"; -import { ConfigProvider, Effect, Layer } from "effect"; -import { createServer } from "node:http"; -import { PolarConnector, PolarConnectorConfig } from "./src/index"; -const ConsolePublisher = Layer.succeed(Publisher, { - publish: () => Effect.succeed({ success: true }), -}); +import { layerApiClient, PolarApiClient } from "@useairfoil/producer-polar"; -const program = Effect.gen(function* () { - const { connector, routes } = yield* PolarConnector; - const serverLayer = NodeHttpServer.layer(createServer, { port: 8080 }); +const apiLayer = layerApiClient({ + accessToken: "test", + apiBaseUrl: "https://sandbox-api.polar.sh/v1/", + organizationId: Option.none(), + webhookSecret: Option.none(), +}).pipe(Layer.provide(FetchHttpClient.layer)); - return yield* runConnector(connector, { - initialCutoff: new Date(), - webhook: { routes }, - }).pipe(Effect.provide(serverLayer)); -}).pipe( - Effect.provide(StateStoreInMemory), - Effect.provide(ConsolePublisher), - Effect.provide(FetchHttpClient.layer), - Effect.provide(PolarConnectorConfig()), - Effect.withConfigProvider(ConfigProvider.fromEnv()), -); +const program = PolarApiClient.use((api) => + api.fetchList(Schema.Any, "customers/", { + page: 1, + limit: 100, + sorting: "-created_at", + }), +).pipe(Effect.provide(apiLayer)); Effect.runPromise(program); ``` -### Project structure - -``` -src/ -├── schemas.ts - entity schemas and webhook event union -├── api.ts - Polar API client service (Effect HttpClient) -├── streams.ts - stream helpers (backfill paging, live webhook stream) -├── connector.ts - connector service (PolarConnectorConfig) -├── index.ts - exports -└── sandbox.ts - demo runner with console publisher -``` +## Development Notes -### Testing with VCR +- Polar entity streams combine live webhook events with paginated backfill +- backfill de-duplicates rows already observed live +- incoming events outside the current connector scope are ignored -The connector supports VCR-style record/replay for outgoing Polar API calls through `@useairfoil/effect-vcr` by providing the `VcrHttpClient` layer. +## Testing -Minimal VCR wiring (Node test example): - -```ts -import { FetchHttpClient } from "effect/unstable/http"; -import { FileSystemCassetteStore, VcrHttpClient } from "@useairfoil/effect-vcr"; -import { ConfigProvider, Effect, Layer } from "effect"; -import { PolarConnector, PolarConnectorConfig } from "../src/index"; - -const vcrLayer = VcrHttpClient.layer({ - vcrName: "producer-polar", - mode: "auto", - matchIgnore: { requestHeaders: ["authorization"] }, - redact: { requestHeaders: ["authorization"] }, -}).pipe( - Layer.provideMerge(FileSystemCassetteStore.layer()), - Layer.provideMerge(FetchHttpClient.layer), -); - -const configProvider = ConfigProvider.fromUnknown({ - POLAR_ACCESS_TOKEN: "test", - POLAR_API_BASE_URL: "https://sandbox-api.polar.sh/v1/", -}); - -const program = Effect.gen(function* () { - const { connector } = yield* PolarConnector; - // run connector with your publisher/state layers... -}).pipe( - Effect.provide(PolarConnectorConfig()), - Effect.provide(vcrLayer), - Effect.withConfigProvider(configProvider), -); -``` +- `test/api.vcr.test.ts`: VCR-backed API replay against a recorded cassette +- `test/webhook.test.ts`: in-memory webhook round-trip using `NodeHttpServer.layerTest` -Example test run from the connector directory: +Run: ```bash -POLAR_API_BASE_URL=https://sandbox-api.polar.sh/v1/ \ -pnpm --filter @useairfoil/producer-polar run test +pnpm --filter @useairfoil/producer-polar run test:ci ``` diff --git a/connectors/producer-polar/src/api.ts b/connectors/producer-polar/src/api.ts index c0c6ee9..fb4ec69 100644 --- a/connectors/producer-polar/src/api.ts +++ b/connectors/producer-polar/src/api.ts @@ -30,7 +30,7 @@ export class PolarApiClient extends Context.Service => - Effect.gen(function* () { + Effect.fnUntraced(function* () { const client = (yield* HttpClient.HttpClient).pipe( HttpClient.mapRequest(HttpClientRequest.prependUrl(config.apiBaseUrl)), HttpClient.mapRequest(HttpClientRequest.bearerToken(config.accessToken)), @@ -84,9 +84,9 @@ export const makePolarApiClient = ( }; return { fetchJson, fetchList }; - }); + })(); -export const PolarApiClientConfig = ( +export const layerApiClient = ( config: PolarConfig, ): Layer.Layer => Layer.effect(PolarApiClient)(makePolarApiClient(config)); diff --git a/connectors/producer-polar/src/connector.ts b/connectors/producer-polar/src/connector.ts index 0a40bff..6f1a599 100644 --- a/connectors/producer-polar/src/connector.ts +++ b/connectors/producer-polar/src/connector.ts @@ -6,11 +6,11 @@ import { ConnectorError, defineConnector, defineEntity, - type WebhookRoute, + Webhook, } from "@useairfoil/connector-kit"; import { Config, Context, Effect, Layer, Option } from "effect"; -import { PolarApiClient, PolarApiClientConfig } from "./api"; +import { layerApiClient, PolarApiClient } from "./api"; import { type Checkout, CheckoutSchema, @@ -39,7 +39,7 @@ export type PolarConfig = { export type PolarConnectorRuntime = { readonly connector: ConnectorDefinition; - readonly routes: ReadonlyArray>; + readonly routes: ReadonlyArray>; }; export class PolarConnector extends Context.Service()( @@ -55,7 +55,6 @@ export const PolarConfigConfig = Config.all({ webhookSecret: Config.option(Config.string("POLAR_WEBHOOK_SECRET")), }); -// Webhook verification const verifyWebhookSignature = (options: { readonly rawBody: Uint8Array; readonly headers: Headers.Headers; @@ -75,7 +74,6 @@ const verifyWebhookSignature = (options: { }), }); -// Webhook dispatch const resolveWebhookDispatch = (options: { readonly payload: WebhookPayload; readonly customers: EntityStreams; @@ -216,7 +214,7 @@ const resolveWebhookDispatch = (options: { const makePolarConnector = ( config: PolarConfig, ): Effect.Effect => - Effect.gen(function* () { + Effect.fnUntraced(function* () { const api = yield* PolarApiClient; const customerStreams = yield* makeEntityStreams({ api, @@ -281,11 +279,11 @@ const makePolarConnector = ( events: [], }); - const webhookRoute: WebhookRoute = { + const webhookRoute = Webhook.route({ path: "/webhooks/polar", schema: WebhookPayloadSchema, handle: (payload, request, rawBody) => - Effect.gen(function* () { + Effect.fn("polar/webhook/handle")(function* () { if (Option.isSome(config.webhookSecret) && rawBody) { yield* verifyWebhookSignature({ rawBody, @@ -294,15 +292,15 @@ const makePolarConnector = ( }); } - yield* resolveWebhookDispatch({ + return yield* resolveWebhookDispatch({ payload, customers: customerStreams, checkouts: checkoutStreams, subscriptions: subscriptionStreams, orders: orderStreams, }); - }), - }; + })(), + }); if (Option.isNone(config.webhookSecret)) { yield* Effect.logWarning( @@ -311,18 +309,14 @@ const makePolarConnector = ( } return { connector, routes: [webhookRoute] }; - }).pipe(Effect.annotateLogs({ component: "polar" })); + })().pipe(Effect.annotateLogs({ component: "polar" })); -export const PolarConnectorConfig = (): Layer.Layer< - PolarConnector, - ConnectorError, - HttpClient.HttpClient -> => +export const layerConfig: Layer.Layer = Layer.effect(PolarConnector)( - Effect.gen(function* () { + Effect.fnUntraced(function* () { const config = yield* PolarConfigConfig; - return yield* makePolarConnector(config).pipe(Effect.provide(PolarApiClientConfig(config))); - }).pipe( + return yield* makePolarConnector(config).pipe(Effect.provide(layerApiClient(config))); + })().pipe( Effect.mapError((error) => error instanceof ConnectorError ? error diff --git a/connectors/producer-polar/src/index.ts b/connectors/producer-polar/src/index.ts index 467e18a..63fb9cb 100644 --- a/connectors/producer-polar/src/index.ts +++ b/connectors/producer-polar/src/index.ts @@ -1,8 +1,8 @@ -export { PolarApiClientConfig } from "./api"; +export { layerApiClient, PolarApiClient } from "./api"; export { type PolarConfig, PolarConfigConfig, PolarConnector, - PolarConnectorConfig, + layerConfig, type PolarConnectorRuntime, } from "./connector"; diff --git a/connectors/producer-polar/src/sandbox.ts b/connectors/producer-polar/src/sandbox.ts index f9773b9..00bf9e1 100644 --- a/connectors/producer-polar/src/sandbox.ts +++ b/connectors/producer-polar/src/sandbox.ts @@ -1,13 +1,11 @@ -import type { ConnectorError } from "@useairfoil/connector-kit"; - import { NodeHttpServer } from "@effect/platform-node"; -import { Publisher, runConnector, StateStoreInMemory } from "@useairfoil/connector-kit"; +import { Ingestion, Publisher } from "@useairfoil/connector-kit"; import { Config, ConfigProvider, DateTime, Effect, Layer, Logger, Metric } from "effect"; import { FetchHttpClient } from "effect/unstable/http"; import * as Observability from "effect/unstable/observability"; import { createServer } from "node:http"; -import { PolarConnector, PolarConnectorConfig } from "./index"; +import { layerConfig, PolarConnector } from "./index"; const SandboxConfig = Config.all({ port: Config.port("POLAR_WEBHOOK_PORT").pipe(Config.withDefault(8080)), @@ -19,7 +17,7 @@ const TelemetryConfig = Config.all({ serviceName: Config.string("ACK_SERVICE_NAME").pipe(Config.withDefault("producer-polar")), }); -const ConsolePublisherLayer = Layer.succeed(Publisher)({ +const ConsolePublisherLayer = Layer.succeed(Publisher.Publisher)({ publish: ({ name, source, batch }) => Effect.gen(function* () { const ids = batch.rows.map((r) => r["id"]).filter(Boolean); @@ -47,7 +45,7 @@ const program = Effect.gen(function* () { const now = yield* DateTime.now; - return yield* runConnector(connector, { + return yield* Ingestion.runConnector(connector, { initialCutoff: DateTime.toDate(now), webhook: { routes, @@ -62,7 +60,7 @@ const EnvLayer = Layer.mergeAll( Layer.succeed(ConfigProvider.ConfigProvider, ConfigProvider.fromEnv()), ); -const ConnectorLayer = PolarConnectorConfig(); +const ConnectorLayer = layerConfig.pipe(Layer.provide(EnvLayer)); const TelemetryLayer = Layer.unwrap( Effect.gen(function* () { @@ -88,23 +86,17 @@ const TelemetryLayer = Layer.unwrap( Metric.enableRuntimeMetricsLayer, ); }), -); +).pipe(Layer.provide(EnvLayer)); const RuntimeLayer = Layer.mergeAll( - StateStoreInMemory, + Ingestion.layerMemory, ConsolePublisherLayer, ConnectorLayer, Logger.layer([Logger.consolePretty()]), TelemetryLayer, - EnvLayer, ); -Effect.runPromise( - Effect.scoped(program).pipe(Effect.provide(RuntimeLayer)) as Effect.Effect< - void, - Config.ConfigError | ConnectorError - >, -).catch((error) => { +Effect.runPromise(Effect.scoped(program).pipe(Effect.provide(RuntimeLayer))).catch((error) => { void Effect.runPromise( Effect.logError("fatal error").pipe( Effect.annotateLogs({ component: "polar", error: String(error) }), diff --git a/connectors/producer-polar/src/streams.ts b/connectors/producer-polar/src/streams.ts index 8f56372..5652276 100644 --- a/connectors/producer-polar/src/streams.ts +++ b/connectors/producer-polar/src/streams.ts @@ -1,13 +1,6 @@ import type * as Schema from "effect/Schema"; -import { - type Batch, - type ConnectorError, - type Cursor, - makePullStream, - makeWebhookQueue, - type WebhookStream, -} from "@useairfoil/connector-kit"; +import { type Batch, type ConnectorError, type Cursor, Streams } from "@useairfoil/connector-kit"; import { DateTime, Deferred, Effect, Queue, Stream } from "effect"; import type { PolarApiClientService } from "./api"; @@ -19,12 +12,12 @@ export const resolveCursor = >( row: T, cursorField: keyof T & string, ): Effect.Effect => - Effect.gen(function* () { + Effect.fnUntraced(function* () { const value = row[cursorField]; if (typeof value === "string") return value; const now = yield* DateTime.now; return DateTime.formatIso(now); - }); + })(); const isOnOrBeforeCutoff = (value: unknown, cutoff: Cursor) => { if (typeof value !== "string") return false; @@ -36,18 +29,18 @@ const setCutoff = (deferred: Deferred.Deferred, cursor: Cursor) = Deferred.succeed(deferred, cursor).pipe(Effect.asVoid); export const dispatchEntityWebhook = >(options: { - readonly queue: WebhookStream; + readonly queue: Streams.WebhookStream; readonly cutoff: Deferred.Deferred; readonly row: T; readonly cursor: Cursor; }): Effect.Effect => - Effect.gen(function* () { + Effect.fnUntraced(function* () { yield* setCutoff(options.cutoff, options.cursor); - yield* Queue.offer(options.queue.queue, { + return yield* Queue.offer(options.queue.queue, { cursor: options.cursor, rows: [options.row], }).pipe(Effect.asVoid); - }); + })(); /** Backfill stream for a single entity. Paging continues until the end. */ const makeBackfillStream = >(options: { @@ -61,7 +54,7 @@ const makeBackfillStream = >(options: { const sorting = `-${options.cursorField}`; return Stream.fromEffect(Deferred.await(options.cutoff)).pipe( Stream.flatMap((cutoff) => - makePullStream({ + Streams.makePullStream({ fetchPage: (cursor: Cursor | undefined) => { const page = cursor ? Number(cursor) : 1; return options.api @@ -96,7 +89,7 @@ const makeBackfillStream = >(options: { }; export type EntityStreams> = { - readonly live: WebhookStream; + readonly live: Streams.WebhookStream; readonly cutoff: Deferred.Deferred; readonly backfill: Stream.Stream, ConnectorError>; }; @@ -109,9 +102,9 @@ export const makeEntityStreams = >(options: { readonly cursorField: keyof T & string; readonly limit?: number; }): Effect.Effect, ConnectorError> => - Effect.gen(function* () { - const queue = yield* makeWebhookQueue({ capacity: 2048 }); + Effect.fnUntraced(function* () { + const queue = yield* Streams.makeWebhookQueue({ capacity: 2048 }); const cutoff = yield* Deferred.make(); const backfill = makeBackfillStream({ ...options, cutoff }); return { live: queue, cutoff, backfill }; - }); + })(); diff --git a/connectors/producer-polar/test/api.vcr.test.ts b/connectors/producer-polar/test/api.vcr.test.ts index c49c734..a613572 100644 --- a/connectors/producer-polar/test/api.vcr.test.ts +++ b/connectors/producer-polar/test/api.vcr.test.ts @@ -31,24 +31,30 @@ describe("producer-polar api (vcr)", () => { }), ); - return program.pipe( - Effect.provide(apiLayer), - Effect.provide( - VcrHttpClient.layer({ - vcrName: "producer-polar", - }), - ), - Effect.provide(FileSystemCassetteStore.layer()), - Effect.provide(FetchHttpClient.layer), - Effect.provide(NodeServices.layer), - Effect.provideService( - ConfigProvider.ConfigProvider, - ConfigProvider.fromUnknown({ - POLAR_ACCESS_TOKEN: "test", - POLAR_API_BASE_URL: "https://sandbox-api.polar.sh/v1/", - }), + const cassetteStoreLayer = FileSystemCassetteStore.layer().pipe( + Layer.provide(NodeServices.layer), + ); + const vcrRuntimeLayer = Layer.mergeAll( + FetchHttpClient.layer, + NodeServices.layer, + cassetteStoreLayer, + ); + const vcrWithDeps = VcrHttpClient.layer({ vcrName: "producer-polar" }).pipe( + Layer.provide(vcrRuntimeLayer), + ); + + const testLayer = apiLayer.pipe( + Layer.provide(vcrWithDeps), + Layer.provide( + ConfigProvider.layer( + ConfigProvider.fromUnknown({ + POLAR_ACCESS_TOKEN: "test", + POLAR_API_BASE_URL: "https://sandbox-api.polar.sh/v1/", + }), + ), ), - Effect.scoped, ); + + return program.pipe(Effect.provide(testLayer), Effect.scoped); }); }); diff --git a/connectors/producer-polar/test/helpers.ts b/connectors/producer-polar/test/helpers.ts index e595f1f..41a728a 100644 --- a/connectors/producer-polar/test/helpers.ts +++ b/connectors/producer-polar/test/helpers.ts @@ -13,7 +13,7 @@ export const makeTestPublisher = (expected: number) => Effect.gen(function* () { const publishedRef = yield* Ref.make>([]); const done = yield* Deferred.make(); - const layer = Layer.succeed(Publisher)({ + const layer = Layer.succeed(Publisher.Publisher)({ publish: ({ name, source, batch }) => Effect.gen(function* () { const next = yield* Ref.updateAndGet(publishedRef, (items) => [ diff --git a/connectors/producer-polar/test/webhook.test.ts b/connectors/producer-polar/test/webhook.test.ts index 11f61d6..e4678e9 100644 --- a/connectors/producer-polar/test/webhook.test.ts +++ b/connectors/producer-polar/test/webhook.test.ts @@ -1,11 +1,11 @@ import { NodeHttpServer } from "@effect/platform-node"; import { describe, expect, it } from "@effect/vitest"; -import { ConnectorError, runConnector, StateStoreInMemory } from "@useairfoil/connector-kit"; +import { ConnectorError, Ingestion } from "@useairfoil/connector-kit"; import { ConfigProvider, Deferred, Effect, Layer, Ref } from "effect"; import { HttpClient, HttpClientRequest } from "effect/unstable/http"; import { PolarApiClient, type PolarApiClientService } from "../src/api"; -import { PolarConnector, PolarConnectorConfig } from "../src/index"; +import { layerConfig, PolarConnector } from "../src/index"; import { makeTestPublisher } from "./helpers"; const customerWebhookPayload = { @@ -42,7 +42,7 @@ describe("producer-polar webhook", () => { const runtimeLayer = NodeHttpServer.layerTest; const apiLayer = Layer.succeed(PolarApiClient)(makeApiStub()); - const connectorLayer = PolarConnectorConfig().pipe(Layer.provide(apiLayer)); + const connectorLayer = layerConfig.pipe(Layer.provide(apiLayer)); const configProvider = ConfigProvider.fromUnknown({ POLAR_ACCESS_TOKEN: "test", POLAR_API_BASE_URL: "https://sandbox-api.polar.sh/v1/", @@ -51,11 +51,11 @@ describe("producer-polar webhook", () => { return Effect.gen(function* () { const { publishedRef, done, layer } = yield* makeTestPublisher(1); const { connector, routes } = yield* PolarConnector; - const runLayer = Layer.mergeAll(StateStoreInMemory, layer, runtimeLayer); + const runLayer = Layer.mergeAll(Ingestion.layerMemory, layer, runtimeLayer); yield* Effect.gen(function* () { yield* Effect.forkScoped( - runConnector(connector, { + Ingestion.runConnector(connector, { initialCutoff: new Date(), webhook: { routes, @@ -77,10 +77,13 @@ describe("producer-polar webhook", () => { expect(published[0]?.name).toBe("customers"); }).pipe(Effect.provide(runLayer)); }).pipe( - Effect.provide(connectorLayer), - Effect.provide(runtimeLayer), - Effect.provideService(ConfigProvider.ConfigProvider, configProvider), + Effect.provide( + connectorLayer.pipe( + Layer.provide(runtimeLayer), + Layer.provide(ConfigProvider.layer(configProvider)), + ), + ), Effect.scoped, - ) as Effect.Effect; + ); }); }); diff --git a/connectors/producer-shopify/README.md b/connectors/producer-shopify/README.md index fee0d58..fc1328b 100644 --- a/connectors/producer-shopify/README.md +++ b/connectors/producer-shopify/README.md @@ -1,78 +1,142 @@ -# producer-shopify +# @useairfoil/producer-shopify -Shopify producer connector for Airfoil Connector Kit (ACK). +Shopify producer connector for Airfoil Connector Kit. -Current v1 scope: +Current scope: -- Entity: `products` -- Backfill source: Shopify Admin REST `GET /products.json` -- Live source: Shopify webhooks on `products/create` and `products/update` +- entity: `products` +- backfill source: Shopify Admin REST `GET /products.json` +- live source: Shopify webhooks on `/webhooks/shopify` -## Architecture +## Public Exports -- `src/api.ts`: REST client with `X-Shopify-Access-Token` auth and Link-header pagination support -- `src/streams.ts`: cutoff-aware backfill stream plus live webhook queue -- `src/connector.ts`: connector/entity registration and webhook route/signature verification -- `src/sandbox.ts`: runnable local runtime (Node server + in-memory store + console publisher) +- `ShopifyApiClient` +- `layerApiClient(config)` +- `ShopifyConnector` +- `layerConfig` +- `ShopifyConfig` +- `ShopifyConfigConfig` +- `ShopifyConnectorRuntime` +- `Product` +- `ProductSchema` +- `WebhookPayload` +- `WebhookPayloadSchema` -## Environment variables +## Configuration -Copy `.env.example` to `.env` and fill values: +Required: -- `SHOPIFY_API_BASE_URL` - full base URL including pinned API version, for example `https://your-store.myshopify.com/admin/api/2026-01` -- `SHOPIFY_API_TOKEN` - Admin API access token (`X-Shopify-Access-Token`) -- `SHOPIFY_WEBHOOK_SECRET` - app shared secret used to validate `X-Shopify-Hmac-SHA256` -- `SHOPIFY_WEBHOOK_PORT` - local webhook server port (default `8080`) +```env +SHOPIFY_API_TOKEN=shpat_xxx +``` -Recommended scope for this v1 connector: `read_products`. +Common: -## Usage +```env +SHOPIFY_API_BASE_URL=https://your-store.myshopify.com/admin/api/2026-01 +SHOPIFY_WEBHOOK_SECRET=your-app-shared-secret +SHOPIFY_WEBHOOK_PORT=8080 +ACK_TELEMETRY_ENABLED=false +ACK_OTLP_BASE_URL=http://localhost:4318 +ACK_SERVICE_NAME=producer-shopify +``` -Run sandbox: +Recommended Shopify scope for the current connector surface: `read_products`. -```bash -pnpm --filter @useairfoil/producer-shopify run sandbox -``` +## Minimal Runtime Wiring -Webhook endpoint: +```ts +import { NodeHttpServer } from "@effect/platform-node"; +import { Ingestion, Publisher } from "@useairfoil/connector-kit"; +import { ConfigProvider, Effect, Layer } from "effect"; +import { FetchHttpClient } from "effect/unstable/http"; +import { createServer } from "node:http"; -- `POST /webhooks/shopify` +import { layerConfig, ShopifyConnector } from "@useairfoil/producer-shopify"; -Expected headers: +const ConsolePublisher = Layer.succeed(Publisher.Publisher)({ + publish: () => Effect.succeed({ success: true }), +}); -- `X-Shopify-Topic` (`products/create` or `products/update`) -- `X-Shopify-Hmac-SHA256` (verified against raw body bytes) +const envLayer = Layer.mergeAll( + FetchHttpClient.layer, + Layer.succeed(ConfigProvider.ConfigProvider, ConfigProvider.fromEnv()), +); -## Tests +const connectorLayer = layerConfig.pipe(Layer.provide(envLayer)); -- `test/api.vcr.test.ts`: deterministic replay of a recorded `products.json` response -- `test/webhook.test.ts`: in-memory webhook flow with HMAC signature verification +const program = Effect.gen(function* () { + const { connector, routes } = yield* ShopifyConnector; + const serverLayer = NodeHttpServer.layer(createServer, { port: 8080 }); -### VCR workflow + return yield* Ingestion.runConnector(connector, { + initialCutoff: new Date(), + webhook: { + routes, + healthPath: "/health", + disableHttpLogger: true, + }, + }).pipe(Effect.provide(serverLayer)); +}); -1. Ensure `.env` contains valid `SHOPIFY_API_BASE_URL` and `SHOPIFY_API_TOKEN`. -2. Record cassette: +const runtimeLayer = Layer.mergeAll(Ingestion.layerMemory, ConsolePublisher, connectorLayer); -```bash -rm -rf connectors/producer-shopify/test/__cassettes__ -pnpm --filter @useairfoil/producer-shopify run test:ci -- test/api.vcr.test.ts +const runnable = Effect.scoped(program).pipe(Effect.provide(runtimeLayer)); + +Effect.runPromise(runnable); ``` -3. Replay-only verification: +## Webhook Behavior -```bash -pnpm --filter @useairfoil/producer-shopify run test:ci -``` +- webhook path: `POST /webhooks/shopify` +- expected topic headers include `products/create` and `products/update` +- when `SHOPIFY_WEBHOOK_SECRET` is set, the connector verifies `x-shopify-hmac-sha256` against the raw request body +- live events are merged with backfill using the entity cursor field `updated_at` -Run tests: +## API Client Layer -```bash -pnpm --filter @useairfoil/producer-shopify run test:ci +`layerApiClient(config)` builds `ShopifyApiClient` from a raw `ShopifyConfig` value. + +The client: + +- authenticates with `X-Shopify-Access-Token` +- sends `Accept: application/json` +- follows Shopify `Link` headers for `rel="next"` pagination + +```ts +import { Effect, Layer, Option } from "effect"; +import { FetchHttpClient } from "effect/unstable/http"; + +import { layerApiClient, ProductSchema, ShopifyApiClient } from "@useairfoil/producer-shopify"; + +const apiLayer = layerApiClient({ + apiBaseUrl: "https://your-store.myshopify.com/admin/api/2026-01", + apiToken: "test-token", + webhookSecret: Option.none(), +}).pipe(Layer.provide(FetchHttpClient.layer)); + +const program = ShopifyApiClient.use((api) => + api.fetchList(ProductSchema, "/products.json", { + limit: 50, + }), +).pipe(Effect.provide(apiLayer)); + +Effect.runPromise(program); ``` ## Notes -- Shopify REST Admin API is legacy; GraphQL is recommended by Shopify for new apps. -- This connector pins REST paths by embedding the version in `SHOPIFY_API_BASE_URL`. -- Pagination follows Shopify Link header `rel="next"` URLs with `page_info` cursors. -- Inbound webhook signature validation uses `SHOPIFY_WEBHOOK_SECRET` and raw body HMAC SHA-256. +- the current connector uses Shopify Admin REST +- the API version is pinned through `SHOPIFY_API_BASE_URL` +- pagination follows the full `nextUrl` returned by Shopify + +## Testing + +- `test/api.vcr.test.ts`: VCR replay of a recorded `products.json` response +- `test/webhook.test.ts`: in-memory webhook flow with HMAC verification + +Run: + +```bash +pnpm --filter @useairfoil/producer-shopify run test:ci +``` diff --git a/connectors/producer-shopify/src/api.ts b/connectors/producer-shopify/src/api.ts index cc8c92c..1c1e850 100644 --- a/connectors/producer-shopify/src/api.ts +++ b/connectors/producer-shopify/src/api.ts @@ -31,7 +31,6 @@ export class ShopifyApiClient extends Context.Service { if (!linkHeader) { return null; @@ -51,7 +50,7 @@ const isAbsoluteUrl = (value: string): boolean => /^https?:\/\//i.test(value); export const makeShopifyApiClient = ( config: ShopifyConfig, ): Effect.Effect => - Effect.gen(function* () { + Effect.fnUntraced(function* () { const rawClient = yield* HttpClient.HttpClient; const authAndJsonClient = rawClient.pipe( HttpClient.mapRequest(HttpClientRequest.setHeader("X-Shopify-Access-Token", config.apiToken)), @@ -136,9 +135,9 @@ export const makeShopifyApiClient = ( }; return { fetchJson, fetchList }; - }); + })(); -export const ShopifyApiClientConfig = ( +export const layerApiClient = ( config: ShopifyConfig, ): Layer.Layer => Layer.effect(ShopifyApiClient)(makeShopifyApiClient(config)); diff --git a/connectors/producer-shopify/src/connector.ts b/connectors/producer-shopify/src/connector.ts index 6928923..58c3904 100644 --- a/connectors/producer-shopify/src/connector.ts +++ b/connectors/producer-shopify/src/connector.ts @@ -5,12 +5,12 @@ import { ConnectorError, defineConnector, defineEntity, - type WebhookRoute, + Webhook, } from "@useairfoil/connector-kit"; import { Config, Context, Effect, Layer, Option } from "effect"; import { createHmac, timingSafeEqual } from "node:crypto"; -import { ShopifyApiClient, ShopifyApiClientConfig } from "./api"; +import { layerApiClient, ShopifyApiClient } from "./api"; import { type Product, ProductSchema, type WebhookPayload, WebhookPayloadSchema } from "./schemas"; import { dispatchEntityWebhook, @@ -27,7 +27,7 @@ export type ShopifyConfig = { export type ShopifyConnectorRuntime = { readonly connector: ConnectorDefinition; - readonly routes: ReadonlyArray>; + readonly routes: ReadonlyArray>; }; export class ShopifyConnector extends Context.Service()( @@ -103,7 +103,7 @@ const resolveWebhookDispatch = (options: { const makeShopifyConnector = ( config: ShopifyConfig, ): Effect.Effect => - Effect.gen(function* () { + Effect.fnUntraced(function* () { const api = yield* ShopifyApiClient; const productStreams = yield* makeEntityStreams({ api, @@ -127,11 +127,11 @@ const makeShopifyConnector = ( events: [], }); - const webhookRoute: WebhookRoute = { + const webhookRoute = Webhook.route({ path: "/webhooks/shopify", schema: WebhookPayloadSchema, handle: (payload, request, rawBody) => - Effect.gen(function* () { + Effect.fn("shopify/webhook/handle")(function* () { const topic = request.headers["x-shopify-topic"] ?? ""; if (Option.isSome(config.webhookSecret)) { @@ -150,13 +150,13 @@ const makeShopifyConnector = ( }); } - yield* resolveWebhookDispatch({ + return yield* resolveWebhookDispatch({ payload, topic, products: productStreams, }); - }), - }; + })(), + }); if (Option.isNone(config.webhookSecret)) { yield* Effect.logWarning( @@ -165,20 +165,14 @@ const makeShopifyConnector = ( } return { connector, routes: [webhookRoute] }; - }).pipe(Effect.annotateLogs({ component: "producer-shopify" })); + })().pipe(Effect.annotateLogs({ component: "producer-shopify" })); -export const ShopifyConnectorConfig = (): Layer.Layer< - ShopifyConnector, - ConnectorError, - HttpClient.HttpClient -> => +export const layerConfig: Layer.Layer = Layer.effect(ShopifyConnector)( - Effect.gen(function* () { + Effect.fnUntraced(function* () { const config = yield* ShopifyConfigConfig; - return yield* makeShopifyConnector(config).pipe( - Effect.provide(ShopifyApiClientConfig(config)), - ); - }).pipe( + return yield* makeShopifyConnector(config).pipe(Effect.provide(layerApiClient(config))); + })().pipe( Effect.mapError((error) => error instanceof ConnectorError ? error diff --git a/connectors/producer-shopify/src/index.ts b/connectors/producer-shopify/src/index.ts index 01e352b..6cc64b6 100644 --- a/connectors/producer-shopify/src/index.ts +++ b/connectors/producer-shopify/src/index.ts @@ -1,9 +1,9 @@ -export { ShopifyApiClient, ShopifyApiClientConfig } from "./api"; +export { layerApiClient, ShopifyApiClient } from "./api"; export { type ShopifyConfig, ShopifyConfigConfig, ShopifyConnector, - ShopifyConnectorConfig, + layerConfig, type ShopifyConnectorRuntime, } from "./connector"; export type { Product, WebhookPayload } from "./schemas"; diff --git a/connectors/producer-shopify/src/sandbox.ts b/connectors/producer-shopify/src/sandbox.ts index 661d428..74368cd 100644 --- a/connectors/producer-shopify/src/sandbox.ts +++ b/connectors/producer-shopify/src/sandbox.ts @@ -1,13 +1,13 @@ import type { ConnectorError } from "@useairfoil/connector-kit"; import { NodeHttpServer } from "@effect/platform-node"; -import { Publisher, runConnector, StateStoreInMemory } from "@useairfoil/connector-kit"; +import { Ingestion, Publisher } from "@useairfoil/connector-kit"; import { Config, ConfigProvider, DateTime, Effect, Layer, Logger, Metric } from "effect"; import { FetchHttpClient } from "effect/unstable/http"; import * as Observability from "effect/unstable/observability"; import { createServer } from "node:http"; -import { ShopifyConnector, ShopifyConnectorConfig } from "./index"; +import { layerConfig, ShopifyConnector } from "./index"; const SandboxConfig = Config.all({ port: Config.port("SHOPIFY_WEBHOOK_PORT").pipe(Config.withDefault(8080)), @@ -20,8 +20,8 @@ const TelemetryConfig = Config.all({ }); // Console publisher so you can see ingestion output during `pnpm run sandbox`. -// Real connectors plug in `WingsPublisherLayer` from @useairfoil/connector-kit. -const ConsolePublisherLayer = Layer.succeed(Publisher)({ +// Real connectors plug in `layerWings` from @useairfoil/connector-kit. +const ConsolePublisherLayer = Layer.succeed(Publisher.Publisher)({ publish: ({ name, source, batch }) => Effect.gen(function* () { const ids = batch.rows.map((r) => r["id"]).filter((id) => id != null); @@ -49,7 +49,7 @@ const program = Effect.gen(function* () { const now = yield* DateTime.now; - return yield* runConnector(connector, { + return yield* Ingestion.runConnector(connector, { initialCutoff: DateTime.toDate(now), webhook: { routes, @@ -59,9 +59,13 @@ const program = Effect.gen(function* () { }).pipe(Effect.provide(serverLayer)); }).pipe(Effect.annotateLogs({ component: "producer-shopify" })); -const EnvLayer = FetchHttpClient.layer; +const EnvLayer = Layer.mergeAll( + FetchHttpClient.layer, + Layer.succeed(ConfigProvider.ConfigProvider, ConfigProvider.fromEnv()), +); -const ConnectorLayer = ShopifyConnectorConfig().pipe(Layer.provide(EnvLayer)); +const ConnectorLayer: Layer.Layer = + layerConfig.pipe(Layer.provide(EnvLayer)); const TelemetryLayer = Layer.unwrap( Effect.gen(function* () { @@ -87,22 +91,17 @@ const TelemetryLayer = Layer.unwrap( Metric.enableRuntimeMetricsLayer, ); }), -); +).pipe(Layer.provide(EnvLayer)); const RuntimeLayer = Layer.mergeAll( - StateStoreInMemory, + Ingestion.layerMemory, ConsolePublisherLayer, ConnectorLayer, Logger.layer([Logger.consolePretty()]), TelemetryLayer, ); -Effect.runPromise( - Effect.scoped(program).pipe( - Effect.provide(RuntimeLayer), - Effect.provideService(ConfigProvider.ConfigProvider, ConfigProvider.fromEnv()), - ) as Effect.Effect, -).catch((error) => { +Effect.runPromise(Effect.scoped(program).pipe(Effect.provide(RuntimeLayer))).catch((error) => { void Effect.runPromise( Effect.logError("fatal error").pipe( Effect.annotateLogs({ diff --git a/connectors/producer-shopify/src/streams.ts b/connectors/producer-shopify/src/streams.ts index 98d6f29..03c786e 100644 --- a/connectors/producer-shopify/src/streams.ts +++ b/connectors/producer-shopify/src/streams.ts @@ -1,13 +1,6 @@ import type * as Schema from "effect/Schema"; -import { - type Batch, - ConnectorError, - type Cursor, - makePullStream, - makeWebhookQueue, - type WebhookStream, -} from "@useairfoil/connector-kit"; +import { type Batch, ConnectorError, type Cursor, Streams } from "@useairfoil/connector-kit"; import { Deferred, Effect, Queue, Stream } from "effect"; import type { ShopifyApiClientService } from "./api"; @@ -63,18 +56,18 @@ const setCutoff = (deferred: Deferred.Deferred, cursor: Cursor) = // Enqueue a single webhook row after recording its cursor as the backfill // cutoff. This is safe to call many times — Deferred.succeed is idempotent. export const dispatchEntityWebhook = >(options: { - readonly queue: WebhookStream; + readonly queue: Streams.WebhookStream; readonly cutoff: Deferred.Deferred; readonly row: T; readonly cursor: Cursor; }): Effect.Effect => - Effect.gen(function* () { + Effect.fnUntraced(function* () { yield* setCutoff(options.cutoff, options.cursor); - yield* Queue.offer(options.queue.queue, { + return yield* Queue.offer(options.queue.queue, { cursor: options.cursor, rows: [options.row], }).pipe(Effect.asVoid); - }); + })(); // Backfill stream for a single entity. Waits for the cutoff deferred to // resolve (set by the first live webhook or by initialCutoff), then pages @@ -89,7 +82,7 @@ const makeBackfillStream = >(options: { }): Stream.Stream, ConnectorError> => Stream.fromEffect(Deferred.await(options.cutoff)).pipe( Stream.flatMap((cutoff) => - makePullStream({ + Streams.makePullStream({ fetchPage: (cursor: Cursor | undefined) => { const nextUrl = typeof cursor === "string" ? cursor : undefined; return options.api @@ -124,7 +117,7 @@ const makeBackfillStream = >(options: { ); export type EntityStreams> = { - readonly live: WebhookStream; + readonly live: Streams.WebhookStream; readonly cutoff: Deferred.Deferred; readonly backfill: Stream.Stream, ConnectorError>; }; @@ -139,9 +132,9 @@ export const makeEntityStreams = >(options: { readonly cursorField: keyof T & string; readonly limit?: number; }): Effect.Effect, ConnectorError> => - Effect.gen(function* () { - const queue = yield* makeWebhookQueue({ capacity: 1024 }); + Effect.fnUntraced(function* () { + const queue = yield* Streams.makeWebhookQueue({ capacity: 1024 }); const cutoff = yield* Deferred.make(); const backfill = makeBackfillStream({ ...options, cutoff }); return { live: queue, cutoff, backfill }; - }); + })(); diff --git a/connectors/producer-shopify/test/api.vcr.test.ts b/connectors/producer-shopify/test/api.vcr.test.ts index 3465500..b1e06e8 100644 --- a/connectors/producer-shopify/test/api.vcr.test.ts +++ b/connectors/producer-shopify/test/api.vcr.test.ts @@ -40,32 +40,38 @@ describe("producer-shopify api (vcr)", () => { }), ); - return program.pipe( - Effect.provide(apiLayer), - Effect.provide( - VcrHttpClient.layer({ - vcrName: "producer-shopify", - mode: "auto", - match: matchByPathAndMethod, - redact: { - requestHeaders: ["x-shopify-access-token", "authorization"], - }, - matchIgnore: { - requestHeaders: ["x-shopify-access-token", "authorization"], - }, - }), - ), - Effect.provide(FileSystemCassetteStore.layer()), - Effect.provide(FetchHttpClient.layer), - Effect.provide(NodeServices.layer), - Effect.provideService( - ConfigProvider.ConfigProvider, - ConfigProvider.fromUnknown({ - SHOPIFY_API_BASE_URL: "https://nothing-12348377.myshopify.com/admin/api/2026-01", - SHOPIFY_API_TOKEN: "test-token", - }), + const cassetteStoreLayer = FileSystemCassetteStore.layer().pipe( + Layer.provide(NodeServices.layer), + ); + const vcrRuntimeLayer = Layer.mergeAll( + FetchHttpClient.layer, + NodeServices.layer, + cassetteStoreLayer, + ); + const vcrWithDeps = VcrHttpClient.layer({ + vcrName: "producer-shopify", + mode: "auto", + match: matchByPathAndMethod, + redact: { + requestHeaders: ["x-shopify-access-token", "authorization"], + }, + matchIgnore: { + requestHeaders: ["x-shopify-access-token", "authorization"], + }, + }).pipe(Layer.provide(vcrRuntimeLayer)); + + const testLayer = apiLayer.pipe( + Layer.provide(vcrWithDeps), + Layer.provide( + ConfigProvider.layer( + ConfigProvider.fromUnknown({ + SHOPIFY_API_BASE_URL: "https://nothing-12348377.myshopify.com/admin/api/2026-01", + SHOPIFY_API_TOKEN: "test-token", + }), + ), ), - Effect.scoped, ); + + return program.pipe(Effect.provide(testLayer), Effect.scoped); }); }); diff --git a/connectors/producer-shopify/test/helpers.ts b/connectors/producer-shopify/test/helpers.ts index 33088b8..5e680e2 100644 --- a/connectors/producer-shopify/test/helpers.ts +++ b/connectors/producer-shopify/test/helpers.ts @@ -16,7 +16,7 @@ export const makeTestPublisher = (expected: number) => Effect.gen(function* () { const publishedRef = yield* Ref.make>([]); const done = yield* Deferred.make(); - const layer = Layer.succeed(Publisher)({ + const layer = Layer.succeed(Publisher.Publisher)({ publish: ({ name, source, batch }) => Effect.gen(function* () { const next = yield* Ref.updateAndGet(publishedRef, (items) => [ diff --git a/connectors/producer-shopify/test/webhook.test.ts b/connectors/producer-shopify/test/webhook.test.ts index ca93e12..e1fd4f3 100644 --- a/connectors/producer-shopify/test/webhook.test.ts +++ b/connectors/producer-shopify/test/webhook.test.ts @@ -1,12 +1,12 @@ import { NodeHttpServer } from "@effect/platform-node"; import { describe, expect, it } from "@effect/vitest"; -import { ConnectorError, runConnector, StateStoreInMemory } from "@useairfoil/connector-kit"; +import { ConnectorError, Ingestion } from "@useairfoil/connector-kit"; import { ConfigProvider, Deferred, Effect, Layer, Ref } from "effect"; import { HttpClient, HttpClientRequest } from "effect/unstable/http"; import { createHmac } from "node:crypto"; import { ShopifyApiClient, type ShopifyApiClientService } from "../src/api"; -import { ShopifyConnector, ShopifyConnectorConfig } from "../src/index"; +import { layerConfig, ShopifyConnector } from "../src/index"; import { makeTestPublisher } from "./helpers"; const webhookSecret = "test-shopify-webhook-secret"; @@ -45,7 +45,7 @@ describe("producer-shopify webhook", () => { const runtimeLayer = NodeHttpServer.layerTest; const apiLayer = Layer.succeed(ShopifyApiClient)(makeApiStub()); - const connectorLayer = ShopifyConnectorConfig().pipe(Layer.provide(apiLayer)); + const connectorLayer = layerConfig.pipe(Layer.provide(apiLayer)); const configProvider = ConfigProvider.fromUnknown({ SHOPIFY_API_BASE_URL: "https://your-development-store.myshopify.com/admin/api/2026-01", SHOPIFY_API_TOKEN: "test-token", @@ -55,11 +55,11 @@ describe("producer-shopify webhook", () => { return Effect.gen(function* () { const { publishedRef, done, layer } = yield* makeTestPublisher(1); const { connector, routes } = yield* ShopifyConnector; - const runLayer = Layer.mergeAll(StateStoreInMemory, layer, runtimeLayer); + const runLayer = Layer.mergeAll(Ingestion.layerMemory, layer, runtimeLayer); yield* Effect.gen(function* () { yield* Effect.forkScoped( - runConnector(connector, { + Ingestion.runConnector(connector, { initialCutoff: new Date(), webhook: { routes, @@ -86,18 +86,21 @@ describe("producer-shopify webhook", () => { expect(published[0]?.name).toBe("products"); }).pipe(Effect.provide(runLayer)); }).pipe( - Effect.provide(connectorLayer), - Effect.provide(runtimeLayer), - Effect.provideService(ConfigProvider.ConfigProvider, configProvider), + Effect.provide( + connectorLayer.pipe( + Layer.provide(runtimeLayer), + Layer.provide(ConfigProvider.layer(configProvider)), + ), + ), Effect.scoped, - ) as Effect.Effect; + ); }); it.effect("rejects invalid webhook signatures", () => { const runtimeLayer = NodeHttpServer.layerTest; const apiLayer = Layer.succeed(ShopifyApiClient)(makeApiStub()); - const connectorLayer = ShopifyConnectorConfig().pipe(Layer.provide(apiLayer)); + const connectorLayer = layerConfig.pipe(Layer.provide(apiLayer)); const configProvider = ConfigProvider.fromUnknown({ SHOPIFY_API_BASE_URL: "https://your-development-store.myshopify.com/admin/api/2026-01", SHOPIFY_API_TOKEN: "test-token", @@ -107,11 +110,11 @@ describe("producer-shopify webhook", () => { return Effect.gen(function* () { const { publishedRef, layer } = yield* makeTestPublisher(1); const { connector, routes } = yield* ShopifyConnector; - const runLayer = Layer.mergeAll(StateStoreInMemory, layer, runtimeLayer); + const runLayer = Layer.mergeAll(Ingestion.layerMemory, layer, runtimeLayer); yield* Effect.gen(function* () { yield* Effect.forkScoped( - runConnector(connector, { + Ingestion.runConnector(connector, { initialCutoff: new Date(), webhook: { routes, @@ -135,10 +138,13 @@ describe("producer-shopify webhook", () => { expect(published.length).toBe(0); }).pipe(Effect.provide(runLayer)); }).pipe( - Effect.provide(connectorLayer), - Effect.provide(runtimeLayer), - Effect.provideService(ConfigProvider.ConfigProvider, configProvider), + Effect.provide( + connectorLayer.pipe( + Layer.provide(runtimeLayer), + Layer.provide(ConfigProvider.layer(configProvider)), + ), + ), Effect.scoped, - ) as Effect.Effect; + ); }); }); diff --git a/docker-compose.yaml b/docker-compose.yaml index 13324d1..27ee84e 100644 --- a/docker-compose.yaml +++ b/docker-compose.yaml @@ -1,6 +1,6 @@ services: wings: - image: docker.useairfoil.com/airfoil/wings:0.1.0 + image: docker.useairfoil.com/airfoil/wings:latest command: - dev - --http.address=0.0.0.0:7780 diff --git a/pnpm-lock.yaml b/pnpm-lock.yaml index 8bfad80..b1e57f4 100644 --- a/pnpm-lock.yaml +++ b/pnpm-lock.yaml @@ -188,15 +188,9 @@ importers: '@useairfoil/wings': specifier: workspace:* version: link:../wings - consola: - specifier: ^3.4.2 - version: 3.4.2 effect: specifier: 'catalog:' version: 4.0.0-beta.54 - json-stable-stringify: - specifier: ^1.3.0 - version: 1.3.0 devDependencies: '@effect/vitest': specifier: 'catalog:' @@ -259,10 +253,6 @@ importers: nice-grpc-common: specifier: 'catalog:' version: 2.0.3 - optionalDependencies: - testcontainers: - specifier: 'catalog:' - version: 11.14.0 devDependencies: '@bufbuild/buf': specifier: 'catalog:' @@ -285,6 +275,10 @@ importers: vitest: specifier: 'catalog:' version: 3.2.4(@types/node@24.12.2)(jiti@2.6.1)(tsx@4.21.0)(yaml@2.8.3) + optionalDependencies: + testcontainers: + specifier: 'catalog:' + version: 11.14.0 packages/wings: dependencies: @@ -828,21 +822,25 @@ packages: resolution: {integrity: sha512-2JkWuMGj+HpW6oPAvU5VdAx1afTnEbiM10Y3YOrl3fipWV4BiP5VDx762QTrfCraP4hl6yqTgvTe7F9xaby+jQ==} cpu: [arm64] os: [linux] + libc: [glibc] '@nx/nx-linux-arm64-musl@22.6.5': resolution: {integrity: sha512-Z/zMqFClnEyqDXouJKEPoWVhMQIif5F0YuECWBYjd3ZLwQsXGTItoh+6Wm3XF/nGMA2uLOHyTq/X7iFXQY3RzA==} cpu: [arm64] os: [linux] + libc: [musl] '@nx/nx-linux-x64-gnu@22.6.5': resolution: {integrity: sha512-FlotSyqNnaXSn0K+yWw+hRdYBwusABrPgKLyixfJIYRzsy+xPKN6pON6vZfqGwzuWF/9mEGReRz+iM8PiW0XSg==} cpu: [x64] os: [linux] + libc: [glibc] '@nx/nx-linux-x64-musl@22.6.5': resolution: {integrity: sha512-RVOe2qcwhoIx6mxQURPjUfAW5SEOmT2gdhewvdcvX9ICq1hj5B2VarmkhTg0qroO7xiyqOqwq26mCzoV2I3NgQ==} cpu: [x64] os: [linux] + libc: [musl] '@nx/nx-win32-arm64-msvc@22.6.5': resolution: {integrity: sha512-ZqurqI8VuYnsr2Kn4K4t+Gx6j/BZdf6qz/6Tv4A7XQQ6oNYVQgTqoNEFj+CCkVaIe6aIdCWpousFLqs+ZgBqYQ==} @@ -904,48 +902,56 @@ packages: engines: {node: ^20.19.0 || >=22.12.0} cpu: [arm64] os: [linux] + libc: [glibc] '@oxfmt/binding-linux-arm64-musl@0.46.0': resolution: {integrity: sha512-aAUPBWJ1lGwwnxZUEDLJ94+Iy6MuwJwPxUgO4sCA5mEEyDk7b+cDQ+JpX1VR150Zoyd+D49gsrUzpUK5h587Eg==} engines: {node: ^20.19.0 || >=22.12.0} cpu: [arm64] os: [linux] + libc: [musl] '@oxfmt/binding-linux-ppc64-gnu@0.46.0': resolution: {integrity: sha512-ufBCJukyFX/UDrokP/r6BGDoTInnsDs7bxyzKAgMiZlt2Qu8GPJSJ6Zm6whIiJzKk0naxA8ilwmbO1LMw6Htxw==} engines: {node: ^20.19.0 || >=22.12.0} cpu: [ppc64] os: [linux] + libc: [glibc] '@oxfmt/binding-linux-riscv64-gnu@0.46.0': resolution: {integrity: sha512-eqtlC2YmPqjun76R1gVfGLuKWx7NuEnLEAudZ7n6ipSKbCZTqIKSs1b5Y8K/JHZsRpLkeSmAAjig5HOIg8fQzQ==} engines: {node: ^20.19.0 || >=22.12.0} cpu: [riscv64] os: [linux] + libc: [glibc] '@oxfmt/binding-linux-riscv64-musl@0.46.0': resolution: {integrity: sha512-yccVOO2nMXkQLGgy0He3EQEwKD7NF0zEk+/OWmroznkqXyJdN6bfK0LtNnr6/14Bh3FjpYq7bP33l/VloCnxpA==} engines: {node: ^20.19.0 || >=22.12.0} cpu: [riscv64] os: [linux] + libc: [musl] '@oxfmt/binding-linux-s390x-gnu@0.46.0': resolution: {integrity: sha512-aAf7fG23OQCey6VRPj9IeCraoYtpgtx0ZyJ1CXkPyT1wjzBE7c3xtuxHe/AdHaJfVVb/SXpSk8Gl1LzyQupSqw==} engines: {node: ^20.19.0 || >=22.12.0} cpu: [s390x] os: [linux] + libc: [glibc] '@oxfmt/binding-linux-x64-gnu@0.46.0': resolution: {integrity: sha512-q0JPsTMyJNjYrBvYFDz4WbVsafNZaPCZv4RnFypRotLqpKROtBZcEaXQW4eb9YmvLU3NckVemLJnzkSZSdmOxw==} engines: {node: ^20.19.0 || >=22.12.0} cpu: [x64] os: [linux] + libc: [glibc] '@oxfmt/binding-linux-x64-musl@0.46.0': resolution: {integrity: sha512-7LsLY9Cw57GPkhSR+duI3mt9baRczK/DtHYSldQ4BEU92da9igBQNl4z7Vq5U9NNPsh1FmpKvv1q9WDtiUQR1A==} engines: {node: ^20.19.0 || >=22.12.0} cpu: [x64] os: [linux] + libc: [musl] '@oxfmt/binding-openharmony-arm64@0.46.0': resolution: {integrity: sha512-lHiBOz8Duaku7JtRNLlps3j++eOaICPZSd8FCVmTDM4DFOPT71Bjn7g6iar1z7StXlKRweUKxWUs4sA+zWGDXg==} @@ -1018,48 +1024,56 @@ packages: engines: {node: ^20.19.0 || >=22.12.0} cpu: [arm64] os: [linux] + libc: [glibc] '@oxlint/binding-linux-arm64-musl@1.61.0': resolution: {integrity: sha512-bl1dQh8LnVqsj6oOQAcxwbuOmNJkwc4p6o//HTBZhNTzJy21TLDwAviMqUFNUxDHkPGpmdKTSN4tWTjLryP8xg==} engines: {node: ^20.19.0 || >=22.12.0} cpu: [arm64] os: [linux] + libc: [musl] '@oxlint/binding-linux-ppc64-gnu@1.61.0': resolution: {integrity: sha512-QoOX6KB2IiEpyOj/HKqaxi+NQHPnOgNgnr22n9N4ANJCzXkUlj1UmeAbFb4PpqdlHIzvGDM5xZ0OKtcLq9RhiQ==} engines: {node: ^20.19.0 || >=22.12.0} cpu: [ppc64] os: [linux] + libc: [glibc] '@oxlint/binding-linux-riscv64-gnu@1.61.0': resolution: {integrity: sha512-1TGcTerjY6p152wCof3oKElccq3xHljS/Mucp04gV/4ATpP6nO7YNnp7opEg6SHkv2a57/b4b8Ndm9znJ1/qAw==} engines: {node: ^20.19.0 || >=22.12.0} cpu: [riscv64] os: [linux] + libc: [glibc] '@oxlint/binding-linux-riscv64-musl@1.61.0': resolution: {integrity: sha512-65wXEmZIrX2ADwC8i/qFL4EWLSbeuBpAm3suuX1vu4IQkKd+wLT/HU/BOl84kp91u2SxPkPDyQgu4yrqp8vwVA==} engines: {node: ^20.19.0 || >=22.12.0} cpu: [riscv64] os: [linux] + libc: [musl] '@oxlint/binding-linux-s390x-gnu@1.61.0': resolution: {integrity: sha512-TVvhgMvor7Qa6COeXxCJ7ENOM+lcAOGsQ0iUdPSCv2hxb9qSHLQ4XF1h50S6RE1gBOJ0WV3rNukg4JJJP1LWRA==} engines: {node: ^20.19.0 || >=22.12.0} cpu: [s390x] os: [linux] + libc: [glibc] '@oxlint/binding-linux-x64-gnu@1.61.0': resolution: {integrity: sha512-SjpS5uYuFoDnDdZPwZE59ndF95AsY47R5MliuneTWR1pDm2CxGJaYXbKULI71t5TVfLQUWmrHEGRL9xvuq6dnA==} engines: {node: ^20.19.0 || >=22.12.0} cpu: [x64] os: [linux] + libc: [glibc] '@oxlint/binding-linux-x64-musl@1.61.0': resolution: {integrity: sha512-gGfAeGD4sNJGILZbc/yKcIimO9wQnPMoYp9swAaKeEtwsSQAbU+rsdQze5SBtIP6j0QDzeYd4XSSUCRCF+LIeQ==} engines: {node: ^20.19.0 || >=22.12.0} cpu: [x64] os: [linux] + libc: [musl] '@oxlint/binding-openharmony-arm64@1.61.0': resolution: {integrity: sha512-OlVT0LrG/ct33EVtWRyR+B/othwmDWeRxfi13wUdPeb3lAT5TgTcFDcfLfarZtzB4W1nWF/zICMgYdkggX2WmQ==} @@ -1166,24 +1180,28 @@ packages: engines: {node: ^20.19.0 || >=22.12.0} cpu: [arm64] os: [linux] + libc: [glibc] '@rolldown/binding-linux-arm64-musl@1.0.0-beta.45': resolution: {integrity: sha512-tdy8ThO/fPp40B81v0YK3QC+KODOmzJzSUOO37DinQxzlTJ026gqUSOM8tzlVixRbQJltgVDCTYF8HNPRErQTA==} engines: {node: ^20.19.0 || >=22.12.0} cpu: [arm64] os: [linux] + libc: [musl] '@rolldown/binding-linux-x64-gnu@1.0.0-beta.45': resolution: {integrity: sha512-lS082ROBWdmOyVY/0YB3JmsiClaWoxvC+dA8/rbhyB9VLkvVEaihLEOr4CYmrMse151C4+S6hCw6oa1iewox7g==} engines: {node: ^20.19.0 || >=22.12.0} cpu: [x64] os: [linux] + libc: [glibc] '@rolldown/binding-linux-x64-musl@1.0.0-beta.45': resolution: {integrity: sha512-Hi73aYY0cBkr1/SvNQqH8Cd+rSV6S9RB5izCv0ySBcRnd/Wfn5plguUoGYwBnhHgFbh6cPw9m2dUVBR6BG1gxA==} engines: {node: ^20.19.0 || >=22.12.0} cpu: [x64] os: [linux] + libc: [musl] '@rolldown/binding-openharmony-arm64@1.0.0-beta.45': resolution: {integrity: sha512-fljEqbO7RHHogNDxYtTzr+GNjlfOx21RUyGmF+NrkebZ8emYYiIqzPxsaMZuRx0rgZmVmliOzEp86/CQFDKhJQ==} @@ -1251,66 +1269,79 @@ packages: resolution: {integrity: sha512-2QxQrM+KQ7DAW4o22j+XZ6RKdxjLD7BOWTP0Bv0tmjdyhXSsr2Ul1oJDQqh9Zf5qOwTuTc7Ek83mOFaKnodPjg==} cpu: [arm] os: [linux] + libc: [glibc] '@rollup/rollup-linux-arm-musleabihf@4.60.2': resolution: {integrity: sha512-TbziEu2DVsTEOPif2mKWkMeDMLoYjx95oESa9fkQQK7r/Orta0gnkcDpzwufEcAO2BLBsD7mZkXGFqEdMRRwfw==} cpu: [arm] os: [linux] + libc: [musl] '@rollup/rollup-linux-arm64-gnu@4.60.2': resolution: {integrity: sha512-bO/rVDiDUuM2YfuCUwZ1t1cP+/yqjqz+Xf2VtkdppefuOFS2OSeAfgafaHNkFn0t02hEyXngZkxtGqXcXwO8Rg==} cpu: [arm64] os: [linux] + libc: [glibc] '@rollup/rollup-linux-arm64-musl@4.60.2': resolution: {integrity: sha512-hr26p7e93Rl0Za+JwW7EAnwAvKkehh12BU1Llm9Ykiibg4uIr2rbpxG9WCf56GuvidlTG9KiiQT/TXT1yAWxTA==} cpu: [arm64] os: [linux] + libc: [musl] '@rollup/rollup-linux-loong64-gnu@4.60.2': resolution: {integrity: sha512-pOjB/uSIyDt+ow3k/RcLvUAOGpysT2phDn7TTUB3n75SlIgZzM6NKAqlErPhoFU+npgY3/n+2HYIQVbF70P9/A==} cpu: [loong64] os: [linux] + libc: [glibc] '@rollup/rollup-linux-loong64-musl@4.60.2': resolution: {integrity: sha512-2/w+q8jszv9Ww1c+6uJT3OwqhdmGP2/4T17cu8WuwyUuuaCDDJ2ojdyYwZzCxx0GcsZBhzi3HmH+J5pZNXnd+Q==} cpu: [loong64] os: [linux] + libc: [musl] '@rollup/rollup-linux-ppc64-gnu@4.60.2': resolution: {integrity: sha512-11+aL5vKheYgczxtPVVRhdptAM2H7fcDR5Gw4/bTcteuZBlH4oP9f5s9zYO9aGZvoGeBpqXI/9TZZihZ609wKw==} cpu: [ppc64] os: [linux] + libc: [glibc] '@rollup/rollup-linux-ppc64-musl@4.60.2': resolution: {integrity: sha512-i16fokAGK46IVZuV8LIIwMdtqhin9hfYkCh8pf8iC3QU3LpwL+1FSFGej+O7l3E/AoknL6Dclh2oTdnRMpTzFQ==} cpu: [ppc64] os: [linux] + libc: [musl] '@rollup/rollup-linux-riscv64-gnu@4.60.2': resolution: {integrity: sha512-49FkKS6RGQoriDSK/6E2GkAsAuU5kETFCh7pG4yD/ylj9rKhTmO3elsnmBvRD4PgJPds5W2PkhC82aVwmUcJ7A==} cpu: [riscv64] os: [linux] + libc: [glibc] '@rollup/rollup-linux-riscv64-musl@4.60.2': resolution: {integrity: sha512-mjYNkHPfGpUR00DuM1ZZIgs64Hpf4bWcz9Z41+4Q+pgDx73UwWdAYyf6EG/lRFldmdHHzgrYyge5akFUW0D3mQ==} cpu: [riscv64] os: [linux] + libc: [musl] '@rollup/rollup-linux-s390x-gnu@4.60.2': resolution: {integrity: sha512-ALyvJz965BQk8E9Al/JDKKDLH2kfKFLTGMlgkAbbYtZuJt9LU8DW3ZoDMCtQpXAltZxwBHevXz5u+gf0yA0YoA==} cpu: [s390x] os: [linux] + libc: [glibc] '@rollup/rollup-linux-x64-gnu@4.60.2': resolution: {integrity: sha512-UQjrkIdWrKI626Du8lCQ6MJp/6V1LAo2bOK9OTu4mSn8GGXIkPXk/Vsp4bLHCd9Z9Iz2OTEaokUE90VweJgIYQ==} cpu: [x64] os: [linux] + libc: [glibc] '@rollup/rollup-linux-x64-musl@4.60.2': resolution: {integrity: sha512-bTsRGj6VlSdn/XD4CGyzMnzaBs9bsRxy79eTqTCBsA8TMIEky7qg48aPkvJvFe1HyzQ5oMZdg7AnVlWQSKLTnw==} cpu: [x64] os: [linux] + libc: [musl] '@rollup/rollup-openbsd-x64@4.60.2': resolution: {integrity: sha512-6d4Z3534xitaA1FcMWP7mQPq5zGwBmGbhphh2DwaA1aNIXUu3KTOfwrWpbwI4/Gr0uANo7NTtaykFyO2hPuFLg==} diff --git a/templates/producer-template/README.md b/templates/producer-template/README.md index 6ac0ded..7099773 100644 --- a/templates/producer-template/README.md +++ b/templates/producer-template/README.md @@ -1,73 +1,148 @@ -# producer-template +# @useairfoil/producer-template + +Reference producer connector built on Airfoil Connector Kit. + +It uses JSONPlaceholder so the package stays runnable, typecheckable, and testable without external credentials. + +## Public Exports + +- `TemplateApiClient` +- `layerApiClient(config)` +- `TemplateConnector` +- `layerConfig` +- `TemplateConfig` +- `TemplateConfigConfig` +- `TemplateConnectorRuntime` +- `Post` +- `PostSchema` +- `WebhookPayload` +- `WebhookPayloadSchema` + +## What This Package Shows + +- a single-entity connector wired with `defineConnector` and `defineEntity` +- an Effect `HttpClient` API client layer +- paginated backfill plus queue-backed live webhook streams +- `Webhook.route(...)` with schema-validated payloads +- a sandbox runtime using Node HTTP, in-memory state, and a console publisher +- VCR-backed API tests and in-memory webhook tests + +## Configuration + +Defaults make the package runnable without extra setup, but all values still flow through Effect Config. + +```env +TEMPLATE_API_BASE_URL=https://jsonplaceholder.typicode.com +TEMPLATE_API_TOKEN=anonymous +TEMPLATE_WEBHOOK_SECRET= +TEMPLATE_WEBHOOK_PORT=8080 +ACK_TELEMETRY_ENABLED=false +ACK_OTLP_BASE_URL=http://localhost:4318 +ACK_SERVICE_NAME=producer-template +``` -A minimal, **buildable**, **CI-verified** Airfoil Connector Kit (ACK) connector template. -It targets [JSONPlaceholder](https://jsonplaceholder.typicode.com) (a free public -REST API) so the template can be compiled, typechecked, and tested without any -external credentials or sandbox setup. +## Minimal Runtime Wiring -Use it as the starting point for any new producer connector. See -[`.agents/skills/airfoil-kit/SKILL.md`](../../.agents/skills/airfoil-kit/SKILL.md) -for the end-to-end playbook. +```ts +import { NodeHttpServer } from "@effect/platform-node"; +import { Ingestion, Publisher } from "@useairfoil/connector-kit"; +import { ConfigProvider, Effect, Layer } from "effect"; +import { FetchHttpClient } from "effect/unstable/http"; +import { createServer } from "node:http"; ---- +import { layerConfig, TemplateConnector } from "@useairfoil/producer-template"; -## What this template demonstrates +const ConsolePublisher = Layer.succeed(Publisher.Publisher)({ + publish: () => Effect.succeed({ success: true }), +}); -- `defineConnector` with a single entity (`posts`). -- `defineEntity` with a paginated backfill stream and a live webhook stream. -- A small Effect `HttpClient`-based API client (bearer-token stubbed). -- Effect v4 `Config` composition for credentials, base URL, webhook port, - and webhook secret (optional). -- A `WebhookRoute` with `Schema`-validated payload and optional raw-body - signature verification hook. -- VCR tests: one recorded cassette for the backfill happy path + one in-memory - webhook test using `NodeHttpServer.layerTest`. -- `sandbox.ts` runner using `NodeHttpServer` (or Bun equivalent), `FetchHttpClient`, an in-memory - `StateStore`, a console `Publisher`, and optional OTLP telemetry. +const envLayer = Layer.mergeAll( + FetchHttpClient.layer, + Layer.succeed(ConfigProvider.ConfigProvider, ConfigProvider.fromEnv()), +); -## Files +const connectorLayer = layerConfig.pipe(Layer.provide(envLayer)); -``` -src/ -├── schemas.ts - entity + webhook payload schemas (Effect Schema) -├── api.ts - HttpClient-based API service -├── streams.ts - backfill + live stream helpers -├── connector.ts - defineConnector wiring + webhook route -├── sandbox.ts - local dev runner (Node example, Bun-compatible) -└── index.ts - public exports +const program = Effect.gen(function* () { + const { connector, routes } = yield* TemplateConnector; + const serverLayer = NodeHttpServer.layer(createServer, { port: 8080 }); -test/ -├── helpers.ts - test publisher layer -├── api.vcr.test.ts - VCR replay of the backfill path -└── webhook.test.ts - in-memory webhook round trip + return yield* Ingestion.runConnector(connector, { + initialCutoff: new Date(), + webhook: { + routes, + healthPath: "/health", + disableHttpLogger: true, + }, + }).pipe(Effect.provide(serverLayer)); +}); + +const runtimeLayer = Layer.mergeAll(Ingestion.layerMemory, ConsolePublisher, connectorLayer); + +const runnable = Effect.scoped(program).pipe(Effect.provide(runtimeLayer)); + +Effect.runPromise(runnable); ``` -## Using the template +## API Client Layer -This package is meant to be **copied**, not installed. The agent workflow is: +`layerApiClient(config)` builds `TemplateApiClient` from a raw `TemplateConfig` value. -1. `cp -R templates/producer-template connectors/producer-` -2. Replace `TEMPLATE_` / `template` identifiers with your service name. -3. Replace the JSONPlaceholder endpoint / schemas with real API calls. -4. Re-record VCR cassettes against the real sandbox. -5. Run `pnpm run lint && pnpm run typecheck && pnpm run build && pnpm run test:ci` - from the repo root. +The default implementation uses bearer-token style auth and JSONPlaceholder pagination via `_page` and `_limit`. -See [`.agents/skills/airfoil-kit/assets/rename-checklist.md`](../../.agents/skills/airfoil-kit/assets/rename-checklist.md) -for the exact search-and-replace list. +```ts +import { Effect, Layer, Option } from "effect"; +import { FetchHttpClient } from "effect/unstable/http"; -## Local development +import { layerApiClient, PostSchema, TemplateApiClient } from "@useairfoil/producer-template"; -```bash -cd templates/producer-template -cp .env.example .env -pnpm run sandbox # starts the webhook server on :8080 +const apiLayer = layerApiClient({ + apiBaseUrl: "https://jsonplaceholder.typicode.com", + apiToken: "anonymous", + webhookSecret: Option.none(), +}).pipe(Layer.provide(FetchHttpClient.layer)); + +const program = TemplateApiClient.use((api) => + api.fetchList(PostSchema, "/posts", { + page: 1, + limit: 10, + }), +).pipe(Effect.provide(apiLayer)); + +Effect.runPromise(program); +``` + +## Webhook Behavior + +- webhook path: `POST /webhooks/template` +- route payloads are decoded with `WebhookPayloadSchema` +- if `TEMPLATE_WEBHOOK_SECRET` is set, the connector expects a raw body and passes it to the signature verification hook +- the template verification function currently accepts everything; replace it with real upstream verification when adapting this package + +## Structure + +```text +src/ +├── api.ts +├── connector.ts +├── schemas.ts +├── sandbox.ts +├── streams.ts +└── index.ts + +test/ +├── api.vcr.test.ts +├── helpers.ts +└── webhook.test.ts ``` -## Scripts +## Testing + +- `test/api.vcr.test.ts`: VCR-backed replay of the API client path +- `test/webhook.test.ts`: in-memory webhook flow using `NodeHttpServer.layerTest` + +Run: -- `pnpm run build` — bundle `src/` via `tsdown`. -- `pnpm run test` — vitest (the template tests do not require `.env`). -- `pnpm run test:ci` — vitest `run` mode. -- `pnpm run typecheck` — `tsc --noEmit`. -- `pnpm run sandbox` — local end-to-end runner. +```bash +pnpm --filter @useairfoil/producer-template run test:ci +``` diff --git a/templates/producer-template/src/api.ts b/templates/producer-template/src/api.ts index e0479f0..6c7357d 100644 --- a/templates/producer-template/src/api.ts +++ b/templates/producer-template/src/api.ts @@ -40,7 +40,7 @@ export class TemplateApiClient extends Context.Service< export const makeTemplateApiClient = ( config: TemplateConfig, ): Effect.Effect => - Effect.gen(function* () { + Effect.fnUntraced(function* () { const client = (yield* HttpClient.HttpClient).pipe( HttpClient.mapRequest(HttpClientRequest.prependUrl(config.apiBaseUrl)), HttpClient.mapRequest(HttpClientRequest.bearerToken(config.apiToken)), @@ -93,9 +93,9 @@ export const makeTemplateApiClient = ( }; return { fetchJson, fetchList }; - }); + })(); -export const TemplateApiClientConfig = ( +export const layerApiClient = ( config: TemplateConfig, ): Layer.Layer => Layer.effect(TemplateApiClient)(makeTemplateApiClient(config)); diff --git a/templates/producer-template/src/connector.ts b/templates/producer-template/src/connector.ts index 980e398..83d1ce2 100644 --- a/templates/producer-template/src/connector.ts +++ b/templates/producer-template/src/connector.ts @@ -5,11 +5,11 @@ import { ConnectorError, defineConnector, defineEntity, - type WebhookRoute, + Webhook, } from "@useairfoil/connector-kit"; import { Config, Context, Effect, Layer, Option } from "effect"; -import { TemplateApiClient, TemplateApiClientConfig } from "./api"; +import { layerApiClient, TemplateApiClient } from "./api"; import { type Post, PostSchema, type WebhookPayload, WebhookPayloadSchema } from "./schemas"; import { dispatchEntityWebhook, @@ -26,7 +26,7 @@ export type TemplateConfig = { export type TemplateConnectorRuntime = { readonly connector: ConnectorDefinition; - readonly routes: ReadonlyArray>; + readonly routes: ReadonlyArray>; }; export class TemplateConnector extends Context.Service< @@ -96,7 +96,7 @@ const resolveWebhookDispatch = (options: { const makeTemplateConnector = ( config: TemplateConfig, ): Effect.Effect => - Effect.gen(function* () { + Effect.fnUntraced(function* () { const api = yield* TemplateApiClient; const postStreams = yield* makeEntityStreams({ api, @@ -120,11 +120,11 @@ const makeTemplateConnector = ( events: [], }); - const webhookRoute: WebhookRoute = { + const webhookRoute = Webhook.route({ path: "/webhooks/template", schema: WebhookPayloadSchema, handle: (payload, request, rawBody) => - Effect.gen(function* () { + Effect.fn("template/webhook/handle")(function* () { if (Option.isSome(config.webhookSecret) && rawBody) { yield* verifyWebhookSignature({ rawBody, @@ -133,12 +133,12 @@ const makeTemplateConnector = ( }); } - yield* resolveWebhookDispatch({ + return yield* resolveWebhookDispatch({ payload, posts: postStreams, }); - }), - }; + })(), + }); if (Option.isNone(config.webhookSecret)) { yield* Effect.logWarning( @@ -147,20 +147,14 @@ const makeTemplateConnector = ( } return { connector, routes: [webhookRoute] }; - }).pipe(Effect.annotateLogs({ component: "producer-template" })); + })().pipe(Effect.annotateLogs({ component: "producer-template" })); -export const TemplateConnectorConfig = (): Layer.Layer< - TemplateConnector, - ConnectorError, - HttpClient.HttpClient -> => +export const layerConfig: Layer.Layer = Layer.effect(TemplateConnector)( - Effect.gen(function* () { + Effect.fnUntraced(function* () { const config = yield* TemplateConfigConfig; - return yield* makeTemplateConnector(config).pipe( - Effect.provide(TemplateApiClientConfig(config)), - ); - }).pipe( + return yield* makeTemplateConnector(config).pipe(Effect.provide(layerApiClient(config))); + })().pipe( Effect.mapError((error) => error instanceof ConnectorError ? error diff --git a/templates/producer-template/src/index.ts b/templates/producer-template/src/index.ts index 1a103a3..6104615 100644 --- a/templates/producer-template/src/index.ts +++ b/templates/producer-template/src/index.ts @@ -1,9 +1,9 @@ -export { TemplateApiClient, TemplateApiClientConfig } from "./api"; +export { layerApiClient, TemplateApiClient } from "./api"; export { type TemplateConfig, TemplateConfigConfig, TemplateConnector, - TemplateConnectorConfig, + layerConfig, type TemplateConnectorRuntime, } from "./connector"; export type { Post, WebhookPayload } from "./schemas"; diff --git a/templates/producer-template/src/sandbox.ts b/templates/producer-template/src/sandbox.ts index 1c00e1d..020880f 100644 --- a/templates/producer-template/src/sandbox.ts +++ b/templates/producer-template/src/sandbox.ts @@ -1,13 +1,11 @@ -import type { ConnectorError } from "@useairfoil/connector-kit"; - import { NodeHttpServer } from "@effect/platform-node"; -import { Publisher, runConnector, StateStoreInMemory } from "@useairfoil/connector-kit"; +import { Ingestion, Publisher } from "@useairfoil/connector-kit"; import { Config, ConfigProvider, DateTime, Effect, Layer, Logger, Metric } from "effect"; import { FetchHttpClient } from "effect/unstable/http"; import * as Observability from "effect/unstable/observability"; import { createServer } from "node:http"; -import { TemplateConnector, TemplateConnectorConfig } from "./index"; +import { layerConfig, TemplateConnector } from "./index"; const SandboxConfig = Config.all({ port: Config.port("TEMPLATE_WEBHOOK_PORT").pipe(Config.withDefault(8080)), @@ -20,8 +18,8 @@ const TelemetryConfig = Config.all({ }); // Console publisher so you can see ingestion output during `pnpm run sandbox`. -// Real connectors plug in `WingsPublisherLayer` from @useairfoil/connector-kit. -const ConsolePublisherLayer = Layer.succeed(Publisher)({ +// Real connectors plug in `layerWings` from @useairfoil/connector-kit. +const ConsolePublisherLayer = Layer.succeed(Publisher.Publisher)({ publish: ({ name, source, batch }) => Effect.gen(function* () { const ids = batch.rows.map((r) => r["id"]).filter((id) => id != null); @@ -49,7 +47,7 @@ const program = Effect.gen(function* () { const now = yield* DateTime.now; - return yield* runConnector(connector, { + return yield* Ingestion.runConnector(connector, { initialCutoff: DateTime.toDate(now), webhook: { routes, @@ -64,7 +62,7 @@ const EnvLayer = Layer.mergeAll( Layer.succeed(ConfigProvider.ConfigProvider, ConfigProvider.fromEnv()), ); -const ConnectorLayer = TemplateConnectorConfig(); +const ConnectorLayer = layerConfig.pipe(Layer.provide(EnvLayer)); const TelemetryLayer = Layer.unwrap( Effect.gen(function* () { @@ -90,23 +88,17 @@ const TelemetryLayer = Layer.unwrap( Metric.enableRuntimeMetricsLayer, ); }), -); +).pipe(Layer.provide(EnvLayer)); const RuntimeLayer = Layer.mergeAll( - StateStoreInMemory, + Ingestion.layerMemory, ConsolePublisherLayer, ConnectorLayer, Logger.layer([Logger.consolePretty()]), TelemetryLayer, - EnvLayer, ); -Effect.runPromise( - Effect.scoped(program).pipe(Effect.provide(RuntimeLayer)) as Effect.Effect< - void, - Config.ConfigError | ConnectorError - >, -).catch((error) => { +Effect.runPromise(Effect.scoped(program).pipe(Effect.provide(RuntimeLayer))).catch((error) => { void Effect.runPromise( Effect.logError("fatal error").pipe( Effect.annotateLogs({ diff --git a/templates/producer-template/src/streams.ts b/templates/producer-template/src/streams.ts index ec662b3..32e9f8a 100644 --- a/templates/producer-template/src/streams.ts +++ b/templates/producer-template/src/streams.ts @@ -1,13 +1,6 @@ import type * as Schema from "effect/Schema"; -import { - type Batch, - type ConnectorError, - type Cursor, - makePullStream, - makeWebhookQueue, - type WebhookStream, -} from "@useairfoil/connector-kit"; +import { type Batch, type ConnectorError, type Cursor, Streams } from "@useairfoil/connector-kit"; import { Deferred, Effect, Queue, Stream } from "effect"; import type { TemplateApiClientService } from "./api"; @@ -36,18 +29,18 @@ const setCutoff = (deferred: Deferred.Deferred, cursor: Cursor) = // Enqueue a single webhook row after recording its cursor as the backfill // cutoff. This is safe to call many times — Deferred.succeed is idempotent. export const dispatchEntityWebhook = >(options: { - readonly queue: WebhookStream; + readonly queue: Streams.WebhookStream; readonly cutoff: Deferred.Deferred; readonly row: T; readonly cursor: Cursor; }): Effect.Effect => - Effect.gen(function* () { + Effect.fnUntraced(function* () { yield* setCutoff(options.cutoff, options.cursor); - yield* Queue.offer(options.queue.queue, { + return yield* Queue.offer(options.queue.queue, { cursor: options.cursor, rows: [options.row], }).pipe(Effect.asVoid); - }); + })(); // Backfill stream for a single entity. Waits for the cutoff deferred to // resolve (set by the first live webhook or by initialCutoff), then pages @@ -62,7 +55,7 @@ const makeBackfillStream = >(options: { }): Stream.Stream, ConnectorError> => Stream.fromEffect(Deferred.await(options.cutoff)).pipe( Stream.flatMap((cutoff) => - makePullStream({ + Streams.makePullStream({ fetchPage: (cursor: Cursor | undefined) => { const page = cursor ? Number(cursor) : 1; return options.api @@ -93,7 +86,7 @@ const makeBackfillStream = >(options: { ); export type EntityStreams> = { - readonly live: WebhookStream; + readonly live: Streams.WebhookStream; readonly cutoff: Deferred.Deferred; readonly backfill: Stream.Stream, ConnectorError>; }; @@ -108,9 +101,9 @@ export const makeEntityStreams = >(options: { readonly cursorField: keyof T & string; readonly limit?: number; }): Effect.Effect, ConnectorError> => - Effect.gen(function* () { - const queue = yield* makeWebhookQueue({ capacity: 1024 }); + Effect.fnUntraced(function* () { + const queue = yield* Streams.makeWebhookQueue({ capacity: 1024 }); const cutoff = yield* Deferred.make(); const backfill = makeBackfillStream({ ...options, cutoff }); return { live: queue, cutoff, backfill }; - }); + })(); diff --git a/templates/producer-template/test/api.vcr.test.ts b/templates/producer-template/test/api.vcr.test.ts index df98450..c55d4db 100644 --- a/templates/producer-template/test/api.vcr.test.ts +++ b/templates/producer-template/test/api.vcr.test.ts @@ -30,25 +30,30 @@ describe("producer-template api (vcr)", () => { }), ); - return program.pipe( - Effect.provide(apiLayer), - Effect.provide( - VcrHttpClient.layer({ - vcrName: "producer-template", - mode: "replay", - }), - ), - Effect.provide(FileSystemCassetteStore.layer()), - Effect.provide(FetchHttpClient.layer), - Effect.provide(NodeServices.layer), - Effect.provideService( - ConfigProvider.ConfigProvider, - ConfigProvider.fromUnknown({ - TEMPLATE_API_BASE_URL: "https://jsonplaceholder.typicode.com", - TEMPLATE_API_TOKEN: "test", - }), + const cassetteStoreLayer = FileSystemCassetteStore.layer().pipe( + Layer.provide(NodeServices.layer), + ); + const vcrRuntimeLayer = Layer.mergeAll( + FetchHttpClient.layer, + NodeServices.layer, + cassetteStoreLayer, + ); + const vcrWithDeps = VcrHttpClient.layer({ vcrName: "producer-template", mode: "replay" }).pipe( + Layer.provide(vcrRuntimeLayer), + ); + + const testLayer = apiLayer.pipe( + Layer.provide(vcrWithDeps), + Layer.provide( + ConfigProvider.layer( + ConfigProvider.fromUnknown({ + TEMPLATE_API_BASE_URL: "https://jsonplaceholder.typicode.com", + TEMPLATE_API_TOKEN: "test", + }), + ), ), - Effect.scoped, ); + + return program.pipe(Effect.provide(testLayer), Effect.scoped); }); }); diff --git a/templates/producer-template/test/helpers.ts b/templates/producer-template/test/helpers.ts index 33088b8..5e680e2 100644 --- a/templates/producer-template/test/helpers.ts +++ b/templates/producer-template/test/helpers.ts @@ -16,7 +16,7 @@ export const makeTestPublisher = (expected: number) => Effect.gen(function* () { const publishedRef = yield* Ref.make>([]); const done = yield* Deferred.make(); - const layer = Layer.succeed(Publisher)({ + const layer = Layer.succeed(Publisher.Publisher)({ publish: ({ name, source, batch }) => Effect.gen(function* () { const next = yield* Ref.updateAndGet(publishedRef, (items) => [ diff --git a/templates/producer-template/test/webhook.test.ts b/templates/producer-template/test/webhook.test.ts index beda058..a47b40b 100644 --- a/templates/producer-template/test/webhook.test.ts +++ b/templates/producer-template/test/webhook.test.ts @@ -1,11 +1,11 @@ import { NodeHttpServer } from "@effect/platform-node"; import { describe, expect, it } from "@effect/vitest"; -import { ConnectorError, runConnector, StateStoreInMemory } from "@useairfoil/connector-kit"; +import { ConnectorError, Ingestion } from "@useairfoil/connector-kit"; import { ConfigProvider, Deferred, Effect, Layer, Ref } from "effect"; import { HttpClient, HttpClientRequest } from "effect/unstable/http"; import { TemplateApiClient, type TemplateApiClientService } from "../src/api"; -import { TemplateConnector, TemplateConnectorConfig } from "../src/index"; +import { layerConfig, TemplateConnector } from "../src/index"; import { makeTestPublisher } from "./helpers"; const postWebhookPayload = { @@ -31,7 +31,7 @@ describe("producer-template webhook", () => { const runtimeLayer = NodeHttpServer.layerTest; const apiLayer = Layer.succeed(TemplateApiClient)(makeApiStub()); - const connectorLayer = TemplateConnectorConfig().pipe(Layer.provide(apiLayer)); + const connectorLayer = layerConfig.pipe(Layer.provide(apiLayer)); const configProvider = ConfigProvider.fromUnknown({ TEMPLATE_API_BASE_URL: "https://jsonplaceholder.typicode.com", TEMPLATE_API_TOKEN: "test", @@ -40,11 +40,11 @@ describe("producer-template webhook", () => { return Effect.gen(function* () { const { publishedRef, done, layer } = yield* makeTestPublisher(1); const { connector, routes } = yield* TemplateConnector; - const runLayer = Layer.mergeAll(StateStoreInMemory, layer, runtimeLayer); + const runLayer = Layer.mergeAll(Ingestion.layerMemory, layer, runtimeLayer); yield* Effect.gen(function* () { yield* Effect.forkScoped( - runConnector(connector, { + Ingestion.runConnector(connector, { initialCutoff: new Date(), webhook: { routes, @@ -66,10 +66,13 @@ describe("producer-template webhook", () => { expect(published[0]?.name).toBe("posts"); }).pipe(Effect.provide(runLayer)); }).pipe( - Effect.provide(connectorLayer), - Effect.provide(runtimeLayer), - Effect.provideService(ConfigProvider.ConfigProvider, configProvider), + Effect.provide( + connectorLayer.pipe( + Layer.provide(runtimeLayer), + Layer.provide(ConfigProvider.layer(configProvider)), + ), + ), Effect.scoped, - ) as Effect.Effect; + ); }); }); From 5bd79e18d54dbb981a6e4a5bcc3cac7645cf11af Mon Sep 17 00:00:00 2001 From: jaipaljadeja Date: Thu, 30 Apr 2026 14:51:12 +0530 Subject: [PATCH 08/12] skill: update ack skill --- .agents/skills/airfoil-kit/README.md | 43 +- .agents/skills/airfoil-kit/SKILL.md | 22 +- .../airfoil-kit/assets/rename-checklist.md | 7 +- .../references/connector-kit-api.md | 120 ++--- .../references/effect-v4-essentials.md | 438 ++++++++--------- .../airfoil-kit/references/effect-vcr-api.md | 107 +++-- .../airfoil-kit/references/example-auth.md | 14 +- .../references/example-producer-polar.md | 363 ++++---------- .../skills/airfoil-kit/references/patterns.md | 450 +++++++++++------- .../skills/airfoil-kit/references/playbook.md | 20 +- .../references/template-walkthrough.md | 279 +++++------ .../airfoil-kit/references/vcr-workflow.md | 89 ++-- .../skills/airfoil-kit/references/webhooks.md | 21 +- 13 files changed, 975 insertions(+), 998 deletions(-) diff --git a/.agents/skills/airfoil-kit/README.md b/.agents/skills/airfoil-kit/README.md index 7a7002b..64a39dd 100644 --- a/.agents/skills/airfoil-kit/README.md +++ b/.agents/skills/airfoil-kit/README.md @@ -8,7 +8,8 @@ Airfoil producer connector end-to-end. - Confirms no existing implementation is being copied. - Copies `templates/producer-template/` into `connectors/producer-/`. - Helps you research the target API and derive schemas from recorded traffic. -- Wires Effect v4 `Config`, API clients, `WebhookRoute`, and streams. +- Wires current Effect v4 `Config`, API client layers, `Webhook.route(...)`, + connector layers, and streams. - Guides deterministic replay testing (VCR for REST/GraphQL, fixtures/mocks for gRPC). - Enforces a Definition of Done before declaring the task complete. @@ -29,6 +30,42 @@ Canonical process docs: Example-oriented docs are optional aids, not normative contracts. +## Public package surfaces you should know + +Current root surfaces used most often by connector work: + +- `@useairfoil/connector-kit` + - core exports flattened at root + - `Ingestion` + - `Publisher` + - `Streams` + - `Webhook` + - flat root errors +- `@useairfoil/effect-vcr` + - `CassetteStore` + - `FileSystemCassetteStore` + - `VcrHttpClient` + - flat root VCR types + - focused subpath exports for cassette store, file-system cassette store, + types, and VCR HTTP client +- `@useairfoil/wings` + - `Cluster` + - `ClusterClient` + - `WingsClient` + - `Arrow` + - `Partition` + - `Schema` + - `Topic` + - flat root errors +- `@useairfoil/flight` + - `ArrowFlightClient` + - `ArrowFlightSqlClient` + - `FlightClientError` + - root encoder/proto exports and typed client options + +When writing examples or guidance, prefer the actual current package surface +over historical helper names or internal file-level imports. + ## Files ``` @@ -40,9 +77,9 @@ references/ ├── api-mode-graphql.md # GraphQL implementation contract ├── api-mode-grpc.md # gRPC implementation contract ├── connector-kit-api.md # exhaustive @useairfoil/connector-kit docs -├── effect-vcr-api.md # exhaustive @useairfoil/effect-vcr docs +├── effect-vcr-api.md # current @useairfoil/effect-vcr docs and wiring ├── effect-v4-essentials.md # Effect v4 idioms relevant to connectors -├── patterns.md # shared patterns (cursor, cutoff, streams) +├── patterns.md # shared naming, layer, cursor, cutoff, and stream patterns ├── webhooks.md # WebhookRoute + signature verification ├── vcr-workflow.md # record/replay + ACK_DISABLE_VCR ├── api-research.md # how to learn a real API's shape diff --git a/.agents/skills/airfoil-kit/SKILL.md b/.agents/skills/airfoil-kit/SKILL.md index 6b08184..29a0fee 100644 --- a/.agents/skills/airfoil-kit/SKILL.md +++ b/.agents/skills/airfoil-kit/SKILL.md @@ -10,6 +10,10 @@ inside this monorepo. Work in small, verified steps. Use the template as your starting point, never guess API shapes, and keep changes aligned with the existing patterns in `connectors/producer-polar/`. +The current repo shape matters more than historical examples. When in doubt, +follow the current source packages and the refreshed reference docs in this +skill. + --- ## Hard rules (do not violate) @@ -25,7 +29,9 @@ existing patterns in `connectors/producer-polar/`. 3. **Use Effect v4 only** (`effect@4.x`, `@effect/vitest@4.x`, `@effect/platform-*@4.x`). No legacy `@effect/platform`, `@effect/schema`, or Effect v2/v3 patterns. Read [`references/effect-v4-essentials.md`](./references/effect-v4-essentials.md) - whenever you reach for a new Effect module. + whenever you reach for a new Effect module. For Effect guidance, consult + `effect-smol` only. Do not use the older official Effect docs as source of + truth for this repo right now. 4. **No `process.env` reads in connector code or tests.** Use `Config`/`ConfigProvider` everywhere. Sandbox/runtime layers attach `ConfigProvider.fromEnv()`; tests attach `ConfigProvider.fromUnknown({ ... })` @@ -68,6 +74,15 @@ existing patterns in `connectors/producer-polar/`. item in [`references/definition-of-done.md`](./references/definition-of-done.md) passes (lint, typecheck, build, test:ci, and mode-appropriate deterministic replay: VCR for REST/GraphQL, fixtures or mock servers for gRPC). +14. **Use current names.** Prefer `layerApiClient(config)`, `layerConfig`, + `Ingestion.runConnector(...)`, `Ingestion.layerMemory`, + `Publisher.Publisher`, and `Webhook.route(...)`. +15. **Use correct layer semantics.** `Layer.mergeAll(...)` is for independent + layers. If a layer needs another to build, satisfy that dependency with + `Layer.provide(...)` before merging. +16. **Do not hide dependency graph mistakes behind casts.** If a runtime or + test entrypoint seems to need `as Effect.Effect<...>`, inspect the layer + graph first. --- @@ -108,14 +123,15 @@ existing patterns in `connectors/producer-polar/`. [`references/patterns.md`](./references/patterns.md), [`references/webhooks.md`](./references/webhooks.md) 10. **Update the sandbox runner** — rename config names and port, keep the - telemetry + console publisher boilerplate. + telemetry + console publisher shape, and preserve the dependency graph. 11. **Write tests** — - REST/GraphQL: `api.vcr.test.ts` replays the backfill path. - gRPC: deterministic fixture/mock-server tests cover equivalent paths. - `webhook.test.ts` exercises webhook endpoint behavior in-memory. Switch to replay mode (or fixture-only deterministic mode) before committing. -12. **Run the CI gate locally** — `pnpm run lint && pnpm run typecheck && pnpm run build && pnpm run test:ci`. +12. **Run local verification in order** — `pnpm install`, then the relevant + `build`, `typecheck`, `test:ci`, format, and lint checks. Every one must pass. → [`references/definition-of-done.md`](./references/definition-of-done.md) A detailed, numbered version of this flow lives at diff --git a/.agents/skills/airfoil-kit/assets/rename-checklist.md b/.agents/skills/airfoil-kit/assets/rename-checklist.md index 67093bf..d757c9e 100644 --- a/.agents/skills/airfoil-kit/assets/rename-checklist.md +++ b/.agents/skills/airfoil-kit/assets/rename-checklist.md @@ -30,13 +30,13 @@ rg -l "template" connectors/producer- --glob '!**/__cassettes__' --glob | `@useairfoil/producer-template` | `@useairfoil/producer-` | | `TEMPLATE_` (env prefix) | `_` | | `TemplateApiClient` | `ApiClient` | -| `TemplateApiClientConfig` | `ApiClientConfig` | +| `layerApiClient` | `layerApiClient` | | `TemplateApiClientService` | `ApiClientService` | | `TemplateListPage` | `ListPage` | | `TemplateConfig` (type) | `Config` | | `TemplateConfigConfig` (Config value) | `ConfigConfig` | | `TemplateConnector` (service tag) | `Connector` | -| `TemplateConnectorConfig` (layer factory) | `ConnectorConfig` | +| `layerConfig` | `layerConfig` | | `TemplateConnectorRuntime` | `ConnectorRuntime` | | `makeTemplateConnector` | `makeConnector` | | `Template` (any other identifier prefix) | `` | @@ -92,7 +92,8 @@ recreate it. Rewrite `connectors/producer-/README.md`: - Drop every JSONPlaceholder reference. -- Document the real API entities, auth, base URLs, env vars. +- Document the current public exports, real API entities, auth, runtime + wiring, base URLs, and env vars. - List known limitations specific to the target (rate limits, missing historical data, sandbox quirks). diff --git a/.agents/skills/airfoil-kit/references/connector-kit-api.md b/.agents/skills/airfoil-kit/references/connector-kit-api.md index 688b26d..2a789eb 100644 --- a/.agents/skills/airfoil-kit/references/connector-kit-api.md +++ b/.agents/skills/airfoil-kit/references/connector-kit-api.md @@ -11,16 +11,10 @@ import { defineConnector, defineEntity, defineEvent, - makePullStream, - makeWebhookQueue, + Ingestion, Publisher, - runConnector, - StateStore, - StateStoreInMemory, - WingsPublisherLayer, - ConnectorRuntimeContext, - ConnectorRuntimeContextLayer, - buildWebhookRouter, + Streams, + Webhook, } from "@useairfoil/connector-kit"; import type { @@ -37,11 +31,8 @@ import type { IngestionState, LiveSource, LiveStream, - RunConnectorOptions, StreamState, Transform, - WebhookRoute, - WebhookStream, } from "@useairfoil/connector-kit"; ``` @@ -232,12 +223,12 @@ runConnector( ): Effect.Effect; // With webhook: also requires HttpServer -runConnector( +runConnector( connector, options: { initialCutoff?: Cursor; webhook: { - routes: ReadonlyArray>; + routes: ReadonlyArray; healthPath?: HttpRouter.PathInput; // default "/health" disableHttpLogger?: boolean; // default true }; @@ -247,15 +238,35 @@ runConnector( Internally: -- Provides `ConnectorRuntimeContextLayer(connector)` so downstream spans can - tag metrics with `connector.name`. +- Provides an internal connector runtime context so downstream spans can tag + metrics with `connector.name`. - Wraps the whole run in an `Effect.withSpan("connector.run", ...)`. - Emits `connector_batches_total`, `connector_rows_total`, and `connector_batch_size` via `effect/Metric`. - For webhooks, composes `buildWebhookRouter(routes)` with a `/health` route and serves it via `HttpRouter.serve(app, { disableLogger })`. -### `RunConnectorOptions` +Current runtime composition pattern around `runConnector(...)`: + +```ts +const ConnectorLayer = layerConfig.pipe(Layer.provide(EnvLayer)); + +const program = Effect.gen(function* () { + const { connector, routes } = yield* MyConnector; + const serverLayer = NodeHttpServer.layer(createServer, { port: 8080 }); + + return yield* Ingestion.runConnector(connector, { + initialCutoff: new Date(), + webhook: { + routes, + healthPath: "/health", + disableHttpLogger: true, + }, + }).pipe(Effect.provide(serverLayer)); +}); +``` + +### `Ingestion.RunConnectorOptions` Exposed type for callers who build options programmatically. @@ -282,7 +293,7 @@ class StateStore extends Context.Service< Keyed by entity/event name. One row per stream. -### `StateStoreInMemory` +### `Ingestion.layerMemory` In-process `Map` backed `StateStore` layer. Use for the sandbox runner and tests. Production deployments provide a durable @@ -310,10 +321,10 @@ class Publisher extends Context.Service< `PublishAck = { readonly success: boolean }`. The engine fails the stream if `publish` fails. -### `WingsPublisherLayer(config)` +### `Publisher.layerWings(config)` ```ts -WingsPublisherLayer({ +Publisher.layerWings({ connector, topics: { customers: customerTopic, orders: orderTopic }, partitionValues: { customers: "account_id" }, @@ -323,24 +334,27 @@ WingsPublisherLayer({ Production-grade publisher that fans each entity into a Wings topic. For the sandbox / tests, use a hand-written console publisher instead. +Current tag access pattern in this repo is `Publisher.Publisher` from the root +module namespace. + --- ## Streams -### `makeWebhookQueue(options?)` +### `Streams.makeWebhookQueue(options?)` ```ts -makeWebhookQueue({ capacity?: number }): Effect.Effect>; +Streams.makeWebhookQueue({ capacity?: number }): Effect.Effect>; ``` Creates a bounded `Queue` (default capacity 1024) and its `Stream.fromQueue` view. Always keep the queue bounded — unbounded queues can let a noisy webhook drown the publisher. -### `makePullStream(options)` +### `Streams.makePullStream(options)` ```ts -makePullStream({ +Streams.makePullStream({ initialCursor?: Cursor, fetchPage: (cursor: Cursor | undefined) => Effect.Effect, ConnectorError, R>, }): Stream.Stream, ConnectorError, R>; @@ -360,14 +374,14 @@ list endpoint. ## Webhooks -### `WebhookRoute` +### `Webhook.WebhookRoute` ```ts -type WebhookRoute = { +type WebhookRoute> = { readonly path: HttpRouter.PathInput; - readonly schema: Schema.Schema; + readonly schema: S; readonly handle: ( - payload: TPayload, + payload: Schema.Schema.Type, request: HttpServerRequest.HttpServerRequest, rawBody?: Uint8Array, ) => Effect.Effect; @@ -378,7 +392,7 @@ The framework decodes the request body, validates against `schema`, and invokes `handle(payload, request, rawBody)`. Use `rawBody` for HMAC verification; use `payload` for dispatch. -### `buildWebhookRouter(routes)` +### `Webhook.buildWebhookRouter(routes)` Low-level helper that turns an array of routes into an `HttpRouter` Layer. `runConnector(...)` uses this internally; you rarely call it directly. @@ -387,19 +401,6 @@ Low-level helper that turns an array of routes into an `HttpRouter` Layer. ## Runtime context -### `ConnectorRuntimeContext` - -Service tag exposing `{ connector: ConnectorDefinition }`. The engine sets -this via `ConnectorRuntimeContextLayer(connector)`. Metrics attributes use -it to tag batches with `connector.name`. - -### `ConnectorRuntimeContextLayer(connector)` - -Returns a `Layer.succeed(ConnectorRuntimeContext)({ connector })`. Call this -in custom test harnesses if you bypass `runConnector`. - ---- - ## Observability (provided by the engine) ### Spans @@ -429,27 +430,36 @@ sandbox uses `Observability.Otlp.layerJson({ baseUrl, resource })` from ## Typical composition recipe ```ts -const runtimeLayer = Layer.mergeAll( - StateStoreInMemory, - ConsolePublisherLayer, // or WingsPublisherLayer(...) - MyConnectorConfig(), // Layer +const EnvLayer = Layer.mergeAll( + FetchHttpClient.layer, + Layer.succeed(ConfigProvider.ConfigProvider, ConfigProvider.fromEnv()), +) + +const ConnectorLayer = layerConfig.pipe(Layer.provide(EnvLayer)) + +const TelemetryLayer = Layer.unwrap(...).pipe(Layer.provide(EnvLayer)) + +const RuntimeLayer = Layer.mergeAll( + Ingestion.layerMemory, + ConsolePublisherLayer, // or Publisher.layerWings(...) + ConnectorLayer, Logger.layer([Logger.consolePretty()]), - TelemetryLayer, // optional - Layer.mergeAll( - FetchHttpClient.layer, - Layer.succeed(ConfigProvider.ConfigProvider, ConfigProvider.fromEnv()), - ), + TelemetryLayer, ); const program = Effect.gen(function* () { const { connector, routes } = yield* MyConnector; - return yield* runConnector(connector, { + return yield* Ingestion.runConnector(connector, { initialCutoff: new Date(), - webhook: { routes }, + webhook: { + routes, + healthPath: "/health", + disableHttpLogger: true, + }, }).pipe(Effect.provide(NodeHttpServer.layer(createServer, { port: 8080 }))); -}); +}).pipe(Effect.annotateLogs({ component: "producer-foo" })); -Effect.runPromise(Effect.scoped(program).pipe(Effect.provide(runtimeLayer))); +Effect.runPromise(Effect.scoped(program).pipe(Effect.provide(RuntimeLayer))); ``` See `connectors/producer-polar/src/sandbox.ts` for the live reference. diff --git a/.agents/skills/airfoil-kit/references/effect-v4-essentials.md b/.agents/skills/airfoil-kit/references/effect-v4-essentials.md index 6ab8497..fb67627 100644 --- a/.agents/skills/airfoil-kit/references/effect-v4-essentials.md +++ b/.agents/skills/airfoil-kit/references/effect-v4-essentials.md @@ -1,325 +1,271 @@ # effect-v4-essentials -The SDK is pinned to **Effect v4 beta** (`effect@4.0.0-beta.54`). Many -patterns changed from v2/v3. This file is the short list of idioms you -**must** use in connector code. +The SDK is pinned to Effect v4 beta (`effect@4.0.0-beta.54`). This file is the +short list of Effect rules and idioms that matter for connector work in this +repo. -For deep dives, read: +Primary upstream reference for Effect-specific questions: -- Effect v4 source/docs repo only: `https://github.com/effect-ts/effect-smol` -- Context7 Effect v4 LLM docs: - `https://context7.com/effect-ts/effect-smol/llms.txt?tokens=10000` -- Context7 API guide (if using API directly): - `https://context7.com/docs/api-guide` +- `https://github.com/Effect-TS/effect-smol` -Do **not** treat legacy Effect docs/repositories as Effect v4 source of truth -for this repo. They may reflect older API generations. +Use that repo as the source of truth when checking naming, layer semantics, +service patterns, and current HTTP/Config APIs. -## Prerequisite check (Effect source mirror) +Do not use the older official Effect docs as authority for this repo right now. +They lag the APIs and examples we are actually using. If `effect-smol` and the +older official docs disagree, follow `effect-smol`. -Before Effect-related implementation or refactors, verify a local mirror exists -at `.temp/effect-smol` (repo-local, disposable) and points to `effect-smol`. +## 1. Core rules -If missing, clone it: +1. Use `Context.Service` for repo services. +2. Use `Effect.fnUntraced` for library and hot-path helpers. +3. Use `return yield*` for terminal effects inside generators. +4. Do not use `try/catch` inside `Effect.gen`. +5. Use `layer` / `layerConfig` naming conventions where appropriate. +6. Use `Layer.provide(...)` to satisfy dependencies; do not assume sibling + `Layer.mergeAll(...)` layers satisfy each other. +7. Prefer composing layers up front and doing one final `Effect.provide(...)` + at the effect site. -```bash -git clone https://github.com/Effect-TS/effect-smol.git .temp/effect-smol -``` +## 2. Services -If present, refresh it before deep API lookups: +```ts +export type MyApiClientService = { + readonly fetchJson: (path: string) => Effect.Effect; +}; -```bash -git -C .temp/effect-smol pull --ff-only +export class MyApiClient extends Context.Service()( + "@useairfoil/producer-foo/MyApiClient", +) {} ``` -Use this mirror as local, greppable ground truth when MCP tools are flaky. -It is disposable and can be deleted any time: +Use the package-scoped string identifier form: + +- `@useairfoil/producer-foo/MyApiClient` +- `@useairfoil/producer-foo/MyConnector` -```bash -rm -rf .temp/effect-smol +## 3. Config + +```ts +export const MyConfigConfig = Config.all({ + apiBaseUrl: Config.string("FOO_API_BASE_URL"), + apiToken: Config.string("FOO_API_TOKEN"), + webhookSecret: Config.option(Config.string("FOO_WEBHOOK_SECRET")), +}); ``` -Context7 quick use (recommended for Effect v4 content): +Runtime wiring: -1. Resolve library id for Effect docs (`effect-smol`) and query docs. -2. Ask focused questions (service tags, Config patterns, HTTP paths). -3. Cross-check answers against local package APIs before coding. +```ts +const EnvLayer = Layer.mergeAll( + FetchHttpClient.layer, + Layer.succeed(ConfigProvider.ConfigProvider, ConfigProvider.fromEnv()), +); +``` -DeepWiki MCP quick use (optional fallback): +Never use `process.env` in connector code or tests. -1. Ensure the repo is indexed in DeepWiki (open - `https://deepwiki.com/effect-ts/effect-smol` once if needed). -2. Read available topics: `deepwiki_read_wiki_structure({ repoName: "effect-ts/effect-smol" })`. -3. Ask focused questions: `deepwiki_ask_question({ repoName: "effect-ts/effect-smol", question: "..." })`. -4. Cross-check answers against local package APIs before coding. +## 4. API client layer -If Context7/DeepWiki are unavailable, fall back to: +```ts +export const makeMyApiClient = ( + config: MyConfig, +): Effect.Effect => + Effect.fnUntraced(function* () { + const client = (yield* HttpClient.HttpClient).pipe( + HttpClient.mapRequest(HttpClientRequest.prependUrl(config.apiBaseUrl)), + HttpClient.mapRequest(HttpClientRequest.acceptJson), + ); + + return { fetchJson, fetchList }; + })(); + +export const layerApiClient = ( + config: MyConfig, +): Layer.Layer => + Layer.effect(MyApiClient)(makeMyApiClient(config)); +``` -1. Local source in this repo (especially `packages/connector-kit/src/**` and - `packages/effect-vcr/src/**`). -2. Official Effect docs + GitHub source. +Keep transport policy here: -Never block implementation on Context7/DeepWiki availability. +- auth +- base URL +- decode +- pagination mapping +- retry/timeout choices +- error mapping ---- +## 5. Connector layer -## API integration contract (checklist) +```ts +export const layerConfig: Layer.Layer = + Layer.effect(MyConnector)( + Effect.fnUntraced(function* () { + const config = yield* MyConfigConfig; + return yield* makeMyConnector(config).pipe(Effect.provide(layerApiClient(config))); + })().pipe( + Effect.mapError((error) => + error instanceof ConnectorError + ? error + : new ConnectorError({ message: "My config failed", cause: error }), + ), + ), + ); +``` -Apply this checklist for REST, GraphQL, and gRPC connectors: +Current repo naming is: -1. **Config-only runtime/test inputs:** no `process.env` reads in connector - code or tests; use `Config` and `ConfigProvider`. -2. **Service-layer clients:** build API clients as `Context.Service` + - `Layer.effect(...)`, not ad-hoc singleton objects. -3. **Boundary decode:** parse external payloads with `Schema` at API boundaries - before they enter stream/entity logic. -4. **Typed errors:** map unknown/transport/decode failures to tagged domain - errors (`ConnectorError` and/or connector-specific tagged errors). -5. **Central transport policy:** retries, timeouts/deadlines, auth headers, and - rate-limit behavior are configured in the API client layer, not scattered - across connector orchestration code. +- `layerApiClient(config)` +- `layerConfig` ---- +Avoid stale names like `XApiClientConfig` and `XConnectorConfig()`. -## 1. Imports you will use +## 6. Layer semantics -```ts -import { - Config, - ConfigProvider, - Context, - DateTime, - Deferred, - Effect, - Layer, - Logger, - Metric, - Option, - Queue, - Ref, - Stream, -} from "effect"; - -import * as Schema from "effect/Schema"; -import * as Observability from "effect/unstable/observability"; - -import { - FetchHttpClient, - HttpClient, - HttpClientRequest, - HttpClientResponse, - type HttpRouter, - HttpServer, - HttpServerRequest, - HttpServerResponse, - type Headers, -} from "effect/unstable/http"; - -import { NodeHttpServer, NodeHttpClient, NodeFileSystem } from "@effect/platform-node"; - -import { describe, expect, it } from "@effect/vitest"; -``` +Correct: -Notes: +```ts +const EnvLayer = Layer.mergeAll( + FetchHttpClient.layer, + Layer.succeed(ConfigProvider.ConfigProvider, ConfigProvider.fromEnv()), +); -- HTTP lives under `effect/unstable/http` in v4. Do not import from - `@effect/platform` (that was the v2/v3 location). -- `Schema` lives at `effect/Schema`, not `@effect/schema`. -- `Context.Service` replaces `ServiceMap.Service` patterns from older versions. +const ConnectorLayer = layerConfig.pipe(Layer.provide(EnvLayer)); +``` -## 2. Defining services +Incorrect: ```ts -export class MyApiClient extends Context.Service()( - "@useairfoil/producer-foo/MyApiClient", -) {} +const RuntimeLayer = Layer.mergeAll(layerConfig, EnvLayer); ``` -- The string tag must be unique across all services. -- Use `yield* MyApiClient` inside `Effect.gen(function* () { ... })` to - access the service. +`Layer.mergeAll(...)` combines independent layers. It does not satisfy sibling +layer dependencies. -## 3. Defining typed errors +Use: -```ts -import { Data } from "effect"; +- `Layer.provide(depLayer)` to satisfy a dependent layer +- `Layer.provideMerge(depLayer)` when you also want `depLayer`'s outputs kept + in the resulting layer -export class MyError extends Data.TaggedError("MyError")<{ - readonly message: string; - readonly cause?: unknown; -}> {} -``` +## 7. Generators -`ConnectorError` is defined this way. Prefer tagged errors over plain -classes; they play well with `Effect.catchTag`. +Use `Effect.gen(function* () { ... })` for orchestration. -## 4. Config and ConfigProvider +Use `Effect.fnUntraced(function* () { ... })()` for reusable library helpers. + +Always terminate explicitly: ```ts -export const MyConfig = Config.all({ - apiToken: Config.string("FOO_API_TOKEN"), - apiBaseUrl: Config.string("FOO_API_BASE_URL").pipe(Config.withDefault("https://api.foo.com")), - webhookSecret: Config.option(Config.string("FOO_WEBHOOK_SECRET")), -}); +if (!rawBody) { + return yield * Effect.fail(new ConnectorError({ message: "Webhook raw body is required" })); +} ``` -- `Config.option(...)` returns `Option.Option` — check with - `Option.isSome` / `Option.isNone`. -- `Config.withDefault(v)` makes a field optional with a fallback. -- `Config.port(name)` parses integers. -- `Config.boolean(name)` parses `"true"` / `"false"`. - -Runtime wiring: +Do not write: ```ts -Layer.succeed( - ConfigProvider.ConfigProvider, - ConfigProvider.fromEnv(), // or fromUnknown({ FOO_API_TOKEN: "..." }) -); +yield* Effect.fail(...) ``` -Never read `process.env` directly in library code; always go through -`Config`. +without `return` in a terminal branch. -## 5. Layers +## 8. Error handling -- `Layer.succeed(Tag)(impl)` — constant service. -- `Layer.effect(Tag)(effect)` — service built from an Effect. -- `Layer.mergeAll(a, b, c)` — union two or more layers. -- `Layer.provide(layer)` — provide a sub-layer that the outer layer depends on. -- `Layer.unwrap(Effect.gen(function* () { return Layer.mergeAll(...) }))` — - dynamically decide which layers to build based on config. -- `Layer.empty` — the no-op layer, useful in `Layer.unwrap` branches. +Expected failures should use typed error channels. -## 6. Effect.gen is the default style +Good: ```ts -Effect.gen(function* () { - const config = yield* MyConfig; - const api = yield* MyApiClient; - const rows = yield* api.fetchList(schema, path, options); - return rows; -}); +Effect.mapError( + (cause) => + new ConnectorError({ + message: "Shopify API request failed", + cause, + }), +); ``` -- Use `yield*` for every Effect, never `await`. -- Mapping simple values: `Effect.map`, `Effect.andThen`. -- Mapping errors: `Effect.mapError` or `Effect.catchTag`. - -## 7. HttpClient pipeline +Allowed synchronous wrapper when calling ambient code that may throw: ```ts -const client = (yield * HttpClient.HttpClient).pipe( - HttpClient.mapRequest(HttpClientRequest.prependUrl(baseUrl)), - HttpClient.mapRequest(HttpClientRequest.bearerToken(token)), - HttpClient.mapRequest(HttpClientRequest.acceptJson), -); - -const request = HttpClientRequest.get("/v1/things").pipe( - HttpClientRequest.setUrlParams({ page: "1" }), -); - -const rows = - yield * - Effect.scoped( - client.execute(request).pipe( - Effect.flatMap(HttpClientResponse.filterStatusOk), - Effect.flatMap((response) => response.json), - Effect.flatMap(Schema.decodeUnknownEffect(schema)), - ), - ); +Effect.try({ + try: () => validateSignature(...), + catch: (cause) => + new ConnectorError({ + message: "Webhook verification failed", + cause, + }), +}) ``` -- Always `Effect.scoped(...)` around `client.execute(...)` unless the - surrounding context is already scoped. -- `HttpClient.transform((effect, request) => ...)` lets you wrap requests - (that's how VCR is built). +Do not use `Effect.die(...)` for expected connector failures. -## 8. Streams +## 9. Webhooks -- `Stream.fromEffect(e)` — single-element stream. -- `Stream.fromQueue(q)` — stream that emits whatever is pushed to the queue. -- `Stream.unfold(state, step)` — the building block behind `makePullStream`. -- `Stream.merge(a, b)` — run two streams concurrently. -- `Stream.map(s, f)`, `Stream.mapEffect(s, f)` — transform batches. -- `Stream.tap(s, f)` — side effect on each element. -- `Stream.runForEach(s, f)` — drain. +Use `Webhook.route({...})`. -## 9. Concurrency primitives +Signature verification rules: -- `Ref.make(value)`, `Ref.get(ref)`, `Ref.update(ref, fn)`, - `Ref.updateAndGet(ref, fn)`. -- `Deferred.make()`, `Deferred.succeed(d, v)`, `Deferred.await(d)`. -- `Queue.bounded(capacity)`, `Queue.offer(q, v)`, `Queue.take(q)`. -- `Effect.forkScoped(effect)` — spawn in the current scope; the fiber is - interrupted when the scope closes. -- `Effect.all([a, b], { concurrency: "unbounded" })` — run in parallel. +- verify before side effects +- use raw request bytes when required by provider docs +- fail closed if verification is enabled and inputs are missing +- log a warning, do not crash, when the secret is intentionally unset in local + development -## 10. Schema +## 10. Runtime shape ```ts -const Post = Schema.Struct({ - id: Schema.Number, - title: Schema.String, - body: Schema.NullOr(Schema.String), - tags: Schema.Array(Schema.String), - status: Schema.Literals(["draft", "published"]), - metadata: Schema.Record(Schema.String, Schema.Any), - nested: Schema.optional(Schema.Struct({ foo: Schema.String })), +const program = Effect.gen(function* () { + const { connector, routes } = yield* MyConnector; + const serverLayer = NodeHttpServer.layer(createServer, { port: 8080 }); + + return yield* Ingestion.runConnector(connector, { + initialCutoff: new Date(), + webhook: { + routes, + healthPath: "/health", + disableHttpLogger: true, + }, + }).pipe(Effect.provide(serverLayer)); }); -type Post = Schema.Schema.Type; -``` - -- `Schema.decodeUnknownEffect(schema)(value)` returns - `Effect.Effect`. -- Use `Schema.Any` for fields you don't want to validate (common for Polar's - `product` / `discount` fields which are large nested objects). +const RuntimeLayer = Layer.mergeAll( + Ingestion.layerMemory, + ConsolePublisherLayer, + ConnectorLayer, + Logger.layer([Logger.consolePretty()]), + TelemetryLayer, +); -## 11. Observability +Effect.runPromise(Effect.scoped(program).pipe(Effect.provide(RuntimeLayer))); +``` -- `Effect.withSpan("span.name", { attributes: {...} })` — wrap an effect - in a tracing span. -- `Metric.counter("name", { description })`, `Metric.histogram("name", { boundaries })`, - `Metric.update(metric, value)`, `Metric.withAttributes(metric, attrs)`. -- Provide telemetry via `Observability.Otlp.layerJson({ baseUrl, resource })` - from `effect/unstable/observability`. +## 11. VCR runtime shape -Avoid high-cardinality labels (user ids, request ids, timestamps). +```ts +const cassetteStoreLayer = FileSystemCassetteStore.layer().pipe(Layer.provide(NodeServices.layer)); -## 12. Vitest + Effect +const vcrRuntimeLayer = Layer.mergeAll( + FetchHttpClient.layer, + NodeServices.layer, + cassetteStoreLayer, +); -```ts -import { describe, expect, it } from "@effect/vitest"; - -describe("things", () => { - it.effect("works", () => - Effect.gen(function* () { - const result = yield* something; - expect(result).toBe(42); - }).pipe(Effect.provide(TestLayer)), - ); -}); +const vcrLayer = VcrHttpClient.layer({ + vcrName: "producer-foo", + mode: "replay", +}).pipe(Layer.provide(vcrRuntimeLayer)); ``` -- `it.effect` expects an Effect. The framework runs it with a default - runtime and fails the test on any unhandled defect. -- To run your own scoped effect, wrap with `Effect.scoped`. - ---- +## 12. Tests -## What **not** to do +Prefer: -- `import { ... } from "@effect/platform"` — v2/v3 only. -- `import * as Schema from "@effect/schema"` — v2/v3 only. -- `ServiceMap.Service` examples — use `Context.Service` instead. -- `process.env.FOO` in library code — always `Config.string("FOO")`. -- `Effect.die(new Error(...))` for expected failures — use tagged errors. -- `async/await` inside `Effect.gen` — use `yield*`. -- Mutating a `Ref` without `Ref.update` — the whole point is atomic updates. -- `Stream.bracket`, `Stream.ensuring` from v2 — v4 uses `Effect.scoped` - and `Scope` instead. +- build a focused test layer up front +- do one final `Effect.provide(...)` +- use `NodeHttpServer.layerTest` for webhook tests +- use VCR replay tests for API client paths -When a pattern you find online doesn't match what's in the repo, trust -the repo: `connectors/producer-polar/` and `packages/connector-kit/` -are the ground truth. +Avoid hiding missing dependencies behind `as Effect.Effect<...>` casts. If a +cast seems necessary, inspect the layer graph first. diff --git a/.agents/skills/airfoil-kit/references/effect-vcr-api.md b/.agents/skills/airfoil-kit/references/effect-vcr-api.md index 78abe55..efc3107 100644 --- a/.agents/skills/airfoil-kit/references/effect-vcr-api.md +++ b/.agents/skills/airfoil-kit/references/effect-vcr-api.md @@ -4,58 +4,100 @@ Reference notes for `@useairfoil/effect-vcr`. ## Package exports -From `packages/effect-vcr/src/index.ts`: +Root exports: -- `CassetteStore` namespace (`cassette-store.ts`) -- `FileSystemCassetteStore` namespace (`file-system-cassette-store.ts`) -- `VcrHttpClient` namespace (`vcr-http-client.ts`) +- `CassetteStore` +- `FileSystemCassetteStore` +- `VcrHttpClient` +- `VcrConfig` +- `VcrMode` +- `VcrRequest` +- `VcrResponse` +- `VcrEntry` +- `Cassette` +- `CassetteFile` -## Core types and services +Focused subpath exports: -- `CassetteStore.CassetteStore` service tag +- `@useairfoil/effect-vcr/cassette-store` +- `@useairfoil/effect-vcr/file-system-cassette-store` +- `@useairfoil/effect-vcr/types` +- `@useairfoil/effect-vcr/vcr-http-client` + +## Core services and helpers + +- `CassetteStore.CassetteStore` - `CassetteStore.CassetteStoreError` - `CassetteStore.createEmptyCassette()` - `CassetteStore.createEmptyCassetteFile()` +- `FileSystemCassetteStore.layer({ cassetteDir? })` +- `VcrHttpClient.layer(config)` + +`FileSystemCassetteStore.layer(...)` provides a filesystem-backed cassette store. -`FileSystemCassetteStore.layer()` provides a filesystem-backed cassette store. +## `VcrConfig` + +```ts +type VcrConfig = { + readonly vcrName?: string; + readonly cassetteName?: string; + readonly mode?: "record" | "replay" | "auto"; + readonly redact?: { + readonly requestHeaders?: ReadonlyArray; + readonly responseHeaders?: ReadonlyArray; + readonly requestBodyKeys?: ReadonlyArray; + readonly responseBodyKeys?: ReadonlyArray; + }; + readonly matchIgnore?: { + readonly requestHeaders?: ReadonlyArray; + readonly requestBodyKeys?: ReadonlyArray; + }; + readonly match?: (request: VcrRequest, entry: VcrEntry) => boolean; +}; +``` -## VCR HTTP layer +Important behavior: -Use `VcrHttpClient.layer({ ... })` to wrap `HttpClient.HttpClient` with -record/replay behavior. +- `cassetteName: "users"` resolves to `users.cassette` +- `cassetteName: "users.cassette"` is preserved as-is +- when `cassetteName` is omitted in Vitest, the cassette file defaults to the + test file name and the current test name becomes the export key inside that + cassette file -Common config fields: +Defaults: -- `vcrName?: string` -- `cassetteName?: string` -- `mode?: "record" | "replay" | "auto"` -- `redact?: { requestHeaders?, responseHeaders?, requestBodyKeys?, responseBodyKeys? }` -- `matchIgnore?: { requestHeaders?, requestBodyKeys? }` -- `match?: (request, entry) => boolean` +- `mode` defaults to `"auto"` +- `authorization` is ignored for matching by default +- `authorization` is redacted from recorded request headers by default -## Typical test wiring +## Typical VCR runtime wiring ```ts import { NodeServices } from "@effect/platform-node"; import { FileSystemCassetteStore, VcrHttpClient } from "@useairfoil/effect-vcr"; -import { Effect, Layer } from "effect"; +import { Layer } from "effect"; import { FetchHttpClient } from "effect/unstable/http"; +const cassetteStoreLayer = FileSystemCassetteStore.layer().pipe(Layer.provide(NodeServices.layer)); + +const vcrRuntimeLayer = Layer.mergeAll( + FetchHttpClient.layer, + NodeServices.layer, + cassetteStoreLayer, +); + const vcrLayer = VcrHttpClient.layer({ vcrName: "producer-", mode: "replay", -}).pipe( - Layer.provideMerge(FileSystemCassetteStore.layer()), - Layer.provideMerge(FetchHttpClient.layer), - Layer.provideMerge(NodeServices.layer), -); +}).pipe(Layer.provide(vcrRuntimeLayer)); +``` -const program = Effect.gen(function* () { - // call connector runtime / stream logic that needs HttpClient -}); +Why this shape matters: -const runnable = program.pipe(Effect.provide(vcrLayer)); -``` +- `FileSystemCassetteStore.layer()` needs filesystem and `Path` +- `VcrHttpClient.layer(...)` needs a live `HttpClient`, `Path`, and a + `CassetteStore` +- sibling-merging these layers does not satisfy dependencies transitively ## Cassette naming @@ -68,9 +110,10 @@ Example: ## Environment behavior -- `ACK_DISABLE_VCR` can bypass VCR by `vcrName` (comma-separated list or `*`). -- In `auto` mode, missing cassette behavior is CI-sensitive (fails in CI, - records locally). +- `ACK_DISABLE_VCR` bypasses VCR by `vcrName` (`*` or comma-separated list) +- in `auto` mode, missing cassette behavior is CI-sensitive +- when VCR is disabled, the wrapped live client is returned directly and no + cassette read/write occurs ## Source of truth diff --git a/.agents/skills/airfoil-kit/references/example-auth.md b/.agents/skills/airfoil-kit/references/example-auth.md index 84b1b6a..f699d86 100644 --- a/.agents/skills/airfoil-kit/references/example-auth.md +++ b/.agents/skills/airfoil-kit/references/example-auth.md @@ -1,7 +1,7 @@ # example-auth Auth patterns expressed as Effect `Config` + `HttpClient.mapRequest`. -All patterns plug into the `XApiClientConfig(config)` factory layer from +All patterns plug into the current `layerApiClient(config)` factory layer from `api.ts`. Nothing here requires changes to the connector kit. These are illustrative implementation patterns, not a protocol contract. @@ -25,9 +25,9 @@ export const XConfigConfig = Config.all({ import { HttpClient, HttpClientRequest } from "effect/unstable/http"; import { Redacted } from "effect"; -export const XApiClientConfig = (config: XConfig) => +export const layerApiClient = (config: XConfig) => Layer.effect(XApiClient)( - Effect.gen(function* () { + Effect.fnUntraced(function* () { const httpClient = yield* HttpClient.HttpClient; const client = httpClient.pipe( HttpClient.mapRequest(HttpClientRequest.prependUrl(config.apiBaseUrl)), @@ -35,7 +35,7 @@ export const XApiClientConfig = (config: XConfig) => HttpClient.mapRequest(HttpClientRequest.acceptJson), ); // ... fetchJson, fetchList built from client - }), + })(), ); ``` @@ -98,13 +98,13 @@ export class XOAuthTokens extends Context.Service +export const layerOAuthTokens = (config: XConfig) => Layer.effect(XOAuthTokens)( - Effect.gen(function* () { + Effect.fnUntraced(function* () { const httpClient = yield* HttpClient.HttpClient; const initial = yield* exchangeRefreshToken(httpClient, config); return yield* Ref.make(initial); - }), + })(), ); ``` diff --git a/.agents/skills/airfoil-kit/references/example-producer-polar.md b/.agents/skills/airfoil-kit/references/example-producer-polar.md index c945708..62d4057 100644 --- a/.agents/skills/airfoil-kit/references/example-producer-polar.md +++ b/.agents/skills/airfoil-kit/references/example-producer-polar.md @@ -1,283 +1,122 @@ # example-producer-polar -Kitchen-sink walkthrough of `connectors/producer-polar/`. This connector -exercises nearly every feature of the kit: four entities, real HMAC -verification, optional config values, the Deferred-cutoff handoff from -webhooks to backfill, a sandbox base URL, and a VCR test suite. Read -this after the template walkthrough to see "what good looks like" for a -real connector. +Walkthrough of `connectors/producer-polar/`. This is the main reference +connector for the current repo shape. -Everything you see here is source-of-truth code. Reference the actual -files instead of re-typing blocks. +Use it as the example of: + +- multiple entities +- real webhook verification +- current `layerApiClient` / `layerConfig` naming +- current sandbox layer composition +- current VCR test runtime wiring ## File inventory -``` +```text connectors/producer-polar/ src/ - api.ts # 103 lines — PolarApiClient service + layer - connector.ts # 344 lines — PolarConfigConfig, webhook dispatch, runtime - index.ts # 8 lines — public re-exports - sandbox.ts # 132 lines — local runner with telemetry toggle - schemas.ts # 234 lines — four entity schemas + webhook payload union - streams.ts # 120 lines — makeEntityStreams, dispatch helpers + api.ts + connector.ts + index.ts + sandbox.ts + schemas.ts + streams.ts test/ - api.vcr.test.ts # 58 lines — per-entity replay tests - helpers.ts # 29 lines — test publisher - webhook.test.ts # 90 lines — end-to-end webhook dispatch test - __cassettes__/ # committed recorded responses - package.json, tsconfig.json, tsdown.config.ts, vitest.config.ts, README.md + api.vcr.test.ts + helpers.ts + webhook.test.ts + __cassettes__/ ``` -## `src/connector.ts` — the centerpiece +## `src/api.ts` -```45:50:connectors/producer-polar/src/connector.ts -export class PolarConnector extends Context.Service< - PolarConnector, - PolarConnectorRuntime ->()("@useairfoil/producer-polar/PolarConnector") {} -``` +Current public surface: -- `PolarConnector` is the service tag. It holds the fully-assembled - `{ connector, routes }` pair. Callers inject it into `runConnector`. - -```52:59:connectors/producer-polar/src/connector.ts -export const PolarConfigConfig = Config.all({ - accessToken: Config.string("POLAR_ACCESS_TOKEN"), - apiBaseUrl: Config.string("POLAR_API_BASE_URL").pipe( - Config.withDefault("https://sandbox-api.polar.sh/v1/"), - ), - organizationId: Config.option(Config.string("POLAR_ORGANIZATION_ID")), - webhookSecret: Config.option(Config.string("POLAR_WEBHOOK_SECRET")), -}); -``` +- `PolarApiClient` +- `makePolarApiClient(config)` +- `layerApiClient(config)` -- `Config.all({...})` composes the four required/optional env vars. -- `Config.withDefault` points at the sandbox by default — this is the - "sandbox archetype" (see `connector-archetypes.md`). -- `Config.option` lets the two optional fields be absent without - failing decode. - -```62:83:connectors/producer-polar/src/connector.ts -const verifyWebhookSignature = (options: { - readonly rawBody: Uint8Array; - readonly headers: Headers.Headers; - readonly secret: string; -}): Effect.Effect => - Effect.try({ - try: () => { - validateEvent( - Buffer.from(options.rawBody), - options.headers, - options.secret, - ); - }, - catch: (error) => - new ConnectorError({ - message: - error instanceof WebhookVerificationError - ? "Invalid Polar webhook signature" - : "Failed to validate Polar webhook", - cause: error, - }), - }); -``` +Pattern: -- Uses Polar's official SDK (`@polar-sh/sdk/webhooks.validateEvent`) - rather than rolling its own HMAC. Prefer official libs when the - platform ships one. -- Maps ambient errors into a typed `ConnectorError` with a meaningful - message. - -```86:220:connectors/producer-polar/src/connector.ts -const resolveWebhookDispatch = (options: { - readonly payload: WebhookPayload; - readonly customers: EntityStreams; - ... -}) => { - switch (payload.type) { - case "checkout.created": - case "checkout.updated": - ... - } -}; -``` +- build the configured `HttpClient` once +- expose `fetchJson` and `fetchList` +- decode at the boundary +- map failures to `ConnectorError` -- Exhaustive switch over every webhook type documented by Polar. -- Event types that fan out to the same entity are merged (e.g., all - `checkout.*` go to the `checkouts` stream). -- Types that Polar sends but we intentionally ignore (membership, - refunds, products) fall through to `Effect.void`. -- Unknown types hit the default branch and emit `logWarning` — a - deliberate trade-off: we don't fail on new webhook types, but we - make them visible. - -```223:321:connectors/producer-polar/src/connector.ts -const makePolarConnector = (config: PolarConfig) => - Effect.gen(function* () { - const api = yield* PolarApiClient; - const customerStreams = yield* makeEntityStreams({ api, schema: CustomerSchema, path: "customers/", cursorField: "created_at" }); - // ... three more entity streams - const connector = defineConnector({ - name: "producer-polar", - entities: [ defineEntity({...}), ... ], - events: [], - }); - const webhookRoute: WebhookRoute = { - path: "/webhooks/polar", - schema: WebhookPayloadSchema, - handle: (payload, request, rawBody) => - Effect.gen(function* () { - if (Option.isSome(config.webhookSecret) && rawBody) { - yield* verifyWebhookSignature({...}); - } - yield* resolveWebhookDispatch({...}); - }), - }; - return { connector, routes: [webhookRoute] }; - }); -``` +## `src/connector.ts` -- Four entities, each wired through `makeEntityStreams`. -- `cursorField: "created_at"` is Polar's monotonically-increasing field. -- A single webhook route handles all four entities. -- `Option.isSome(config.webhookSecret) && rawBody` gates verification — - missing secret in dev is a warn, not an error. - -```323:344:connectors/producer-polar/src/connector.ts -export const PolarConnectorConfig = (): Layer.Layer< - PolarConnector, - ConnectorError, - HttpClient.HttpClient -> => - Layer.effect(PolarConnector)( - Effect.gen(function* () { - const config = yield* PolarConfigConfig; - return yield* makePolarConnector(config).pipe( - Effect.provide(PolarApiClientConfig(config)), - ); - }).pipe( - Effect.mapError((error) => - error instanceof ConnectorError - ? error - : new ConnectorError({ message: "Polar config failed", cause: error }), - ), - ), - ); -``` +Current public surface: + +- `PolarConfig` +- `PolarConfigConfig` +- `PolarConnector` +- `layerConfig` +- `PolarConnectorRuntime` + +Important patterns: + +- `PolarConnector` is a `Context.Service` +- `layerConfig` decodes config and provides `layerApiClient(config)` +- routes are authored with `Webhook.route({...})` +- the route handler uses `Effect.fn("polar/webhook/handle")(... )` +- signature verification uses the official Polar SDK helper +- ignored events are explicit, not implicit + +## `src/streams.ts` + +This file shows the current generic stream helper shape: + +- `resolveCursor` +- `dispatchEntityWebhook` +- `makeEntityStreams` + +It also shows the current `Streams.*` namespaced connector-kit imports. + +## `src/sandbox.ts` + +This file is the current runtime reference for connectors. + +Key points: + +- `EnvLayer = Layer.mergeAll(FetchHttpClient.layer, ConfigProvider.fromEnv())` +- `ConnectorLayer = layerConfig.pipe(Layer.provide(EnvLayer))` +- `TelemetryLayer` is pre-provided with `EnvLayer` +- `RuntimeLayer` merges only already-satisfied layers +- entrypoint is `Effect.runPromise(Effect.scoped(program).pipe(Effect.provide(RuntimeLayer)))` + +If you are unsure how to compose layers, copy this shape. + +## `test/api.vcr.test.ts` + +This is the current reference for VCR-backed API tests. + +Key points: + +- build `cassetteStoreLayer = FileSystemCassetteStore.layer().pipe(Layer.provide(NodeServices.layer))` +- build `vcrRuntimeLayer = Layer.mergeAll(FetchHttpClient.layer, NodeServices.layer, cassetteStoreLayer)` +- build `vcrLayer = VcrHttpClient.layer(...).pipe(Layer.provide(vcrRuntimeLayer))` +- provide `ConfigProvider.fromUnknown({ ... })` for hermetic config decode + +## `test/webhook.test.ts` + +This is the current reference for webhook tests. + +Key points: -- The public layer factory. Reads config, builds the API client layer - on the fly, produces the runtime. -- Narrows the error channel to `ConnectorError`. -- Requires `HttpClient.HttpClient` — callers supply this via - `FetchHttpClient.layer` or `VcrHttpClient.layer(...).pipe(Layer.provide(FetchHttpClient.layer))`. +- stub the API service with `Layer.succeed(PolarApiClient)(...)` +- build `connectorLayer = layerConfig.pipe(Layer.provide(apiLayer))` +- use `NodeHttpServer.layerTest` +- fork `Ingestion.runConnector(...)` +- call the in-process route with `HttpClientRequest.post(...)` -## `src/api.ts` — HTTP layer +## `README.md` -The shape is exactly the pattern described in `patterns.md` §4: +The README is the current style target for connector docs: -- `PolarApiClient` service tag. -- `fetchJson(schema, path, params?)` for single-resource fetches. -- `fetchList(schema, path, options)` for paginated lists — Polar uses - `page`/`limit` query params and returns `{ items, pagination }`. -- Base URL + bearer header are baked into the `HttpClient` via - `HttpClient.mapRequest(HttpClientRequest.prependUrl(...))` + - `HttpClientRequest.bearerToken(accessToken)`. -- `PolarApiClientConfig(config)` factory layer provides the service, - requiring `HttpClient.HttpClient` from below. - -Use this as the template for any bearer-token + page+limit API. - -## `src/schemas.ts` — data shapes - -- Four `Schema.Struct` definitions (Customer, Checkout, Subscription, - Order). -- A `WebhookPayloadSchema = Schema.Union([...])` that tags each payload - variant with its literal `type`. -- Ignored event types appear in a second `Schema.Struct` with - `type: Schema.Literal(...)` and an open `data: Schema.Any` — this - lets decode succeed so the connector can log+skip rather than crash. - -Patterns to steal: - -- Optional fields wrapped with `Schema.NullOr(...)` when the API returns - `null` for empty values. -- Timestamp fields typed as `Schema.String` (ISO-8601) rather than - `Schema.Date`, because the cursor is a string. - -## `src/streams.ts` — stream wiring - -- `resolveCursor(row, field)` extracts the cursor value from a row. -- `setCutoff(deferred, cursor)` is idempotent — safe to call on every - incoming webhook. -- `dispatchEntityWebhook({queue, cutoff, row, cursor})` sets the cutoff - and enqueues. -- `makeBackfillStream(...)` wraps `makePullStream` with a cutoff filter. -- `makeEntityStreams(...)` creates the `{live, cutoff, backfill}` trio. - -This file is almost entirely generic — 90% of it is reusable across -connectors (and is essentially what the template ships). - -## `src/sandbox.ts` — local runner - -- Reads `ACK_TELEMETRY_ENABLED` (via Effect Config) to toggle OTLP - export. When enabled, composes - `Observability.Otlp.layerJson(...) + Metric.enableRuntimeMetricsLayer`. -- Mounts `NodeHttpServer.layer(createServer, { port: webhookPort })`. -- Uses `StateStoreInMemory` + `ConsolePublisherLayer` for zero - infrastructure — prints batches to stdout. -- Entry point: `Effect.runPromise(program.pipe(Effect.provide(RuntimeLayer)))`. - -Copy this sandbox shape unchanged for any connector; only the -connector-specific layer (`PolarConnectorConfig` → `XConnectorConfig`) -and env-var name change. - -## `test/api.vcr.test.ts` — replay tests - -- One cassette covers all four list endpoints. -- Each test decodes the real response through the schema. If the schema - drifts from the cassette, the test fails — this is the mechanism that - keeps schemas honest. - -## `test/webhook.test.ts` — end-to-end - -- Uses `NodeHttpServer.layerTest` for an in-process HTTP transport. -- Forks `runConnector(...)` with `Effect.forkScoped`, so the webhook - route is actually mounted. -- POSTs a realistic `customer.created` payload. -- Uses `makeTestPublisher` to capture the emitted batch, then asserts - shape. - -This is the template for every webhook test — the only thing that -changes is the payload fixture and the expected stream. - -## `test/helpers.ts` — test publisher - -- ~29 lines. Creates a `Publisher` layer that buffers batches into a - `Ref` and resolves a `Deferred` after `expected` deliveries. -- Drop-in for any connector test. - -## What NOT to copy verbatim - -- `@polar-sh/sdk` dependency — Polar-specific. -- The four entity names / cursor fields — platform-specific. -- The list of ignored webhook types — this is the Polar event catalog. -- `POLAR_*` env var names. - -## Anatomy summary - -Polar demonstrates: - -- Single-tenant sandbox-URL archetype. -- Bearer token auth. -- Page+limit pagination. -- Webhook-driven live + API-driven backfill. -- Cutoff-deferred handoff. -- Optional signing secret with friendly warning. -- Telemetry wiring toggled by one env var. -- VCR replay tests + in-process webhook tests. - -If you're building a connector that matches these shapes, Polar is the -best code to mirror. If your target differs (OAuth, per-tenant URL, -polling-only), combine Polar's structure with the relevant archetype -delta in `connector-archetypes.md`. +- present-state only +- public exports listed explicitly +- minimal runtime wiring example +- API-layer example +- webhook behavior +- testing commands diff --git a/.agents/skills/airfoil-kit/references/patterns.md b/.agents/skills/airfoil-kit/references/patterns.md index 04cb848..1b94eac 100644 --- a/.agents/skills/airfoil-kit/references/patterns.md +++ b/.agents/skills/airfoil-kit/references/patterns.md @@ -1,156 +1,210 @@ # patterns Patterns shared by `templates/producer-template/` and -`connectors/producer-polar/`. For each pattern this file explains: what it -is, when to deviate, and where to look in the existing code. +`connectors/producer-polar/`. This file is the current implementation contract +for connector code in this repo. --- ## 1. Config struct vs individual fields -**Pattern:** a single `Config.all({...})` that produces a flat struct. Pass -the decoded struct into downstream factories (`makeXApiClient(config)`), -never reach into `ConfigProvider` from deep inside the connector. +Use one `Config.all({...})` that produces a flat config struct. Pass that struct +into downstream factories. Do not read `ConfigProvider` from deep inside API +helpers or stream code. -**Deviate when:** none. Even for large configs, keep one struct. +```ts +export const XConfigConfig = Config.all({ + apiBaseUrl: Config.string("X_API_BASE_URL"), + apiToken: Config.string("X_API_TOKEN"), + webhookSecret: Config.option(Config.string("X_WEBHOOK_SECRET")), +}); +``` -**See:** `PolarConfigConfig`, `TemplateConfigConfig`. +Use: -## 2. Service tag per logical component +- `Config.string(...)` +- `Config.option(...)` +- `Config.withDefault(...)` +- `Config.port(...)` +- `Config.boolean(...)` -Three service tags per connector: +Do not use `process.env` in connector code or tests. -- `XApiClient` — HTTP-level operations. -- `XConnector` — the `{ connector, routes }` pair. -- (Optional) `XOAuthTokens` — refreshing tokens, if applicable. +## 2. Service tag per logical component -Each tag lives in the file that owns the logic, with a string tag of the -form `@useairfoil/producer-/`. +Every connector usually has these services: -**Deviate when:** never merge unrelated responsibilities into one tag. +- `XApiClient` +- `XConnector` +- optional service-specific helpers when the API genuinely needs them -## 3. Layer factories return `Layer.effect(Tag)(factory)` +String tags should use package scope: ```ts -export const XConnectorConfig = (): Layer.Layer< - XConnector, - ConnectorError, - HttpClient.HttpClient -> => - Layer.effect(XConnector)( - Effect.gen(function* () { - const config = yield* XConfigConfig; - return yield* makeXConnector(config).pipe(Effect.provide(XApiClientConfig(config))); - }), - ); +export class XApiClient extends Context.Service()( + "@useairfoil/producer-x/XApiClient", +) {} + +export class XConnector extends Context.Service()( + "@useairfoil/producer-x/XConnector", +) {} ``` -- The layer **requires** whatever its factories need (`HttpClient` here). -- It reads config itself, so callers only supply the `ConfigProvider`. -- Error channel is narrowed to `ConnectorError` via `Effect.mapError`. +Do not collapse unrelated responsibilities into one service tag. + +## 3. Current naming conventions + +Use the current repo names. -## 4. API client with `fetchJson` + `fetchList` +- raw-config API client layer: `layerApiClient(config)` +- config-decoded connector layer: `layerConfig` +- connector runtime: `{ connector, routes }` +- webhook routes: `Webhook.route({...})` +- connector runner: `Ingestion.runConnector(...)` +- in-memory state layer: `Ingestion.layerMemory` +- publisher service tag: `Publisher.Publisher` + +Avoid stale names like: + +- `XApiClientConfig` +- `XConnectorConfig()` +- `runConnector` root imports +- `StateStoreInMemory` + +## 4. API client layer shape + +Use a typed service plus a raw-config layer factory. ```ts -type XApiClientService = { - readonly fetchJson: (schema, path, params?) => Effect.Effect; +export type XApiClientService = { + readonly fetchJson: ( + schema: Schema.Decoder, + path: string, + params?: Record, + ) => Effect.Effect; readonly fetchList: ( - schema, - path, - options, + schema: Schema.Decoder, + path: string, + options: XListOptions, ) => Effect.Effect, ConnectorError, R>; }; + +export class XApiClient extends Context.Service()( + "@useairfoil/producer-x/XApiClient", +) {} + +export const makeXApiClient = ( + config: XConfig, +): Effect.Effect => + Effect.fnUntraced(function* () { + const client = (yield* HttpClient.HttpClient).pipe( + HttpClient.mapRequest(HttpClientRequest.prependUrl(config.apiBaseUrl)), + HttpClient.mapRequest(HttpClientRequest.acceptJson), + ); + + return { fetchJson, fetchList }; + })(); + +export const layerApiClient = ( + config: XConfig, +): Layer.Layer => + Layer.effect(XApiClient)(makeXApiClient(config)); ``` -- `fetchJson` for detail fetches and non-list endpoints. -- `fetchList` encapsulates the pagination convention. Return - `{ items, hasMore, ...maybeCursor }` — whatever your API communicates. -- Derive pagination semantics from official platform docs and validate against - recorded traffic. Do not assume cursor or continuation behavior from another - connector. +Keep transport policy here: -**Deviate when:** your API is GraphQL (replace GET with POST + query), -bulk-export based (replace `fetchList` with a job runner), or returns -protocol buffers (add a `fetchBytes` helper that decodes). +- auth headers +- base URL prefixing +- response decode +- pagination mapping +- transport/decode error mapping -## REST mode summary (default) +## 5. Connector layer shape -For REST APIs, treat this file + `example-auth.md` + -`example-pagination.md` as the mode contract. +Use `layerConfig` to decode config and build the connector service. + +```ts +export const layerConfig: Layer.Layer = + Layer.effect(XConnector)( + Effect.fnUntraced(function* () { + const config = yield* XConfigConfig; + return yield* makeXConnector(config).pipe(Effect.provide(layerApiClient(config))); + })().pipe( + Effect.mapError((error) => + error instanceof ConnectorError + ? error + : new ConnectorError({ + message: "X config failed", + cause: error, + }), + ), + ), + ); +``` -- Keep list/detail access in `fetchJson` and `fetchList` helpers. -- Keep auth middleware in one client construction pipeline. -- Keep pagination mapping deterministic and isolated in `fetchList`. -- Decode response bodies at the API boundary using `Schema`. -- Map all transport/decode failures to `ConnectorError`. +This layer: -If your API is not REST, switch to mode-specific docs: +- reads config itself +- builds the API client from the decoded config +- narrows failures to `ConnectorError` -- GraphQL: `api-mode-graphql.md` -- gRPC: `api-mode-grpc.md` +## 6. Entity stream trio -## 5. Entity stream trio: `{ live, cutoff, backfill }` +For entity connectors, always build the same trio: -Always wire every entity with `makeEntityStreams({ api, schema, path, cursorField })`. -The returned trio has exactly the shape the engine expects: +- `live` +- `cutoff` +- `backfill` -- `live`: `WebhookStream` — pushed to by the webhook handler. -- `cutoff`: `Deferred` — resolved by the first live event - (or by initialCutoff for polling-only connectors). -- `backfill`: `Stream, ConnectorError>` — waits on cutoff, then pages. +```ts +const streams = + yield * + makeEntityStreams({ + api, + schema: CustomerSchema, + path: "/customers", + cursorField: "updated_at", + limit: 100, + }); +``` -**Deviate when:** +That returns: -- Pure polling — skip `WebhookStream`, use `makePullStream` as `live` and - point `initialCutoff` at the desired history window. -- Webhook-only — return an empty backfill stream. +- `live: Streams.WebhookStream` +- `cutoff: Deferred` +- `backfill: Stream, ConnectorError>` -## 6. First-webhook-sets-cutoff +## 7. First-live-event sets cutoff -The first live event dispatched to an entity resolves its `Deferred`. -Backfill waits on that deferred, so it can only run historical data that -happened **before** the first live event. This guarantees no overlap gap. +For webhook-driven entity streams, the first live event establishes the cutoff. +Backfill waits on that cutoff so historical data does not overlap the live side. ```ts -export const dispatchEntityWebhook = (options) => - Effect.gen(function* () { - yield* setCutoff(options.cutoff, options.cursor); // idempotent - yield* Queue.offer(options.queue.queue, { +export const dispatchEntityWebhook = >(options: { + readonly queue: Streams.WebhookStream; + readonly cutoff: Deferred.Deferred; + readonly row: T; + readonly cursor: Cursor; +}): Effect.Effect => + Effect.fnUntraced(function* () { + yield* Deferred.succeed(options.cutoff, options.cursor).pipe(Effect.asVoid); + return yield* Queue.offer(options.queue.queue, { cursor: options.cursor, rows: [options.row], }).pipe(Effect.asVoid); - }); + })(); ``` -**Deviate when:** your connector is polling-only (no live events); -`initialCutoff` passed to `runConnector` becomes the canonical cutoff. - -## 7. Seen-set for upsert de-dupe - -The engine tracks a `Set` of primary keys that have already been -published (live or backfill). Backfill filters its rows through that set -before emitting, so overlapping windows don't re-publish the same row. +## 8. Webhook route pattern -This is implemented inside `runEntity` in -`packages/connector-kit/src/ingestion/engine.ts`. You don't need to do -anything in connector code. - -## 8. Events run backfill then live (order matters) - -For `defineEvent` streams, the engine drains the entire backfill before -starting live. Events are append-only logs; ordering must be preserved. - -**Deviate when:** you want overlap (which would violate ordering) — in -that case, use `defineEntity` instead. - -## 9. Webhook handler pattern +Always author routes with `Webhook.route({...})`. ```ts -const webhookRoute: WebhookRoute = { - path: "/webhooks/", +const webhookRoute = Webhook.route({ + path: "/webhooks/x", schema: WebhookPayloadSchema, handle: (payload, request, rawBody) => - Effect.gen(function* () { + Effect.fn("x/webhook/handle")(function* () { if (Option.isSome(config.webhookSecret)) { if (!rawBody) { return yield* Effect.fail( @@ -159,121 +213,177 @@ const webhookRoute: WebhookRoute = { }), ); } + yield* verifyWebhookSignature({ rawBody, - headers: request.headers, + request, secret: config.webhookSecret.value, }); } - yield* resolveWebhookDispatch({ payload /* ...streams */ }); - }), -}; + + return yield* resolveWebhookDispatch({ payload, streams }); + })(), +}); ``` -Key points: +Rules: -- `Schema.Union([...])` validates the payload structure against known types. -- Raw body is used for signature verification. -- Verification is fail-closed when enabled: missing verification inputs are - explicit typed failures. -- Dispatch logic is extracted into a pure function for testability. +- verify signatures before side effects +- fail closed when verification is enabled but inputs are missing +- use raw request bytes when the platform requires raw-byte signing +- return `Effect.void` for intentionally ignored event types -## 10. Explicit enumeration of ignored events +## 9. Exhaustive dispatch -`producer-polar` lists every ignored event type in a dedicated -`Schema.Literals([...])` union. Unknown types fall through to a -`logWarning` default. This is deliberate: silent schema failures are -nightmare to debug. +Dispatch webhook events through an explicit switch. ```ts switch (payload.type) { - case "order.created": - return handleOrder(...); - case "organization.updated": // ignored on purpose - return Effect.void; + case "product.created": + case "product.updated": + return yield* dispatchEntityWebhook(...) + case "unrelated.event": + return Effect.void default: - return Effect.logWarning("Ignoring unknown webhook type").pipe(...); + return Effect.logWarning("Ignoring unknown webhook type").pipe( + Effect.annotateLogs({ type: (payload as { type: string }).type }), + Effect.asVoid, + ) } ``` -**Deviate when:** the service has hundreds of event types — then group -into a dispatch table `const handlers: Record`. +## 10. Layer semantics: `mergeAll` vs `provide` -## 11. Sandbox runner layer composition +This is the most important Effect composition rule in the repo. -Always the same shape: +- `Layer.mergeAll(...)` is for independent layers. +- `Layer.provide(...)` satisfies a dependent layer's requirements. +- `Layer.provideMerge(...)` satisfies requirements and also keeps the provided + outputs exposed downstream. + +Correct: ```ts +const EnvLayer = Layer.mergeAll( + FetchHttpClient.layer, + Layer.succeed(ConfigProvider.ConfigProvider, ConfigProvider.fromEnv()), +); + +const ConnectorLayer = layerConfig.pipe(Layer.provide(EnvLayer)); +``` + +Incorrect: + +```ts +const RuntimeLayer = Layer.mergeAll(layerConfig, EnvLayer); +``` + +The incorrect example only merges the layers side-by-side. It does not use +`EnvLayer` to build `layerConfig`. + +If an entrypoint still appears to require `HttpClient`, `Path`, or +`ConfigProvider`, inspect the layer graph before reaching for a cast. + +## 11. Sandbox runner shape + +Current sandbox shape: + +```ts +const EnvLayer = Layer.mergeAll( + FetchHttpClient.layer, + Layer.succeed(ConfigProvider.ConfigProvider, ConfigProvider.fromEnv()), +); + +const ConnectorLayer = layerConfig.pipe(Layer.provide(EnvLayer)); + +const TelemetryLayer = Layer.unwrap( + Effect.gen(function* () { + const telemetry = yield* TelemetryConfig; + if (!telemetry.enabled) { + return Layer.empty; + } + + return Layer.mergeAll( + Observability.Otlp.layerJson({ + baseUrl: telemetry.baseUrl, + resource: { serviceName: telemetry.serviceName }, + }), + Metric.enableRuntimeMetricsLayer, + ); + }), +).pipe(Layer.provide(EnvLayer)); + const RuntimeLayer = Layer.mergeAll( - StateStoreInMemory, + Ingestion.layerMemory, ConsolePublisherLayer, ConnectorLayer, Logger.layer([Logger.consolePretty()]), TelemetryLayer, - EnvLayer, // FetchHttpClient.layer + ConfigProvider.fromEnv() ); + +Effect.runPromise(Effect.scoped(program).pipe(Effect.provide(RuntimeLayer))); ``` -Callers toggle telemetry via `ACK_TELEMETRY_ENABLED` and choose the -publisher via which layer they merge in (console vs Wings). +## 12. VCR test wiring shape -## 12. Test publisher +Current `effect-vcr` shape: -Always `makeTestPublisher(expected)` that captures into a `Ref` and -resolves a `Deferred` after `expected` batches land. Never count on -timeouts to decide "the connector is idle now". +```ts +const cassetteStoreLayer = FileSystemCassetteStore.layer().pipe(Layer.provide(NodeServices.layer)); + +const vcrRuntimeLayer = Layer.mergeAll( + FetchHttpClient.layer, + NodeServices.layer, + cassetteStoreLayer, +); + +const vcrLayer = VcrHttpClient.layer({ + vcrName: "producer-x", + mode: "replay", +}).pipe(Layer.provide(vcrRuntimeLayer)); +``` + +Why: + +- `FileSystemCassetteStore.layer()` needs platform filesystem + `Path` +- `VcrHttpClient.layer(...)` needs live `HttpClient`, `Path`, and a cassette + store service +- pre-provide dependencies before use; do not assume sibling merges satisfy them + +## 13. Test publisher + +Use a `makeTestPublisher(expected)` helper that buffers rows into a `Ref` and +resolves a `Deferred` after the expected number of deliveries. -## 13. Error mapping +Do not rely on timeouts to decide a connector is idle. -Wrap every non-`ConnectorError` failure: +## 14. Error mapping + +Wrap non-`ConnectorError` failures into `ConnectorError` at layer boundaries. ```ts Effect.mapError((error) => error instanceof ConnectorError ? error : new ConnectorError({ - message: "", + message: "X config failed", cause: error, }), ); ``` -Without this, `Layer.effect` will complain that the error channel isn't -narrowed, and `runConnector`'s contract (`E = ConnectorError`) won't hold. - -## 14. Connector config ↔ test config - -In sandbox/prod, `EnvLayer` provides `ConfigProvider.fromEnv()`. - -In tests, use either: - -- `ConfigProvider.fromUnknown({ ... })` for hermetic deterministic tests, or -- `ConfigProvider.fromEnv()` for integration-style tests that intentionally use - environment-backed settings. +## 15. Verification order -Pick one deliberately and keep `test` and `test:ci` behavior equivalent. +When doing non-trivial refactors or export changes, use this order: ---- - -## Shape of a connector-kit test - -``` -┌───────────────┐ -│ Test body │ runs the Effect program -│ (Effect.gen) │ -└───────┬───────┘ - │ requires -┌───────▼───────────────────────────────────────────────┐ -│ connectorLayer = XConnectorConfig().pipe( │ -│ Layer.provide(apiLayer OR vcrLayer) │ -│ ) │ -└───────┬───────────────────────────────────────────────┘ - │ requires -┌───────▼───────────────────┐ ┌────────────────────┐ -│ apiLayer: Layer │ │ + real HttpClient │ -└───────────────────────────┘ └────────────────────┘ +```bash +pnpm install +pnpm --filter @useairfoil/producer- build +pnpm --filter @useairfoil/producer- typecheck +pnpm --filter @useairfoil/producer- test:ci +pnpm exec oxfmt --check +pnpm exec oxlint ``` -Plus `ConfigProvider` and `StateStoreInMemory` / `test publisher` as -needed. Polar has working examples for both shapes. +If a package surface changes, build it before downstream typechecks so workspace +resolution sees the current shape. diff --git a/.agents/skills/airfoil-kit/references/playbook.md b/.agents/skills/airfoil-kit/references/playbook.md index a2dd8b9..84c44a3 100644 --- a/.agents/skills/airfoil-kit/references/playbook.md +++ b/.agents/skills/airfoil-kit/references/playbook.md @@ -58,10 +58,11 @@ Default path is `connectors/producer-/api-facts.md`. If the user asks not to persist this file, keep equivalent facts in notes and include them in the final report. -Use Context7 for Effect-specific v4 docs (`effect-ts/effect-smol`) and -service SDK docs, and `WebFetch` for public API reference pages. DeepWiki -is optional fallback. Capture everything you learn in a short notes file -or in the PR description so nothing is lost. +Use current repo source plus `effect-smol` as the only source of truth for +Effect-specific patterns. Do not rely on the older official Effect docs for +this repo right now; they lag the APIs and patterns we are using here. Use +service docs for provider-specific behavior. Capture everything you learn in a +short notes file or in the PR description so nothing is lost. ## 4. Read mode-specific contract @@ -95,6 +96,7 @@ Verify the new package installs: ```bash cd ../.. # back to repo root pnpm install +pnpm --filter @useairfoil/producer- run build pnpm --filter @useairfoil/producer- run typecheck ``` @@ -154,7 +156,7 @@ the same way `producer-polar` does — see ## 10. Webhook route (`src/connector.ts`) -- Define one `WebhookRoute` per inbound path the service +- Define one `Webhook.route({ ... })` per inbound path the service uses (often just one). - Verify signatures against `rawBody` using the documented HMAC or library helper. See [`webhooks.md`](./webhooks.md) and @@ -172,6 +174,7 @@ the same way `producer-polar` does — see - Keep the telemetry layer as-is; callers can enable it via `ACK_TELEMETRY_ENABLED`. - Required layer checklist: `HttpClient`, `ConfigProvider`, `StateStore`, `Publisher`, and server layer. +- Pre-provide dependency layers into the dependent layers that need them. - Run once and confirm startup reaches webhook server ready/health output. ## 12. Tests (`test/*`) @@ -204,12 +207,17 @@ the same way `producer-polar` does — see Run each of these from the repo root. Every one must pass: ```bash +pnpm install +pnpm run build pnpm run lint pnpm run typecheck -pnpm run build pnpm run test:ci +pnpm exec oxfmt --check . ``` +If package exports changed, build the changed package before downstream +typechecks so workspace consumers resolve the current package surface. + If any fail, fix before proceeding. See [`definition-of-done.md`](./definition-of-done.md). ## 15. Report back diff --git a/.agents/skills/airfoil-kit/references/template-walkthrough.md b/.agents/skills/airfoil-kit/references/template-walkthrough.md index 0bb4c11..661dd84 100644 --- a/.agents/skills/airfoil-kit/references/template-walkthrough.md +++ b/.agents/skills/airfoil-kit/references/template-walkthrough.md @@ -1,227 +1,164 @@ # template-walkthrough File-by-file tour of `templates/producer-template/`. The template targets -[JSONPlaceholder](https://jsonplaceholder.typicode.com) so the code runs and -tests pass with zero credentials. Every file below has a "what to change" -section for when you port it to a real API. +JSONPlaceholder so the code runs and tests pass without external credentials. ---- +Use this as the starting point for any new connector. ## `package.json` -Minimal workspace package. Key points: +- rename the package to `@useairfoil/producer-` +- keep it ESM +- keep `effect`, `@effect/*`, and `@useairfoil/effect-vcr` versions aligned + with the workspace -- `"name": "@useairfoil/producer-template"` — rename to - `@useairfoil/producer-`. -- `"private": true` — keep private unless explicitly publishing. -- `"type": "module"` — all packages in this repo are ESM. -- `"exports"` — points `.` to `dist/index.js` and `dist/index.d.ts`. -- `dependencies.effect: "catalog:"` — Effect and `@effect/*` versions are - managed at the monorepo catalog level. -- `devDependencies['@useairfoil/effect-vcr']` — VCR lives in a devDep - so it does not leak into the published bundle. - -**What to change:** `name`, `version`, and any service-specific dependencies -(e.g. `stripe`, `@octokit/rest`, `shopify-api-node`). Do **not** change the -Effect, `@effect/platform-*`, or `@effect/vitest` versions — they are pinned -at the monorepo level. - -## `tsconfig.json` - -Extends the repo root tsconfig. `strict: true`, `verbatimModuleSyntax: true`, -`noEmit: true` (the build runs through tsdown). - -**What to change:** nothing. +## `src/schemas.ts` -## `tsdown.config.ts` +- define entity schemas with `Schema.Struct(...)` +- define webhook payloads with `Schema.Union([...])` +- derive types from real traffic, not memory -Single entry (`src/index.ts`) bundled as ESM with `.d.ts` output. Same as -every other package in the repo. +## `src/api.ts` -**What to change:** nothing. +Current shape: -## `vitest.config.ts` +- `TemplateApiClientService` +- `TemplateApiClient` +- `makeTemplateApiClient(config)` +- `layerApiClient(config)` -`fileParallelism: false` (VCR tests share cassette files, don't race them), -60s timeout for network recording. +Porting rules: -**What to change:** nothing. +- keep the service shape +- replace auth middleware +- replace pagination mapping inside `fetchList` +- decode at the API boundary with `Schema.decodeUnknownEffect(...)` +- map failures to `ConnectorError` -## `.env.example` +## `src/streams.ts` -Template env surface. Every variable the connector reads from `Config` should -appear here with a stubbed value. +Current shape: -**What to change:** replace `TEMPLATE_*` with `_*` and add real -variables — API key, webhook secret, tenant id, etc. +- `resolveCursor(...)` +- `dispatchEntityWebhook(...)` +- `makeBackfillStream(...)` +- `makeEntityStreams(...)` -## `src/schemas.ts` +Porting rules: -Effect `Schema.Struct` for the `Post` entity + a `WebhookPayloadSchema` union -of two event shapes (`post.created|post.updated` and an ignored `post.deleted`). +- keep the live/cutoff/backfill trio +- change cursor semantics to match the target API +- keep webhook dispatch generic and small -**What to change:** replace `PostSchema` with the real entity schemas and -`WebhookPayloadSchema` with the real event union. Always derive fields from a -recorded cassette — see [`vcr-workflow.md`](./vcr-workflow.md). +## `src/connector.ts` -## `src/api.ts` +Current shape: -Defines: +- `TemplateConfig` +- `TemplateConfigConfig` +- `TemplateConnector` +- `makeTemplateConnector(config)` +- `layerConfig` -- `TemplateApiClientService` — the typed service surface (`fetchJson`, - `fetchList`). -- `TemplateApiClient` — `Context.Service` tag. -- `makeTemplateApiClient` — Effect factory that obtains an `HttpClient`, - prepends the base URL, attaches `bearerToken`, and returns typed helpers. -- `TemplateApiClientConfig` — `Layer.effect(...)` wrapper for composition. +Current webhook authoring pattern: -**What to change:** +- `Webhook.route({...})` +- `Effect.fn("template/webhook/handle")(... )` +- optional signature verification on raw body -- Auth middleware on `HttpClient.mapRequest(...)` — Bearer by default, swap - to `setHeader`, `basicAuth`, OAuth2 refresh layer as needed. See - [`example-auth.md`](./example-auth.md). -- Pagination style in `fetchList`. JSONPlaceholder uses `_page`/`_limit`; - your API may use `cursor`, `page_size`, `starting_after`, link headers, etc. - See [`example-pagination.md`](./example-pagination.md). -- Endpoint paths — `/posts` → your real list endpoints. -- Error mapping — keep mapping into `ConnectorError`, but add service-specific - error enrichment where useful. +Porting rules: -## `src/streams.ts` +- rename all template identifiers +- keep `layerConfig` +- keep the connector runtime shape `{ connector, routes }` +- keep exhaustive dispatch over payload types -Entity-stream factory: +## `src/sandbox.ts` -- `resolveCursor(row, field)` turns a row's cursor field into a `Cursor`. -- `dispatchEntityWebhook({ queue, cutoff, row, cursor })` — enqueue + set - cutoff in one go. -- `makeBackfillStream(...)` — waits on the cutoff deferred, then uses - `makePullStream` to page until `hasMore` is false. Filters to - `row[cursorField] <= cutoff`. -- `makeEntityStreams(...)` — one-shot factory returning `{ live, cutoff, backfill }`. +Current runtime shape: -**What to change:** +```ts +const EnvLayer = Layer.mergeAll( + FetchHttpClient.layer, + Layer.succeed(ConfigProvider.ConfigProvider, ConfigProvider.fromEnv()), +) -- `isOnOrBeforeCutoff` — tweak the cutoff comparison if your cursor is a - timestamp (`new Date(...)`) vs a numeric id. For timestamps, prefer the - Polar connector's string-compare (`new Date(value).getTime()`). -- Pagination hand-off. The JSONPlaceholder example paginates by incrementing - `_page`. For cursor-based APIs, return `cursor: next_token` and rely on the - API's own `hasMore`/`has_more` flag. -- `limit` default (10 for JSONPlaceholder; 100 is a good default for real APIs). +const ConnectorLayer = layerConfig.pipe(Layer.provide(EnvLayer)) -## `src/connector.ts` +const TelemetryLayer = Layer.unwrap(...).pipe(Layer.provide(EnvLayer)) -The main wire-up file: - -- `TemplateConfig` — plain type describing the decoded config struct. -- `TemplateConfigConfig` — `Config.all({...})` that decodes env vars. -- `TemplateConnector` — `Context.Service` exposing - `{ connector, routes }` to callers. -- `verifyWebhookSignature` — **stub**. Replace with real HMAC verification. -- `resolveWebhookDispatch` — switch on `payload.type`, dispatch to the right - entity queue. -- `makeTemplateConnector` — builds `EntityStreams` and composes everything. -- `TemplateConnectorConfig` — `Layer.effect(TemplateConnector)(...)` for - runtime composition. - -**What to change:** - -- Rename every `Template` / `TEMPLATE_` identifier. See - [`assets/rename-checklist.md`](../assets/rename-checklist.md). -- Implement real webhook signature verification. Use the service's SDK - helper where available (e.g. `stripe.webhooks.constructEvent`, - `@polar-sh/sdk/webhooks.validateEvent`). See [`webhooks.md`](./webhooks.md). -- Add one `makeEntityStreams` call per entity. -- Add one `WebhookRoute` per inbound path. -- Extend `resolveWebhookDispatch` with cases for every event type you care - about. Ignored events should fall into a `.void`/`.asVoid` case to keep - them explicit. +const RuntimeLayer = Layer.mergeAll( + Ingestion.layerMemory, + ConsolePublisherLayer, + ConnectorLayer, + Logger.layer([Logger.consolePretty()]), + TelemetryLayer, +) +``` -## `src/sandbox.ts` +Porting rules: -End-to-end runner for local development: +- keep this dependency graph +- rename env vars and logging labels only +- do not sibling-merge a dependency layer and assume it satisfies dependents -- `SandboxConfig`, `TelemetryConfig` — Effect `Config.all({...})` for runtime - knobs. -- `ConsolePublisherLayer` — a `Publisher` that logs batches instead of - pushing them to Wings. -- `program` — obtains the connector + routes, starts a `NodeHttpServer` (or - Bun equivalent), and - calls `runConnector(connector, { initialCutoff, webhook: { routes } })`. -- `EnvLayer` — merges `FetchHttpClient.layer` and - `ConfigProvider.fromEnv()`. -- `TelemetryLayer` — opt-in OTLP export + runtime metrics. -- `RuntimeLayer` — composes every layer the program needs. -- Final `Effect.runPromise(...)` with a fatal error logger. +## `src/index.ts` -**What to change:** only identifiers (`TEMPLATE_*` → `_*`, -`producer-template` → `producer-`), never the layer structure. +Current public surface: -## `src/index.ts` +- `TemplateApiClient` +- `layerApiClient` +- `TemplateConnector` +- `layerConfig` +- `TemplateConfig` +- `TemplateConfigConfig` +- `TemplateConnectorRuntime` +- `Post` +- `PostSchema` +- `WebhookPayload` +- `WebhookPayloadSchema` -Re-exports the public API. Keep the shape small: service tag, config -factory, config struct type, runtime type, and schemas you want consumers -to pattern-match against. +Keep the public surface small and present-state. ## `test/helpers.ts` -Test-only `Publisher` that captures every published batch into a `Ref` and -resolves a `Deferred` after N batches land. Used by every webhook test. - -**What to change:** nothing. +Keep the test publisher helper shape. It is reusable across connectors. ## `test/api.vcr.test.ts` -VCR replay test. Construction order: +Current test shape: -1. Build `program` that uses `TemplateApiClient` directly. -2. Build an `apiLayer` that supplies `TemplateApiClient` from - `makeTemplateApiClient`. -3. Build a `cassetteLayer` from `FileSystemCassetteStore.layer()`. -4. Build a `vcrLayer` from `VcrHttpClient.layer({ vcrName, mode })`. -5. Provide everything + a `ConfigProvider.fromUnknown({ ... })` with the - minimum env needed for `TemplateConfigConfig` to decode. +1. build a program that uses the API client service directly +2. build `apiLayer` +3. build `cassetteStoreLayer` +4. build `vcrRuntimeLayer` +5. build `vcrLayer` +6. provide `ConfigProvider.fromUnknown({ ... })` -The first time you run this against a real API, set `mode: "record"`. After -the cassette is written, switch to `"replay"` and commit. +VCR wiring should match the current `effect-vcr` runtime pattern exactly. ## `test/webhook.test.ts` -In-memory webhook test using `NodeHttpServer.layerTest` (or Bun equivalent): - -1. Build a test publisher via `makeTestPublisher(1)`. -2. Fork `runConnector(connector, { webhook: { routes } })`. -3. POST a synthetic payload to the webhook path via `HttpClient.execute`. -4. Wait on `Deferred.await(done)`; assert one batch was published to the - right entity name. - -**What to change:** the fixture payload object, the webhook path, and the -expected entity name. +Current test shape: -## `test/__cassettes__/` +1. use `NodeHttpServer.layerTest` +2. build a stub API layer +3. build `connectorLayer = layerConfig.pipe(Layer.provide(apiLayer))` +4. fork `Ingestion.runConnector(...)` +5. post to the in-process webhook route +6. await the `Deferred` from the test publisher -JSON cassette files, committed. One per `*.vcr.test.ts`, keyed by the Vitest -test name. See [`vcr-workflow.md`](./vcr-workflow.md) for the file format. +This is the standard webhook test shape for new connectors. ## `README.md` -Document the connector. Mirror the structure of -`connectors/producer-polar/README.md`: Install → Env → Minimal wiring → -Architecture → Testing with VCR. - ---- - -## Where the template intentionally differs from Polar +Document the connector in present-state terms: -- Only one entity (`posts`), no `events`. -- Numeric cursor (`id`) instead of a timestamp, because JSONPlaceholder does - not emit timestamps. Real connectors should prefer timestamps. -- No real webhook signing — the stub accepts everything. Polar delegates to - `@polar-sh/sdk/webhooks.validateEvent`. -- No service SDK dependency. Real connectors usually add one. -- A simpler `TemplateListPage` with `{ items, hasMore }` instead of - `{ items, pagination: { total_count, max_page } }` since JSONPlaceholder - has no totals. +- public exports +- env/config +- minimal runtime wiring +- webhook behavior +- API client layer +- testing -When in doubt, compare the new connector against Polar: -[`example-producer-polar.md`](./example-producer-polar.md). +Avoid migration framing and avoid explaining old names. diff --git a/.agents/skills/airfoil-kit/references/vcr-workflow.md b/.agents/skills/airfoil-kit/references/vcr-workflow.md index 26309d8..4043e91 100644 --- a/.agents/skills/airfoil-kit/references/vcr-workflow.md +++ b/.agents/skills/airfoil-kit/references/vcr-workflow.md @@ -1,23 +1,25 @@ # vcr-workflow -VCR captures real HTTP interactions once, then replays them in CI. +VCR captures real HTTP interactions once, then replays them deterministically. Source of truth: - `packages/effect-vcr/src/types.ts` +- `packages/effect-vcr/src/cassette-store.ts` +- `packages/effect-vcr/src/file-system-cassette-store.ts` - `packages/effect-vcr/src/vcr-http-client.ts` ## Real-API verification loop -Use this loop per entity endpoint you ship. +Use this loop per endpoint you ship. 1. Write a schema in `src/schemas.ts` from docs as a starting point. -2. Write/update `test/api.vcr.test.ts` to call the real endpoint. -3. Set VCR mode to `"record"` temporarily. -4. Run test with real credentials from `.env`. -5. Inspect cassette response body and tighten schema fields. +2. Write or update `test/api.vcr.test.ts` to call the real endpoint. +3. Switch VCR mode to `"record"` temporarily. +4. Run the test with real credentials from `.env`. +5. Inspect the recorded response body and tighten the schema. 6. Switch VCR mode back to `"replay"`. -7. Re-run test (replay-only) and commit cassette. +7. Re-run the test in replay mode and commit the cassette. ## Correct layer wiring in tests @@ -27,19 +29,27 @@ import { FileSystemCassetteStore, VcrHttpClient } from "@useairfoil/effect-vcr"; import { Layer } from "effect"; import { FetchHttpClient } from "effect/unstable/http"; +const cassetteStoreLayer = FileSystemCassetteStore.layer().pipe(Layer.provide(NodeServices.layer)); + +const vcrRuntimeLayer = Layer.mergeAll( + FetchHttpClient.layer, + NodeServices.layer, + cassetteStoreLayer, +); + const vcrLayer = VcrHttpClient.layer({ vcrName: "producer-", mode: "replay", // switch to "record" only when recording -}).pipe( - Layer.provideMerge(FileSystemCassetteStore.layer()), - Layer.provideMerge(FetchHttpClient.layer), - Layer.provideMerge(NodeServices.layer), -); +}).pipe(Layer.provide(vcrRuntimeLayer)); ``` -Do not omit `FileSystemCassetteStore.layer()`; VCR needs cassette storage. If -`FetchHttpClient.layer` and `NodeServices.layer` are already provided higher in -your test runtime, keep them there and only merge the missing dependencies. +Why this shape: + +- `FileSystemCassetteStore.layer()` needs platform filesystem and `Path` +- `VcrHttpClient.layer(...)` needs a live `HttpClient`, `Path`, and a + cassette store service +- `NodeServices.layer` matters both for the cassette store and for VCR cassette + name inference in Node tests ## Cassette path and export key @@ -49,9 +59,15 @@ Default inference under Vitest: - cassette file: `test/__cassettes__/api.vcr.test.cassette` - export key: current Vitest test name (`describe > it`) -If not running under Vitest state, pass `cassetteDir` + `cassetteName` explicitly. +If `cassetteName` is provided explicitly: -## Cassette file shape (current) +- `users` becomes `users.cassette` +- `users.cassette` stays `users.cassette` + +If not running under Vitest, provide `cassetteDir` and `cassetteName` +explicitly. + +## Cassette file shape ```json { @@ -91,13 +107,13 @@ If not running under Vitest state, pass `cassetteDir` + `cassetteName` explicitl CI behavior: -- `CI=true` only affects `auto` mode. -- In `auto`, missing cassette fails in CI instead of recording. -- `record` still records even in CI. +- `CI=true` only affects `auto` +- in `auto`, missing cassette fails in CI instead of recording +- `record` still records even in CI ## `ACK_DISABLE_VCR` -Per-connector bypass uses `VcrConfig.connectorName`: +Per-connector bypass uses `VcrConfig.vcrName`: ```bash ACK_DISABLE_VCR=producer-stripe,producer-shopify pnpm run test @@ -105,18 +121,18 @@ ACK_DISABLE_VCR=producer-stripe,producer-shopify pnpm run test Behavior: -- Match is case-insensitive after trimming. -- If matched, VCR returns the live client directly (no cassette read/write). -- Use full connector names (`producer-stripe`, not just `stripe`). +- match is case-insensitive after trimming +- if matched, VCR returns the live client directly +- use the same `vcrName` you configured in the layer ## Redaction and matching Defaults: -- `authorization` is ignored for matching by default. -- `authorization` is redacted by default on write. +- `authorization` is ignored for matching by default +- `authorization` is redacted on write by default -Add service-specific headers/keys when needed: +Add service-specific fields when needed: ```ts VcrHttpClient.layer({ @@ -130,12 +146,16 @@ VcrHttpClient.layer({ }); ``` +Use `matchIgnore` when request fields should not affect cassette identity. + +Use `redact` when fields should never be written to disk. + ## Rerecording safely -1. Delete stale cassette file (or stale export key). -2. Switch test to `mode: "record"`. +1. Delete the stale cassette file or the stale export key. +2. Switch the test to `mode: "record"`. 3. Run with real credentials. -4. Inspect diff for sensitive fields. +4. Inspect the diff for sensitive fields. 5. Switch back to `mode: "replay"`. 6. Re-run `test:ci` and commit. @@ -144,8 +164,7 @@ delete the cassette, and re-record. ## Troubleshooting -- **Missing replay entry**: request shape changed (URL/body/header key mismatch) - or cassette not recorded yet. -- **Cassette path inference failure**: not under Vitest; provide path/name. -- **Unexpected live call**: mode is `auto` and cassette/export missing. -- **Invalid cassette format**: regenerate cassette from record mode. +- missing replay entry: request shape changed or cassette not recorded yet +- cassette path inference failure: not under Vitest; provide explicit names +- unexpected live call: mode is `auto` and cassette/export missing +- invalid cassette format: regenerate from record mode diff --git a/.agents/skills/airfoil-kit/references/webhooks.md b/.agents/skills/airfoil-kit/references/webhooks.md index 3269062..dcbef69 100644 --- a/.agents/skills/airfoil-kit/references/webhooks.md +++ b/.agents/skills/airfoil-kit/references/webhooks.md @@ -6,7 +6,7 @@ no webhooks at all. ## Anatomy of a `WebhookRoute` ```ts -import type { WebhookRoute } from "@useairfoil/connector-kit"; +import { Ingestion, Webhook } from "@useairfoil/connector-kit"; import { Effect } from "effect"; import * as Schema from "effect/Schema"; @@ -15,7 +15,7 @@ const ExamplePayloadSchema = Schema.Union([ Schema.Struct({ type: Schema.Literal("post.updated"), data: PostSchema }), ]); -const route: WebhookRoute> = { +const route = Webhook.route({ path: "/webhooks/example", schema: ExamplePayloadSchema, handle: (payload, request, rawBody) => @@ -23,7 +23,7 @@ const route: WebhookRoute> = { // 1. Verify signature (if applicable) // 2. Dispatch by payload.type to the correct entity/event stream }), -}; +}); ``` - `path` — relative URL mounted by `runConnector`. Prepend `/webhooks/` by @@ -46,7 +46,7 @@ for idempotency cases (duplicate deliveries). import { NodeHttpServer } from "@effect/platform-node"; yield * - runConnector(connector, { + Ingestion.runConnector(connector, { webhook: { routes: [route], healthPath: "/health", // default; override if the platform requires it @@ -56,6 +56,9 @@ yield * - Provide a platform server layer separately (`NodeHttpServer.layer`, `NodeHttpServer.layerTest`, or Bun equivalents) via `Effect.provide`. +- Keep other runtime dependencies in layers outside the `runConnector(...)` + call. The server layer is the usual webhook-specific dependency provided at + the effect site. - `healthPath` — auto-mounted returning `"ok"` with 200. - `disableHttpLogger` — set `true` in noisy CI if you want to silence the default access-log middleware. @@ -194,7 +197,7 @@ const ServerLayer = NodeHttpServer.layerTest; it.effect("dispatches webhook", () => Effect.gen(function* () { yield* Effect.forkScoped( - runConnector(connector, { + Ingestion.runConnector(connector, { webhook: { /* ... */ }, @@ -218,6 +221,14 @@ it.effect("dispatches webhook", () => `NodeHttpServer.layerTest` wires server + client to an in-process transport — no real port needed. +Current test composition shape: + +- `connectorLayer = layerConfig.pipe(Layer.provide(apiLayer))` +- `runLayer = Layer.mergeAll(Ingestion.layerMemory, testPublisherLayer, runtimeLayer)` +- fork `Ingestion.runConnector(...)` +- provide `connectorLayer` with `runtimeLayer` and `ConfigProvider` already + satisfied + ## Gotchas - **Deliveries arrive before backfill is ready.** The kit's cutoff From 4a17f095e31b0d6e5742da2c4e3da031e19faa2a Mon Sep 17 00:00:00 2001 From: jaipaljadeja Date: Thu, 30 Apr 2026 14:53:02 +0530 Subject: [PATCH 09/12] chore: change files --- ...connector-kit-5ed8e943-76d0-4455-99d4-e8b99d69cf2a.json | 7 +++++++ ...il-effect-vcr-df5ac0a2-cbdc-4d12-b54a-92a25ce35919.json | 7 +++++++ ...irfoil-flight-d550cd22-a006-427b-ac02-35dd25d8bfa1.json | 7 +++++++ ...airfoil-wings-4810fc8a-be05-4a1c-ab06-891fb04d6dd4.json | 7 +++++++ ...wings-testing-d62641e8-2f7e-468c-a077-74838143df05.json | 7 +++++++ change/airfoil-5ecd7cdf-f905-45ca-853f-c6b33fa4a93f.json | 7 +++++++ 6 files changed, 42 insertions(+) create mode 100644 change/@useairfoil-connector-kit-5ed8e943-76d0-4455-99d4-e8b99d69cf2a.json create mode 100644 change/@useairfoil-effect-vcr-df5ac0a2-cbdc-4d12-b54a-92a25ce35919.json create mode 100644 change/@useairfoil-flight-d550cd22-a006-427b-ac02-35dd25d8bfa1.json create mode 100644 change/@useairfoil-wings-4810fc8a-be05-4a1c-ab06-891fb04d6dd4.json create mode 100644 change/@useairfoil-wings-testing-d62641e8-2f7e-468c-a077-74838143df05.json create mode 100644 change/airfoil-5ecd7cdf-f905-45ca-853f-c6b33fa4a93f.json diff --git a/change/@useairfoil-connector-kit-5ed8e943-76d0-4455-99d4-e8b99d69cf2a.json b/change/@useairfoil-connector-kit-5ed8e943-76d0-4455-99d4-e8b99d69cf2a.json new file mode 100644 index 0000000..e953186 --- /dev/null +++ b/change/@useairfoil-connector-kit-5ed8e943-76d0-4455-99d4-e8b99d69cf2a.json @@ -0,0 +1,7 @@ +{ + "type": "patch", + "comment": "ack: refactor", + "packageName": "@useairfoil/connector-kit", + "email": "jadejajaipal5@gmail.com", + "dependentChangeType": "patch" +} diff --git a/change/@useairfoil-effect-vcr-df5ac0a2-cbdc-4d12-b54a-92a25ce35919.json b/change/@useairfoil-effect-vcr-df5ac0a2-cbdc-4d12-b54a-92a25ce35919.json new file mode 100644 index 0000000..8826a4f --- /dev/null +++ b/change/@useairfoil-effect-vcr-df5ac0a2-cbdc-4d12-b54a-92a25ce35919.json @@ -0,0 +1,7 @@ +{ + "type": "patch", + "comment": "vcr: refactor", + "packageName": "@useairfoil/effect-vcr", + "email": "jadejajaipal5@gmail.com", + "dependentChangeType": "patch" +} diff --git a/change/@useairfoil-flight-d550cd22-a006-427b-ac02-35dd25d8bfa1.json b/change/@useairfoil-flight-d550cd22-a006-427b-ac02-35dd25d8bfa1.json new file mode 100644 index 0000000..4ebe6d9 --- /dev/null +++ b/change/@useairfoil-flight-d550cd22-a006-427b-ac02-35dd25d8bfa1.json @@ -0,0 +1,7 @@ +{ + "type": "patch", + "comment": "flight: refactor and add record batch with metadata", + "packageName": "@useairfoil/flight", + "email": "jadejajaipal5@gmail.com", + "dependentChangeType": "patch" +} diff --git a/change/@useairfoil-wings-4810fc8a-be05-4a1c-ab06-891fb04d6dd4.json b/change/@useairfoil-wings-4810fc8a-be05-4a1c-ab06-891fb04d6dd4.json new file mode 100644 index 0000000..d69aee1 --- /dev/null +++ b/change/@useairfoil-wings-4810fc8a-be05-4a1c-ab06-891fb04d6dd4.json @@ -0,0 +1,7 @@ +{ + "type": "patch", + "comment": "wings: good refactor", + "packageName": "@useairfoil/wings", + "email": "jadejajaipal5@gmail.com", + "dependentChangeType": "patch" +} diff --git a/change/@useairfoil-wings-testing-d62641e8-2f7e-468c-a077-74838143df05.json b/change/@useairfoil-wings-testing-d62641e8-2f7e-468c-a077-74838143df05.json new file mode 100644 index 0000000..2f458bd --- /dev/null +++ b/change/@useairfoil-wings-testing-d62641e8-2f7e-468c-a077-74838143df05.json @@ -0,0 +1,7 @@ +{ + "type": "patch", + "comment": "wings-testing: improve effect standards", + "packageName": "@useairfoil/wings-testing", + "email": "jadejajaipal5@gmail.com", + "dependentChangeType": "patch" +} diff --git a/change/airfoil-5ecd7cdf-f905-45ca-853f-c6b33fa4a93f.json b/change/airfoil-5ecd7cdf-f905-45ca-853f-c6b33fa4a93f.json new file mode 100644 index 0000000..3975052 --- /dev/null +++ b/change/airfoil-5ecd7cdf-f905-45ca-853f-c6b33fa4a93f.json @@ -0,0 +1,7 @@ +{ + "type": "patch", + "comment": "cli: refactor", + "packageName": "airfoil", + "email": "jadejajaipal5@gmail.com", + "dependentChangeType": "patch" +} From 084a6de4cd66965d86418b5431aa3424b7966b38 Mon Sep 17 00:00:00 2001 From: jaipaljadeja Date: Mon, 4 May 2026 17:06:42 +0530 Subject: [PATCH 10/12] chore: refactor --- connectors/producer-polar/src/api.ts | 101 +++--- connectors/producer-polar/src/connector.ts | 163 ++++----- connectors/producer-polar/src/streams.ts | 51 +-- connectors/producer-shopify/src/api.ts | 161 +++++---- connectors/producer-shopify/src/connector.ts | 91 ++--- connectors/producer-shopify/src/streams.ts | 36 +- packages/connector-kit/src/publisher/wings.ts | 4 +- packages/effect-vcr/src/cassette-store.ts | 21 +- .../src/file-system-cassette-store.ts | 4 +- packages/effect-vcr/src/vcr-http-client.ts | 231 +++++++------ packages/wings/src/data-plane/fetcher.ts | 79 +++-- packages/wings/src/data-plane/publisher.ts | 313 +++++++++--------- packages/wings/src/index.ts | 2 +- packages/wings/src/partition-value.ts | 195 ++++++----- packages/wings/src/schema/types.ts | 84 ++--- .../wings/src/schema/wings-annotations.ts | 86 ----- packages/wings/src/schema/wings-converter.ts | 217 ------------ packages/wings/src/schema/wings-types.ts | 161 --------- packages/wings/test/fetcher.test.ts | 10 +- packages/wings/test/publisher.test.ts | 12 +- templates/producer-template/src/api.ts | 101 +++--- templates/producer-template/src/connector.ts | 87 ++--- templates/producer-template/src/streams.ts | 36 +- 23 files changed, 896 insertions(+), 1350 deletions(-) delete mode 100644 packages/wings/src/schema/wings-annotations.ts delete mode 100644 packages/wings/src/schema/wings-converter.ts delete mode 100644 packages/wings/src/schema/wings-types.ts diff --git a/connectors/producer-polar/src/api.ts b/connectors/producer-polar/src/api.ts index fb4ec69..9daf4db 100644 --- a/connectors/producer-polar/src/api.ts +++ b/connectors/producer-polar/src/api.ts @@ -27,64 +27,63 @@ export class PolarApiClient extends Context.Service => - Effect.fnUntraced(function* () { - const client = (yield* HttpClient.HttpClient).pipe( - HttpClient.mapRequest(HttpClientRequest.prependUrl(config.apiBaseUrl)), - HttpClient.mapRequest(HttpClientRequest.bearerToken(config.accessToken)), - HttpClient.mapRequest(HttpClientRequest.acceptJson), - ); +): Effect.fn.Return { + const client = (yield* HttpClient.HttpClient).pipe( + HttpClient.mapRequest(HttpClientRequest.prependUrl(config.apiBaseUrl)), + HttpClient.mapRequest(HttpClientRequest.bearerToken(config.accessToken)), + HttpClient.mapRequest(HttpClientRequest.acceptJson), + ); - const fetchJson = ( - schema: Schema.Decoder, - path: string, - params?: Record, - ): Effect.Effect => { - const request = params - ? HttpClientRequest.get(path).pipe(HttpClientRequest.setUrlParams(params)) - : HttpClientRequest.get(path); - return Effect.scoped( - client.execute(request).pipe( - Effect.flatMap(HttpClientResponse.filterStatusOk), - Effect.flatMap((response) => response.json), - Effect.flatMap(Schema.decodeUnknownEffect(schema)), - Effect.mapError( - (error) => - new ConnectorError({ - message: "Polar API request failed", - cause: error, - }), - ), + const fetchJson = ( + schema: Schema.Decoder, + path: string, + params?: Record, + ): Effect.Effect => { + const request = params + ? HttpClientRequest.get(path).pipe(HttpClientRequest.setUrlParams(params)) + : HttpClientRequest.get(path); + return Effect.scoped( + client.execute(request).pipe( + Effect.flatMap(HttpClientResponse.filterStatusOk), + Effect.flatMap((response) => response.json), + Effect.flatMap(Schema.decodeUnknownEffect(schema)), + Effect.mapError( + (error) => + new ConnectorError({ + message: "Polar API request failed", + cause: error, + }), ), - ); - }; + ), + ); + }; - const fetchList = ( - schema: Schema.Decoder, - path: string, - options: { - readonly page: number; - readonly limit: number; - readonly sorting: string; - }, - ): Effect.Effect, ConnectorError, R> => { - const params: Record = { - page: String(options.page), - limit: String(options.limit), - sorting: options.sorting, - }; + const fetchList = ( + schema: Schema.Decoder, + path: string, + options: { + readonly page: number; + readonly limit: number; + readonly sorting: string; + }, + ): Effect.Effect, ConnectorError, R> => { + const params: Record = { + page: String(options.page), + limit: String(options.limit), + sorting: options.sorting, + }; - if (Option.isSome(config.organizationId)) { - params.organization_id = config.organizationId.value; - } + if (Option.isSome(config.organizationId)) { + params.organization_id = config.organizationId.value; + } - return fetchJson(makeListResponseSchema(schema), path, params); - }; + return fetchJson(makeListResponseSchema(schema), path, params); + }; - return { fetchJson, fetchList }; - })(); + return { fetchJson, fetchList }; +}); export const layerApiClient = ( config: PolarConfig, diff --git a/connectors/producer-polar/src/connector.ts b/connectors/producer-polar/src/connector.ts index 6f1a599..5c13822 100644 --- a/connectors/producer-polar/src/connector.ts +++ b/connectors/producer-polar/src/connector.ts @@ -211,79 +211,79 @@ const resolveWebhookDispatch = (options: { }; // Connector factory -const makePolarConnector = ( +const makePolarConnector = Effect.fnUntraced(function* ( config: PolarConfig, -): Effect.Effect => - Effect.fnUntraced(function* () { - const api = yield* PolarApiClient; - const customerStreams = yield* makeEntityStreams({ - api, - schema: CustomerSchema, - path: "customers/", - cursorField: "created_at", - }); +): Effect.fn.Return { + const api = yield* PolarApiClient; + const customerStreams = yield* makeEntityStreams({ + api, + schema: CustomerSchema, + path: "customers/", + cursorField: "created_at", + }); - const checkoutStreams = yield* makeEntityStreams({ - api, - schema: CheckoutSchema, - path: "checkouts/", - cursorField: "created_at", - }); + const checkoutStreams = yield* makeEntityStreams({ + api, + schema: CheckoutSchema, + path: "checkouts/", + cursorField: "created_at", + }); - const subscriptionStreams = yield* makeEntityStreams({ - api, - schema: SubscriptionSchema, - path: "subscriptions/", - cursorField: "created_at", - }); + const subscriptionStreams = yield* makeEntityStreams({ + api, + schema: SubscriptionSchema, + path: "subscriptions/", + cursorField: "created_at", + }); - const orderStreams = yield* makeEntityStreams({ - api, - schema: OrderSchema, - path: "orders/", - cursorField: "created_at", - }); + const orderStreams = yield* makeEntityStreams({ + api, + schema: OrderSchema, + path: "orders/", + cursorField: "created_at", + }); - const connector = defineConnector({ - name: "producer-polar", - entities: [ - defineEntity({ - name: "customers", - schema: CustomerSchema, - primaryKey: "id", - live: customerStreams.live, - backfill: customerStreams.backfill, - }), - defineEntity({ - name: "checkouts", - schema: CheckoutSchema, - primaryKey: "id", - live: checkoutStreams.live, - backfill: checkoutStreams.backfill, - }), - defineEntity({ - name: "subscriptions", - schema: SubscriptionSchema, - primaryKey: "id", - live: subscriptionStreams.live, - backfill: subscriptionStreams.backfill, - }), - defineEntity({ - name: "orders", - schema: OrderSchema, - primaryKey: "id", - live: orderStreams.live, - backfill: orderStreams.backfill, - }), - ], - events: [], - }); + const connector = defineConnector({ + name: "producer-polar", + entities: [ + defineEntity({ + name: "customers", + schema: CustomerSchema, + primaryKey: "id", + live: customerStreams.live, + backfill: customerStreams.backfill, + }), + defineEntity({ + name: "checkouts", + schema: CheckoutSchema, + primaryKey: "id", + live: checkoutStreams.live, + backfill: checkoutStreams.backfill, + }), + defineEntity({ + name: "subscriptions", + schema: SubscriptionSchema, + primaryKey: "id", + live: subscriptionStreams.live, + backfill: subscriptionStreams.backfill, + }), + defineEntity({ + name: "orders", + schema: OrderSchema, + primaryKey: "id", + live: orderStreams.live, + backfill: orderStreams.backfill, + }), + ], + events: [], + }); - const webhookRoute = Webhook.route({ - path: "/webhooks/polar", - schema: WebhookPayloadSchema, - handle: (payload, request, rawBody) => - Effect.fn("polar/webhook/handle")(function* () { + const webhookRoute = Webhook.route({ + path: "/webhooks/polar", + schema: WebhookPayloadSchema, + handle: (payload, request, rawBody) => + Effect.withSpan( + Effect.gen(function* () { if (Option.isSome(config.webhookSecret) && rawBody) { yield* verifyWebhookSignature({ rawBody, @@ -299,24 +299,29 @@ const makePolarConnector = ( subscriptions: subscriptionStreams, orders: orderStreams, }); - })(), - }); + }), + "polar/webhook/handle", + ), + }); - if (Option.isNone(config.webhookSecret)) { - yield* Effect.logWarning( - "POLAR_WEBHOOK_SECRET is not set. Incoming webhooks will not be signature-verified.", - ); - } + if (Option.isNone(config.webhookSecret)) { + yield* Effect.logWarning( + "POLAR_WEBHOOK_SECRET is not set. Incoming webhooks will not be signature-verified.", + ); + } - return { connector, routes: [webhookRoute] }; - })().pipe(Effect.annotateLogs({ component: "polar" })); + return { connector, routes: [webhookRoute] }; +}); export const layerConfig: Layer.Layer = Layer.effect(PolarConnector)( - Effect.fnUntraced(function* () { + Effect.gen(function* () { const config = yield* PolarConfigConfig; - return yield* makePolarConnector(config).pipe(Effect.provide(layerApiClient(config))); - })().pipe( + return yield* makePolarConnector(config).pipe( + Effect.annotateLogs({ component: "polar" }), + Effect.provide(layerApiClient(config)), + ); + }).pipe( Effect.mapError((error) => error instanceof ConnectorError ? error diff --git a/connectors/producer-polar/src/streams.ts b/connectors/producer-polar/src/streams.ts index 5652276..8235358 100644 --- a/connectors/producer-polar/src/streams.ts +++ b/connectors/producer-polar/src/streams.ts @@ -8,16 +8,15 @@ import type { PolarApiClientService } from "./api"; // Cursor helpers const toDate = (cursor: Cursor) => (cursor instanceof Date ? cursor : new Date(String(cursor))); -export const resolveCursor = >( +export const resolveCursor = Effect.fnUntraced(function* >( row: T, cursorField: keyof T & string, -): Effect.Effect => - Effect.fnUntraced(function* () { - const value = row[cursorField]; - if (typeof value === "string") return value; - const now = yield* DateTime.now; - return DateTime.formatIso(now); - })(); +): Effect.fn.Return { + const value = row[cursorField]; + if (typeof value === "string") return value; + const now = yield* DateTime.now; + return DateTime.formatIso(now); +}); const isOnOrBeforeCutoff = (value: unknown, cutoff: Cursor) => { if (typeof value !== "string") return false; @@ -28,19 +27,20 @@ const isOnOrBeforeCutoff = (value: unknown, cutoff: Cursor) => { const setCutoff = (deferred: Deferred.Deferred, cursor: Cursor) => Deferred.succeed(deferred, cursor).pipe(Effect.asVoid); -export const dispatchEntityWebhook = >(options: { +export const dispatchEntityWebhook = Effect.fnUntraced(function* < + T extends Record, +>(options: { readonly queue: Streams.WebhookStream; readonly cutoff: Deferred.Deferred; readonly row: T; readonly cursor: Cursor; -}): Effect.Effect => - Effect.fnUntraced(function* () { - yield* setCutoff(options.cutoff, options.cursor); - return yield* Queue.offer(options.queue.queue, { - cursor: options.cursor, - rows: [options.row], - }).pipe(Effect.asVoid); - })(); +}) { + yield* setCutoff(options.cutoff, options.cursor); + return yield* Queue.offer(options.queue.queue, { + cursor: options.cursor, + rows: [options.row], + }).pipe(Effect.asVoid); +}); /** Backfill stream for a single entity. Paging continues until the end. */ const makeBackfillStream = >(options: { @@ -95,16 +95,17 @@ export type EntityStreams> = { }; /** Creates the webhook queue, cutoff deferred, and backfill stream for one entity. */ -export const makeEntityStreams = >(options: { +export const makeEntityStreams = Effect.fnUntraced(function* < + T extends Record, +>(options: { readonly api: PolarApiClientService; readonly schema: Schema.Decoder; readonly path: string; readonly cursorField: keyof T & string; readonly limit?: number; -}): Effect.Effect, ConnectorError> => - Effect.fnUntraced(function* () { - const queue = yield* Streams.makeWebhookQueue({ capacity: 2048 }); - const cutoff = yield* Deferred.make(); - const backfill = makeBackfillStream({ ...options, cutoff }); - return { live: queue, cutoff, backfill }; - })(); +}) { + const queue = yield* Streams.makeWebhookQueue({ capacity: 2048 }); + const cutoff = yield* Deferred.make(); + const backfill = makeBackfillStream({ ...options, cutoff }); + return { live: queue, cutoff, backfill }; +}); diff --git a/connectors/producer-shopify/src/api.ts b/connectors/producer-shopify/src/api.ts index 1c1e850..1509807 100644 --- a/connectors/producer-shopify/src/api.ts +++ b/connectors/producer-shopify/src/api.ts @@ -47,95 +47,94 @@ const inferListField = (path: string): string => { const isAbsoluteUrl = (value: string): boolean => /^https?:\/\//i.test(value); -export const makeShopifyApiClient = ( +export const makeShopifyApiClient = Effect.fnUntraced(function* ( config: ShopifyConfig, -): Effect.Effect => - Effect.fnUntraced(function* () { - const rawClient = yield* HttpClient.HttpClient; - const authAndJsonClient = rawClient.pipe( - HttpClient.mapRequest(HttpClientRequest.setHeader("X-Shopify-Access-Token", config.apiToken)), - HttpClient.mapRequest(HttpClientRequest.acceptJson), - ); - const relativePathClient = authAndJsonClient.pipe( - HttpClient.mapRequest(HttpClientRequest.prependUrl(config.apiBaseUrl)), - ); +): Effect.fn.Return { + const rawClient = yield* HttpClient.HttpClient; + const authAndJsonClient = rawClient.pipe( + HttpClient.mapRequest(HttpClientRequest.setHeader("X-Shopify-Access-Token", config.apiToken)), + HttpClient.mapRequest(HttpClientRequest.acceptJson), + ); + const relativePathClient = authAndJsonClient.pipe( + HttpClient.mapRequest(HttpClientRequest.prependUrl(config.apiBaseUrl)), + ); - const fetchJson = ( - schema: Schema.Decoder, - path: string, - params?: Record, - ): Effect.Effect => { - const request = params - ? HttpClientRequest.get(path).pipe(HttpClientRequest.setUrlParams(params)) - : HttpClientRequest.get(path); - return Effect.scoped( - relativePathClient.execute(request).pipe( - Effect.flatMap(HttpClientResponse.filterStatusOk), - Effect.flatMap((response) => response.json), - Effect.flatMap(Schema.decodeUnknownEffect(schema)), - Effect.mapError( - (error) => - new ConnectorError({ - message: "Shopify API request failed", - cause: error, - }), - ), + const fetchJson = ( + schema: Schema.Decoder, + path: string, + params?: Record, + ): Effect.Effect => { + const request = params + ? HttpClientRequest.get(path).pipe(HttpClientRequest.setUrlParams(params)) + : HttpClientRequest.get(path); + return Effect.scoped( + relativePathClient.execute(request).pipe( + Effect.flatMap(HttpClientResponse.filterStatusOk), + Effect.flatMap((response) => response.json), + Effect.flatMap(Schema.decodeUnknownEffect(schema)), + Effect.mapError( + (error) => + new ConnectorError({ + message: "Shopify API request failed", + cause: error, + }), ), - ); - }; + ), + ); + }; - const fetchList = ( - schema: Schema.Decoder, - path: string, - options: { - readonly limit: number; - readonly nextUrl?: string; - }, - ): Effect.Effect, ConnectorError, R> => { - const useAbsolute = typeof options.nextUrl === "string" && isAbsoluteUrl(options.nextUrl); - const client = useAbsolute ? authAndJsonClient : relativePathClient; - const request = options.nextUrl - ? HttpClientRequest.get(options.nextUrl) - : HttpClientRequest.get(`${path}?limit=${options.limit}`); - const arraySchema = Schema.Array(schema) as unknown as Schema.Decoder, R>; - const listField = inferListField(path); + const fetchList = ( + schema: Schema.Decoder, + path: string, + options: { + readonly limit: number; + readonly nextUrl?: string; + }, + ): Effect.Effect, ConnectorError, R> => { + const useAbsolute = typeof options.nextUrl === "string" && isAbsoluteUrl(options.nextUrl); + const client = useAbsolute ? authAndJsonClient : relativePathClient; + const request = options.nextUrl + ? HttpClientRequest.get(options.nextUrl) + : HttpClientRequest.get(`${path}?limit=${options.limit}`); + const arraySchema = Schema.Array(schema) as unknown as Schema.Decoder, R>; + const listField = inferListField(path); - return Effect.scoped( - client.execute(request).pipe( - Effect.flatMap(HttpClientResponse.filterStatusOk), - Effect.flatMap((response) => - Effect.all({ - body: response.json, - linkHeader: Effect.succeed(response.headers["link"]), - }), - ), - Effect.flatMap(({ body, linkHeader }) => { - const unknownEnvelope = body as Record; - const unknownItems = unknownEnvelope[listField]; - return Schema.decodeUnknownEffect(arraySchema)(unknownItems).pipe( - Effect.map((items) => { - const nextUrl = extractNextUrl(linkHeader); - return { - items, - nextUrl, - hasMore: nextUrl !== null, - }; - }), - ); + return Effect.scoped( + client.execute(request).pipe( + Effect.flatMap(HttpClientResponse.filterStatusOk), + Effect.flatMap((response) => + Effect.all({ + body: response.json, + linkHeader: Effect.succeed(response.headers["link"]), }), - Effect.mapError( - (error) => - new ConnectorError({ - message: "Shopify list request failed", - cause: error, - }), - ), ), - ); - }; + Effect.flatMap(({ body, linkHeader }) => { + const unknownEnvelope = body as Record; + const unknownItems = unknownEnvelope[listField]; + return Schema.decodeUnknownEffect(arraySchema)(unknownItems).pipe( + Effect.map((items) => { + const nextUrl = extractNextUrl(linkHeader); + return { + items, + nextUrl, + hasMore: nextUrl !== null, + }; + }), + ); + }), + Effect.mapError( + (error) => + new ConnectorError({ + message: "Shopify list request failed", + cause: error, + }), + ), + ), + ); + }; - return { fetchJson, fetchList }; - })(); + return { fetchJson, fetchList }; +}); export const layerApiClient = ( config: ShopifyConfig, diff --git a/connectors/producer-shopify/src/connector.ts b/connectors/producer-shopify/src/connector.ts index 58c3904..41f738c 100644 --- a/connectors/producer-shopify/src/connector.ts +++ b/connectors/producer-shopify/src/connector.ts @@ -100,38 +100,38 @@ const resolveWebhookDispatch = (options: { } }; -const makeShopifyConnector = ( +const makeShopifyConnector = Effect.fnUntraced(function* ( config: ShopifyConfig, -): Effect.Effect => - Effect.fnUntraced(function* () { - const api = yield* ShopifyApiClient; - const productStreams = yield* makeEntityStreams({ - api, - schema: ProductSchema, - path: "/products.json", - cursorField: "updated_at", - limit: 50, - }); - - const connector = defineConnector({ - name: "producer-shopify", - entities: [ - defineEntity({ - name: "products", - schema: ProductSchema, - primaryKey: "id", - live: productStreams.live, - backfill: productStreams.backfill, - }), - ], - events: [], - }); - - const webhookRoute = Webhook.route({ - path: "/webhooks/shopify", - schema: WebhookPayloadSchema, - handle: (payload, request, rawBody) => - Effect.fn("shopify/webhook/handle")(function* () { +): Effect.fn.Return { + const api = yield* ShopifyApiClient; + const productStreams = yield* makeEntityStreams({ + api, + schema: ProductSchema, + path: "/products.json", + cursorField: "updated_at", + limit: 50, + }); + + const connector = defineConnector({ + name: "producer-shopify", + entities: [ + defineEntity({ + name: "products", + schema: ProductSchema, + primaryKey: "id", + live: productStreams.live, + backfill: productStreams.backfill, + }), + ], + events: [], + }); + + const webhookRoute = Webhook.route({ + path: "/webhooks/shopify", + schema: WebhookPayloadSchema, + handle: (payload, request, rawBody) => + Effect.withSpan( + Effect.gen(function* () { const topic = request.headers["x-shopify-topic"] ?? ""; if (Option.isSome(config.webhookSecret)) { @@ -155,24 +155,29 @@ const makeShopifyConnector = ( topic, products: productStreams, }); - })(), - }); + }), + "shopify/webhook/handle", + ), + }); - if (Option.isNone(config.webhookSecret)) { - yield* Effect.logWarning( - "SHOPIFY_WEBHOOK_SECRET is not set. Incoming webhooks will not be signature-verified.", - ); - } + if (Option.isNone(config.webhookSecret)) { + yield* Effect.logWarning( + "SHOPIFY_WEBHOOK_SECRET is not set. Incoming webhooks will not be signature-verified.", + ); + } - return { connector, routes: [webhookRoute] }; - })().pipe(Effect.annotateLogs({ component: "producer-shopify" })); + return { connector, routes: [webhookRoute] }; +}); export const layerConfig: Layer.Layer = Layer.effect(ShopifyConnector)( - Effect.fnUntraced(function* () { + Effect.gen(function* () { const config = yield* ShopifyConfigConfig; - return yield* makeShopifyConnector(config).pipe(Effect.provide(layerApiClient(config))); - })().pipe( + return yield* makeShopifyConnector(config).pipe( + Effect.annotateLogs({ component: "producer-shopify" }), + Effect.provide(layerApiClient(config)), + ); + }).pipe( Effect.mapError((error) => error instanceof ConnectorError ? error diff --git a/connectors/producer-shopify/src/streams.ts b/connectors/producer-shopify/src/streams.ts index 03c786e..f4a4748 100644 --- a/connectors/producer-shopify/src/streams.ts +++ b/connectors/producer-shopify/src/streams.ts @@ -55,19 +55,20 @@ const setCutoff = (deferred: Deferred.Deferred, cursor: Cursor) = // Enqueue a single webhook row after recording its cursor as the backfill // cutoff. This is safe to call many times — Deferred.succeed is idempotent. -export const dispatchEntityWebhook = >(options: { +export const dispatchEntityWebhook = Effect.fnUntraced(function* < + T extends Record, +>(options: { readonly queue: Streams.WebhookStream; readonly cutoff: Deferred.Deferred; readonly row: T; readonly cursor: Cursor; -}): Effect.Effect => - Effect.fnUntraced(function* () { - yield* setCutoff(options.cutoff, options.cursor); - return yield* Queue.offer(options.queue.queue, { - cursor: options.cursor, - rows: [options.row], - }).pipe(Effect.asVoid); - })(); +}) { + yield* setCutoff(options.cutoff, options.cursor); + return yield* Queue.offer(options.queue.queue, { + cursor: options.cursor, + rows: [options.row], + }).pipe(Effect.asVoid); +}); // Backfill stream for a single entity. Waits for the cutoff deferred to // resolve (set by the first live webhook or by initialCutoff), then pages @@ -125,16 +126,17 @@ export type EntityStreams> = { // Convenience factory: creates the live webhook queue, the cutoff deferred, // and the backfill stream all at once. Callers destructure the result into a // defineEntity() call. -export const makeEntityStreams = >(options: { +export const makeEntityStreams = Effect.fnUntraced(function* < + T extends Record, +>(options: { readonly api: ShopifyApiClientService; readonly schema: Schema.Decoder; readonly path: string; readonly cursorField: keyof T & string; readonly limit?: number; -}): Effect.Effect, ConnectorError> => - Effect.fnUntraced(function* () { - const queue = yield* Streams.makeWebhookQueue({ capacity: 1024 }); - const cutoff = yield* Deferred.make(); - const backfill = makeBackfillStream({ ...options, cutoff }); - return { live: queue, cutoff, backfill }; - })(); +}) { + const queue = yield* Streams.makeWebhookQueue({ capacity: 1024 }); + const cutoff = yield* Deferred.make(); + const backfill = makeBackfillStream({ ...options, cutoff }); + return { live: queue, cutoff, backfill }; +}); diff --git a/packages/connector-kit/src/publisher/wings.ts b/packages/connector-kit/src/publisher/wings.ts index 2bb0d76..39ff60f 100644 --- a/packages/connector-kit/src/publisher/wings.ts +++ b/packages/connector-kit/src/publisher/wings.ts @@ -13,7 +13,7 @@ export type WingsPublisherConfig = { /** Map of entity/event name to Wings topic. */ readonly topics: Record; /** per-stream partition value (key is entity/event name). */ - readonly partitionValues?: Record; + readonly partitionValues?: Record; }; /** Publisher entry for a single entity/event. */ @@ -23,7 +23,7 @@ type PublisherEntry = { /** Partition field name (if any). */ readonly partitionField?: string; /** Partition value (if any). */ - readonly partitionValue?: Wings.Partition.PartitionValue; + readonly partitionValue?: Wings.PartitionValue.PartitionValue; }; /** Convert JSON rows into an Arrow RecordBatch for Wings. Returns a typed failure if rows are empty. */ diff --git a/packages/effect-vcr/src/cassette-store.ts b/packages/effect-vcr/src/cassette-store.ts index cdb2944..0dd0746 100644 --- a/packages/effect-vcr/src/cassette-store.ts +++ b/packages/effect-vcr/src/cassette-store.ts @@ -32,17 +32,16 @@ export class CassetteStore extends Context.Service => - Effect.fnUntraced(function* () { - const now = yield* DateTime.now; - return { - meta: { - createdAt: DateTime.formatIso(now), - version: "1", - }, - entries: {}, - }; - })(); +export const createEmptyCassette = Effect.fnUntraced(function* (): Effect.fn.Return { + const now = yield* DateTime.now; + return { + meta: { + createdAt: DateTime.formatIso(now), + version: "1", + }, + entries: {}, + }; +}); export const createEmptyCassetteFile = (): Effect.Effect => Effect.map(createEmptyCassette(), (cassette) => ({ diff --git a/packages/effect-vcr/src/file-system-cassette-store.ts b/packages/effect-vcr/src/file-system-cassette-store.ts index b9c2555..65a43a9 100644 --- a/packages/effect-vcr/src/file-system-cassette-store.ts +++ b/packages/effect-vcr/src/file-system-cassette-store.ts @@ -16,7 +16,7 @@ export type FileSystemCassetteStoreConfig = { */ export const layer = (config: FileSystemCassetteStoreConfig = {}) => Layer.effect(CassetteStore.CassetteStore)( - Effect.fnUntraced(function* () { + Effect.gen(function* () { const fs = yield* FileSystem.FileSystem; const path = yield* Path.Path; @@ -86,5 +86,5 @@ export const layer = (config: FileSystemCassetteStoreConfig = {}) => save, loadOrInit, }); - })(), + }), ); diff --git a/packages/effect-vcr/src/vcr-http-client.ts b/packages/effect-vcr/src/vcr-http-client.ts index 621c2d1..da2770e 100644 --- a/packages/effect-vcr/src/vcr-http-client.ts +++ b/packages/effect-vcr/src/vcr-http-client.ts @@ -33,34 +33,33 @@ const toCassetteFileName = (name: string): string => * - .cassette file name * - export key = current test name (describe > test) */ -const resolveCassetteLocation = (config: VcrConfig) => - Effect.fnUntraced(function* () { - const path = yield* Path.Path; - if (config.cassetteName) { - return { - name: toCassetteFileName(config.cassetteName), - exportKey: "default", - }; - } +const resolveCassetteLocation = Effect.fnUntraced(function* (config: VcrConfig) { + const path = yield* Path.Path; + if (config.cassetteName) { + return { + name: toCassetteFileName(config.cassetteName), + exportKey: "default", + }; + } - const { testPath, currentTestName } = getVitestState(); - if (!testPath || !currentTestName) { - return yield* Effect.fail( - new VcrHttpClientError({ - message: - "VCR cassette path could not be inferred. Provide cassetteName when not running in Vitest.", - }), - ); - } + const { testPath, currentTestName } = getVitestState(); + if (!testPath || !currentTestName) { + return yield* Effect.fail( + new VcrHttpClientError({ + message: + "VCR cassette path could not be inferred. Provide cassetteName when not running in Vitest.", + }), + ); + } - const fileName = path.basename(testPath, ".ts"); - const cassetteName = `${fileName}.cassette`; + const fileName = path.basename(testPath, ".ts"); + const cassetteName = `${fileName}.cassette`; - return { - name: cassetteName, - exportKey: currentTestName, - }; - })(); + return { + name: cassetteName, + exportKey: currentTestName, + }; +}); /** * Apply defaults for common VCR behavior while preserving explicit overrides. @@ -293,7 +292,7 @@ const replay = ( /** * Record a live response into the cassette and return the original response. */ -const record = ( +const record = Effect.fnUntraced(function* ( store: CassetteStoreService, request: HttpClientRequest.HttpClientRequest, vcrRequest: VcrRequest, @@ -301,114 +300,112 @@ const record = ( config: VcrConfig, name: string, exportKey: string, -) => - Effect.fnUntraced(function* () { - const response = yield* effect; - const body = yield* response.text; - const vcrResponse = toVcrResponse(response, body); - - const sanitizedRequest = config.redact - ? redactRequest(vcrRequest, { - redactHeaders: config.redact.requestHeaders, - redactBodyKeys: config.redact.requestBodyKeys, - }) - : vcrRequest; - const sanitizedResponse = config.redact - ? redactResponse(vcrResponse, { - redactHeaders: config.redact.responseHeaders, - redactBodyKeys: config.redact.responseBodyKeys, - }) - : vcrResponse; - - const file = yield* loadOrInitCassetteFile(store, name, request); - const cassette = file.exports[exportKey] ?? (yield* createEmptyCassette()); - const key = yield* buildRequestKey(vcrRequest, { - ignoreHeaders: config.matchIgnore?.requestHeaders, - ignoreBodyKeys: config.matchIgnore?.requestBodyKeys, - }); - const next: Cassette = { - ...cassette, - entries: { - ...cassette.entries, - [key]: { - request: sanitizedRequest, - response: sanitizedResponse, - }, - }, - }; - const nextFile: CassetteFile = { - ...file, - exports: { - ...file.exports, - [exportKey]: next, +): Effect.fn.Return { + const response = yield* effect; + const body = yield* response.text; + const vcrResponse = toVcrResponse(response, body); + + const sanitizedRequest = config.redact + ? redactRequest(vcrRequest, { + redactHeaders: config.redact.requestHeaders, + redactBodyKeys: config.redact.requestBodyKeys, + }) + : vcrRequest; + const sanitizedResponse = config.redact + ? redactResponse(vcrResponse, { + redactHeaders: config.redact.responseHeaders, + redactBodyKeys: config.redact.responseBodyKeys, + }) + : vcrResponse; + + const file = yield* loadOrInitCassetteFile(store, name, request); + const cassette = file.exports[exportKey] ?? (yield* createEmptyCassette()); + const key = yield* buildRequestKey(vcrRequest, { + ignoreHeaders: config.matchIgnore?.requestHeaders, + ignoreBodyKeys: config.matchIgnore?.requestBodyKeys, + }); + const next: Cassette = { + ...cassette, + entries: { + ...cassette.entries, + [key]: { + request: sanitizedRequest, + response: sanitizedResponse, }, - }; - yield* saveCassetteFile(store, name, nextFile, request); - return response; - })(); + }, + }; + const nextFile: CassetteFile = { + ...file, + exports: { + ...file.exports, + [exportKey]: next, + }, + }; + yield* saveCassetteFile(store, name, nextFile, request); + return response; +}); /** * Build a VCR-aware HttpClient that replays or records per config. */ -const makeVcrHttpClient = (config: VcrConfig = {}) => - Effect.fnUntraced(function* () { - const live = yield* HttpClient.HttpClient; - const normalized = normalizeConfig(config); +const makeVcrHttpClient = Effect.fnUntraced(function* (config: VcrConfig = {}) { + const live = yield* HttpClient.HttpClient; + const normalized = normalizeConfig(config); - const isCi = yield* Config.boolean("CI").pipe(Config.withDefault(false)); + const isCi = yield* Config.boolean("CI").pipe(Config.withDefault(false)); - const disabledVcrs = yield* AckDisableVcrConfig; - if (shouldDisableVcr(normalized.vcrName, disabledVcrs)) { - return live; - } + const disabledVcrs = yield* AckDisableVcrConfig; + if (shouldDisableVcr(normalized.vcrName, disabledVcrs)) { + return live; + } - const store = yield* CassetteStore; + const store = yield* CassetteStore; - const { name, exportKey } = yield* resolveCassetteLocation(normalized); + const { name, exportKey } = yield* resolveCassetteLocation(normalized); - return live.pipe( - HttpClient.transform((effect, request) => - Effect.fnUntraced(function* () { - const vcrRequest = toVcrRequest(request); - if (normalized.mode === "replay") { - return yield* replay(store, request, vcrRequest, normalized, name, exportKey); - } + return live.pipe( + HttpClient.transform( + Effect.fnUntraced(function* (effect, request) { + const vcrRequest = toVcrRequest(request); + if (normalized.mode === "replay") { + return yield* replay(store, request, vcrRequest, normalized, name, exportKey); + } - if (normalized.mode === "record") { - return yield* record(store, request, vcrRequest, effect, normalized, name, exportKey); - } + if (normalized.mode === "record") { + return yield* record(store, request, vcrRequest, effect, normalized, name, exportKey); + } - const available = yield* store - .exists(name) - .pipe(Effect.mapError((error) => toRequestError(request, error))); - - if (!available) { - if (isCi) { - return yield* Effect.fail( - new HttpClientError.HttpClientError({ - reason: new HttpClientError.TransportError({ - request, - description: "VCR cassette missing in CI for auto mode", - }), - }), - ); - } + const available = yield* store + .exists(name) + .pipe(Effect.mapError((error) => toRequestError(request, error))); - return yield* record(store, request, vcrRequest, effect, normalized, name, exportKey); + if (!available) { + if (isCi) { + return yield* Effect.fail( + new HttpClientError.HttpClientError({ + reason: new HttpClientError.TransportError({ + request, + description: "VCR cassette missing in CI for auto mode", + }), + }), + ); } - const cassette = yield* readCassetteExport(store, name, exportKey, request); - const entry = yield* findEntry(vcrRequest, cassette, normalized); + return yield* record(store, request, vcrRequest, effect, normalized, name, exportKey); + } - if (entry) { - return replayResponse(request, entry); - } + const cassette = yield* readCassetteExport(store, name, exportKey, request); + const entry = yield* findEntry(vcrRequest, cassette, normalized); - return yield* record(store, request, vcrRequest, effect, normalized, name, exportKey); - })(), - ), - ); - })(); + if (entry) { + return replayResponse(request, entry); + } + + return yield* record(store, request, vcrRequest, effect, normalized, name, exportKey); + }), + ), + ); +}); /** * Layer that provides a VCR-wrapped HttpClient. diff --git a/packages/wings/src/data-plane/fetcher.ts b/packages/wings/src/data-plane/fetcher.ts index b37e011..603b27a 100644 --- a/packages/wings/src/data-plane/fetcher.ts +++ b/packages/wings/src/data-plane/fetcher.ts @@ -47,51 +47,50 @@ import { FetchTicket } from "../proto/utils"; * Stream.runDrain, * ) */ -export const fetch = ( +export const fetch = Effect.fnUntraced(function* ( client: ArrowFlightClientService, options: FetchOptions, -): Effect.Effect, never> => - Effect.fnUntraced(function* () { - const schema = arrowSchemaFromProto(ArrowTypeCodec.ArrowSchema.toProto(options.topic.schema)); - // let currentOffset = options.offset ?? 0n; - const currentOffsetRef = yield* Ref.make(options.offset ?? 0n); +): Effect.fn.Return, never> { + const schema = arrowSchemaFromProto(ArrowTypeCodec.ArrowSchema.toProto(options.topic.schema)); + // let currentOffset = options.offset ?? 0n; + const currentOffsetRef = yield* Ref.make(options.offset ?? 0n); - return Stream.fromEffectRepeat( - Effect.gen(function* () { - const currentOffset = yield* Ref.get(currentOffsetRef); + return Stream.fromEffectRepeat( + Effect.gen(function* () { + const currentOffset = yield* Ref.get(currentOffsetRef); - const ticket = createAny(FetchTicket, { - topicName: options.topic.name, - // @ts-expect-error - protobuf type incompatibility between different proto files - partitionValue: options.partitionValue, - offset: currentOffset, - minBatchSize: options.minBatchSize ?? 1, - maxBatchSize: options.maxBatchSize ?? 100, - }); + const ticket = createAny(FetchTicket, { + topicName: options.topic.name, + // @ts-expect-error - protobuf type incompatibility between different proto files + partitionValue: options.partitionValue, + offset: currentOffset, + minBatchSize: options.minBatchSize ?? 1, + maxBatchSize: options.maxBatchSize ?? 100, + }); - const batches: RecordBatch[] = yield* client.doGet(createTicket(ticket), { schema }).pipe( - Stream.runCollect, - Effect.map((results) => Array.from(results, ({ batch }) => batch)), - Effect.mapError( - (error) => - new WingsError({ - message: "Failed to fetch data", - cause: error, - }), - ), - ); + const batches: RecordBatch[] = yield* client.doGet(createTicket(ticket), { schema }).pipe( + Stream.runCollect, + Effect.map((results) => Array.from(results, ({ batch }) => batch)), + Effect.mapError( + (error) => + new WingsError({ + message: "Failed to fetch data", + cause: error, + }), + ), + ); - // Update offset. - if (batches.length > 0) { - const lastBatch = batches[batches.length - 1]; - const offsetColumn = lastBatch.getChild("__offset__"); - if (offsetColumn && offsetColumn.length > 0) { - const lastOffset = offsetColumn.get(offsetColumn.length - 1); - yield* Ref.update(currentOffsetRef, (_offset) => lastOffset + 1n); - } + // Update offset. + if (batches.length > 0) { + const lastBatch = batches[batches.length - 1]; + const offsetColumn = lastBatch.getChild("__offset__"); + if (offsetColumn && offsetColumn.length > 0) { + const lastOffset = offsetColumn.get(offsetColumn.length - 1); + yield* Ref.update(currentOffsetRef, (_offset) => lastOffset + 1n); } + } - return batches; - }), - ).pipe(Stream.flatMap((batches) => Stream.fromIterable(batches))); - })(); + return batches; + }), + ).pipe(Stream.flatMap((batches) => Stream.fromIterable(batches))); +}); diff --git a/packages/wings/src/data-plane/publisher.ts b/packages/wings/src/data-plane/publisher.ts index c0254c0..0520139 100644 --- a/packages/wings/src/data-plane/publisher.ts +++ b/packages/wings/src/data-plane/publisher.ts @@ -33,186 +33,185 @@ export interface Publisher { * The publisher manages a background fiber that processes responses. * The fiber lifecycle is tied to the provided scope (typically the WingsClient layer). */ -export const makePublisher = ( +export const makePublisher = Effect.fnUntraced(function* ( client: ArrowFlightClientService, options: { readonly topic: ClusterSchema.Topic.Topic; readonly partitionValue?: PartitionValue; }, -): Effect.Effect => - Effect.fnUntraced(function* (): Effect.fn.Return { - const channel = new Channel(); - const { topic, partitionValue: defaultPartitionValue } = options; - - // Find partition key index using field id directly from our ArrowSchema - const partitionKeyIndex = - topic.partitionKey !== undefined - ? topic.schema.fields.findIndex((field) => field.id === topic.partitionKey) - : undefined; - - if (topic.partitionKey !== undefined && partitionKeyIndex === -1) { - return yield* Effect.fail( - new WingsError({ - message: `Partition key field id ${topic.partitionKey.toString()} not found in schema`, - }), - ); - } - - const fullSchema = arrowSchemaFromProto(ArrowTypeCodec.ArrowSchema.toProto(topic.schema)); - - const batchSchema: Schema = - partitionKeyIndex !== undefined && partitionKeyIndex >= 0 - ? new Schema( - fullSchema.fields.filter((_, idx) => idx !== partitionKeyIndex), - fullSchema.metadata, - ) - : fullSchema; - - // Send initial schema message - const path: Readonly = [topic.name]; - channel.push( - FlightDataEncoder.encodeSchema(batchSchema, { - flightDescriptor: FlightDescriptor.create({ - type: FlightDescriptor_DescriptorType.PATH, - path, - }), +): Effect.fn.Return { + const channel = new Channel(); + const { topic, partitionValue: defaultPartitionValue } = options; + + // Find partition key index using field id directly from our ArrowSchema + const partitionKeyIndex = + topic.partitionKey !== undefined + ? topic.schema.fields.findIndex((field) => field.id === topic.partitionKey) + : undefined; + + if (topic.partitionKey !== undefined && partitionKeyIndex === -1) { + return yield* Effect.fail( + new WingsError({ + message: `Partition key field id ${topic.partitionKey.toString()} not found in schema`, }), ); + } + + const fullSchema = arrowSchemaFromProto(ArrowTypeCodec.ArrowSchema.toProto(topic.schema)); + + const batchSchema: Schema = + partitionKeyIndex !== undefined && partitionKeyIndex >= 0 + ? new Schema( + fullSchema.fields.filter((_, idx) => idx !== partitionKeyIndex), + fullSchema.metadata, + ) + : fullSchema; + + // Send initial schema message + const path: Readonly = [topic.name]; + channel.push( + FlightDataEncoder.encodeSchema(batchSchema, { + flightDescriptor: FlightDescriptor.create({ + type: FlightDescriptor_DescriptorType.PATH, + path, + }), + }), + ); - const responseIterator = client.doPut(channel)[Symbol.asyncIterator](); - - const initialResult = yield* Effect.tryPromise({ - try: () => responseIterator.next(), - catch: (error) => - new WingsError({ - message: "Failed to start push stream", - cause: error, - }), - }); - - if (initialResult.done) { - return yield* Effect.fail(new WingsError({ message: "Failed to create publisher" })); - } - - const meta = IngestionResponseMetadata.decode(initialResult.value.appMetadata); - if (meta.requestId !== 0n) { - return yield* Effect.fail(new WingsError({ message: "Invalid initial response id" })); - } - - const requestIdRef = yield* Ref.make(1n); - const pendingRef = yield* Ref.make( - new Map>(), - ); + const responseIterator = client.doPut(channel)[Symbol.asyncIterator](); - // Background fiber that processes responses - const processResponses = Effect.gen(function* () { - while (true) { - const result = yield* Effect.tryPromise({ - try: () => responseIterator.next(), - catch: (error) => + const initialResult = yield* Effect.tryPromise({ + try: () => responseIterator.next(), + catch: (error) => + new WingsError({ + message: "Failed to start push stream", + cause: error, + }), + }); + + if (initialResult.done) { + return yield* Effect.fail(new WingsError({ message: "Failed to create publisher" })); + } + + const meta = IngestionResponseMetadata.decode(initialResult.value.appMetadata); + if (meta.requestId !== 0n) { + return yield* Effect.fail(new WingsError({ message: "Invalid initial response id" })); + } + + const requestIdRef = yield* Ref.make(1n); + const pendingRef = yield* Ref.make( + new Map>(), + ); + + // Background fiber that processes responses + const processResponses = Effect.gen(function* () { + while (true) { + const result = yield* Effect.tryPromise({ + try: () => responseIterator.next(), + catch: (error) => + new WingsError({ + message: "Response stream error", + cause: error, + }), + }); + + if (result.done) { + const pending = yield* Ref.get(pendingRef); + for (const [requestId, deferred] of pending.entries()) { + yield* Deferred.fail( + deferred, new WingsError({ - message: "Response stream error", - cause: error, + message: `Stream closed waiting for response: ${requestId}`, }), - }); + ); + } + yield* Ref.set(pendingRef, new Map()); + break; + } - if (result.done) { - const pending = yield* Ref.get(pendingRef); - for (const [requestId, deferred] of pending.entries()) { - yield* Deferred.fail( - deferred, - new WingsError({ - message: `Stream closed waiting for response: ${requestId}`, - }), - ); - } - yield* Ref.set(pendingRef, new Map()); - break; + const response = IngestionResponseMetadata.decode(result.value.appMetadata); + if (response.result === undefined) { + // Invalid response - fail all pending + const pending = yield* Ref.get(pendingRef); + const error = new WingsError({ message: "Invalid push response" }); + for (const deferred of pending.values()) { + yield* Deferred.fail(deferred, error); } + yield* Ref.set(pendingRef, new Map()); + return yield* Effect.fail(error); + } - const response = IngestionResponseMetadata.decode(result.value.appMetadata); - if (response.result === undefined) { - // Invalid response - fail all pending - const pending = yield* Ref.get(pendingRef); - const error = new WingsError({ message: "Invalid push response" }); - for (const deferred of pending.values()) { - yield* Deferred.fail(deferred, error); - } - yield* Ref.set(pendingRef, new Map()); - return yield* Effect.fail(error); + // Do match + remove in one step, so concurrent push updates don't get overwritten. + const deferred = yield* Ref.modify(pendingRef, (pending) => { + const matched = pending.get(response.requestId); + if (matched === undefined) { + return [undefined, pending] as const; } - // Do match + remove in one step, so concurrent push updates don't get overwritten. - const deferred = yield* Ref.modify(pendingRef, (pending) => { - const matched = pending.get(response.requestId); - if (matched === undefined) { - return [undefined, pending] as const; - } + const updated = new Map(pending); + updated.delete(response.requestId); + return [matched, updated] as const; + }); - const updated = new Map(pending); - updated.delete(response.requestId); - return [matched, updated] as const; - }); + if (deferred) { + yield* Deferred.succeed(deferred, response.result); + } + } + }); - if (deferred) { - yield* Deferred.succeed(deferred, response.result); - } + const responseFiber = yield* Effect.forkScoped(processResponses); + + yield* Effect.addFinalizer(() => + Effect.gen(function* () { + yield* Fiber.interrupt(responseFiber); + + // Fail all pending requests + const pending = yield* Ref.get(pendingRef); + const closeError = new WingsError({ message: "Publisher closed" }); + for (const deferred of pending.values()) { + yield* Deferred.fail(deferred, closeError); } - }); - const responseFiber = yield* Effect.forkScoped(processResponses); + channel.close(); + }).pipe(Effect.catchCause(() => Effect.void)), + ); - yield* Effect.addFinalizer(() => + const publisher: Publisher = { + push: (options) => Effect.gen(function* () { - yield* Fiber.interrupt(responseFiber); + const requestId = yield* Ref.getAndUpdate(requestIdRef, (id) => id + 1n); + // Create deferred for response + const deferred = yield* Deferred.make(); - // Fail all pending requests - const pending = yield* Ref.get(pendingRef); - const closeError = new WingsError({ message: "Publisher closed" }); - for (const deferred of pending.values()) { - yield* Deferred.fail(deferred, closeError); + yield* Ref.update(pendingRef, (pending) => { + const updated = new Map(pending); + updated.set(requestId, deferred); + return updated; + }); + + const effectivePartitionValue = options.partitionValue ?? defaultPartitionValue; + + // Encode and send batch + const messages = FlightDataEncoder.encodeBatch(options.batch, { + appMetadata({ length }) { + assert(length === 1, "Unexpected metadata length"); + const meta = IngestionRequestMetadata.create({ + requestId, + partitionValue: effectivePartitionValue, + }); + return IngestionRequestMetadata.encode(meta).finish(); + }, + }); + + assert(messages.length === 1, "Dictionary messages not supported"); + + for (const message of messages) { + channel.push(message); } - channel.close(); - }).pipe(Effect.catchCause(() => Effect.void)), - ); + return yield* Deferred.await(deferred); + }), + }; - const publisher: Publisher = { - push: (options) => - Effect.gen(function* () { - const requestId = yield* Ref.getAndUpdate(requestIdRef, (id) => id + 1n); - // Create deferred for response - const deferred = yield* Deferred.make(); - - yield* Ref.update(pendingRef, (pending) => { - const updated = new Map(pending); - updated.set(requestId, deferred); - return updated; - }); - - const effectivePartitionValue = options.partitionValue ?? defaultPartitionValue; - - // Encode and send batch - const messages = FlightDataEncoder.encodeBatch(options.batch, { - appMetadata({ length }) { - assert(length === 1, "Unexpected metadata length"); - const meta = IngestionRequestMetadata.create({ - requestId, - partitionValue: effectivePartitionValue, - }); - return IngestionRequestMetadata.encode(meta).finish(); - }, - }); - - assert(messages.length === 1, "Dictionary messages not supported"); - - for (const message of messages) { - channel.push(message); - } - - return yield* Deferred.await(deferred); - }), - }; - - return publisher; - })(); + return publisher; +}); diff --git a/packages/wings/src/index.ts b/packages/wings/src/index.ts index 0d230dd..2d028a3 100644 --- a/packages/wings/src/index.ts +++ b/packages/wings/src/index.ts @@ -2,7 +2,7 @@ export * as Cluster from "./cluster"; export * as ClusterClient from "./cluster-client"; export * as WingsClient from "./data-plane"; export * as Arrow from "./arrow"; -export * as Partition from "./partition-value"; +export * as PartitionValue from "./partition-value"; export * as Schema from "./schema"; export * as Topic from "./topic"; export * from "./errors"; diff --git a/packages/wings/src/partition-value.ts b/packages/wings/src/partition-value.ts index 6ed1081..15a787c 100644 --- a/packages/wings/src/partition-value.ts +++ b/packages/wings/src/partition-value.ts @@ -8,103 +8,102 @@ export { PartitionValue } from "./proto/wings/v1/log_metadata"; * * @example * ```ts - * const partition = PV.int32(42) + * const partition = PartitionValue.int32(42) * ``` */ -export const PV = { - null(): PartitionValue { - return PartitionValue.create({ - value: { - $case: "nullValue", - }, - }); - }, - int8(value: number): PartitionValue { - return PartitionValue.create({ - value: { - $case: "int8Value", - int8Value: value, - }, - }); - }, - int16(value: number): PartitionValue { - return PartitionValue.create({ - value: { - $case: "int16Value", - int16Value: value, - }, - }); - }, - int32(value: number): PartitionValue { - return PartitionValue.create({ - value: { - $case: "int32Value", - int32Value: value, - }, - }); - }, - int64(value: bigint): PartitionValue { - return PartitionValue.create({ - value: { - $case: "int64Value", - int64Value: value, - }, - }); - }, - uint8(value: number): PartitionValue { - return PartitionValue.create({ - value: { - $case: "uint8Value", - uint8Value: value, - }, - }); - }, - uint16(value: number): PartitionValue { - return PartitionValue.create({ - value: { - $case: "uint16Value", - uint16Value: value, - }, - }); - }, - uint32(value: number): PartitionValue { - return PartitionValue.create({ - value: { - $case: "uint32Value", - uint32Value: value, - }, - }); - }, - uint64(value: bigint): PartitionValue { - return PartitionValue.create({ - value: { - $case: "uint64Value", - uint64Value: value, - }, - }); - }, - stringValue(value: string): PartitionValue { - return PartitionValue.create({ - value: { - $case: "stringValue", - stringValue: value, - }, - }); - }, - bytesValue(value: Uint8Array): PartitionValue { - return PartitionValue.create({ - value: { - $case: "bytesValue", - bytesValue: value, - }, - }); - }, - boolValue(value: boolean): PartitionValue { - return PartitionValue.create({ - value: { - $case: "boolValue", - boolValue: value, - }, - }); - }, -}; +const nullPartitionValue = (): PartitionValue => + PartitionValue.create({ + value: { + $case: "nullValue", + }, + }); + +export { nullPartitionValue as null }; + +export const int8 = (value: number): PartitionValue => + PartitionValue.create({ + value: { + $case: "int8Value", + int8Value: value, + }, + }); + +export const int16 = (value: number): PartitionValue => + PartitionValue.create({ + value: { + $case: "int16Value", + int16Value: value, + }, + }); + +export const int32 = (value: number): PartitionValue => + PartitionValue.create({ + value: { + $case: "int32Value", + int32Value: value, + }, + }); + +export const int64 = (value: bigint): PartitionValue => + PartitionValue.create({ + value: { + $case: "int64Value", + int64Value: value, + }, + }); + +export const uint8 = (value: number): PartitionValue => + PartitionValue.create({ + value: { + $case: "uint8Value", + uint8Value: value, + }, + }); + +export const uint16 = (value: number): PartitionValue => + PartitionValue.create({ + value: { + $case: "uint16Value", + uint16Value: value, + }, + }); + +export const uint32 = (value: number): PartitionValue => + PartitionValue.create({ + value: { + $case: "uint32Value", + uint32Value: value, + }, + }); + +export const uint64 = (value: bigint): PartitionValue => + PartitionValue.create({ + value: { + $case: "uint64Value", + uint64Value: value, + }, + }); + +export const stringValue = (value: string): PartitionValue => + PartitionValue.create({ + value: { + $case: "stringValue", + stringValue: value, + }, + }); + +export const bytesValue = (value: Uint8Array): PartitionValue => + PartitionValue.create({ + value: { + $case: "bytesValue", + bytesValue: value, + }, + }); + +export const boolValue = (value: boolean): PartitionValue => + PartitionValue.create({ + value: { + $case: "boolValue", + boolValue: value, + }, + }); diff --git a/packages/wings/src/schema/types.ts b/packages/wings/src/schema/types.ts index 964c911..6e47458 100644 --- a/packages/wings/src/schema/types.ts +++ b/packages/wings/src/schema/types.ts @@ -16,7 +16,7 @@ const annotateWingsType = ( /** * Wraps a schema to accept null values and marks the Wings Arrow field nullable. */ -export const WingsNullOr = (schema: Schema.Schema): Schema.Schema => { +export const NullOr = (schema: Schema.Schema): Schema.Schema => { const existingAnnotations = (SchemaAST.resolve(schema.ast) ?? {}) as Record; const nullOr = Schema.NullOr(schema); const nextAnnotations: Record = { @@ -35,103 +35,103 @@ function _readWingsTypeAnnotation(schema: Schema.Top): WingsTypeAnnotation | und } /** Arrow UTF-8 string schema. */ -export const WingsString = annotateWingsType(Schema.String, { +export const String = annotateWingsType(Schema.String, { _tag: "primitive", type: "utf8", }); /** Arrow boolean schema. */ -export const WingsBool = annotateWingsType(Schema.Boolean, { +export const Bool = annotateWingsType(Schema.Boolean, { _tag: "primitive", type: "bool", }); /** Arrow binary schema. */ -export const WingsBinary = annotateWingsType(Schema.Uint8Array, { +export const Binary = annotateWingsType(Schema.Uint8Array, { _tag: "primitive", type: "binary", }); /** Arrow uint8 schema. */ -export const WingsUInt8 = annotateWingsType(Schema.Number, { +export const UInt8 = annotateWingsType(Schema.Number, { _tag: "primitive", type: "uint8", }); /** Arrow int8 schema. */ -export const WingsInt8 = annotateWingsType(Schema.Number, { +export const Int8 = annotateWingsType(Schema.Number, { _tag: "primitive", type: "int8", }); /** Arrow uint16 schema. */ -export const WingsUInt16 = annotateWingsType(Schema.Number, { +export const UInt16 = annotateWingsType(Schema.Number, { _tag: "primitive", type: "uint16", }); /** Arrow int16 schema. */ -export const WingsInt16 = annotateWingsType(Schema.Number, { +export const Int16 = annotateWingsType(Schema.Number, { _tag: "primitive", type: "int16", }); /** Arrow uint32 schema. */ -export const WingsUInt32 = annotateWingsType(Schema.Number, { +export const UInt32 = annotateWingsType(Schema.Number, { _tag: "primitive", type: "uint32", }); /** Arrow int32 schema. */ -export const WingsInt32 = annotateWingsType(Schema.Number, { +export const Int32 = annotateWingsType(Schema.Number, { _tag: "primitive", type: "int32", }); /** Arrow uint64 schema. */ -export const WingsUInt64 = annotateWingsType(Schema.BigInt, { +export const UInt64 = annotateWingsType(Schema.BigInt, { _tag: "primitive", type: "uint64", }); /** Arrow int64 schema. */ -export const WingsInt64 = annotateWingsType(Schema.BigInt, { +export const Int64 = annotateWingsType(Schema.BigInt, { _tag: "primitive", type: "int64", }); /** Arrow float16 schema. */ -export const WingsFloat16 = annotateWingsType(Schema.Number, { +export const Float16 = annotateWingsType(Schema.Number, { _tag: "primitive", type: "float16", }); /** Arrow float32 schema. */ -export const WingsFloat32 = annotateWingsType(Schema.Number, { +export const Float32 = annotateWingsType(Schema.Number, { _tag: "primitive", type: "float32", }); /** Arrow float64 schema. */ -export const WingsFloat64 = annotateWingsType(Schema.Number, { +export const Float64 = annotateWingsType(Schema.Number, { _tag: "primitive", type: "float64", }); /** Arrow date32 schema. */ -export const WingsDate32 = annotateWingsType(Schema.Date, { +export const Date32 = annotateWingsType(Schema.Date, { _tag: "primitive", type: "date32", }); /** Arrow date64 schema. */ -export const WingsDate64 = annotateWingsType(Schema.Date, { +export const Date64 = annotateWingsType(Schema.Date, { _tag: "primitive", type: "date64", }); /** Arrow timestamp schema with time unit and timezone. */ -export const WingsTimestamp = (timeUnit: TimeUnit, timezone?: string) => +export const Timestamp = (timeUnit: TimeUnit, timezone?: string) => annotateWingsType(Schema.Date, { _tag: "timestamp", timeUnit, @@ -139,7 +139,7 @@ export const WingsTimestamp = (timeUnit: TimeUnit, timezone?: string) => }); /** Arrow duration schema with time unit. */ -export const WingsDuration = (timeUnit: TimeUnit) => +export const Duration = (timeUnit: TimeUnit) => annotateWingsType(Schema.Number, { _tag: "duration", timeUnit, @@ -149,7 +149,7 @@ export const WingsDuration = (timeUnit: TimeUnit) => * Arrow list schema with a single item field definition. * The item schema must include a FieldId annotation. */ -export const WingsList = (item: Item) => +export const List = (item: Item) => annotateWingsType(Schema.Array(item), { _tag: "list", item, @@ -158,28 +158,28 @@ export const WingsList = (item: Item) => /** * Convenience alias for defining nested Wings structs. */ -export const WingsStruct = Schema.Struct; +export const Struct = Schema.Struct; export const Types = { - Binary: WingsBinary, - Bool: WingsBool, - Date32: WingsDate32, - Date64: WingsDate64, - Duration: WingsDuration, - Float16: WingsFloat16, - Float32: WingsFloat32, - Float64: WingsFloat64, - Int8: WingsInt8, - Int16: WingsInt16, - Int32: WingsInt32, - Int64: WingsInt64, - List: WingsList, - NullOr: WingsNullOr, - String: WingsString, - Struct: WingsStruct, - Timestamp: WingsTimestamp, - UInt8: WingsUInt8, - UInt16: WingsUInt16, - UInt32: WingsUInt32, - UInt64: WingsUInt64, + Binary, + Bool, + Date32, + Date64, + Duration, + Float16, + Float32, + Float64, + Int8, + Int16, + Int32, + Int64, + List, + NullOr, + String, + Struct, + Timestamp, + UInt8, + UInt16, + UInt32, + UInt64, } as const; diff --git a/packages/wings/src/schema/wings-annotations.ts b/packages/wings/src/schema/wings-annotations.ts deleted file mode 100644 index fd9e33a..0000000 --- a/packages/wings/src/schema/wings-annotations.ts +++ /dev/null @@ -1,86 +0,0 @@ -import type * as Schema from "effect/Schema"; - -import type { TimeUnit } from "../cluster/arrow-type"; - -/** - * Field id annotation used to populate Arrow field ids. - */ -export const FieldId = Symbol.for("wings/fieldId"); -/** - * @internal - * Arrow type annotation used by Wings schema helpers. - */ -export const WingsType = Symbol.for("wings/arrowType"); -/** - * Field-level metadata annotation for Arrow fields. - */ -export const FieldMetadata = Symbol.for("wings/fieldMetadata"); -/** - * Schema-level metadata annotation for Arrow schemas. - */ -export const SchemaMetadata = Symbol.for("wings/schemaMetadata"); -/** - * @internal - * Nullable flag annotation used by the converter. - */ -export const WingsNullable = Symbol.for("wings/nullable"); - -/** - * @internal - * Arrow primitive tags supported by the Wings schema mapper. - */ -export type PrimitiveArrowTypeTag = - | "bool" - | "uint8" - | "int8" - | "uint16" - | "int16" - | "uint32" - | "int32" - | "uint64" - | "int64" - | "float16" - | "float32" - | "float64" - | "utf8" - | "binary" - | "date32" - | "date64"; - -/** - * @internal - * Internal annotation that encodes Arrow type information. - */ -export type WingsTypeAnnotation = - | { - readonly _tag: "primitive"; - readonly type: PrimitiveArrowTypeTag; - } - | { - readonly _tag: "timestamp"; - readonly timeUnit: TimeUnit; - readonly timezone?: string; - } - | { - readonly _tag: "duration"; - readonly timeUnit: TimeUnit; - } - | { - readonly _tag: "list"; - readonly item: Schema.Top; - }; - -/** - * Declares the Wings schema annotations types on the Effect Schema namespace. - */ -declare module "effect/Schema" { - namespace Annotations { - interface Schema<_A> { - [FieldId]?: number | bigint; - [WingsType]?: WingsTypeAnnotation; - [FieldMetadata]?: Readonly>; - [SchemaMetadata]?: Readonly>; - [WingsNullable]?: boolean; - } - } -} diff --git a/packages/wings/src/schema/wings-converter.ts b/packages/wings/src/schema/wings-converter.ts deleted file mode 100644 index 684912b..0000000 --- a/packages/wings/src/schema/wings-converter.ts +++ /dev/null @@ -1,217 +0,0 @@ -import type * as Schema from "effect/Schema"; - -import * as SchemaAST from "effect/SchemaAST"; - -import type { ArrowSchema, ArrowType, Field } from "../cluster/arrow-type"; - -import { - FieldId, - FieldMetadata, - type PrimitiveArrowTypeTag, - SchemaMetadata, - WingsNullable, - WingsType, - type WingsTypeAnnotation, -} from "./wings-annotations"; - -/** - * Converts a Wings Struct schema into a Wings ArrowSchema. - */ -export function schemaConverter( - structSchema: Schema.Struct, -): ArrowSchema { - return { - fields: convertStructFields(structSchema.fields, "root"), - metadata: readSchemaMetadata(structSchema), - }; -} - -/** - * Converts a map of struct fields into Wings Arrow fields. - */ -function convertStructFields(fields: Schema.Struct.Fields, path: string): Field[] { - return Reflect.ownKeys(fields).map((key) => { - const schema = fields[key]; - return convertField(String(key), schema, path); - }); -} - -/** - * Converts a single Wings schema into an Wings Arrow field. - */ -function convertField(name: string, schema: Schema.Top, path: string): Field { - const id = readFieldId(schema, `${path}.${name}`); - const arrowType = mapEffectTypeToArrow(schema, `${path}.${name}`); - return { - name, - id, - arrowType, - nullable: readNullable(schema), - metadata: readFieldMetadata(schema), - }; -} - -/** - * Maps a Wings schema to the corresponding Wings Arrow type. - */ -function mapEffectTypeToArrow(schema: Schema.Top, path: string): ArrowType { - const annotation = readWingsTypeAnnotation(schema); - if (annotation) { - switch (annotation._tag) { - case "primitive": - return primitiveArrowType(annotation.type); - case "timestamp": - return { - _tag: "timestamp", - timestamp: { - timeUnit: annotation.timeUnit, - timezone: annotation.timezone ?? "", - }, - }; - case "duration": - return { _tag: "duration", duration: annotation.timeUnit }; - case "list": - return { - _tag: "list", - list: { - fieldType: convertListItem(annotation.item, `${path}.item`), - }, - }; - } - } - - if (isStructSchema(schema)) { - return { - _tag: "struct", - struct: { - subFieldTypes: convertStructFields(schema.fields, path), - }, - }; - } - - throw new Error(`Unsupported schema for "${path}". Use Wings types or Schema.Struct.`); -} - -/** - * Converts the list item schema into the Wings Arrow list field definition. - */ -function convertListItem(itemSchema: Schema.Top, path: string): Field { - return { - name: "item", - id: readFieldId(itemSchema, path), - arrowType: mapEffectTypeToArrow(itemSchema, path), - nullable: readNullable(itemSchema), - metadata: readFieldMetadata(itemSchema), - }; -} - -/** - * Reads the FieldId annotation and normalizes it to bigint. - */ -function readFieldId(schema: Schema.Top, path: string): bigint { - const annotations = getAnnotations(schema); - const value = annotations[FieldId]; - if (value === undefined) { - throw new Error(`Missing FieldId annotation for "${path}".`); - } - if (typeof value === "bigint") { - return value; - } - if (typeof value === "number" && Number.isInteger(value)) { - return BigInt(value); - } - throw new Error(`Invalid FieldId annotation for "${path}".`); -} - -/** - * Reads the internal Arrow type annotation from a schema. - */ -function readWingsTypeAnnotation(schema: Schema.Top): WingsTypeAnnotation | undefined { - const annotations = getAnnotations(schema); - return annotations[WingsType] as WingsTypeAnnotation | undefined; -} - -/** - * Reads field-level metadata annotations. - */ -function readFieldMetadata(schema: Schema.Top): Readonly> { - const annotations = getAnnotations(schema); - const metadata = annotations[FieldMetadata] as Readonly> | undefined; - return metadata ?? {}; -} - -/** - * Reads schema-level metadata annotations. - */ -function readSchemaMetadata(schema: Schema.Top): Readonly> { - const annotations = getAnnotations(schema); - const metadata = annotations[SchemaMetadata] as Readonly> | undefined; - return metadata ?? {}; -} - -/** - * Reads whether a schema should be marked nullable for Wings Arrow. - */ -function readNullable(schema: Schema.Top): boolean { - const annotations = getAnnotations(schema); - return annotations[WingsNullable] === true; -} - -/** - * Returns the annotation map from a schema AST. - */ -function getAnnotations(schema: Schema.Top): Record { - return (SchemaAST.resolve(schema.ast) ?? {}) as Record; -} - -/** - * Runtime check for struct schemas that expose a fields map. - */ -function isStructSchema(schema: Schema.Top): schema is Schema.Struct { - return ( - (typeof schema === "object" || typeof schema === "function") && - schema !== null && - "fields" in schema && - typeof schema.fields === "object" - ); -} - -/** - * Maps a primitive annotation to its Wings Arrow type tag. - */ -function primitiveArrowType(type: PrimitiveArrowTypeTag): ArrowType { - switch (type) { - case "bool": - return { _tag: "bool" }; - case "uint8": - return { _tag: "uint8" }; - case "int8": - return { _tag: "int8" }; - case "uint16": - return { _tag: "uint16" }; - case "int16": - return { _tag: "int16" }; - case "uint32": - return { _tag: "uint32" }; - case "int32": - return { _tag: "int32" }; - case "uint64": - return { _tag: "uint64" }; - case "int64": - return { _tag: "int64" }; - case "float16": - return { _tag: "float16" }; - case "float32": - return { _tag: "float32" }; - case "float64": - return { _tag: "float64" }; - case "utf8": - return { _tag: "utf8" }; - case "binary": - return { _tag: "binary" }; - case "date32": - return { _tag: "date32" }; - case "date64": - return { _tag: "date64" }; - } -} diff --git a/packages/wings/src/schema/wings-types.ts b/packages/wings/src/schema/wings-types.ts deleted file mode 100644 index a851310..0000000 --- a/packages/wings/src/schema/wings-types.ts +++ /dev/null @@ -1,161 +0,0 @@ -import * as Schema from "effect/Schema"; -import * as SchemaAST from "effect/SchemaAST"; - -import type { TimeUnit } from "../cluster/arrow-type"; - -import { WingsNullable, WingsType, type WingsTypeAnnotation } from "./wings-annotations"; - -/** - * Attaches the internal Wings Arrow type annotation to a schema. - */ -const annotateWingsType = ( - schema: Schema.Schema, - annotation: WingsTypeAnnotation, -): Schema.Schema => schema.annotate({ [WingsType]: annotation }); - -/** - * Wraps a schema to accept null values and marks the Wings Arrow field nullable. - */ -export const WingsNullOr = (schema: Schema.Schema): Schema.Schema => { - const existingAnnotations = (SchemaAST.resolve(schema.ast) ?? {}) as Record; - const nullOr = Schema.NullOr(schema); - const nextAnnotations: Record = { - ...existingAnnotations, - [WingsNullable]: true, - }; - return nullOr.annotate(nextAnnotations); -}; - -/** - * Reads the Wings Arrow type annotation from a schema, if present. - */ -function _readWingsTypeAnnotation(schema: Schema.Top): WingsTypeAnnotation | undefined { - const annotations = (SchemaAST.resolve(schema.ast) ?? {}) as Record; - return annotations[WingsType] as WingsTypeAnnotation | undefined; -} - -/** Arrow UTF-8 string schema. */ -export const WingsString = annotateWingsType(Schema.String, { - _tag: "primitive", - type: "utf8", -}); - -/** Arrow boolean schema. */ -export const WingsBool = annotateWingsType(Schema.Boolean, { - _tag: "primitive", - type: "bool", -}); - -/** Arrow binary schema. */ -export const WingsBinary = annotateWingsType(Schema.Uint8Array, { - _tag: "primitive", - type: "binary", -}); - -/** Arrow uint8 schema. */ -export const WingsUInt8 = annotateWingsType(Schema.Number, { - _tag: "primitive", - type: "uint8", -}); - -/** Arrow int8 schema. */ -export const WingsInt8 = annotateWingsType(Schema.Number, { - _tag: "primitive", - type: "int8", -}); - -/** Arrow uint16 schema. */ -export const WingsUInt16 = annotateWingsType(Schema.Number, { - _tag: "primitive", - type: "uint16", -}); - -/** Arrow int16 schema. */ -export const WingsInt16 = annotateWingsType(Schema.Number, { - _tag: "primitive", - type: "int16", -}); - -/** Arrow uint32 schema. */ -export const WingsUInt32 = annotateWingsType(Schema.Number, { - _tag: "primitive", - type: "uint32", -}); - -/** Arrow int32 schema. */ -export const WingsInt32 = annotateWingsType(Schema.Number, { - _tag: "primitive", - type: "int32", -}); - -/** Arrow uint64 schema. */ -export const WingsUInt64 = annotateWingsType(Schema.BigInt, { - _tag: "primitive", - type: "uint64", -}); - -/** Arrow int64 schema. */ -export const WingsInt64 = annotateWingsType(Schema.BigInt, { - _tag: "primitive", - type: "int64", -}); - -/** Arrow float16 schema. */ -export const WingsFloat16 = annotateWingsType(Schema.Number, { - _tag: "primitive", - type: "float16", -}); - -/** Arrow float32 schema. */ -export const WingsFloat32 = annotateWingsType(Schema.Number, { - _tag: "primitive", - type: "float32", -}); - -/** Arrow float64 schema. */ -export const WingsFloat64 = annotateWingsType(Schema.Number, { - _tag: "primitive", - type: "float64", -}); - -/** Arrow date32 schema. */ -export const WingsDate32 = annotateWingsType(Schema.Date, { - _tag: "primitive", - type: "date32", -}); - -/** Arrow date64 schema. */ -export const WingsDate64 = annotateWingsType(Schema.Date, { - _tag: "primitive", - type: "date64", -}); - -/** Arrow timestamp schema with time unit and timezone. */ -export const WingsTimestamp = (timeUnit: TimeUnit, timezone?: string) => - annotateWingsType(Schema.Date, { - _tag: "timestamp", - timeUnit, - timezone, - }); - -/** Arrow duration schema with time unit. */ -export const WingsDuration = (timeUnit: TimeUnit) => - annotateWingsType(Schema.Number, { - _tag: "duration", - timeUnit, - }); - -/** - * Arrow list schema with a single item field definition. - * The item schema must include a FieldId annotation. - */ -export const WingsList = (item: Item) => - annotateWingsType(Schema.Array(item), { - _tag: "list", - item, - }); - -/** - * Convenience alias for defining nested Wings structs. - */ -export const WingsStruct = Schema.Struct; diff --git a/packages/wings/test/fetcher.test.ts b/packages/wings/test/fetcher.test.ts index 4af2de2..8171e0c 100644 --- a/packages/wings/test/fetcher.test.ts +++ b/packages/wings/test/fetcher.test.ts @@ -3,7 +3,7 @@ import { TestWings } from "@useairfoil/wings-testing"; import { Effect, Layer, Stream } from "effect"; import { customAlphabet } from "nanoid"; -import { Arrow, Partition, WingsClient } from "../src"; +import { Arrow, PartitionValue, WingsClient } from "../src"; import { makeTestBatch } from "./helpers"; const makeTopicId = customAlphabet("abcdefghijklmnopqrstuvwxyz", 12); @@ -108,16 +108,16 @@ layer(testLayer, { timeout: "30 seconds" })("Fetcher", (it) => { yield* publisher.push({ batch: makeTestBatch({ partitionValue: 1000 }), - partitionValue: Partition.PV.int32(1000), + partitionValue: PartitionValue.int32(1000), }); yield* publisher.push({ batch: makeTestBatch({ partitionValue: 2000 }), - partitionValue: Partition.PV.int32(2000), + partitionValue: PartitionValue.int32(2000), }); const streamP1 = yield* WingsClient.fetch({ topic, - partitionValue: Partition.PV.int32(1000), + partitionValue: PartitionValue.int32(1000), offset: 0n, }); @@ -135,7 +135,7 @@ layer(testLayer, { timeout: "30 seconds" })("Fetcher", (it) => { const streamP2 = yield* WingsClient.fetch({ topic, - partitionValue: Partition.PV.int32(2000), + partitionValue: PartitionValue.int32(2000), offset: 0n, }); diff --git a/packages/wings/test/publisher.test.ts b/packages/wings/test/publisher.test.ts index c866448..55a9a89 100644 --- a/packages/wings/test/publisher.test.ts +++ b/packages/wings/test/publisher.test.ts @@ -3,7 +3,7 @@ import { TestWings } from "@useairfoil/wings-testing"; import { Effect, Layer } from "effect"; import { customAlphabet } from "nanoid"; -import { Partition, WingsClient } from "../src"; +import { PartitionValue, WingsClient } from "../src"; import { makeTestBatch } from "./helpers"; const makeTopicId = customAlphabet("abcdefghijklmnopqrstuvwxyz", 12); @@ -120,15 +120,15 @@ layer(testLayer, { timeout: "30 seconds" })("Publisher", (it) => { const b0 = publisher.push({ batch: makeTestBatch({ partitionValue: 1000 }), - partitionValue: Partition.PV.int32(1000), + partitionValue: PartitionValue.int32(1000), }); const b1 = publisher.push({ batch: makeTestBatch({ partitionValue: 2000 }), - partitionValue: Partition.PV.int32(2000), + partitionValue: PartitionValue.int32(2000), }); const b2 = publisher.push({ batch: makeTestBatch({ partitionValue: 3000 }), - partitionValue: Partition.PV.int32(3000), + partitionValue: PartitionValue.int32(3000), }); return yield* Effect.all([b0, b1, b2], { @@ -202,7 +202,7 @@ layer(testLayer, { timeout: "30 seconds" })("Publisher", (it) => { const publisher = yield* WingsClient.publisher({ topic, - partitionValue: Partition.PV.int32(5000), + partitionValue: PartitionValue.int32(5000), }); const b0 = publisher.push({ @@ -211,7 +211,7 @@ layer(testLayer, { timeout: "30 seconds" })("Publisher", (it) => { const b1 = publisher.push({ batch: makeTestBatch({ partitionValue: 6000 }), - partitionValue: Partition.PV.int32(6000), + partitionValue: PartitionValue.int32(6000), }); return yield* Effect.all([b0, b1], { concurrency: "unbounded" }); diff --git a/templates/producer-template/src/api.ts b/templates/producer-template/src/api.ts index 6c7357d..b62d9ba 100644 --- a/templates/producer-template/src/api.ts +++ b/templates/producer-template/src/api.ts @@ -37,63 +37,62 @@ export class TemplateApiClient extends Context.Service< // returns a small typed API surface. The auth header is Bearer by default; // swap it out for `setHeader("X-Api-Key", ...)`, Basic auth, or OAuth2 as // required by your upstream API. -export const makeTemplateApiClient = ( +export const makeTemplateApiClient = Effect.fnUntraced(function* ( config: TemplateConfig, -): Effect.Effect => - Effect.fnUntraced(function* () { - const client = (yield* HttpClient.HttpClient).pipe( - HttpClient.mapRequest(HttpClientRequest.prependUrl(config.apiBaseUrl)), - HttpClient.mapRequest(HttpClientRequest.bearerToken(config.apiToken)), - HttpClient.mapRequest(HttpClientRequest.acceptJson), - ); +): Effect.fn.Return { + const client = (yield* HttpClient.HttpClient).pipe( + HttpClient.mapRequest(HttpClientRequest.prependUrl(config.apiBaseUrl)), + HttpClient.mapRequest(HttpClientRequest.bearerToken(config.apiToken)), + HttpClient.mapRequest(HttpClientRequest.acceptJson), + ); - const fetchJson = ( - schema: Schema.Decoder, - path: string, - params?: Record, - ): Effect.Effect => { - const request = params - ? HttpClientRequest.get(path).pipe(HttpClientRequest.setUrlParams(params)) - : HttpClientRequest.get(path); - return Effect.scoped( - client.execute(request).pipe( - Effect.flatMap(HttpClientResponse.filterStatusOk), - Effect.flatMap((response) => response.json), - Effect.flatMap(Schema.decodeUnknownEffect(schema)), - Effect.mapError( - (error) => - new ConnectorError({ - message: "Template API request failed", - cause: error, - }), - ), + const fetchJson = ( + schema: Schema.Decoder, + path: string, + params?: Record, + ): Effect.Effect => { + const request = params + ? HttpClientRequest.get(path).pipe(HttpClientRequest.setUrlParams(params)) + : HttpClientRequest.get(path); + return Effect.scoped( + client.execute(request).pipe( + Effect.flatMap(HttpClientResponse.filterStatusOk), + Effect.flatMap((response) => response.json), + Effect.flatMap(Schema.decodeUnknownEffect(schema)), + Effect.mapError( + (error) => + new ConnectorError({ + message: "Template API request failed", + cause: error, + }), ), - ); - }; + ), + ); + }; - const fetchList = ( - schema: Schema.Decoder, - path: string, - options: { - readonly page: number; - readonly limit: number; - }, - ): Effect.Effect, ConnectorError, R> => { - const params: Record = { - _page: String(options.page), - _limit: String(options.limit), - }; - const arraySchema = Schema.Array(schema) as unknown as Schema.Decoder, R>; - return fetchJson(arraySchema, path, params).pipe( - Effect.map((items) => ({ - items, - hasMore: items.length === options.limit, - })), - ); + const fetchList = ( + schema: Schema.Decoder, + path: string, + options: { + readonly page: number; + readonly limit: number; + }, + ): Effect.Effect, ConnectorError, R> => { + const params: Record = { + _page: String(options.page), + _limit: String(options.limit), }; + const arraySchema = Schema.Array(schema) as unknown as Schema.Decoder, R>; + return fetchJson(arraySchema, path, params).pipe( + Effect.map((items) => ({ + items, + hasMore: items.length === options.limit, + })), + ); + }; - return { fetchJson, fetchList }; - })(); + return { fetchJson, fetchList }; +}); export const layerApiClient = ( config: TemplateConfig, diff --git a/templates/producer-template/src/connector.ts b/templates/producer-template/src/connector.ts index 83d1ce2..ea71018 100644 --- a/templates/producer-template/src/connector.ts +++ b/templates/producer-template/src/connector.ts @@ -93,38 +93,38 @@ const resolveWebhookDispatch = (options: { } }; -const makeTemplateConnector = ( +const makeTemplateConnector = Effect.fnUntraced(function* ( config: TemplateConfig, -): Effect.Effect => - Effect.fnUntraced(function* () { - const api = yield* TemplateApiClient; - const postStreams = yield* makeEntityStreams({ - api, - schema: PostSchema, - path: "/posts", - cursorField: "id", - limit: 10, - }); +): Effect.fn.Return { + const api = yield* TemplateApiClient; + const postStreams = yield* makeEntityStreams({ + api, + schema: PostSchema, + path: "/posts", + cursorField: "id", + limit: 10, + }); - const connector = defineConnector({ - name: "producer-template", - entities: [ - defineEntity({ - name: "posts", - schema: PostSchema, - primaryKey: "id", - live: postStreams.live, - backfill: postStreams.backfill, - }), - ], - events: [], - }); + const connector = defineConnector({ + name: "producer-template", + entities: [ + defineEntity({ + name: "posts", + schema: PostSchema, + primaryKey: "id", + live: postStreams.live, + backfill: postStreams.backfill, + }), + ], + events: [], + }); - const webhookRoute = Webhook.route({ - path: "/webhooks/template", - schema: WebhookPayloadSchema, - handle: (payload, request, rawBody) => - Effect.fn("template/webhook/handle")(function* () { + const webhookRoute = Webhook.route({ + path: "/webhooks/template", + schema: WebhookPayloadSchema, + handle: (payload, request, rawBody) => + Effect.withSpan( + Effect.gen(function* () { if (Option.isSome(config.webhookSecret) && rawBody) { yield* verifyWebhookSignature({ rawBody, @@ -137,24 +137,29 @@ const makeTemplateConnector = ( payload, posts: postStreams, }); - })(), - }); + }), + "template/webhook/handle", + ), + }); - if (Option.isNone(config.webhookSecret)) { - yield* Effect.logWarning( - "TEMPLATE_WEBHOOK_SECRET is not set. Incoming webhooks will not be signature-verified.", - ); - } + if (Option.isNone(config.webhookSecret)) { + yield* Effect.logWarning( + "TEMPLATE_WEBHOOK_SECRET is not set. Incoming webhooks will not be signature-verified.", + ); + } - return { connector, routes: [webhookRoute] }; - })().pipe(Effect.annotateLogs({ component: "producer-template" })); + return { connector, routes: [webhookRoute] }; +}); export const layerConfig: Layer.Layer = Layer.effect(TemplateConnector)( - Effect.fnUntraced(function* () { + Effect.gen(function* () { const config = yield* TemplateConfigConfig; - return yield* makeTemplateConnector(config).pipe(Effect.provide(layerApiClient(config))); - })().pipe( + return yield* makeTemplateConnector(config).pipe( + Effect.annotateLogs({ component: "producer-template" }), + Effect.provide(layerApiClient(config)), + ); + }).pipe( Effect.mapError((error) => error instanceof ConnectorError ? error diff --git a/templates/producer-template/src/streams.ts b/templates/producer-template/src/streams.ts index 32e9f8a..339c04b 100644 --- a/templates/producer-template/src/streams.ts +++ b/templates/producer-template/src/streams.ts @@ -28,19 +28,20 @@ const setCutoff = (deferred: Deferred.Deferred, cursor: Cursor) = // Enqueue a single webhook row after recording its cursor as the backfill // cutoff. This is safe to call many times — Deferred.succeed is idempotent. -export const dispatchEntityWebhook = >(options: { +export const dispatchEntityWebhook = Effect.fnUntraced(function* < + T extends Record, +>(options: { readonly queue: Streams.WebhookStream; readonly cutoff: Deferred.Deferred; readonly row: T; readonly cursor: Cursor; -}): Effect.Effect => - Effect.fnUntraced(function* () { - yield* setCutoff(options.cutoff, options.cursor); - return yield* Queue.offer(options.queue.queue, { - cursor: options.cursor, - rows: [options.row], - }).pipe(Effect.asVoid); - })(); +}) { + yield* setCutoff(options.cutoff, options.cursor); + return yield* Queue.offer(options.queue.queue, { + cursor: options.cursor, + rows: [options.row], + }).pipe(Effect.asVoid); +}); // Backfill stream for a single entity. Waits for the cutoff deferred to // resolve (set by the first live webhook or by initialCutoff), then pages @@ -94,16 +95,17 @@ export type EntityStreams> = { // Convenience factory: creates the live webhook queue, the cutoff deferred, // and the backfill stream all at once. Callers destructure the result into a // defineEntity() call. -export const makeEntityStreams = >(options: { +export const makeEntityStreams = Effect.fnUntraced(function* < + T extends Record, +>(options: { readonly api: TemplateApiClientService; readonly schema: Schema.Decoder; readonly path: string; readonly cursorField: keyof T & string; readonly limit?: number; -}): Effect.Effect, ConnectorError> => - Effect.fnUntraced(function* () { - const queue = yield* Streams.makeWebhookQueue({ capacity: 1024 }); - const cutoff = yield* Deferred.make(); - const backfill = makeBackfillStream({ ...options, cutoff }); - return { live: queue, cutoff, backfill }; - })(); +}) { + const queue = yield* Streams.makeWebhookQueue({ capacity: 1024 }); + const cutoff = yield* Deferred.make(); + const backfill = makeBackfillStream({ ...options, cutoff }); + return { live: queue, cutoff, backfill }; +}); From 4647220c745dbae424f936bbce0c19dbb1b58949 Mon Sep 17 00:00:00 2001 From: jaipaljadeja Date: Tue, 5 May 2026 18:44:03 +0530 Subject: [PATCH 11/12] chore: keep the image version static --- docker-compose.yaml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docker-compose.yaml b/docker-compose.yaml index 27ee84e..13324d1 100644 --- a/docker-compose.yaml +++ b/docker-compose.yaml @@ -1,6 +1,6 @@ services: wings: - image: docker.useairfoil.com/airfoil/wings:latest + image: docker.useairfoil.com/airfoil/wings:0.1.0 command: - dev - --http.address=0.0.0.0:7780 From 2a206fea78285b473e4196c5293a438d95c37b16 Mon Sep 17 00:00:00 2001 From: jaipaljadeja Date: Tue, 5 May 2026 18:45:46 +0530 Subject: [PATCH 12/12] refactor: rename factory and layer identifiers MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit also enforces effect’s DateTime instead of js Date. --- .agents/skills/airfoil-kit/SKILL.md | 3 +- .../airfoil-kit/assets/rename-checklist.md | 6 +- .../references/effect-v4-essentials.md | 60 +++++++---- .../airfoil-kit/references/example-auth.md | 27 ++--- .../references/example-producer-polar.md | 13 ++- .../skills/airfoil-kit/references/patterns.md | 66 +++++++----- .../references/template-walkthrough.md | 18 ++-- connectors/producer-polar/README.md | 20 ++-- connectors/producer-polar/src/api.ts | 13 ++- connectors/producer-polar/src/connector.ts | 46 ++++---- connectors/producer-polar/src/index.ts | 10 +- connectors/producer-polar/src/sandbox.ts | 10 +- connectors/producer-polar/src/streams.ts | 7 +- .../producer-polar/test/api.vcr.test.ts | 62 +++++------ .../producer-polar/test/webhook.test.ts | 51 +++++---- connectors/producer-shopify/README.md | 18 ++-- connectors/producer-shopify/src/api.ts | 13 ++- connectors/producer-shopify/src/connector.ts | 46 ++++---- connectors/producer-shopify/src/index.ts | 10 +- connectors/producer-shopify/src/sandbox.ts | 14 ++- connectors/producer-shopify/src/streams.ts | 8 +- .../producer-shopify/test/api.vcr.test.ts | 80 ++++++-------- .../producer-shopify/test/webhook.test.ts | 101 ++++++++++-------- packages/connector-kit/src/core/types.ts | 4 +- .../connector-kit/src/ingestion/engine.ts | 4 +- packages/connector-kit/test/engine.test.ts | 14 +-- templates/producer-template/README.md | 18 ++-- templates/producer-template/src/api.ts | 13 ++- templates/producer-template/src/connector.ts | 46 ++++---- templates/producer-template/src/index.ts | 10 +- templates/producer-template/src/sandbox.ts | 10 +- templates/producer-template/src/streams.ts | 7 +- .../producer-template/test/api.vcr.test.ts | 62 +++++------ .../producer-template/test/webhook.test.ts | 51 +++++---- 34 files changed, 491 insertions(+), 450 deletions(-) diff --git a/.agents/skills/airfoil-kit/SKILL.md b/.agents/skills/airfoil-kit/SKILL.md index 29a0fee..65acbf4 100644 --- a/.agents/skills/airfoil-kit/SKILL.md +++ b/.agents/skills/airfoil-kit/SKILL.md @@ -74,7 +74,8 @@ skill. item in [`references/definition-of-done.md`](./references/definition-of-done.md) passes (lint, typecheck, build, test:ci, and mode-appropriate deterministic replay: VCR for REST/GraphQL, fixtures or mock servers for gRPC). -14. **Use current names.** Prefer `layerApiClient(config)`, `layerConfig`, +14. **Use current names.** Prefer `make`, `layer(config)`, + `layerConfig(Config.Wrap<...>)`, namespace entrypoint exports, `Ingestion.runConnector(...)`, `Ingestion.layerMemory`, `Publisher.Publisher`, and `Webhook.route(...)`. 15. **Use correct layer semantics.** `Layer.mergeAll(...)` is for independent diff --git a/.agents/skills/airfoil-kit/assets/rename-checklist.md b/.agents/skills/airfoil-kit/assets/rename-checklist.md index d757c9e..c8c9437 100644 --- a/.agents/skills/airfoil-kit/assets/rename-checklist.md +++ b/.agents/skills/airfoil-kit/assets/rename-checklist.md @@ -30,15 +30,15 @@ rg -l "template" connectors/producer- --glob '!**/__cassettes__' --glob | `@useairfoil/producer-template` | `@useairfoil/producer-` | | `TEMPLATE_` (env prefix) | `_` | | `TemplateApiClient` | `ApiClient` | -| `layerApiClient` | `layerApiClient` | +| API raw-config layer | `layer` | | `TemplateApiClientService` | `ApiClientService` | | `TemplateListPage` | `ListPage` | | `TemplateConfig` (type) | `Config` | | `TemplateConfigConfig` (Config value) | `ConfigConfig` | | `TemplateConnector` (service tag) | `Connector` | -| `layerConfig` | `layerConfig` | +| Config-decoded layers | `layerConfig(config)` | | `TemplateConnectorRuntime` | `ConnectorRuntime` | -| `makeTemplateConnector` | `makeConnector` | +| Connector constructor | `make` | | `Template` (any other identifier prefix) | `` | | `template` (lowercase in strings / URNs) | `` | | `@useairfoil/producer-template/TemplateApiClient` | `@useairfoil/producer-/ApiClient` | diff --git a/.agents/skills/airfoil-kit/references/effect-v4-essentials.md b/.agents/skills/airfoil-kit/references/effect-v4-essentials.md index fb67627..b9a70c1 100644 --- a/.agents/skills/airfoil-kit/references/effect-v4-essentials.md +++ b/.agents/skills/airfoil-kit/references/effect-v4-essentials.md @@ -68,22 +68,26 @@ Never use `process.env` in connector code or tests. ## 4. API client layer ```ts -export const makeMyApiClient = ( +export const make = Effect.fnUntraced(function* ( config: MyConfig, -): Effect.Effect => - Effect.fnUntraced(function* () { - const client = (yield* HttpClient.HttpClient).pipe( - HttpClient.mapRequest(HttpClientRequest.prependUrl(config.apiBaseUrl)), - HttpClient.mapRequest(HttpClientRequest.acceptJson), - ); +): Effect.fn.Return { + const client = (yield* HttpClient.HttpClient).pipe( + HttpClient.mapRequest(HttpClientRequest.prependUrl(config.apiBaseUrl)), + HttpClient.mapRequest(HttpClientRequest.acceptJson), + ); - return { fetchJson, fetchList }; - })(); + return { fetchJson, fetchList }; +}); -export const layerApiClient = ( +export const layer = ( config: MyConfig, ): Layer.Layer => - Layer.effect(MyApiClient)(makeMyApiClient(config)); + Layer.effect(MyApiClient)(make(config)); + +export const layerConfig = ( + config: Config.Wrap, +): Layer.Layer => + Layer.effect(MyApiClient)(Config.unwrap(config).asEffect().pipe(Effect.flatMap(make))); ``` Keep transport policy here: @@ -98,25 +102,35 @@ Keep transport policy here: ## 5. Connector layer ```ts -export const layerConfig: Layer.Layer = +export const make = Effect.fnUntraced(function* ( + config: MyConfig, +): Effect.fn.Return { + // ... +}); + +export const layer = ( + config: MyConfig, +): Layer.Layer => + Layer.effect(MyConnector)(make(config).pipe(Effect.provide(MyApiClient.layer(config)))); + +export const layerConfig = ( + config: Config.Wrap, +): Layer.Layer => Layer.effect(MyConnector)( - Effect.fnUntraced(function* () { - const config = yield* MyConfigConfig; - return yield* makeMyConnector(config).pipe(Effect.provide(layerApiClient(config))); - })().pipe( - Effect.mapError((error) => - error instanceof ConnectorError - ? error - : new ConnectorError({ message: "My config failed", cause: error }), + Config.unwrap(config) + .asEffect() + .pipe( + Effect.flatMap((config) => make(config).pipe(Effect.provide(MyApiClient.layer(config)))), ), - ), ); ``` Current repo naming is: -- `layerApiClient(config)` -- `layerConfig` +- API and connector modules export `make`, `layer(config)`, and + `layerConfig(Config.Wrap<...>)` +- package entrypoints export namespaces, for example + `export * as MyApiClient from "./api"` Avoid stale names like `XApiClientConfig` and `XConnectorConfig()`. diff --git a/.agents/skills/airfoil-kit/references/example-auth.md b/.agents/skills/airfoil-kit/references/example-auth.md index f699d86..d47044f 100644 --- a/.agents/skills/airfoil-kit/references/example-auth.md +++ b/.agents/skills/airfoil-kit/references/example-auth.md @@ -1,8 +1,8 @@ # example-auth Auth patterns expressed as Effect `Config` + `HttpClient.mapRequest`. -All patterns plug into the current `layerApiClient(config)` factory layer from -`api.ts`. Nothing here requires changes to the connector kit. +All patterns plug into the current `make(config)` / `layer(config)` API client +shape from `api.ts`. Nothing here requires changes to the connector kit. These are illustrative implementation patterns, not a protocol contract. Always implement authentication according to official platform docs for the @@ -25,18 +25,19 @@ export const XConfigConfig = Config.all({ import { HttpClient, HttpClientRequest } from "effect/unstable/http"; import { Redacted } from "effect"; -export const layerApiClient = (config: XConfig) => - Layer.effect(XApiClient)( - Effect.fnUntraced(function* () { - const httpClient = yield* HttpClient.HttpClient; - const client = httpClient.pipe( - HttpClient.mapRequest(HttpClientRequest.prependUrl(config.apiBaseUrl)), - HttpClient.mapRequest(HttpClientRequest.bearerToken(Redacted.make(config.accessToken))), - HttpClient.mapRequest(HttpClientRequest.acceptJson), - ); - // ... fetchJson, fetchList built from client - })(), +export const make = Effect.fnUntraced(function* ( + config: XConfig, +): Effect.fn.Return { + const httpClient = yield* HttpClient.HttpClient; + const client = httpClient.pipe( + HttpClient.mapRequest(HttpClientRequest.prependUrl(config.apiBaseUrl)), + HttpClient.mapRequest(HttpClientRequest.bearerToken(Redacted.make(config.accessToken))), + HttpClient.mapRequest(HttpClientRequest.acceptJson), ); + // ... fetchJson, fetchList built from client +}); + +export const layer = (config: XConfig) => Layer.effect(XApiClient)(make(config)); ``` Notes: diff --git a/.agents/skills/airfoil-kit/references/example-producer-polar.md b/.agents/skills/airfoil-kit/references/example-producer-polar.md index 62d4057..4d01453 100644 --- a/.agents/skills/airfoil-kit/references/example-producer-polar.md +++ b/.agents/skills/airfoil-kit/references/example-producer-polar.md @@ -7,7 +7,7 @@ Use it as the example of: - multiple entities - real webhook verification -- current `layerApiClient` / `layerConfig` naming +- current `make` / `layer` / `layerConfig(config)` naming - current sandbox layer composition - current VCR test runtime wiring @@ -34,8 +34,9 @@ connectors/producer-polar/ Current public surface: - `PolarApiClient` -- `makePolarApiClient(config)` -- `layerApiClient(config)` +- `make(config)` +- `layer(config)` +- `layerConfig(config)` Pattern: @@ -51,13 +52,15 @@ Current public surface: - `PolarConfig` - `PolarConfigConfig` - `PolarConnector` +- `make(config)` +- `layer(config)` - `layerConfig` - `PolarConnectorRuntime` Important patterns: - `PolarConnector` is a `Context.Service` -- `layerConfig` decodes config and provides `layerApiClient(config)` +- `layerConfig(config)` decodes config and provides `PolarApiClient.layer(config)` - routes are authored with `Webhook.route({...})` - the route handler uses `Effect.fn("polar/webhook/handle")(... )` - signature verification uses the official Polar SDK helper @@ -80,7 +83,7 @@ This file is the current runtime reference for connectors. Key points: - `EnvLayer = Layer.mergeAll(FetchHttpClient.layer, ConfigProvider.fromEnv())` -- `ConnectorLayer = layerConfig.pipe(Layer.provide(EnvLayer))` +- `ConnectorLayer = PolarConnector.layerConfig(PolarConnector.PolarConfigConfig).pipe(Layer.provide(EnvLayer))` - `TelemetryLayer` is pre-provided with `EnvLayer` - `RuntimeLayer` merges only already-satisfied layers - entrypoint is `Effect.runPromise(Effect.scoped(program).pipe(Effect.provide(RuntimeLayer)))` diff --git a/.agents/skills/airfoil-kit/references/patterns.md b/.agents/skills/airfoil-kit/references/patterns.md index 1b94eac..6f5eb5a 100644 --- a/.agents/skills/airfoil-kit/references/patterns.md +++ b/.agents/skills/airfoil-kit/references/patterns.md @@ -56,8 +56,11 @@ Do not collapse unrelated responsibilities into one service tag. Use the current repo names. -- raw-config API client layer: `layerApiClient(config)` -- config-decoded connector layer: `layerConfig` +- raw-config layers: `layer(config)` +- config-decoded layers: `layerConfig(Config.Wrap<...>)` +- constructors: `make(config)` +- entrypoints: `export * as XApiClient from "./api"` and + `export * as XConnector from "./connector"` - connector runtime: `{ connector, routes }` - webhook routes: `Webhook.route({...})` - connector runner: `Ingestion.runConnector(...)` @@ -93,22 +96,26 @@ export class XApiClient extends Context.Service() "@useairfoil/producer-x/XApiClient", ) {} -export const makeXApiClient = ( +export const make = Effect.fnUntraced(function* ( config: XConfig, -): Effect.Effect => - Effect.fnUntraced(function* () { - const client = (yield* HttpClient.HttpClient).pipe( - HttpClient.mapRequest(HttpClientRequest.prependUrl(config.apiBaseUrl)), - HttpClient.mapRequest(HttpClientRequest.acceptJson), - ); +): Effect.fn.Return { + const client = (yield* HttpClient.HttpClient).pipe( + HttpClient.mapRequest(HttpClientRequest.prependUrl(config.apiBaseUrl)), + HttpClient.mapRequest(HttpClientRequest.acceptJson), + ); - return { fetchJson, fetchList }; - })(); + return { fetchJson, fetchList }; +}); -export const layerApiClient = ( +export const layer = ( config: XConfig, ): Layer.Layer => - Layer.effect(XApiClient)(makeXApiClient(config)); + Layer.effect(XApiClient)(make(config)); + +export const layerConfig = ( + config: Config.Wrap, +): Layer.Layer => + Layer.effect(XApiClient)(Config.unwrap(config).asEffect().pipe(Effect.flatMap(make))); ``` Keep transport policy here: @@ -121,24 +128,29 @@ Keep transport policy here: ## 5. Connector layer shape -Use `layerConfig` to decode config and build the connector service. +Use `layerConfig(config)` to decode config and build the connector service. ```ts -export const layerConfig: Layer.Layer = +export const make = Effect.fnUntraced(function* ( + config: XConfig, +): Effect.fn.Return { + // ... +}); + +export const layer = ( + config: XConfig, +): Layer.Layer => + Layer.effect(XConnector)(make(config).pipe(Effect.provide(XApiClient.layer(config)))); + +export const layerConfig = ( + config: Config.Wrap, +): Layer.Layer => Layer.effect(XConnector)( - Effect.fnUntraced(function* () { - const config = yield* XConfigConfig; - return yield* makeXConnector(config).pipe(Effect.provide(layerApiClient(config))); - })().pipe( - Effect.mapError((error) => - error instanceof ConnectorError - ? error - : new ConnectorError({ - message: "X config failed", - cause: error, - }), + Config.unwrap(config) + .asEffect() + .pipe( + Effect.flatMap((config) => make(config).pipe(Effect.provide(XApiClient.layer(config)))), ), - ), ); ``` diff --git a/.agents/skills/airfoil-kit/references/template-walkthrough.md b/.agents/skills/airfoil-kit/references/template-walkthrough.md index 661dd84..528a5aa 100644 --- a/.agents/skills/airfoil-kit/references/template-walkthrough.md +++ b/.agents/skills/airfoil-kit/references/template-walkthrough.md @@ -24,8 +24,9 @@ Current shape: - `TemplateApiClientService` - `TemplateApiClient` -- `makeTemplateApiClient(config)` -- `layerApiClient(config)` +- `make(config)` +- `layer(config)` +- `layerConfig(config)` Porting rules: @@ -57,8 +58,9 @@ Current shape: - `TemplateConfig` - `TemplateConfigConfig` - `TemplateConnector` -- `makeTemplateConnector(config)` -- `layerConfig` +- `make(config)` +- `layer(config)` +- `layerConfig(config)` Current webhook authoring pattern: @@ -69,7 +71,7 @@ Current webhook authoring pattern: Porting rules: - rename all template identifiers -- keep `layerConfig` +- keep `layerConfig(config)` - keep the connector runtime shape `{ connector, routes }` - keep exhaustive dispatch over payload types @@ -83,7 +85,9 @@ const EnvLayer = Layer.mergeAll( Layer.succeed(ConfigProvider.ConfigProvider, ConfigProvider.fromEnv()), ) -const ConnectorLayer = layerConfig.pipe(Layer.provide(EnvLayer)) +const ConnectorLayer = TemplateConnector.layerConfig(TemplateConnector.TemplateConfigConfig).pipe( + Layer.provide(EnvLayer), +) const TelemetryLayer = Layer.unwrap(...).pipe(Layer.provide(EnvLayer)) @@ -107,9 +111,7 @@ Porting rules: Current public surface: - `TemplateApiClient` -- `layerApiClient` - `TemplateConnector` -- `layerConfig` - `TemplateConfig` - `TemplateConfigConfig` - `TemplateConnectorRuntime` diff --git a/connectors/producer-polar/README.md b/connectors/producer-polar/README.md index fd04ccd..7a41217 100644 --- a/connectors/producer-polar/README.md +++ b/connectors/producer-polar/README.md @@ -11,9 +11,7 @@ Current scope: ## Public Exports - `PolarApiClient` -- `layerApiClient(config)` - `PolarConnector` -- `layerConfig` - `PolarConfig` - `PolarConfigConfig` - `PolarConnectorRuntime` @@ -29,7 +27,7 @@ type PolarConnectorRuntime = { }; ``` -Use `layerConfig` to build that service from Effect Config. +Use `PolarConnector.layerConfig(PolarConnector.PolarConfigConfig)` to build that service from Effect Config. ## Configuration @@ -60,7 +58,7 @@ import { ConfigProvider, Effect, Layer } from "effect"; import { FetchHttpClient } from "effect/unstable/http"; import { createServer } from "node:http"; -import { layerConfig, PolarConnector } from "@useairfoil/producer-polar"; +import { PolarConnector } from "@useairfoil/producer-polar"; const ConsolePublisher = Layer.succeed(Publisher.Publisher)({ publish: ({ name, source, batch }) => Effect.succeed({ success: true }), @@ -71,10 +69,12 @@ const envLayer = Layer.mergeAll( Layer.succeed(ConfigProvider.ConfigProvider, ConfigProvider.fromEnv()), ); -const connectorLayer = layerConfig.pipe(Layer.provide(envLayer)); +const connectorLayer = PolarConnector.layerConfig(PolarConnector.PolarConfigConfig).pipe( + Layer.provide(envLayer), +); const program = Effect.gen(function* () { - const { connector, routes } = yield* PolarConnector; + const { connector, routes } = yield* PolarConnector.PolarConnector; const serverLayer = NodeHttpServer.layer(createServer, { port: 8080 }); return yield* Ingestion.runConnector(connector, { @@ -103,7 +103,7 @@ Effect.runPromise(runnable); ## API Client Layer -`layerApiClient(config)` builds `PolarApiClient` from a raw `PolarConfig` value. +`PolarApiClient.layer(config)` builds `PolarApiClient.PolarApiClient` from a raw `PolarConfig` value. This is useful for focused API tests or custom runtimes that do not need the full connector service. @@ -111,16 +111,16 @@ This is useful for focused API tests or custom runtimes that do not need the ful import { Effect, Layer, Option, Schema } from "effect"; import { FetchHttpClient } from "effect/unstable/http"; -import { layerApiClient, PolarApiClient } from "@useairfoil/producer-polar"; +import { PolarApiClient } from "@useairfoil/producer-polar"; -const apiLayer = layerApiClient({ +const apiLayer = PolarApiClient.layer({ accessToken: "test", apiBaseUrl: "https://sandbox-api.polar.sh/v1/", organizationId: Option.none(), webhookSecret: Option.none(), }).pipe(Layer.provide(FetchHttpClient.layer)); -const program = PolarApiClient.use((api) => +const program = PolarApiClient.PolarApiClient.use((api) => api.fetchList(Schema.Any, "customers/", { page: 1, limit: 100, diff --git a/connectors/producer-polar/src/api.ts b/connectors/producer-polar/src/api.ts index 9daf4db..5b1d657 100644 --- a/connectors/producer-polar/src/api.ts +++ b/connectors/producer-polar/src/api.ts @@ -1,5 +1,5 @@ import { ConnectorError } from "@useairfoil/connector-kit"; -import { Effect, Layer, Option, Context, Schema } from "effect"; +import { Config, Context, Effect, Layer, Option, Schema } from "effect"; import { HttpClient, HttpClientRequest, HttpClientResponse } from "effect/unstable/http"; import type { PolarConfig } from "./connector"; @@ -27,7 +27,7 @@ export class PolarApiClient extends Context.Service { const client = (yield* HttpClient.HttpClient).pipe( @@ -85,7 +85,12 @@ export const makePolarApiClient = Effect.fnUntraced(function* ( return { fetchJson, fetchList }; }); -export const layerApiClient = ( +export const layer = ( config: PolarConfig, ): Layer.Layer => - Layer.effect(PolarApiClient)(makePolarApiClient(config)); + Layer.effect(PolarApiClient)(make(config)); + +export const layerConfig = ( + config: Config.Wrap, +): Layer.Layer => + Layer.effect(PolarApiClient)(Config.unwrap(config).asEffect().pipe(Effect.flatMap(make))); diff --git a/connectors/producer-polar/src/connector.ts b/connectors/producer-polar/src/connector.ts index 5c13822..5ab131c 100644 --- a/connectors/producer-polar/src/connector.ts +++ b/connectors/producer-polar/src/connector.ts @@ -10,7 +10,7 @@ import { } from "@useairfoil/connector-kit"; import { Config, Context, Effect, Layer, Option } from "effect"; -import { layerApiClient, PolarApiClient } from "./api"; +import * as PolarApiClient from "./api"; import { type Checkout, CheckoutSchema, @@ -211,10 +211,10 @@ const resolveWebhookDispatch = (options: { }; // Connector factory -const makePolarConnector = Effect.fnUntraced(function* ( +export const make = Effect.fnUntraced(function* ( config: PolarConfig, -): Effect.fn.Return { - const api = yield* PolarApiClient; +): Effect.fn.Return { + const api = yield* PolarApiClient.PolarApiClient; const customerStreams = yield* makeEntityStreams({ api, schema: CustomerSchema, @@ -313,22 +313,28 @@ const makePolarConnector = Effect.fnUntraced(function* ( return { connector, routes: [webhookRoute] }; }); -export const layerConfig: Layer.Layer = +export const layer = ( + config: PolarConfig, +): Layer.Layer => Layer.effect(PolarConnector)( - Effect.gen(function* () { - const config = yield* PolarConfigConfig; - return yield* makePolarConnector(config).pipe( - Effect.annotateLogs({ component: "polar" }), - Effect.provide(layerApiClient(config)), - ); - }).pipe( - Effect.mapError((error) => - error instanceof ConnectorError - ? error - : new ConnectorError({ - message: "Polar config failed", - cause: error, - }), - ), + make(config).pipe( + Effect.annotateLogs({ component: "polar" }), + Effect.provide(PolarApiClient.layer(config)), ), ); + +export const layerConfig = ( + config: Config.Wrap, +): Layer.Layer => + Layer.effect(PolarConnector)( + Config.unwrap(config) + .asEffect() + .pipe( + Effect.flatMap((config) => + make(config).pipe( + Effect.annotateLogs({ component: "polar" }), + Effect.provide(PolarApiClient.layer(config)), + ), + ), + ), + ); diff --git a/connectors/producer-polar/src/index.ts b/connectors/producer-polar/src/index.ts index 63fb9cb..cedb803 100644 --- a/connectors/producer-polar/src/index.ts +++ b/connectors/producer-polar/src/index.ts @@ -1,8 +1,2 @@ -export { layerApiClient, PolarApiClient } from "./api"; -export { - type PolarConfig, - PolarConfigConfig, - PolarConnector, - layerConfig, - type PolarConnectorRuntime, -} from "./connector"; +export * as PolarApiClient from "./api"; +export * as PolarConnector from "./connector"; diff --git a/connectors/producer-polar/src/sandbox.ts b/connectors/producer-polar/src/sandbox.ts index 00bf9e1..fe0814f 100644 --- a/connectors/producer-polar/src/sandbox.ts +++ b/connectors/producer-polar/src/sandbox.ts @@ -5,7 +5,7 @@ import { FetchHttpClient } from "effect/unstable/http"; import * as Observability from "effect/unstable/observability"; import { createServer } from "node:http"; -import { layerConfig, PolarConnector } from "./index"; +import { PolarConnector } from "./index"; const SandboxConfig = Config.all({ port: Config.port("POLAR_WEBHOOK_PORT").pipe(Config.withDefault(8080)), @@ -35,7 +35,7 @@ const ConsolePublisherLayer = Layer.succeed(Publisher.Publisher)({ const program = Effect.gen(function* () { const config = yield* SandboxConfig; - const { connector, routes } = yield* PolarConnector; + const { connector, routes } = yield* PolarConnector.PolarConnector; const routePaths = routes.map((route) => route.path); const serverLayer = NodeHttpServer.layer(createServer, { port: config.port }); @@ -46,7 +46,7 @@ const program = Effect.gen(function* () { const now = yield* DateTime.now; return yield* Ingestion.runConnector(connector, { - initialCutoff: DateTime.toDate(now), + initialCutoff: now, webhook: { routes, healthPath: "/health", @@ -60,7 +60,9 @@ const EnvLayer = Layer.mergeAll( Layer.succeed(ConfigProvider.ConfigProvider, ConfigProvider.fromEnv()), ); -const ConnectorLayer = layerConfig.pipe(Layer.provide(EnvLayer)); +const ConnectorLayer = PolarConnector.layerConfig(PolarConnector.PolarConfigConfig).pipe( + Layer.provide(EnvLayer), +); const TelemetryLayer = Layer.unwrap( Effect.gen(function* () { diff --git a/connectors/producer-polar/src/streams.ts b/connectors/producer-polar/src/streams.ts index 8235358..f5a0b4c 100644 --- a/connectors/producer-polar/src/streams.ts +++ b/connectors/producer-polar/src/streams.ts @@ -6,7 +6,10 @@ import { DateTime, Deferred, Effect, Queue, Stream } from "effect"; import type { PolarApiClientService } from "./api"; // Cursor helpers -const toDate = (cursor: Cursor) => (cursor instanceof Date ? cursor : new Date(String(cursor))); +const toEpochMillis = (cursor: Cursor): number => { + if (DateTime.isDateTime(cursor)) return DateTime.toEpochMillis(cursor); + return Date.parse(String(cursor)); +}; export const resolveCursor = Effect.fnUntraced(function* >( row: T, @@ -20,7 +23,7 @@ export const resolveCursor = Effect.fnUntraced(function* { if (typeof value !== "string") return false; - return new Date(value).getTime() <= toDate(cutoff).getTime(); + return Date.parse(value) <= toEpochMillis(cutoff); }; // Stream helpers diff --git a/connectors/producer-polar/test/api.vcr.test.ts b/connectors/producer-polar/test/api.vcr.test.ts index a613572..3f46699 100644 --- a/connectors/producer-polar/test/api.vcr.test.ts +++ b/connectors/producer-polar/test/api.vcr.test.ts @@ -4,16 +4,15 @@ import { FileSystemCassetteStore, VcrHttpClient } from "@useairfoil/effect-vcr"; import { ConfigProvider, Effect, Layer, Schema } from "effect"; import { FetchHttpClient } from "effect/unstable/http"; -import { makePolarApiClient, PolarApiClient } from "../src/api"; -import { PolarConfigConfig } from "../src/index"; +import { PolarApiClient, PolarConnector } from "../src/index"; // Tests the PolarApiClient directly using a recorded cassette so no webhook // is needed to trigger a backfill cutoff. The connector-level backfill flow // is covered by webhook.test.ts. describe("producer-polar api (vcr)", () => { - it.effect("replays customers list page with VCR", () => { - const program = Effect.gen(function* () { - const api = yield* PolarApiClient; + it.effect("replays customers list page with VCR", () => + Effect.gen(function* () { + const api = yield* PolarApiClient.PolarApiClient; const result = yield* api.fetchList(Schema.Any, "customers/", { page: 1, limit: 100, @@ -22,39 +21,26 @@ describe("producer-polar api (vcr)", () => { expect(result.items.length).toBeGreaterThan(0); expect(result.pagination.total_count).toBeGreaterThan(0); - }).pipe(Effect.scoped); - - const apiLayer = Layer.effect(PolarApiClient)( - Effect.gen(function* () { - const config = yield* PolarConfigConfig; - return yield* makePolarApiClient(config); - }), - ); - - const cassetteStoreLayer = FileSystemCassetteStore.layer().pipe( - Layer.provide(NodeServices.layer), - ); - const vcrRuntimeLayer = Layer.mergeAll( - FetchHttpClient.layer, - NodeServices.layer, - cassetteStoreLayer, - ); - const vcrWithDeps = VcrHttpClient.layer({ vcrName: "producer-polar" }).pipe( - Layer.provide(vcrRuntimeLayer), - ); - - const testLayer = apiLayer.pipe( - Layer.provide(vcrWithDeps), - Layer.provide( - ConfigProvider.layer( - ConfigProvider.fromUnknown({ - POLAR_ACCESS_TOKEN: "test", - POLAR_API_BASE_URL: "https://sandbox-api.polar.sh/v1/", - }), + }).pipe( + Effect.provide( + PolarApiClient.layerConfig(PolarConnector.PolarConfigConfig).pipe( + Layer.provide( + VcrHttpClient.layer({ vcrName: "producer-polar" }).pipe( + Layer.provide(FileSystemCassetteStore.layer()), + Layer.provide(Layer.merge(NodeServices.layer, FetchHttpClient.layer)), + ), + ), + Layer.provide( + ConfigProvider.layer( + ConfigProvider.fromUnknown({ + POLAR_ACCESS_TOKEN: "test", + POLAR_API_BASE_URL: "https://sandbox-api.polar.sh/v1/", + }), + ), + ), ), ), - ); - - return program.pipe(Effect.provide(testLayer), Effect.scoped); - }); + Effect.scoped, + ), + ); }); diff --git a/connectors/producer-polar/test/webhook.test.ts b/connectors/producer-polar/test/webhook.test.ts index e4678e9..e15d1d1 100644 --- a/connectors/producer-polar/test/webhook.test.ts +++ b/connectors/producer-polar/test/webhook.test.ts @@ -1,11 +1,12 @@ import { NodeHttpServer } from "@effect/platform-node"; import { describe, expect, it } from "@effect/vitest"; import { ConnectorError, Ingestion } from "@useairfoil/connector-kit"; -import { ConfigProvider, Deferred, Effect, Layer, Ref } from "effect"; +import { Config, ConfigProvider, DateTime, Deferred, Effect, Layer, Ref } from "effect"; import { HttpClient, HttpClientRequest } from "effect/unstable/http"; -import { PolarApiClient, type PolarApiClientService } from "../src/api"; -import { layerConfig, PolarConnector } from "../src/index"; +import type { PolarApiClientService } from "../src/api"; + +import { PolarApiClient, PolarConnector } from "../src/index"; import { makeTestPublisher } from "./helpers"; const customerWebhookPayload = { @@ -38,25 +39,16 @@ const makeApiStub = (): PolarApiClientService => ({ }); describe("producer-polar webhook", () => { - it.effect("publishes live webhook batches", () => { - const runtimeLayer = NodeHttpServer.layerTest; - const apiLayer = Layer.succeed(PolarApiClient)(makeApiStub()); - - const connectorLayer = layerConfig.pipe(Layer.provide(apiLayer)); - const configProvider = ConfigProvider.fromUnknown({ - POLAR_ACCESS_TOKEN: "test", - POLAR_API_BASE_URL: "https://sandbox-api.polar.sh/v1/", - }); - - return Effect.gen(function* () { + it.effect("publishes live webhook batches", () => + Effect.gen(function* () { const { publishedRef, done, layer } = yield* makeTestPublisher(1); - const { connector, routes } = yield* PolarConnector; - const runLayer = Layer.mergeAll(Ingestion.layerMemory, layer, runtimeLayer); + const { connector, routes } = yield* PolarConnector.PolarConnector; + const now = yield* DateTime.now; yield* Effect.gen(function* () { yield* Effect.forkScoped( Ingestion.runConnector(connector, { - initialCutoff: new Date(), + initialCutoff: now, webhook: { routes, }, @@ -75,15 +67,28 @@ describe("producer-polar webhook", () => { const published = yield* Ref.get(publishedRef); expect(published.length).toBe(1); expect(published[0]?.name).toBe("customers"); - }).pipe(Effect.provide(runLayer)); + }).pipe( + Effect.provide(Layer.mergeAll(Ingestion.layerMemory, layer, NodeHttpServer.layerTest)), + ); }).pipe( Effect.provide( - connectorLayer.pipe( - Layer.provide(runtimeLayer), - Layer.provide(ConfigProvider.layer(configProvider)), + Layer.effect(PolarConnector.PolarConnector)( + Config.unwrap(PolarConnector.PolarConfigConfig) + .asEffect() + .pipe(Effect.flatMap(PolarConnector.make)), + ).pipe( + Layer.provide(Layer.succeed(PolarApiClient.PolarApiClient)(makeApiStub())), + Layer.provide( + ConfigProvider.layer( + ConfigProvider.fromUnknown({ + POLAR_ACCESS_TOKEN: "test", + POLAR_API_BASE_URL: "https://sandbox-api.polar.sh/v1/", + }), + ), + ), ), ), Effect.scoped, - ); - }); + ), + ); }); diff --git a/connectors/producer-shopify/README.md b/connectors/producer-shopify/README.md index fc1328b..b98b110 100644 --- a/connectors/producer-shopify/README.md +++ b/connectors/producer-shopify/README.md @@ -11,9 +11,7 @@ Current scope: ## Public Exports - `ShopifyApiClient` -- `layerApiClient(config)` - `ShopifyConnector` -- `layerConfig` - `ShopifyConfig` - `ShopifyConfigConfig` - `ShopifyConnectorRuntime` @@ -52,7 +50,7 @@ import { ConfigProvider, Effect, Layer } from "effect"; import { FetchHttpClient } from "effect/unstable/http"; import { createServer } from "node:http"; -import { layerConfig, ShopifyConnector } from "@useairfoil/producer-shopify"; +import { ShopifyConnector } from "@useairfoil/producer-shopify"; const ConsolePublisher = Layer.succeed(Publisher.Publisher)({ publish: () => Effect.succeed({ success: true }), @@ -63,10 +61,12 @@ const envLayer = Layer.mergeAll( Layer.succeed(ConfigProvider.ConfigProvider, ConfigProvider.fromEnv()), ); -const connectorLayer = layerConfig.pipe(Layer.provide(envLayer)); +const connectorLayer = ShopifyConnector.layerConfig(ShopifyConnector.ShopifyConfigConfig).pipe( + Layer.provide(envLayer), +); const program = Effect.gen(function* () { - const { connector, routes } = yield* ShopifyConnector; + const { connector, routes } = yield* ShopifyConnector.ShopifyConnector; const serverLayer = NodeHttpServer.layer(createServer, { port: 8080 }); return yield* Ingestion.runConnector(connector, { @@ -95,7 +95,7 @@ Effect.runPromise(runnable); ## API Client Layer -`layerApiClient(config)` builds `ShopifyApiClient` from a raw `ShopifyConfig` value. +`ShopifyApiClient.layer(config)` builds `ShopifyApiClient.ShopifyApiClient` from a raw `ShopifyConfig` value. The client: @@ -107,15 +107,15 @@ The client: import { Effect, Layer, Option } from "effect"; import { FetchHttpClient } from "effect/unstable/http"; -import { layerApiClient, ProductSchema, ShopifyApiClient } from "@useairfoil/producer-shopify"; +import { ProductSchema, ShopifyApiClient } from "@useairfoil/producer-shopify"; -const apiLayer = layerApiClient({ +const apiLayer = ShopifyApiClient.layer({ apiBaseUrl: "https://your-store.myshopify.com/admin/api/2026-01", apiToken: "test-token", webhookSecret: Option.none(), }).pipe(Layer.provide(FetchHttpClient.layer)); -const program = ShopifyApiClient.use((api) => +const program = ShopifyApiClient.ShopifyApiClient.use((api) => api.fetchList(ProductSchema, "/products.json", { limit: 50, }), diff --git a/connectors/producer-shopify/src/api.ts b/connectors/producer-shopify/src/api.ts index 1509807..f85d49f 100644 --- a/connectors/producer-shopify/src/api.ts +++ b/connectors/producer-shopify/src/api.ts @@ -1,5 +1,5 @@ import { ConnectorError } from "@useairfoil/connector-kit"; -import { Context, Effect, Layer, Schema } from "effect"; +import { Config, Context, Effect, Layer, Schema } from "effect"; import { HttpClient, HttpClientRequest, HttpClientResponse } from "effect/unstable/http"; import type { ShopifyConfig } from "./connector"; @@ -47,7 +47,7 @@ const inferListField = (path: string): string => { const isAbsoluteUrl = (value: string): boolean => /^https?:\/\//i.test(value); -export const makeShopifyApiClient = Effect.fnUntraced(function* ( +export const make = Effect.fnUntraced(function* ( config: ShopifyConfig, ): Effect.fn.Return { const rawClient = yield* HttpClient.HttpClient; @@ -136,7 +136,12 @@ export const makeShopifyApiClient = Effect.fnUntraced(function* ( return { fetchJson, fetchList }; }); -export const layerApiClient = ( +export const layer = ( config: ShopifyConfig, ): Layer.Layer => - Layer.effect(ShopifyApiClient)(makeShopifyApiClient(config)); + Layer.effect(ShopifyApiClient)(make(config)); + +export const layerConfig = ( + config: Config.Wrap, +): Layer.Layer => + Layer.effect(ShopifyApiClient)(Config.unwrap(config).asEffect().pipe(Effect.flatMap(make))); diff --git a/connectors/producer-shopify/src/connector.ts b/connectors/producer-shopify/src/connector.ts index 41f738c..fac84b8 100644 --- a/connectors/producer-shopify/src/connector.ts +++ b/connectors/producer-shopify/src/connector.ts @@ -10,7 +10,7 @@ import { import { Config, Context, Effect, Layer, Option } from "effect"; import { createHmac, timingSafeEqual } from "node:crypto"; -import { layerApiClient, ShopifyApiClient } from "./api"; +import * as ShopifyApiClient from "./api"; import { type Product, ProductSchema, type WebhookPayload, WebhookPayloadSchema } from "./schemas"; import { dispatchEntityWebhook, @@ -100,10 +100,10 @@ const resolveWebhookDispatch = (options: { } }; -const makeShopifyConnector = Effect.fnUntraced(function* ( +export const make = Effect.fnUntraced(function* ( config: ShopifyConfig, -): Effect.fn.Return { - const api = yield* ShopifyApiClient; +): Effect.fn.Return { + const api = yield* ShopifyApiClient.ShopifyApiClient; const productStreams = yield* makeEntityStreams({ api, schema: ProductSchema, @@ -169,22 +169,28 @@ const makeShopifyConnector = Effect.fnUntraced(function* ( return { connector, routes: [webhookRoute] }; }); -export const layerConfig: Layer.Layer = +export const layer = ( + config: ShopifyConfig, +): Layer.Layer => Layer.effect(ShopifyConnector)( - Effect.gen(function* () { - const config = yield* ShopifyConfigConfig; - return yield* makeShopifyConnector(config).pipe( - Effect.annotateLogs({ component: "producer-shopify" }), - Effect.provide(layerApiClient(config)), - ); - }).pipe( - Effect.mapError((error) => - error instanceof ConnectorError - ? error - : new ConnectorError({ - message: "Shopify config failed", - cause: error, - }), - ), + make(config).pipe( + Effect.annotateLogs({ component: "producer-shopify" }), + Effect.provide(ShopifyApiClient.layer(config)), ), ); + +export const layerConfig = ( + config: Config.Wrap, +): Layer.Layer => + Layer.effect(ShopifyConnector)( + Config.unwrap(config) + .asEffect() + .pipe( + Effect.flatMap((config) => + make(config).pipe( + Effect.annotateLogs({ component: "producer-shopify" }), + Effect.provide(ShopifyApiClient.layer(config)), + ), + ), + ), + ); diff --git a/connectors/producer-shopify/src/index.ts b/connectors/producer-shopify/src/index.ts index 6cc64b6..a5c9bd3 100644 --- a/connectors/producer-shopify/src/index.ts +++ b/connectors/producer-shopify/src/index.ts @@ -1,10 +1,4 @@ -export { layerApiClient, ShopifyApiClient } from "./api"; -export { - type ShopifyConfig, - ShopifyConfigConfig, - ShopifyConnector, - layerConfig, - type ShopifyConnectorRuntime, -} from "./connector"; +export * as ShopifyApiClient from "./api"; +export * as ShopifyConnector from "./connector"; export type { Product, WebhookPayload } from "./schemas"; export { ProductSchema, WebhookPayloadSchema } from "./schemas"; diff --git a/connectors/producer-shopify/src/sandbox.ts b/connectors/producer-shopify/src/sandbox.ts index 74368cd..deaf698 100644 --- a/connectors/producer-shopify/src/sandbox.ts +++ b/connectors/producer-shopify/src/sandbox.ts @@ -7,7 +7,7 @@ import { FetchHttpClient } from "effect/unstable/http"; import * as Observability from "effect/unstable/observability"; import { createServer } from "node:http"; -import { layerConfig, ShopifyConnector } from "./index"; +import { ShopifyConnector } from "./index"; const SandboxConfig = Config.all({ port: Config.port("SHOPIFY_WEBHOOK_PORT").pipe(Config.withDefault(8080)), @@ -39,7 +39,7 @@ const ConsolePublisherLayer = Layer.succeed(Publisher.Publisher)({ const program = Effect.gen(function* () { const config = yield* SandboxConfig; - const { connector, routes } = yield* ShopifyConnector; + const { connector, routes } = yield* ShopifyConnector.ShopifyConnector; const routePaths = routes.map((route) => route.path); const serverLayer = NodeHttpServer.layer(createServer, { port: config.port }); @@ -50,7 +50,7 @@ const program = Effect.gen(function* () { const now = yield* DateTime.now; return yield* Ingestion.runConnector(connector, { - initialCutoff: DateTime.toDate(now), + initialCutoff: now, webhook: { routes, healthPath: "/health", @@ -64,8 +64,12 @@ const EnvLayer = Layer.mergeAll( Layer.succeed(ConfigProvider.ConfigProvider, ConfigProvider.fromEnv()), ); -const ConnectorLayer: Layer.Layer = - layerConfig.pipe(Layer.provide(EnvLayer)); +const ConnectorLayer: Layer.Layer< + ShopifyConnector.ShopifyConnector, + Config.ConfigError | ConnectorError +> = ShopifyConnector.layerConfig(ShopifyConnector.ShopifyConfigConfig).pipe( + Layer.provide(EnvLayer), +); const TelemetryLayer = Layer.unwrap( Effect.gen(function* () { diff --git a/connectors/producer-shopify/src/streams.ts b/connectors/producer-shopify/src/streams.ts index f4a4748..af88521 100644 --- a/connectors/producer-shopify/src/streams.ts +++ b/connectors/producer-shopify/src/streams.ts @@ -1,14 +1,12 @@ import type * as Schema from "effect/Schema"; import { type Batch, ConnectorError, type Cursor, Streams } from "@useairfoil/connector-kit"; -import { Deferred, Effect, Queue, Stream } from "effect"; +import { DateTime, Deferred, Effect, Queue, Stream } from "effect"; import type { ShopifyApiClientService } from "./api"; const toEpochMillis = (value: unknown): number | undefined => { - if (value instanceof Date) { - return value.getTime(); - } + if (DateTime.isDateTime(value)) return DateTime.toEpochMillis(value); if (typeof value === "number") { return Number.isFinite(value) ? value : undefined; } @@ -39,7 +37,7 @@ export const resolveCursor = >( return value; } if (value instanceof Date) { - return value; + return DateTime.fromDateUnsafe(value); } throw new Error(`Unsupported cursor value for field '${cursorField}'`); }, diff --git a/connectors/producer-shopify/test/api.vcr.test.ts b/connectors/producer-shopify/test/api.vcr.test.ts index b1e06e8..ee6c223 100644 --- a/connectors/producer-shopify/test/api.vcr.test.ts +++ b/connectors/producer-shopify/test/api.vcr.test.ts @@ -6,8 +6,7 @@ import { FileSystemCassetteStore, VcrHttpClient } from "@useairfoil/effect-vcr"; import { ConfigProvider, Effect, Layer } from "effect"; import { FetchHttpClient } from "effect/unstable/http"; -import { makeShopifyApiClient, ShopifyApiClient } from "../src/api"; -import { ProductSchema, ShopifyConfigConfig } from "../src/index"; +import { ProductSchema, ShopifyApiClient, ShopifyConnector } from "../src/index"; const normalizeRequestPath = (value: string): string => { const url = new URL(value); @@ -22,56 +21,45 @@ const matchByPathAndMethod = (request: VcrRequest, entry: VcrEntry): boolean => normalizeRequestPath(request.url) === normalizeRequestPath(entry.request.url); describe("producer-shopify api (vcr)", () => { - it.effect("replays products list page with VCR", () => { - const program = Effect.gen(function* () { - const api = yield* ShopifyApiClient; + it.effect("replays products list page with VCR", () => + Effect.gen(function* () { + const api = yield* ShopifyApiClient.ShopifyApiClient; const result = yield* api.fetchList(ProductSchema, "/products.json", { limit: 50, }); expect(result.items.length).toBeGreaterThan(0); expect(typeof result.hasMore).toBe("boolean"); - }).pipe(Effect.scoped); - - const apiLayer = Layer.effect(ShopifyApiClient)( - Effect.gen(function* () { - const config = yield* ShopifyConfigConfig; - return yield* makeShopifyApiClient(config); - }), - ); - - const cassetteStoreLayer = FileSystemCassetteStore.layer().pipe( - Layer.provide(NodeServices.layer), - ); - const vcrRuntimeLayer = Layer.mergeAll( - FetchHttpClient.layer, - NodeServices.layer, - cassetteStoreLayer, - ); - const vcrWithDeps = VcrHttpClient.layer({ - vcrName: "producer-shopify", - mode: "auto", - match: matchByPathAndMethod, - redact: { - requestHeaders: ["x-shopify-access-token", "authorization"], - }, - matchIgnore: { - requestHeaders: ["x-shopify-access-token", "authorization"], - }, - }).pipe(Layer.provide(vcrRuntimeLayer)); - - const testLayer = apiLayer.pipe( - Layer.provide(vcrWithDeps), - Layer.provide( - ConfigProvider.layer( - ConfigProvider.fromUnknown({ - SHOPIFY_API_BASE_URL: "https://nothing-12348377.myshopify.com/admin/api/2026-01", - SHOPIFY_API_TOKEN: "test-token", - }), + }).pipe( + Effect.provide( + ShopifyApiClient.layerConfig(ShopifyConnector.ShopifyConfigConfig).pipe( + Layer.provide( + VcrHttpClient.layer({ + vcrName: "producer-shopify", + mode: "auto", + match: matchByPathAndMethod, + redact: { + requestHeaders: ["x-shopify-access-token", "authorization"], + }, + matchIgnore: { + requestHeaders: ["x-shopify-access-token", "authorization"], + }, + }).pipe( + Layer.provide(FileSystemCassetteStore.layer()), + Layer.provide(Layer.merge(NodeServices.layer, FetchHttpClient.layer)), + ), + ), + Layer.provide( + ConfigProvider.layer( + ConfigProvider.fromUnknown({ + SHOPIFY_API_BASE_URL: "https://nothing-12348377.myshopify.com/admin/api/2026-01", + SHOPIFY_API_TOKEN: "test-token", + }), + ), + ), ), ), - ); - - return program.pipe(Effect.provide(testLayer), Effect.scoped); - }); + Effect.scoped, + ), + ); }); diff --git a/connectors/producer-shopify/test/webhook.test.ts b/connectors/producer-shopify/test/webhook.test.ts index e1fd4f3..2de831a 100644 --- a/connectors/producer-shopify/test/webhook.test.ts +++ b/connectors/producer-shopify/test/webhook.test.ts @@ -1,12 +1,13 @@ import { NodeHttpServer } from "@effect/platform-node"; import { describe, expect, it } from "@effect/vitest"; import { ConnectorError, Ingestion } from "@useairfoil/connector-kit"; -import { ConfigProvider, Deferred, Effect, Layer, Ref } from "effect"; +import { Config, ConfigProvider, DateTime, Deferred, Effect, Layer, Ref } from "effect"; import { HttpClient, HttpClientRequest } from "effect/unstable/http"; import { createHmac } from "node:crypto"; -import { ShopifyApiClient, type ShopifyApiClientService } from "../src/api"; -import { layerConfig, ShopifyConnector } from "../src/index"; +import type { ShopifyApiClientService } from "../src/api"; + +import { ShopifyApiClient, ShopifyConnector } from "../src/index"; import { makeTestPublisher } from "./helpers"; const webhookSecret = "test-shopify-webhook-secret"; @@ -41,26 +42,16 @@ const signPayload = (rawBody: string): string => createHmac("sha256", webhookSecret).update(rawBody).digest("base64"); describe("producer-shopify webhook", () => { - it.effect("publishes live product webhook batches", () => { - const runtimeLayer = NodeHttpServer.layerTest; - const apiLayer = Layer.succeed(ShopifyApiClient)(makeApiStub()); - - const connectorLayer = layerConfig.pipe(Layer.provide(apiLayer)); - const configProvider = ConfigProvider.fromUnknown({ - SHOPIFY_API_BASE_URL: "https://your-development-store.myshopify.com/admin/api/2026-01", - SHOPIFY_API_TOKEN: "test-token", - SHOPIFY_WEBHOOK_SECRET: webhookSecret, - }); - - return Effect.gen(function* () { + it.effect("publishes live product webhook batches", () => + Effect.gen(function* () { const { publishedRef, done, layer } = yield* makeTestPublisher(1); - const { connector, routes } = yield* ShopifyConnector; - const runLayer = Layer.mergeAll(Ingestion.layerMemory, layer, runtimeLayer); + const { connector, routes } = yield* ShopifyConnector.ShopifyConnector; + const now = yield* DateTime.now; yield* Effect.gen(function* () { yield* Effect.forkScoped( Ingestion.runConnector(connector, { - initialCutoff: new Date(), + initialCutoff: now, webhook: { routes, }, @@ -84,38 +75,43 @@ describe("producer-shopify webhook", () => { const published = yield* Ref.get(publishedRef); expect(published.length).toBe(1); expect(published[0]?.name).toBe("products"); - }).pipe(Effect.provide(runLayer)); + }).pipe( + Effect.provide(Layer.mergeAll(Ingestion.layerMemory, layer, NodeHttpServer.layerTest)), + ); }).pipe( Effect.provide( - connectorLayer.pipe( - Layer.provide(runtimeLayer), - Layer.provide(ConfigProvider.layer(configProvider)), + Layer.effect(ShopifyConnector.ShopifyConnector)( + Config.unwrap(ShopifyConnector.ShopifyConfigConfig) + .asEffect() + .pipe(Effect.flatMap(ShopifyConnector.make)), + ).pipe( + Layer.provide(Layer.succeed(ShopifyApiClient.ShopifyApiClient)(makeApiStub())), + Layer.provide( + ConfigProvider.layer( + ConfigProvider.fromUnknown({ + SHOPIFY_API_BASE_URL: + "https://your-development-store.myshopify.com/admin/api/2026-01", + SHOPIFY_API_TOKEN: "test-token", + SHOPIFY_WEBHOOK_SECRET: webhookSecret, + }), + ), + ), ), ), Effect.scoped, - ); - }); - - it.effect("rejects invalid webhook signatures", () => { - const runtimeLayer = NodeHttpServer.layerTest; - const apiLayer = Layer.succeed(ShopifyApiClient)(makeApiStub()); - - const connectorLayer = layerConfig.pipe(Layer.provide(apiLayer)); - const configProvider = ConfigProvider.fromUnknown({ - SHOPIFY_API_BASE_URL: "https://your-development-store.myshopify.com/admin/api/2026-01", - SHOPIFY_API_TOKEN: "test-token", - SHOPIFY_WEBHOOK_SECRET: webhookSecret, - }); + ), + ); - return Effect.gen(function* () { + it.effect("rejects invalid webhook signatures", () => + Effect.gen(function* () { const { publishedRef, layer } = yield* makeTestPublisher(1); - const { connector, routes } = yield* ShopifyConnector; - const runLayer = Layer.mergeAll(Ingestion.layerMemory, layer, runtimeLayer); + const { connector, routes } = yield* ShopifyConnector.ShopifyConnector; + const now = yield* DateTime.now; yield* Effect.gen(function* () { yield* Effect.forkScoped( Ingestion.runConnector(connector, { - initialCutoff: new Date(), + initialCutoff: now, webhook: { routes, }, @@ -136,15 +132,30 @@ describe("producer-shopify webhook", () => { expect(response.status).toBe(500); const published = yield* Ref.get(publishedRef); expect(published.length).toBe(0); - }).pipe(Effect.provide(runLayer)); + }).pipe( + Effect.provide(Layer.mergeAll(Ingestion.layerMemory, layer, NodeHttpServer.layerTest)), + ); }).pipe( Effect.provide( - connectorLayer.pipe( - Layer.provide(runtimeLayer), - Layer.provide(ConfigProvider.layer(configProvider)), + Layer.effect(ShopifyConnector.ShopifyConnector)( + Config.unwrap(ShopifyConnector.ShopifyConfigConfig) + .asEffect() + .pipe(Effect.flatMap(ShopifyConnector.make)), + ).pipe( + Layer.provide(Layer.succeed(ShopifyApiClient.ShopifyApiClient)(makeApiStub())), + Layer.provide( + ConfigProvider.layer( + ConfigProvider.fromUnknown({ + SHOPIFY_API_BASE_URL: + "https://your-development-store.myshopify.com/admin/api/2026-01", + SHOPIFY_API_TOKEN: "test-token", + SHOPIFY_WEBHOOK_SECRET: webhookSecret, + }), + ), + ), ), ), Effect.scoped, - ); - }); + ), + ); }); diff --git a/packages/connector-kit/src/core/types.ts b/packages/connector-kit/src/core/types.ts index 92470fe..202810e 100644 --- a/packages/connector-kit/src/core/types.ts +++ b/packages/connector-kit/src/core/types.ts @@ -1,8 +1,8 @@ -import type { Effect, Queue, Schema, Stream } from "effect"; +import type { DateTime, Effect, Queue, Schema, Stream } from "effect"; import type { ConnectorError } from "../errors"; -export type Cursor = string | number | bigint | Date; +export type Cursor = string | number | bigint | DateTime.DateTime; export type Batch = { readonly cursor: Cursor; diff --git a/packages/connector-kit/src/ingestion/engine.ts b/packages/connector-kit/src/ingestion/engine.ts index 54bd267..587e123 100644 --- a/packages/connector-kit/src/ingestion/engine.ts +++ b/packages/connector-kit/src/ingestion/engine.ts @@ -1,4 +1,4 @@ -import { Effect, Layer, Metric, Queue, Ref, Stream } from "effect"; +import { DateTime, Effect, Layer, Metric, Queue, Ref, Stream } from "effect"; import { HttpRouter, type HttpServer, HttpServerResponse } from "effect/unstable/http"; import type { @@ -78,7 +78,7 @@ export function runConnector(connector: ConnectorDefinition, options?: RunConnec return Effect.withSpan( Effect.gen(function* () { - const initialCutoff = options?.initialCutoff ?? new Date(); + const initialCutoff = options?.initialCutoff ?? (yield* DateTime.now); return yield* runIngestion(connector, initialCutoff); }).pipe(Effect.provide(runtimeLayer)), "connector.run", diff --git a/packages/connector-kit/test/engine.test.ts b/packages/connector-kit/test/engine.test.ts index 3543d42..c18a47e 100644 --- a/packages/connector-kit/test/engine.test.ts +++ b/packages/connector-kit/test/engine.test.ts @@ -1,5 +1,5 @@ import { describe, expect, it } from "@effect/vitest"; -import { Deferred, Effect, Layer, Queue, Ref, Schema, Stream } from "effect"; +import { DateTime, Deferred, Effect, Layer, Queue, Ref, Schema, Stream } from "effect"; import type { Cursor, IngestionState } from "../src/core/types"; import type { ConnectorError } from "../src/errors"; @@ -80,7 +80,7 @@ describe("engine merging logic", () => { const publisherLayer = makeTestPublisher(publishedRef, done, 2); yield* Effect.forkScoped( - runConnector(connector, { initialCutoff: new Date() }).pipe( + runConnector(connector, { initialCutoff: yield* DateTime.now }).pipe( Effect.provide(Layer.mergeAll(layerMemory, publisherLayer)), ), ); @@ -143,7 +143,7 @@ describe("engine merging logic", () => { const publisherLayer = makeTestPublisher(publishedRef, done, 3); yield* Effect.forkScoped( - runConnector(connector, { initialCutoff: new Date() }).pipe( + runConnector(connector, { initialCutoff: yield* DateTime.now }).pipe( Effect.provide(Layer.mergeAll(layerMemory, publisherLayer)), ), ); @@ -203,7 +203,7 @@ describe("engine merging logic", () => { const publisherLayer = makeTestPublisher(publishedRef, done, 1); yield* Effect.forkScoped( - runConnector(connector, { initialCutoff: new Date() }).pipe( + runConnector(connector, { initialCutoff: yield* DateTime.now }).pipe( Effect.provide(Layer.mergeAll(layerMemory, publisherLayer)), ), ); @@ -259,9 +259,9 @@ describe("engine merging logic", () => { }); const result = yield* Effect.result( - runConnector(connector, { initialCutoff: new Date("2024-01-02T00:00:00Z") }).pipe( - Effect.provide(Layer.mergeAll(stateStoreLayer, rejectingPublisherLayer)), - ), + runConnector(connector, { + initialCutoff: DateTime.makeUnsafe("2024-01-02T00:00:00Z"), + }).pipe(Effect.provide(Layer.mergeAll(stateStoreLayer, rejectingPublisherLayer))), ); expect(result._tag).toBe("Failure"); diff --git a/templates/producer-template/README.md b/templates/producer-template/README.md index 7099773..6031457 100644 --- a/templates/producer-template/README.md +++ b/templates/producer-template/README.md @@ -7,9 +7,7 @@ It uses JSONPlaceholder so the package stays runnable, typecheckable, and testab ## Public Exports - `TemplateApiClient` -- `layerApiClient(config)` - `TemplateConnector` -- `layerConfig` - `TemplateConfig` - `TemplateConfigConfig` - `TemplateConnectorRuntime` @@ -50,7 +48,7 @@ import { ConfigProvider, Effect, Layer } from "effect"; import { FetchHttpClient } from "effect/unstable/http"; import { createServer } from "node:http"; -import { layerConfig, TemplateConnector } from "@useairfoil/producer-template"; +import { TemplateConnector } from "@useairfoil/producer-template"; const ConsolePublisher = Layer.succeed(Publisher.Publisher)({ publish: () => Effect.succeed({ success: true }), @@ -61,10 +59,12 @@ const envLayer = Layer.mergeAll( Layer.succeed(ConfigProvider.ConfigProvider, ConfigProvider.fromEnv()), ); -const connectorLayer = layerConfig.pipe(Layer.provide(envLayer)); +const connectorLayer = TemplateConnector.layerConfig(TemplateConnector.TemplateConfigConfig).pipe( + Layer.provide(envLayer), +); const program = Effect.gen(function* () { - const { connector, routes } = yield* TemplateConnector; + const { connector, routes } = yield* TemplateConnector.TemplateConnector; const serverLayer = NodeHttpServer.layer(createServer, { port: 8080 }); return yield* Ingestion.runConnector(connector, { @@ -86,7 +86,7 @@ Effect.runPromise(runnable); ## API Client Layer -`layerApiClient(config)` builds `TemplateApiClient` from a raw `TemplateConfig` value. +`TemplateApiClient.layer(config)` builds `TemplateApiClient.TemplateApiClient` from a raw `TemplateConfig` value. The default implementation uses bearer-token style auth and JSONPlaceholder pagination via `_page` and `_limit`. @@ -94,15 +94,15 @@ The default implementation uses bearer-token style auth and JSONPlaceholder pagi import { Effect, Layer, Option } from "effect"; import { FetchHttpClient } from "effect/unstable/http"; -import { layerApiClient, PostSchema, TemplateApiClient } from "@useairfoil/producer-template"; +import { PostSchema, TemplateApiClient } from "@useairfoil/producer-template"; -const apiLayer = layerApiClient({ +const apiLayer = TemplateApiClient.layer({ apiBaseUrl: "https://jsonplaceholder.typicode.com", apiToken: "anonymous", webhookSecret: Option.none(), }).pipe(Layer.provide(FetchHttpClient.layer)); -const program = TemplateApiClient.use((api) => +const program = TemplateApiClient.TemplateApiClient.use((api) => api.fetchList(PostSchema, "/posts", { page: 1, limit: 10, diff --git a/templates/producer-template/src/api.ts b/templates/producer-template/src/api.ts index b62d9ba..c546295 100644 --- a/templates/producer-template/src/api.ts +++ b/templates/producer-template/src/api.ts @@ -1,5 +1,5 @@ import { ConnectorError } from "@useairfoil/connector-kit"; -import { Context, Effect, Layer, Schema } from "effect"; +import { Config, Context, Effect, Layer, Schema } from "effect"; import { HttpClient, HttpClientRequest, HttpClientResponse } from "effect/unstable/http"; import type { TemplateConfig } from "./connector"; @@ -37,7 +37,7 @@ export class TemplateApiClient extends Context.Service< // returns a small typed API surface. The auth header is Bearer by default; // swap it out for `setHeader("X-Api-Key", ...)`, Basic auth, or OAuth2 as // required by your upstream API. -export const makeTemplateApiClient = Effect.fnUntraced(function* ( +export const make = Effect.fnUntraced(function* ( config: TemplateConfig, ): Effect.fn.Return { const client = (yield* HttpClient.HttpClient).pipe( @@ -94,7 +94,12 @@ export const makeTemplateApiClient = Effect.fnUntraced(function* ( return { fetchJson, fetchList }; }); -export const layerApiClient = ( +export const layer = ( config: TemplateConfig, ): Layer.Layer => - Layer.effect(TemplateApiClient)(makeTemplateApiClient(config)); + Layer.effect(TemplateApiClient)(make(config)); + +export const layerConfig = ( + config: Config.Wrap, +): Layer.Layer => + Layer.effect(TemplateApiClient)(Config.unwrap(config).asEffect().pipe(Effect.flatMap(make))); diff --git a/templates/producer-template/src/connector.ts b/templates/producer-template/src/connector.ts index ea71018..bfa8078 100644 --- a/templates/producer-template/src/connector.ts +++ b/templates/producer-template/src/connector.ts @@ -9,7 +9,7 @@ import { } from "@useairfoil/connector-kit"; import { Config, Context, Effect, Layer, Option } from "effect"; -import { layerApiClient, TemplateApiClient } from "./api"; +import * as TemplateApiClient from "./api"; import { type Post, PostSchema, type WebhookPayload, WebhookPayloadSchema } from "./schemas"; import { dispatchEntityWebhook, @@ -93,10 +93,10 @@ const resolveWebhookDispatch = (options: { } }; -const makeTemplateConnector = Effect.fnUntraced(function* ( +export const make = Effect.fnUntraced(function* ( config: TemplateConfig, -): Effect.fn.Return { - const api = yield* TemplateApiClient; +): Effect.fn.Return { + const api = yield* TemplateApiClient.TemplateApiClient; const postStreams = yield* makeEntityStreams({ api, schema: PostSchema, @@ -151,22 +151,28 @@ const makeTemplateConnector = Effect.fnUntraced(function* ( return { connector, routes: [webhookRoute] }; }); -export const layerConfig: Layer.Layer = +export const layer = ( + config: TemplateConfig, +): Layer.Layer => Layer.effect(TemplateConnector)( - Effect.gen(function* () { - const config = yield* TemplateConfigConfig; - return yield* makeTemplateConnector(config).pipe( - Effect.annotateLogs({ component: "producer-template" }), - Effect.provide(layerApiClient(config)), - ); - }).pipe( - Effect.mapError((error) => - error instanceof ConnectorError - ? error - : new ConnectorError({ - message: "Template config failed", - cause: error, - }), - ), + make(config).pipe( + Effect.annotateLogs({ component: "producer-template" }), + Effect.provide(TemplateApiClient.layer(config)), ), ); + +export const layerConfig = ( + config: Config.Wrap, +): Layer.Layer => + Layer.effect(TemplateConnector)( + Config.unwrap(config) + .asEffect() + .pipe( + Effect.flatMap((config) => + make(config).pipe( + Effect.annotateLogs({ component: "producer-template" }), + Effect.provide(TemplateApiClient.layer(config)), + ), + ), + ), + ); diff --git a/templates/producer-template/src/index.ts b/templates/producer-template/src/index.ts index 6104615..eab6066 100644 --- a/templates/producer-template/src/index.ts +++ b/templates/producer-template/src/index.ts @@ -1,10 +1,4 @@ -export { layerApiClient, TemplateApiClient } from "./api"; -export { - type TemplateConfig, - TemplateConfigConfig, - TemplateConnector, - layerConfig, - type TemplateConnectorRuntime, -} from "./connector"; +export * as TemplateApiClient from "./api"; +export * as TemplateConnector from "./connector"; export type { Post, WebhookPayload } from "./schemas"; export { PostSchema, WebhookPayloadSchema } from "./schemas"; diff --git a/templates/producer-template/src/sandbox.ts b/templates/producer-template/src/sandbox.ts index 020880f..f457014 100644 --- a/templates/producer-template/src/sandbox.ts +++ b/templates/producer-template/src/sandbox.ts @@ -5,7 +5,7 @@ import { FetchHttpClient } from "effect/unstable/http"; import * as Observability from "effect/unstable/observability"; import { createServer } from "node:http"; -import { layerConfig, TemplateConnector } from "./index"; +import { TemplateConnector } from "./index"; const SandboxConfig = Config.all({ port: Config.port("TEMPLATE_WEBHOOK_PORT").pipe(Config.withDefault(8080)), @@ -37,7 +37,7 @@ const ConsolePublisherLayer = Layer.succeed(Publisher.Publisher)({ const program = Effect.gen(function* () { const config = yield* SandboxConfig; - const { connector, routes } = yield* TemplateConnector; + const { connector, routes } = yield* TemplateConnector.TemplateConnector; const routePaths = routes.map((route) => route.path); const serverLayer = NodeHttpServer.layer(createServer, { port: config.port }); @@ -48,7 +48,7 @@ const program = Effect.gen(function* () { const now = yield* DateTime.now; return yield* Ingestion.runConnector(connector, { - initialCutoff: DateTime.toDate(now), + initialCutoff: now, webhook: { routes, healthPath: "/health", @@ -62,7 +62,9 @@ const EnvLayer = Layer.mergeAll( Layer.succeed(ConfigProvider.ConfigProvider, ConfigProvider.fromEnv()), ); -const ConnectorLayer = layerConfig.pipe(Layer.provide(EnvLayer)); +const ConnectorLayer = TemplateConnector.layerConfig(TemplateConnector.TemplateConfigConfig).pipe( + Layer.provide(EnvLayer), +); const TelemetryLayer = Layer.unwrap( Effect.gen(function* () { diff --git a/templates/producer-template/src/streams.ts b/templates/producer-template/src/streams.ts index 339c04b..b05497d 100644 --- a/templates/producer-template/src/streams.ts +++ b/templates/producer-template/src/streams.ts @@ -1,13 +1,16 @@ import type * as Schema from "effect/Schema"; import { type Batch, type ConnectorError, type Cursor, Streams } from "@useairfoil/connector-kit"; -import { Deferred, Effect, Queue, Stream } from "effect"; +import { DateTime, Deferred, Effect, Queue, Stream } from "effect"; import type { TemplateApiClientService } from "./api"; // JSONPlaceholder has no timestamps, so we cursor on the numeric `id` field. // For a real API prefer a monotonically increasing, server-emitted timestamp. -const toNumber = (cursor: Cursor): number => (typeof cursor === "number" ? cursor : Number(cursor)); +const toNumber = (cursor: Cursor): number => { + if (DateTime.isDateTime(cursor)) return DateTime.toEpochMillis(cursor); + return typeof cursor === "number" ? cursor : Number(cursor); +}; const isOnOrBeforeCutoff = (value: unknown, cutoff: Cursor): boolean => { if (typeof value !== "number") return false; diff --git a/templates/producer-template/test/api.vcr.test.ts b/templates/producer-template/test/api.vcr.test.ts index c55d4db..37d9b53 100644 --- a/templates/producer-template/test/api.vcr.test.ts +++ b/templates/producer-template/test/api.vcr.test.ts @@ -4,16 +4,15 @@ import { FileSystemCassetteStore, VcrHttpClient } from "@useairfoil/effect-vcr"; import { ConfigProvider, Effect, Layer } from "effect"; import { FetchHttpClient } from "effect/unstable/http"; -import { makeTemplateApiClient, TemplateApiClient } from "../src/api"; -import { PostSchema, TemplateConfigConfig } from "../src/index"; +import { PostSchema, TemplateApiClient, TemplateConnector } from "../src/index"; // Replays a single page of JSONPlaceholder /posts from a recorded cassette. // This mirrors the producer-polar VCR setup: the connector-level flow is // covered by webhook.test.ts, and this test exercises only the API surface. describe("producer-template api (vcr)", () => { - it.effect("replays posts list page with VCR", () => { - const program = Effect.gen(function* () { - const api = yield* TemplateApiClient; + it.effect("replays posts list page with VCR", () => + Effect.gen(function* () { + const api = yield* TemplateApiClient.TemplateApiClient; const result = yield* api.fetchList(PostSchema, "/posts", { page: 1, limit: 10, @@ -21,39 +20,26 @@ describe("producer-template api (vcr)", () => { expect(result.items.length).toBeGreaterThan(0); expect(result.hasMore).toBe(true); - }).pipe(Effect.scoped); - - const apiLayer = Layer.effect(TemplateApiClient)( - Effect.gen(function* () { - const config = yield* TemplateConfigConfig; - return yield* makeTemplateApiClient(config); - }), - ); - - const cassetteStoreLayer = FileSystemCassetteStore.layer().pipe( - Layer.provide(NodeServices.layer), - ); - const vcrRuntimeLayer = Layer.mergeAll( - FetchHttpClient.layer, - NodeServices.layer, - cassetteStoreLayer, - ); - const vcrWithDeps = VcrHttpClient.layer({ vcrName: "producer-template", mode: "replay" }).pipe( - Layer.provide(vcrRuntimeLayer), - ); - - const testLayer = apiLayer.pipe( - Layer.provide(vcrWithDeps), - Layer.provide( - ConfigProvider.layer( - ConfigProvider.fromUnknown({ - TEMPLATE_API_BASE_URL: "https://jsonplaceholder.typicode.com", - TEMPLATE_API_TOKEN: "test", - }), + }).pipe( + Effect.provide( + TemplateApiClient.layerConfig(TemplateConnector.TemplateConfigConfig).pipe( + Layer.provide( + VcrHttpClient.layer({ vcrName: "producer-template", mode: "replay" }).pipe( + Layer.provide(FileSystemCassetteStore.layer()), + Layer.provide(Layer.merge(NodeServices.layer, FetchHttpClient.layer)), + ), + ), + Layer.provide( + ConfigProvider.layer( + ConfigProvider.fromUnknown({ + TEMPLATE_API_BASE_URL: "https://jsonplaceholder.typicode.com", + TEMPLATE_API_TOKEN: "test", + }), + ), + ), ), ), - ); - - return program.pipe(Effect.provide(testLayer), Effect.scoped); - }); + Effect.scoped, + ), + ); }); diff --git a/templates/producer-template/test/webhook.test.ts b/templates/producer-template/test/webhook.test.ts index a47b40b..fe9adb6 100644 --- a/templates/producer-template/test/webhook.test.ts +++ b/templates/producer-template/test/webhook.test.ts @@ -1,11 +1,12 @@ import { NodeHttpServer } from "@effect/platform-node"; import { describe, expect, it } from "@effect/vitest"; import { ConnectorError, Ingestion } from "@useairfoil/connector-kit"; -import { ConfigProvider, Deferred, Effect, Layer, Ref } from "effect"; +import { Config, ConfigProvider, DateTime, Deferred, Effect, Layer, Ref } from "effect"; import { HttpClient, HttpClientRequest } from "effect/unstable/http"; -import { TemplateApiClient, type TemplateApiClientService } from "../src/api"; -import { layerConfig, TemplateConnector } from "../src/index"; +import type { TemplateApiClientService } from "../src/api"; + +import { TemplateApiClient, TemplateConnector } from "../src/index"; import { makeTestPublisher } from "./helpers"; const postWebhookPayload = { @@ -27,25 +28,16 @@ const makeApiStub = (): TemplateApiClientService => ({ }); describe("producer-template webhook", () => { - it.effect("publishes live webhook batches", () => { - const runtimeLayer = NodeHttpServer.layerTest; - const apiLayer = Layer.succeed(TemplateApiClient)(makeApiStub()); - - const connectorLayer = layerConfig.pipe(Layer.provide(apiLayer)); - const configProvider = ConfigProvider.fromUnknown({ - TEMPLATE_API_BASE_URL: "https://jsonplaceholder.typicode.com", - TEMPLATE_API_TOKEN: "test", - }); - - return Effect.gen(function* () { + it.effect("publishes live webhook batches", () => + Effect.gen(function* () { const { publishedRef, done, layer } = yield* makeTestPublisher(1); - const { connector, routes } = yield* TemplateConnector; - const runLayer = Layer.mergeAll(Ingestion.layerMemory, layer, runtimeLayer); + const { connector, routes } = yield* TemplateConnector.TemplateConnector; + const now = yield* DateTime.now; yield* Effect.gen(function* () { yield* Effect.forkScoped( Ingestion.runConnector(connector, { - initialCutoff: new Date(), + initialCutoff: now, webhook: { routes, }, @@ -64,15 +56,28 @@ describe("producer-template webhook", () => { const published = yield* Ref.get(publishedRef); expect(published.length).toBe(1); expect(published[0]?.name).toBe("posts"); - }).pipe(Effect.provide(runLayer)); + }).pipe( + Effect.provide(Layer.mergeAll(Ingestion.layerMemory, layer, NodeHttpServer.layerTest)), + ); }).pipe( Effect.provide( - connectorLayer.pipe( - Layer.provide(runtimeLayer), - Layer.provide(ConfigProvider.layer(configProvider)), + Layer.effect(TemplateConnector.TemplateConnector)( + Config.unwrap(TemplateConnector.TemplateConfigConfig) + .asEffect() + .pipe(Effect.flatMap(TemplateConnector.make)), + ).pipe( + Layer.provide(Layer.succeed(TemplateApiClient.TemplateApiClient)(makeApiStub())), + Layer.provide( + ConfigProvider.layer( + ConfigProvider.fromUnknown({ + TEMPLATE_API_BASE_URL: "https://jsonplaceholder.typicode.com", + TEMPLATE_API_TOKEN: "test", + }), + ), + ), ), ), Effect.scoped, - ); - }); + ), + ); });