diff --git a/AGENTS.md b/AGENTS.md new file mode 100644 index 000000000..745c5861f --- /dev/null +++ b/AGENTS.md @@ -0,0 +1,143 @@ +# AGENTS.md + +You are working in the iii monorepo — a backend unification engine with three primitives: **Function**, **Trigger**, **Worker**. The engine is Rust. SDKs exist for TypeScript, Python, and Rust. All communicate over WebSocket. + +## Commands + +```bash +# Setup +pnpm install # JS/TS dependencies +cargo build --release # Rust workspace + +# Build +pnpm build # all JS/TS packages (Turborepo) +cargo build --release # engine + Rust SDK + console + +# Test +pnpm test # all JS/TS tests +cargo test # all Rust tests +cargo test -p iii-engine # engine only +cargo test -p iii-sdk # Rust SDK only +cd sdk/packages/python/iii && uv sync --extra dev && uv run pytest # Python SDK + +# Lint & Format +pnpm fmt # format JS/TS (Biome) +pnpm fmt:check # check without changes +pnpm lint # lint JS/TS +cargo fmt --all # format Rust +cargo clippy --workspace # lint Rust + +# Run +cargo run --release # start engine (reads engine/config.yaml) +pnpm dev:console # console frontend dev server +pnpm dev:docs # docs dev server (Mintlify) +pnpm dev:website # website dev server +``` + +## Project Map + +``` +engine/ Rust engine — runtime, modules, protocol, CLI +sdk/packages/node/iii/ TypeScript SDK (npm: iii-sdk) +sdk/packages/python/iii/ Python SDK (PyPI: iii-sdk) +sdk/packages/rust/iii/ Rust SDK (crates.io: iii-sdk) +console/ Developer dashboard (React + Rust) +frameworks/motia/ Higher-level framework on iii-sdk +skills/ 24 agent skills (auto-discovered by SkillKit) +docs/ Documentation site (Mintlify/MDX) +website/ iii.dev website +scripts/ Build and CI scripts +``` + +**Workspaces:** `Cargo.toml` (Rust), `pnpm-workspace.yaml` (JS/TS), `turbo.json` (build orchestration). + +## Boundaries + +### Always + +- Use `pnpm` (never `npm`) for JS/TS packages +- Use `cargo fmt --all` before committing Rust changes +- Use `pnpm fmt` before committing JS/TS changes +- Use leading slashes for HTTP `api_path` values: `/orders`, `/users/:id` +- Use `expression` (not `cron`) for cron trigger config fields +- Use `::` separator for function IDs: `orders::validate`, `reports::daily-summary` +- Use `workspace:*` for internal pnpm package references +- Include `## When to Use` and `## Boundaries` sections in every SKILL.md +- Match SKILL.md `name` field to its directory name exactly + +### Ask First + +- Changes to public SDK APIs (npm/PyPI/crates.io surface) +- Changes to engine config schema (`engine/config.yaml`) +- Changes to CI/CD workflows (`.github/`) +- Adding new engine modules +- Modifying the WebSocket protocol between SDK and engine + +### Never + +- Commit secrets, API keys, or credentials +- Use `npm` instead of `pnpm` +- Push directly to `main` +- Change engine licensing (ELv2) or SDK licensing (Apache-2.0) +- Remove "When to Use" / "Boundaries" from SKILL.md files (SkillKit validates these) +- Use `cron` as a config key — the engine uses `expression` +- Omit leading slashes on `api_path` — the engine standard is `/path` + +## Code Style + +**Rust (engine + SDK):** +```rust +// Function IDs use :: separator +iii.register_function( + RegisterFunction::new("orders::validate", validate_order) + .description("Validate an incoming order"), +); + +// HTTP triggers use leading slash +iii.register_trigger( + IIITrigger::Http(HttpTriggerConfig::new("/orders/validate").method(HttpMethod::Post)) + .for_function("orders::validate"), +); + +// Cron triggers use `expression` field (7-field: sec min hour dom month dow year) +iii.register_trigger( + IIITrigger::Cron(CronTriggerConfig::new("0 0 9 * * * *")) + .for_function("reports::daily-summary"), +); +``` + +**TypeScript (SDK):** +```typescript +// HTTP trigger with leading slash +iii.registerTrigger({ + type: 'http', + function_id: 'orders::validate', + config: { api_path: '/orders/validate', http_method: 'POST' }, +}); + +// Cron trigger with `expression` (not `cron`) +iii.registerTrigger({ + type: 'cron', + function_id: 'reports::daily-summary', + config: { expression: '0 0 9 * * * *' }, +}); +``` + +**Python (SDK):** +```python +# Same patterns — leading slash, expression field +iii.register_trigger({ + "type": "http", + "function_id": "orders::validate", + "config": {"api_path": "/orders/validate", "http_method": "POST"}, +}) +``` + +## Skills + +The `skills/` directory contains 24 agent skills (iii-prefixed) auto-discovered by `npx skills add iii-hq/iii` and `npx skillkit install iii-hq/iii`. Reference implementations live in `skills/references/` with TypeScript, Python, and Rust variants. + +## Licensing + +- `engine/` — Elastic License v2 (ELv2) +- Everything else — Apache-2.0 diff --git a/skills/.gitignore b/skills/.gitignore new file mode 100644 index 000000000..7a57a64fe --- /dev/null +++ b/skills/.gitignore @@ -0,0 +1,2 @@ +.claude +.cursor diff --git a/skills/LICENSE b/skills/LICENSE new file mode 100644 index 000000000..4b1c9dcc9 --- /dev/null +++ b/skills/LICENSE @@ -0,0 +1,190 @@ + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to the Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by the Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding any notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + Copyright 2026 iii-hq + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. diff --git a/skills/README.md b/skills/README.md new file mode 100644 index 000000000..f2eb0afb9 --- /dev/null +++ b/skills/README.md @@ -0,0 +1,141 @@ +# iii Skills + +[Agent Skills](https://agentskills.io) for building with the [iii engine](https://github.com/iii-hq/iii) — functions, triggers, workers, state, streams, and more. + +Works with Claude Code, Cursor, Gemini CLI, OpenCode, Amp, Goose, Roo Code, GitHub Copilot, VS Code, OpenAI Codex, and [30+ other agents](https://agentskills.io). + +## Install + +### One command + +```bash +npx skills add iii-hq/iii +``` + +### SkillKit + +```bash +# Install all iii skills +npx skillkit install iii-hq/iii + +# Install a single skill +npx skillkit install iii-hq/iii --skills=iii-http-endpoints + +# Sync skills across all your agents +npx skillkit sync +``` + +### Git clone + +```bash +# Claude Code +git clone https://github.com/iii-hq/iii.git /tmp/iii && cp -r /tmp/iii/skills/iii-* ~/.claude/skills/ + +# Cursor +git clone https://github.com/iii-hq/iii.git /tmp/iii && cp -r /tmp/iii/skills/iii-* ~/.cursor/skills/ + +# Gemini CLI +git clone https://github.com/iii-hq/iii.git /tmp/iii && cp -r /tmp/iii/skills/iii-* ~/.gemini/skills/ +``` + +### Multi-agent sync + +If you use multiple agents, SkillKit keeps skills in sync across all of them: + +```bash +# Install once, sync to Claude Code + Cursor + Gemini CLI +npx skillkit install iii-hq/iii +npx skillkit sync --agent claude-code +npx skillkit sync --agent cursor +npx skillkit sync --agent gemini-cli +``` + +Supports 32+ agents including Claude Code, Cursor, Codex, Gemini CLI, OpenCode, Amp, Goose, Roo Code, GitHub Copilot, and more. + +## Skills + +### Getting Started + +| Skill | What it does | +| --- | --- | +| [iii-getting-started](./iii-getting-started) | Install iii, create a project, write your first worker | + +### HOWTO Skills + +Direct mappings to [iii documentation](https://iii.dev/docs) HOWTOs. Each teaches one primitive or capability. Reference implementations are available in TypeScript, Python, and Rust. + +| Skill | What it does | +| --- | --- | +| [iii-functions-and-triggers](./iii-functions-and-triggers) | Register functions and bind triggers across TypeScript, Python, and Rust | +| [iii-http-endpoints](./iii-http-endpoints) | Expose functions as REST API endpoints | +| [iii-cron-scheduling](./iii-cron-scheduling) | Schedule recurring tasks with cron expressions | +| [iii-queue-processing](./iii-queue-processing) | Async job processing with retries, concurrency, and ordering | +| [iii-state-management](./iii-state-management) | Distributed key-value state across functions | +| [iii-state-reactions](./iii-state-reactions) | Auto-trigger functions on state changes | +| [iii-realtime-streams](./iii-realtime-streams) | Push live updates to WebSocket clients | +| [iii-custom-triggers](./iii-custom-triggers) | Build custom trigger types for external events | +| [iii-trigger-actions](./iii-trigger-actions) | Synchronous, fire-and-forget, and enqueue invocation modes | +| [iii-trigger-conditions](./iii-trigger-conditions) | Gate trigger execution with condition functions | +| [iii-dead-letter-queues](./iii-dead-letter-queues) | Inspect and redrive failed queue jobs | +| [iii-engine-config](./iii-engine-config) | Configure the iii engine via iii-config.yaml | +| [iii-observability](./iii-observability) | OpenTelemetry tracing, metrics, and logging | +| [iii-channels](./iii-channels) | Binary streaming between workers | + +### Architecture Pattern Skills + +Compose multiple iii primitives into common backend architectures. Each includes a full working reference implementation. + +| Skill | What it does | +| --- | --- | +| [iii-agentic-backend](./iii-agentic-backend) | Multi-agent pipelines with queue handoffs and shared state | +| [iii-reactive-backend](./iii-reactive-backend) | Real-time backends with state triggers and stream updates | +| [iii-workflow-orchestration](./iii-workflow-orchestration) | Durable multi-step pipelines with retries and DLQ | +| [iii-http-invoked-functions](./iii-http-invoked-functions) | Register external HTTP endpoints as iii functions | +| [iii-effect-system](./iii-effect-system) | Composable, traceable function pipelines | +| [iii-event-driven-cqrs](./iii-event-driven-cqrs) | CQRS with event sourcing and independent projections | +| [iii-low-code-automation](./iii-low-code-automation) | Trigger-transform-action automation chains | + +### SDK Reference Skills + +| Skill | What it does | +| --- | --- | +| [iii-node-sdk](./iii-node-sdk) | Node.js/TypeScript SDK reference | +| [iii-python-sdk](./iii-python-sdk) | Python SDK reference | +| [iii-rust-sdk](./iii-rust-sdk) | Rust SDK reference | + +### Shared References + +| File | What it contains | +| --- | --- | +| [references/iii-config.yaml](./references/iii-config.yaml) | Full annotated engine configuration reference | + +## Format + +Each skill follows the [Agent Skills specification](https://agentskills.io/specification): + +```text +skills/ +├── iii-http-endpoints/ +│ └── SKILL.md # YAML frontmatter (name + description) + markdown instructions +├── iii-channels/ +│ └── SKILL.md +├── references/ +│ ├── http-endpoints.js # TypeScript reference implementation +│ ├── http-endpoints.py # Python reference implementation +│ ├── http-endpoints.rs # Rust reference implementation +│ ├── iii-config.yaml # Shared engine config reference +│ └── ... +└── README.md +``` + +Skills are activated automatically when the agent detects a matching task based on the description field. Code references live in the `references/` directory, named after their skill. + +## Contributing + +1. Fork this repo +2. Add or edit a skill in `skills/` +3. Submit a PR + +## License + +Apache-2.0 diff --git a/skills/SKILLS.md b/skills/SKILLS.md new file mode 100644 index 000000000..5b9a721de --- /dev/null +++ b/skills/SKILLS.md @@ -0,0 +1,50 @@ +# iii Skills + +Skills for building on the [iii engine](https://iii.dev) — a backend unification and orchestration system. + +## Getting Started + +- [getting-started](iii-getting-started/SKILL.md) — Install iii, create a project, write your first worker + +## HOWTO Skills + +Direct mappings to iii documentation HOWTOs. Each teaches one primitive or capability. + +- [functions-and-triggers](iii-functions-and-triggers/SKILL.md) — Register functions and triggers across TypeScript, Python, and Rust +- [http-endpoints](iii-http-endpoints/SKILL.md) — Expose functions as REST API endpoints +- [cron-scheduling](iii-cron-scheduling/SKILL.md) — Schedule recurring tasks with cron expressions +- [queue-processing](iii-queue-processing/SKILL.md) — Async job processing with retries, concurrency, and ordering +- [state-management](iii-state-management/SKILL.md) — Distributed key-value state across functions +- [state-reactions](iii-state-reactions/SKILL.md) — Auto-trigger functions on state changes +- [realtime-streams](iii-realtime-streams/SKILL.md) — Push live updates to WebSocket clients +- [custom-triggers](iii-custom-triggers/SKILL.md) — Build custom trigger types for external events +- [trigger-actions](iii-trigger-actions/SKILL.md) — Synchronous, fire-and-forget, and enqueue invocation modes +- [trigger-conditions](iii-trigger-conditions/SKILL.md) — Gate trigger execution with condition functions +- [dead-letter-queues](iii-dead-letter-queues/SKILL.md) — Inspect and redrive failed queue jobs +- [engine-config](iii-engine-config/SKILL.md) — Configure the iii engine via iii-config.yaml +- [observability](iii-observability/SKILL.md) — OpenTelemetry tracing, metrics, and logging +- [channels](iii-channels/SKILL.md) — Binary streaming between workers + +## Architecture Pattern Skills + +Compose multiple iii primitives into common backend architectures. Each includes a full working `reference.js`. + +- [agentic-backend](iii-agentic-backend/SKILL.md) — Multi-agent pipelines with queue handoffs and shared state +- [reactive-backend](iii-reactive-backend/SKILL.md) — Real-time backends with state triggers and stream updates +- [workflow-orchestration](iii-workflow-orchestration/SKILL.md) — Durable multi-step pipelines with retries and DLQ +- [http-invoked-functions](iii-http-invoked-functions/SKILL.md) — Register external HTTP endpoints as iii functions +- [effect-system](iii-effect-system/SKILL.md) — Composable, traceable function pipelines +- [event-driven-cqrs](iii-event-driven-cqrs/SKILL.md) — CQRS with event sourcing and independent projections +- [low-code-automation](iii-low-code-automation/SKILL.md) — Trigger-transform-action automation chains + +## SDK Reference Skills + +Minimal skills pointing to official SDK documentation. + +- [node-sdk](iii-node-sdk/SKILL.md) — Node.js/TypeScript SDK +- [python-sdk](iii-python-sdk/SKILL.md) — Python SDK +- [rust-sdk](iii-rust-sdk/SKILL.md) — Rust SDK + +## Shared References + +- [references/iii-config.yaml](references/iii-config.yaml) — Full annotated engine configuration reference (auto-synced from docs) diff --git a/skills/iii-agentic-backend/SKILL.md b/skills/iii-agentic-backend/SKILL.md new file mode 100644 index 000000000..5fdfcb0dd --- /dev/null +++ b/skills/iii-agentic-backend/SKILL.md @@ -0,0 +1,93 @@ +--- +name: iii-agentic-backend +description: >- + Creates and orchestrates multi-agent pipelines on the iii engine. Use when + building AI agent collaboration, agent orchestration, research/review/synthesis + chains, or any system where specialized agents hand off work through queues + and shared state. +--- + +# Agentic Backend + +Comparable to: LangGraph, CrewAI, AutoGen, Letta + +## Key Concepts + +Use the concepts below when they fit the task. Not every agentic workflow needs all of them. + +- Each agent is a registered function with a single responsibility +- Agents communicate via **named queues** (ordered handoffs) and **shared state** (accumulated context) +- **Approval gates** are explicit checks in the producing agent before enqueuing the next step +- An HTTP trigger provides the entry point; agents chain from there +- **Pubsub** broadcasts completion events for downstream listeners + +## Architecture + +```text +HTTP request + → Enqueue(agent-tasks) → Agent 1 (researcher) → writes state + → Enqueue(agent-tasks) → Agent 2 (critic) → reads/updates state + → explicit approval check (is-approved?) + → Enqueue(agent-tasks) → Agent 3 (synthesizer) → final state update + → publish(research.complete) +``` + +## iii Primitives Used + +| Primitive | Purpose | +| ---------------------------------------------------------------------------- | -------------------------------------------- | +| `registerWorker` | Initialize the worker and connect to iii | +| `registerFunction` | Define each agent | +| trigger `state::set`, `state::get`, `state::update` | Shared context between agents | +| `trigger({ ..., action: TriggerAction.Enqueue({ queue }) })` | Async handoff between agents via named queue | +| `trigger({ function_id, payload })` | Explicit condition check before enqueuing | +| `trigger({ function_id: 'publish', payload, action: TriggerAction.Void() })` | Broadcast completion to any listeners | +| `registerTrigger({ type: 'http' })` | Entry point | + +## Reference Implementation + +See [../references/agentic-backend.js](../references/agentic-backend.js) for the full working example — a multi-agent research pipeline +where a researcher gathers findings, a critic reviews them, and a synthesizer produces a final report. + +## Common Patterns + +Code using this pattern commonly includes, when relevant: + +- `registerWorker(url, { workerName })` — worker initialization +- `trigger({ function_id, payload, action: TriggerAction.Enqueue({ queue }) })` — async handoff between agents +- trigger `state::set`, `state::get`, `state::update` — shared context between agents +- Explicit condition check via `await iii.trigger({ function_id: 'condition-fn', payload })` before enqueuing next agent +- `trigger({ function_id: 'publish', payload: { topic, data }, action: TriggerAction.Void() })` — completion broadcast +- Each agent as its own `registerFunction` with `agents::` prefix IDs +- `const logger = new Logger()` — structured logging per agent + +## Adapting This Pattern + +Use the adaptations below when they apply to the task. + +- Replace simulated logic in each agent with real work (API calls, LLM inference, etc.) +- Add more agents by registering functions and enqueuing to them with `TriggerAction.Enqueue({ queue })` +- For approval gates, call a condition function explicitly before enqueuing the next agent +- Define queue configs (retries, concurrency) in `iii-config.yaml` under `queue_configs` +- State scope should be named for your domain (e.g. `research-tasks`, `support-tickets`) +- `functionId` segments should reflect your agent hierarchy (e.g. `agents::researcher`, `agents::critic`) + +## Engine Configuration + +Named queues for agent handoffs are declared in iii-config.yaml under `queue_configs`. See [../references/iii-config.yaml](../references/iii-config.yaml) for the full annotated config reference. + +## Pattern Boundaries + +- If a request is about adapting existing HTTP endpoints into `registerFunction` (including prompts asking for `{ path, id }` endpoint maps + loops), prefer `iii-http-invoked-functions`. +- Stay with `iii-agentic-backend` when the primary problem is multi-agent orchestration, queue handoffs, approval gates, and shared context. + +## When to Use + +- Use this skill when the task is primarily about `iii-agentic-backend` in the iii engine. +- Triggers when the request directly asks for this pattern or an equivalent implementation. + +## Boundaries + +- Never use this skill as a generic fallback for unrelated tasks. +- You must not apply this skill when a more specific iii skill is a better fit. +- Always verify environment and safety constraints before applying examples from this skill. diff --git a/skills/iii-channels/SKILL.md b/skills/iii-channels/SKILL.md new file mode 100644 index 000000000..bdadceffe --- /dev/null +++ b/skills/iii-channels/SKILL.md @@ -0,0 +1,88 @@ +--- +name: iii-channels +description: >- + Binary streaming between workers via channels. Use when building data + pipelines, file transfers, streaming responses, or any pattern requiring + binary data transfer between functions. +--- + +# Channels + +Comparable to: Unix pipes, gRPC streaming, WebSocket data streams + +## Key Concepts + +Use the concepts below when they fit the task. Not every worker needs channels. + +- A **Channel** is a WebSocket-backed binary stream between two endpoints (writer and reader) +- `createChannel()` returns a writer/reader pair plus serializable refs that can be passed to other workers +- **StreamChannelRef** is a serializable reference (channel_id, access_key, direction) that can be included in function payloads +- Writers send binary data (chunked into 64KB frames) and text messages +- Readers consume binary chunks via `readAll()` or receive text messages via callbacks +- Consumers must construct a reader from a serializable `StreamChannelRef` (e.g., `ChannelReader::new(...)`) rather than using the producer-side reader object returned by `createChannel()` +- Channels work cross-worker and cross-language — a Python writer can stream to a Rust reader + +## Architecture + +A function creates a channel via `createChannel()`, receiving a writer and reader pair. The writer ref or reader ref is passed to another function (potentially in a different worker/language) via a trigger payload. The engine brokers the WebSocket connection between the two endpoints. Binary data flows directly between workers through the engine's channel endpoint. + +## iii Primitives Used + +| Primitive | Purpose | +| -------------------------------------- | ------------------------------------------------ | +| `createChannel(bufferSize?)` | Create a channel, returns writer + reader pair | +| `ChannelWriter.write(data)` | Send binary data (chunked into 64KB frames) | +| `ChannelWriter.sendMessage(msg)` | Send a text message through the channel | +| `ChannelWriter.close()` | Close the writer end | +| `ChannelReader.readAll()` | Read entire stream into a single buffer | +| `ChannelReader.onMessage(callback)` | Register callback for text messages | +| `StreamChannelRef` | Serializable reference to pass between workers | + +## Reference Implementation + +- **TypeScript**: [../references/channels.js](../references/channels.js) +- **Python**: [../references/channels.py](../references/channels.py) +- **Rust**: [../references/channels.rs](../references/channels.rs) + +Each reference shows the same patterns (channel creation, binary streaming, text messages, cross-function handoff) in its respective language. + +## Common Patterns + +Code using this pattern commonly includes, when relevant: + +- `const channel = await iii.createChannel()` — create a channel pair (producer access) +- `channel.writer.stream.write(buffer)` / `channel.writer.write(data)` — send binary data +- `channel.writer.sendMessage(JSON.stringify({ type: 'metadata', ... }))` — send text metadata +- `channel.writer.close()` — signal end of stream +- Pass `channel.readerRef` or `channel.writerRef` in trigger payloads for cross-worker streaming +- Consumer must reconstruct the reader from the ref: e.g., `new ChannelReader(iii.address, readerRef)` +- `const data = await reader.readAll()` — read entire stream (consumer behavior) +- `reader.onMessage(msg => { ... })` — handle text messages (consumer behavior) + +## Adapting This Pattern + +Use the adaptations below when they apply to the task. + +- Use channels for large data transfers that shouldn't be serialized into JSON payloads +- Pass `readerRef` to a processing function and `writerRef` to a producing function for pipeline patterns +- Use text messages for metadata/signaling alongside binary data streams +- Set `bufferSize` when the reader may be slower than the writer to apply backpressure +- Channels work cross-language — a TypeScript producer can stream to a Rust consumer + +## Pattern Boundaries + +- For key-value state persistence, prefer `iii-state-management`. +- For stream CRUD (named streams with groups/keys), prefer `iii-realtime-streams`. +- For pub/sub messaging, prefer triggers with `subscribe` type. +- Stay with `iii-channels` when the primary problem is binary data streaming between workers. + +## When to Use + +- Use this skill when the task is primarily about `iii-channels` in the iii engine. +- Triggers when the request directly asks for this pattern or an equivalent implementation. + +## Boundaries + +- Never use this skill as a generic fallback for unrelated tasks. +- You must not apply this skill when a more specific iii skill is a better fit. +- Always verify environment and safety constraints before applying examples from this skill. diff --git a/skills/iii-cron-scheduling/SKILL.md b/skills/iii-cron-scheduling/SKILL.md new file mode 100644 index 000000000..892f8c8be --- /dev/null +++ b/skills/iii-cron-scheduling/SKILL.md @@ -0,0 +1,86 @@ +--- +name: iii-cron-scheduling +description: >- + Registers cron triggers with 7-field expressions to run functions on + recurring schedules. Use when scheduling periodic jobs, timed automation, + crontab replacements, cleanup routines, report generation, health checks, + batch processing, or any task that should run every N seconds, minutes, hours, + or on a weekly/monthly calendar. +--- + +# Cron Scheduling + +Comparable to: node-cron, APScheduler, crontab + +## Key Concepts + +Use the concepts below when they fit the task. Not every scheduled job needs all of them. + +- Cron expressions use a **7-field format**: `second minute hour day month weekday year` +- **CronModule** evaluates expressions and fires triggers on schedule +- Handlers should be **fast** — enqueue heavy work to a queue instead of blocking the cron handler +- Each cron trigger binds one expression to one function +- Overlapping schedules are fine; each trigger fires independently + +## Architecture + + CronModule timer tick + → registerTrigger type:'cron' expression match + → registerFunction handler + → (optional) TriggerAction.Enqueue for heavy work + +## iii Primitives Used + +| Primitive | Purpose | +| ----------------------------------------- | ---------------------------------------- | +| `registerFunction` | Define the handler for the scheduled job | +| `registerTrigger({ type: 'cron' })` | Bind a cron expression to a function | +| `config: { expression: '0 0 9 * * * *' }` | Cron schedule in 7-field format | + +## Reference Implementation + +See [../references/cron-scheduling.js](../references/cron-scheduling.js) for the full working example — a recurring scheduled task that fires on a cron expression and optionally enqueues heavy work. + +Also available in **Python**: [../references/cron-scheduling.py](../references/cron-scheduling.py) + +Also available in **Rust**: [../references/cron-scheduling.rs](../references/cron-scheduling.rs) + +## Common Patterns + +Code using this pattern commonly includes, when relevant: + +- `registerWorker(url, { workerName })` — worker initialization +- `registerFunction(id, handler)` — define the scheduled handler +- `registerTrigger({ type: 'cron', config: { expression } })` — bind the schedule +- `trigger({ function_id, payload, action: TriggerAction.Enqueue({ queue }) })` — offload heavy work +- `const logger = new Logger()` — structured logging per job + +## Adapting This Pattern + +Use the adaptations below when they apply to the task. + +- Adjust the 7-field expression to match your schedule (e.g. `0 0 */6 * * * *` for every 6 hours) +- Keep the cron handler lightweight — use it to validate and enqueue, not to do the heavy lifting +- For jobs that need state (e.g. last-run timestamp), combine with `iii-state-management` +- Multiple cron triggers can feed the same queue for fan-in processing + +## Engine Configuration + +CronModule must be enabled in iii-config.yaml. See [../references/iii-config.yaml](../references/iii-config.yaml) for the full annotated config reference. + +## Pattern Boundaries + +- If the task is about one-off async work rather than recurring schedules, prefer `iii-queue-processing`. +- If the trigger should fire on state changes rather than time, prefer `iii-state-reactions`. +- Stay with `iii-cron-scheduling` when the primary need is time-based periodic execution. + +## When to Use + +- Use this skill when the task is primarily about `iii-cron-scheduling` in the iii engine. +- Triggers when the request directly asks for this pattern or an equivalent implementation. + +## Boundaries + +- Never use this skill as a generic fallback for unrelated tasks. +- You must not apply this skill when a more specific iii skill is a better fit. +- Always verify environment and safety constraints before applying examples from this skill. diff --git a/skills/iii-custom-triggers/SKILL.md b/skills/iii-custom-triggers/SKILL.md new file mode 100644 index 000000000..5ff742551 --- /dev/null +++ b/skills/iii-custom-triggers/SKILL.md @@ -0,0 +1,85 @@ +--- +name: iii-custom-triggers +description: >- + Builds custom trigger types for events iii does not handle natively. Use when + integrating webhooks, file watchers, IoT devices, database CDC, or any + external event source. +--- + +# Custom Triggers + +Comparable to: Custom event adapters, webhook receivers + +## Key Concepts + +Use the concepts below when they fit the task. Not every custom trigger needs all of them. + +- **registerTriggerType(id, handler)** defines a new trigger type with `registerTrigger` and `unregisterTrigger` callbacks +- The handler receives a **TriggerConfig** containing `id`, `function_id`, and `config` +- When the external event fires, call `iii.trigger(function_id, event)` to invoke the registered function +- **unregisterTriggerType** cleans up when the trigger type is no longer needed +- Do not reuse built-in trigger type names: `http`, `cron`, `queue`, `state`, `stream`, `subscribe` + +## Architecture + + External event source (webhook, file watcher, IoT, CDC, etc.) + → Custom trigger handler (registerTriggerType) + → iii.trigger(function_id, event) + → Registered function processes the event + +## iii Primitives Used + +| Primitive | Purpose | +| -------------------------------------------- | -------------------------------------------------- | +| `registerTriggerType(id, handler)` | Define a new trigger type with lifecycle hooks | +| `unregisterTriggerType(id)` | Clean up a custom trigger type | +| `TriggerConfig: { id, function_id, config }` | Configuration passed to the trigger handler | +| `iii.trigger(function_id, event)` | Fire the registered function when the event occurs | + +## Reference Implementation + +See [../references/custom-triggers.js](../references/custom-triggers.js) for the full working example — a custom trigger type that listens for external events and routes them to registered functions. + +Also available in **Python**: [../references/custom-triggers.py](../references/custom-triggers.py) + +Also available in **Rust**: [../references/custom-triggers.rs](../references/custom-triggers.rs) + +## Common Patterns + +Code using this pattern commonly includes, when relevant: + +- `registerWorker(url, { workerName })` — worker initialization +- `registerTriggerType(id, { registerTrigger, unregisterTrigger })` — define the custom trigger +- `registerTrigger(config)` — called by iii when a function subscribes to this trigger type +- `unregisterTrigger(config)` — called by iii when a function unsubscribes +- `iii.trigger(config.function_id, eventPayload)` — fire the target function +- Cleanup logic in `unregisterTrigger` (close connections, remove listeners, clear intervals) +- `const logger = new Logger()` — structured logging + +## Adapting This Pattern + +Use the adaptations below when they apply to the task. + +- Choose a unique trigger type name that describes your event source (e.g. `file-watcher`, `mqtt`, `db-cdc`) +- In `registerTrigger`, start the listener (open socket, poll endpoint, subscribe to topic) +- In `unregisterTrigger`, tear down the listener to avoid resource leaks +- Store active listeners in a map keyed by `config.id` for clean unregistration +- Pass relevant event data in the payload when calling `iii.trigger(function_id, event)` + +## Pattern Boundaries + +- If the task uses built-in HTTP routes, prefer `iii-http-endpoints`. +- If the task uses built-in cron schedules, prefer `iii-cron-scheduling`. +- If the task uses built-in queue triggers, prefer `iii-queue-processing`. +- Stay with `iii-custom-triggers` when iii has no built-in trigger type for the event source. + +## When to Use + +- Use this skill when the task is primarily about `iii-custom-triggers` in the iii engine. +- Triggers when the request directly asks for this pattern or an equivalent implementation. + +## Boundaries + +- Never use this skill as a generic fallback for unrelated tasks. +- You must not apply this skill when a more specific iii skill is a better fit. +- Always verify environment and safety constraints before applying examples from this skill. diff --git a/skills/iii-dead-letter-queues/SKILL.md b/skills/iii-dead-letter-queues/SKILL.md new file mode 100644 index 000000000..76fc56243 --- /dev/null +++ b/skills/iii-dead-letter-queues/SKILL.md @@ -0,0 +1,90 @@ +--- +name: iii-dead-letter-queues +description: >- + Inspects and redrives jobs that exhausted all retries. Use when handling + failed queue jobs, debugging processing errors, or implementing retry + strategies. +--- + +# Dead Letter Queues + +Comparable to: SQS DLQ, RabbitMQ dead-letter exchanges + +## Key Concepts + +Use the concepts below when they fit the task. Not every queue failure needs manual DLQ intervention. + +- Jobs move to a **DLQ** after exhausting `max_retries` with exponential backoff (`backoff_ms * 2^attempt`) +- Each DLQ entry preserves the original payload, last error, timestamp, and job metadata +- **Redrive** via the built-in `iii::queue::redrive` function or the `iii trigger` CLI command +- Redriving resets attempt counters to zero, giving jobs a fresh retry cycle +- Always investigate and deploy fixes before redriving — blindly redriving repeats failures +- DLQ support available on Builtin and RabbitMQ adapters + +## Architecture + +A queue consumer fails processing a job. The engine retries with exponential backoff up to `max_retries`. Once exhausted, the message moves to the DLQ. An operator inspects the failure, deploys a fix, then redrives the DLQ to replay all failed jobs. + +## iii Primitives Used + +| Primitive | Purpose | +| ------------------------------------------------------------------------------ | ---------------------------------------- | +| `trigger({ function_id: 'iii::queue::redrive', payload: { queue } })` | Redrive all DLQ jobs for a named queue | +| `trigger({ function_id: 'iii::queue::status', payload: { queue } })` | Check queue and DLQ status | +| `iii trigger --function-id='iii::queue::redrive' --payload='{"queue":"name"}'` | CLI redrive command (part of the engine binary) | +| `--timeout-ms` | CLI flag to set trigger timeout (default 30s) | +| `queue_configs` in iii-config.yaml | Configure `max_retries` and `backoff_ms` | + +## Reference Implementation + +See [../references/dead-letter-queues.js](../references/dead-letter-queues.js) for the full working example — inspecting DLQ status, + +Also available in **Python**: [../references/dead-letter-queues.py](../references/dead-letter-queues.py) + +Also available in **Rust**: [../references/dead-letter-queues.rs](../references/dead-letter-queues.rs) +redriving failed jobs via SDK and CLI, and configuring retry behavior. + +## Common Patterns + +Code using this pattern commonly includes, when relevant: + +- `await iii.trigger({ function_id: 'iii::queue::redrive', payload: { queue: 'payment' } })` — redrive via SDK +- `iii trigger --function-id='iii::queue::redrive' --payload='{"queue": "payment"}'` — redrive via CLI +- `iii trigger --function-id='iii::queue::redrive' --payload='{"queue": "payment"}' --timeout-ms=60000` — with custom timeout +- Redrive returns `{ queue: 'payment', redriven: 12 }` indicating count of replayed jobs +- Inspect in RabbitMQ UI at `http://localhost:15672`, find `iii.__fn_queue::{name}::dlq.queue` +- Best practice: investigate failures, deploy fix, then redrive +- Monitor DLQ depth as an operational alert signal + +## Adapting This Pattern + +Use the adaptations below when they apply to the task. + +- Set `max_retries` and `backoff_ms` in `queue_configs` based on your failure tolerance +- Build an admin endpoint that calls `iii::queue::redrive` for operational control +- Use `iii::queue::status` to check DLQ depth before and after redriving +- For dev/test, use lower retry counts to surface failures faster +- In production with RabbitMQ, use the management UI for detailed message inspection +- Consider building an alerting function that triggers on DLQ depth thresholds + +## Engine Configuration + +Queue `max_retries` and `backoff_ms` are set per queue in iii-config.yaml under `queue_configs`. See [../references/iii-config.yaml](../references/iii-config.yaml) for the full annotated config reference. + +## Pattern Boundaries + +- For queue processing patterns (enqueue, concurrency, FIFO), prefer `iii-queue-processing`. +- For queue configuration (retries, backoff, adapters), prefer `iii-engine-config`. +- For function registration and triggers, prefer `iii-functions-and-triggers`. +- Stay with `iii-dead-letter-queues` when the primary problem is inspecting or redriving failed jobs. + +## When to Use + +- Use this skill when the task is primarily about `iii-dead-letter-queues` in the iii engine. +- Triggers when the request directly asks for this pattern or an equivalent implementation. + +## Boundaries + +- Never use this skill as a generic fallback for unrelated tasks. +- You must not apply this skill when a more specific iii skill is a better fit. +- Always verify environment and safety constraints before applying examples from this skill. diff --git a/skills/iii-effect-system/SKILL.md b/skills/iii-effect-system/SKILL.md new file mode 100644 index 000000000..a46135f0c --- /dev/null +++ b/skills/iii-effect-system/SKILL.md @@ -0,0 +1,86 @@ +--- +name: iii-effect-system +description: >- + Builds composable, pipeable function chains on the iii engine. Use when + building functional pipelines, effect systems, or typed composition layers + where each step is a pure function with distributed tracing and retry. +--- + +# Effect Systems & Typed Functional Infrastructure + +Comparable to: Effect-TS + +## Key Concepts + +Use the concepts below when they fit the task. Not every effect pipeline needs all of them. + +- Each effect is a registered function with a single responsibility (parse, enrich, persist, notify) +- Effects compose by calling one function from another via `trigger` +- The entire pipeline is traceable end-to-end through OpenTelemetry +- Errors propagate naturally — a failing effect stops the chain +- An HTTP trigger provides the entry point; effects chain from there + +## Architecture + +```text +HTTP request + → fx::parse-user-input (validate + normalize) + → fx::enrich (add metadata, lookup external data) + → fx::persist (write to state) + → fx::notify (fire-and-forget side effect) + ← composed result returned to caller +``` + +## iii Primitives Used + +| Primitive | Purpose | +| ----------------------------------------------------- | ---------------------------------------- | +| `registerWorker` | Initialize the worker and connect to iii | +| `registerFunction` | Define each effect | +| `trigger({ function_id, payload })` | Compose effects synchronously | +| `trigger({ ..., action: TriggerAction.Void() })` | Fire-and-forget side effects | +| trigger `state::set`, `state::get` | Persist data between effects | +| `registerTrigger({ type: 'http' })` | Entry point | + +## Reference Implementation + +See [../references/effect-system.js](../references/effect-system.js) for the full working example — a user signup pipeline +where input is parsed, enriched with external data, persisted to state, and a welcome notification is fired. + +## Common Patterns + +Code using this pattern commonly includes, when relevant: + +- `registerWorker(url, { workerName })` — worker initialization +- `trigger({ function_id, payload })` — synchronous composition (effect A calls effect B) +- Each effect as its own `registerFunction` with `fx::` prefix IDs +- Error throwing for validation failures (errors propagate up the chain) +- `trigger({ ..., action: TriggerAction.Void() })` — fire-and-forget for non-critical side effects +- `const logger = new Logger()` — structured logging per effect + +## Adapting This Pattern + +Use the adaptations below when they apply to the task. + +- Replace simulated logic with real work (API calls, database queries, ML inference) +- Add new effects by registering functions and calling them via `trigger` +- For unreliable steps, use `TriggerAction.Enqueue({ queue })` instead of synchronous `trigger` +- Keep effects pure where possible — accept input, return output, no hidden side effects +- Function IDs should be domain-prefixed (e.g. `fx::validate-email`, `fx::geocode-address`) + +## Pattern Boundaries + +- If a request is about durable multi-step workflows with retries and DLQ handling, prefer `iii-workflow-orchestration`. +- If the task involves multiple independent agents handing off work, prefer `iii-agentic-backend`. +- Stay with `iii-effect-system` when the primary concern is composable, traceable function pipelines with synchronous chaining. + +## When to Use + +- Use this skill when the task is primarily about `iii-effect-system` in the iii engine. +- Triggers when the request directly asks for this pattern or an equivalent implementation. + +## Boundaries + +- Never use this skill as a generic fallback for unrelated tasks. +- You must not apply this skill when a more specific iii skill is a better fit. +- Always verify environment and safety constraints before applying examples from this skill. diff --git a/skills/iii-engine-config/SKILL.md b/skills/iii-engine-config/SKILL.md new file mode 100644 index 000000000..f65f12da3 --- /dev/null +++ b/skills/iii-engine-config/SKILL.md @@ -0,0 +1,128 @@ +--- +name: iii-engine-config +description: >- + Configures the iii engine via iii-config.yaml — modules, adapters, queue + configs, ports, and environment variables. Use when deploying, tuning, or + customizing the engine. +--- + +# Engine Config + +Comparable to: Infrastructure as code, Docker Compose configs + +## Key Concepts + +Use the concepts below when they fit the task. Not every deployment needs all modules or adapters. + +- **iii-config.yaml** defines the engine port, modules, workers, adapters, and queue configs +- **Environment variables** use `${VAR:default}` syntax (default is optional) +- **Modules** are the building blocks — each enables a capability (API, state, queue, cron, etc.) +- **Workers** are external binary modules managed via `iii.toml` and the `iii worker` CLI commands +- **Adapters** swap storage backends per module: in_memory, file_based, Redis, RabbitMQ +- **Queue configs** control retry count, concurrency, ordering, and backoff per named queue +- The engine listens on port **49134** (WebSocket) for SDK/worker connections + +## Architecture + +The iii-config.yaml file is loaded by the iii engine binary at startup. Modules are initialized in order, adapters connect to their backends, and the engine begins accepting worker connections over WebSocket on port 49134. External workers defined in the `workers` section are spawned as child processes automatically. + +## iii Primitives Used + +| Primitive | Purpose | +| ---------------------------------------------- | -------------------------------------- | +| `modules::api::RestApiModule` | HTTP API server (port 3111) | +| `modules::stream::StreamModule` | WebSocket streams (port 3112) | +| `modules::state::StateModule` | Persistent key-value state storage | +| `modules::queue::QueueModule` | Background job processing with retries | +| `modules::pubsub::PubSubModule` | In-process event fanout | +| `modules::cron::CronModule` | Time-based scheduling | +| `modules::observability::OtelModule` | OpenTelemetry traces, metrics, logs | +| `modules::http_functions::HttpFunctionsModule` | Outbound HTTP call security | +| `modules::shell::ExecModule` | Spawn external processes | +| `modules::bridge_client::BridgeClientModule` | Distributed cross-engine invocation | +| `modules::telemetry::TelemetryModule` | Anonymous product analytics | +| `workers` section in iii-config.yaml | External binary workers (worker modules)| +| `iii.toml` | Worker manifest (name → version) | +| `iii worker add NAME[@VERSION]` | Install a worker from the registry | +| `iii worker remove NAME` | Uninstall a worker | +| `iii worker list` | List installed workers | +| `iii worker info NAME` | Show registry info for a worker | + +## Reference Implementation + +See [../references/iii-config.yaml](../references/iii-config.yaml) for the full working example — a complete +engine configuration with all modules, adapters, queue configs, and environment variable patterns. + +## Common Patterns + +Code using this pattern commonly includes, when relevant: + +- `iii --config ./iii-config.yaml` — start the engine with a config file +- `docker pull iiidev/iii:latest` — pull the Docker image +- Dev storage: `store_method: file_based` with `file_path: ./data/...` +- Prod storage: Redis adapters with `redis_url: ${REDIS_URL}` +- Prod queues: RabbitMQ adapter with `amqp_url: ${AMQP_URL}` and `queue_mode: quorum` +- Queue config: `queue_configs` with `max_retries`, `concurrency`, `type`, `backoff_ms` per queue name +- Env var with fallback: `port: ${III_PORT:49134}` +- Health check: `curl http://localhost:3111/health` +- Ports: 3111 (API), 3112 (streams), 49134 (engine WS), 9464 (Prometheus) + +### Worker Module System + +External workers are installed via the CLI and configured in `iii-config.yaml`: + +- `iii worker add pdfkit@1.0.0` — install a worker binary from the registry +- `iii worker add` (no name) — install all workers listed in `iii.toml` +- `iii worker remove pdfkit` — remove binary, manifest entry, and config block +- `iii worker list` — show installed workers and versions from `iii.toml` + +Workers appear in `iii.toml` as a version manifest: +```toml +[workers] +pdfkit = "1.0.0" +image-processor = "2.3.1" +``` + +Worker config blocks in `iii-config.yaml` use marker comments for automatic management: +```yaml +workers: + # === iii:pdfkit BEGIN === + - class: workers::pdfkit::PdfKitWorker + config: + output_dir: ./output + # === iii:pdfkit END === +``` + +At startup, the engine resolves each worker class, finds the binary in `iii_workers/`, and spawns it as a child process. Worker binaries are stored in the `iii_workers/` directory. + +## Adapting This Pattern + +Use the adaptations below when they apply to the task. + +- Start with file_based adapters for development, switch to Redis/RabbitMQ for production +- Define queue configs per workload: high-concurrency for parallel jobs, FIFO for ordered processing +- Use environment variables with defaults for all deployment-sensitive values (URLs, ports, credentials) +- Enable only the modules you need — unused modules can be omitted from the config +- Use `iii worker add` to install external workers and auto-generate their config blocks +- Set `max_retries` and `backoff_ms` based on your failure tolerance and SLA requirements +- Configure `OtelModule` with your collector endpoint and sampling ratio for observability + +## Pattern Boundaries + +- For HTTP handler logic (request/response, path params), prefer `iii-http-endpoints`. +- For queue processing patterns (enqueue, FIFO, concurrency), prefer `iii-queue-processing`. +- For cron scheduling details (expressions, timezones), prefer `iii-cron-scheduling`. +- For OpenTelemetry SDK integration (spans, metrics, traces), prefer `iii-observability`. +- For real-time stream patterns, prefer `iii-realtime-streams`. +- Stay with `iii-engine-config` when the primary problem is configuring or deploying the engine itself. + +## When to Use + +- Use this skill when the task is primarily about `iii-engine-config` in the iii engine. +- Triggers when the request directly asks for this pattern or an equivalent implementation. + +## Boundaries + +- Never use this skill as a generic fallback for unrelated tasks. +- You must not apply this skill when a more specific iii skill is a better fit. +- Always verify environment and safety constraints before applying examples from this skill. diff --git a/skills/iii-event-driven-cqrs/SKILL.md b/skills/iii-event-driven-cqrs/SKILL.md new file mode 100644 index 000000000..45608ad94 --- /dev/null +++ b/skills/iii-event-driven-cqrs/SKILL.md @@ -0,0 +1,93 @@ +--- +name: iii-event-driven-cqrs +description: >- + Implements CQRS with event sourcing on the iii engine. Use when building + command/query separation, event-sourced systems, or fan-out architectures + where commands publish domain events and multiple read model projections + subscribe independently. +--- + +# Event-Driven CQRS & Event Sourcing + +Comparable to: Kafka, RabbitMQ, CQRS/Event Sourcing systems + +## Key Concepts + +Use the concepts below when they fit the task. Not every CQRS system needs all of them. + +- **Write side**: Commands validate input and publish domain events via pubsub +- **Read side**: Multiple projections subscribe to events independently, building query-optimized views in state +- **Event log**: Events are appended to state as an ordered log (event sourcing) +- **PubSub** handles fan-out — one event reaches all projections and downstream consumers +- **HTTP triggers** expose both command endpoints (POST) and query endpoints (GET) + +## Architecture + +```text +HTTP POST /inventory (command) + → cmd::add-inventory-item → validate → append event to state + → publish('inventory.item-added') + ↓ (fan-out via subscribe triggers) + → proj::inventory-list (updates queryable list view) + → proj::inventory-stats (updates aggregate counters) + → notify::inventory-alert (sends low-stock alerts) + +HTTP GET /inventory (query) + → query::list-inventory → reads from projection state +``` + +## iii Primitives Used + +| Primitive | Purpose | +| ----------------------------------------------------------- | ----------------------------------------- | +| `registerWorker` | Initialize the worker and connect to iii | +| `registerFunction` | Define commands, projections, and queries | +| trigger `state::set`, `state::get`, `state::list` | Event log and projection state | +| `trigger({ function_id: 'publish', payload })` | Publish domain events | +| `registerTrigger({ type: 'subscribe', config: { topic } })` | Subscribe projections to events | +| `registerTrigger({ type: 'http' })` | Command and query endpoints | +| `trigger({ ..., action: TriggerAction.Void() })` | Fire-and-forget notifications | + +## Reference Implementation + +See [../references/event-driven-cqrs.js](../references/event-driven-cqrs.js) for the full working example — an inventory management system +with commands that publish domain events and multiple projections building query-optimized views. + +## Common Patterns + +Code using this pattern commonly includes, when relevant: + +- `registerWorker(url, { workerName })` — worker initialization +- `trigger({ function_id: 'state::set', payload: { scope: 'events', key, value } })` — event log append +- `trigger({ function_id: 'publish', payload: { topic, data } })` — domain event publishing +- `registerTrigger({ type: 'subscribe', function_id, config: { topic } })` — projection subscriptions +- Command functions with `cmd::` prefix, projection functions with `proj::` prefix, query functions with `query::` prefix +- Multiple projections subscribing to the same topic independently +- `const logger = new Logger()` — structured logging per command/projection + +## Adapting This Pattern + +Use the adaptations below when they apply to the task. + +- Add new projections by registering subscribe triggers on existing event topics +- Use separate state scopes for each projection (e.g. `inventory-list`, `inventory-stats`) +- Commands should validate before publishing — reject invalid commands early +- For critical event processing, use `TriggerAction.Enqueue({ queue })` instead of pubsub for guaranteed delivery +- Event IDs should be unique and monotonic for ordering (e.g. `evt-${Date.now()}-${counter}`) + +## Pattern Boundaries + +- If the task is about simple CRUD with reactive side effects, prefer `iii-reactive-backend`. +- If the task needs durable multi-step pipelines with retries, prefer `iii-workflow-orchestration`. +- Stay with `iii-event-driven-cqrs` when command/query separation, event sourcing, and independent projections are the primary concerns. + +## When to Use + +- Use this skill when the task is primarily about `iii-event-driven-cqrs` in the iii engine. +- Triggers when the request directly asks for this pattern or an equivalent implementation. + +## Boundaries + +- Never use this skill as a generic fallback for unrelated tasks. +- You must not apply this skill when a more specific iii skill is a better fit. +- Always verify environment and safety constraints before applying examples from this skill. diff --git a/skills/iii-functions-and-triggers/SKILL.md b/skills/iii-functions-and-triggers/SKILL.md new file mode 100644 index 000000000..e2b9ff157 --- /dev/null +++ b/skills/iii-functions-and-triggers/SKILL.md @@ -0,0 +1,97 @@ +--- +name: iii-functions-and-triggers +description: >- + Registers functions and triggers on the iii engine across TypeScript, Python, + and Rust. Use when creating workers, registering function handlers, binding + triggers, or invoking functions across languages. +--- + +# Functions & Triggers + +Comparable to: Serverless function runtimes, Lambda, Cloud Functions + +## Key Concepts + +Use the concepts below when they fit the task. Not every worker needs all of them. + +- A **Function** is an async handler registered with a unique ID +- A **Trigger** binds an event source to a function — types include http, queue, cron, state, stream, and subscribe +- Functions invoke other functions via `trigger()` regardless of language or worker location +- The engine handles serialization, routing, and delivery automatically +- HTTP-invoked functions wrap external endpoints as callable function IDs +- Functions can declare **request/response formats** for documentation and discovery — auto-generated from types in Rust (via `schemars::JsonSchema`) and Python (via type hints / Pydantic), or manually provided in Node.js + +## Architecture + +`registerWorker()` connects the worker to the engine, `registerFunction` defines handlers, `registerTrigger` binds event sources to those handlers, and the engine routes incoming events to the correct function. Functions can invoke other functions across workers and languages via `trigger()`. + +## iii Primitives Used + +| Primitive | Purpose | +| ------------------------------------------------------------ | ---------------------------------- | +| `registerWorker(url, options?)` | Connect worker to engine | +| `registerFunction({ id }, handler)` | Define a function handler | +| `registerTrigger({ type, function_id, config })` | Bind an event source to a function | +| `trigger({ function_id, payload })` | Invoke a function synchronously | +| `trigger({ ..., action: TriggerAction.Void() })` | Fire-and-forget invocation | +| `trigger({ ..., action: TriggerAction.Enqueue({ queue }) })` | Durable async invocation via queue | + +## Reference Implementation + +- **TypeScript**: [../references/functions-and-triggers.js](../references/functions-and-triggers.js) +- **Python**: [../references/functions-and-triggers.py](../references/functions-and-triggers.py) +- **Rust**: [../references/functions-and-triggers.rs](../references/functions-and-triggers.rs) + +Each reference shows the same patterns (function registration, trigger binding, cross-function invocation) in its respective language. + +## Common Patterns + +Code using this pattern commonly includes, when relevant: + +- `registerWorker('ws://localhost:49134', { workerName: 'my-worker' })` — connect to the engine +- `registerFunction({ id: 'namespace::name' }, async (input) => { ... })` — register a handler +- `registerTrigger({ type: 'http', function_id, config: { api_path, http_method } })` — HTTP trigger +- `registerTrigger({ type: 'queue', function_id, config: { topic } })` — queue trigger +- `registerTrigger({ type: 'cron', function_id, config: { expression } })` — cron trigger +- `registerTrigger({ type: 'state', function_id, config: { scope, key } })` — state change trigger +- `registerTrigger({ type: 'stream', function_id, config: { stream } })` — stream trigger +- `registerTrigger({ type: 'subscribe', function_id, config: { topic } })` — pubsub subscriber +- Cross-language invocation: a TypeScript function can trigger a Python or Rust function by ID + +### Request/Response Format (Auto-Registration) + +Functions can declare their input/output schemas for documentation and discovery: + +- **Rust**: Derive `schemars::JsonSchema` on handler input/output types — `RegisterFunction::new()` auto-generates JSON Schema (Draft 7) from the type +- **Python**: Use type hints (Pydantic models or primitives) on handler parameters and return types — `register_function()` auto-extracts JSON Schema (Draft 2020-12) +- **Node.js**: Pass `request_format` / `response_format` manually in the registration message (e.g., via Zod's `toJSONSchema()`) + +## Adapting This Pattern + +Use the adaptations below when they apply to the task. + +- Replace placeholder handler logic with real business logic (API calls, DB queries, LLM calls) +- Use `namespace::name` convention for function IDs to group related functions +- For HTTP endpoints, configure `api_path` and `http_method` in the trigger config +- For durable async work, use `TriggerAction.Enqueue({ queue })` instead of synchronous trigger +- For fire-and-forget side effects, use `TriggerAction.Void()` +- Multiple workers in different languages can register functions that invoke each other by ID + +## Pattern Boundaries + +- For HTTP endpoint specifics (request/response format, path params), prefer `iii-http-endpoints`. +- For queue processing details (retries, concurrency, FIFO), prefer `iii-queue-processing`. +- For cron scheduling details (expressions, timezones), prefer `iii-cron-scheduling`. +- For invocation modes (sync vs void vs enqueue), prefer `iii-trigger-actions`. +- Stay with `iii-functions-and-triggers` when the primary problem is registering functions, binding triggers, or cross-language invocation. + +## When to Use + +- Use this skill when the task is primarily about `iii-functions-and-triggers` in the iii engine. +- Triggers when the request directly asks for this pattern or an equivalent implementation. + +## Boundaries + +- Never use this skill as a generic fallback for unrelated tasks. +- You must not apply this skill when a more specific iii skill is a better fit. +- Always verify environment and safety constraints before applying examples from this skill. diff --git a/skills/iii-getting-started/SKILL.md b/skills/iii-getting-started/SKILL.md new file mode 100644 index 000000000..773996abf --- /dev/null +++ b/skills/iii-getting-started/SKILL.md @@ -0,0 +1,204 @@ +--- +name: iii-getting-started +description: >- + Install the iii engine, set up your first worker, and get a working backend + running. Use when a user wants to start a new iii project, install the SDK, + or needs help with initial setup and configuration. +--- + +# Getting Started with iii + +iii replaces your API framework, task queue, cron scheduler, pub/sub, state store, and observability pipeline with a single engine and three primitives: **Function**, **Trigger**, **Worker**. + +## Step 1: Install the Engine + +```bash +curl -fsSL https://install.iii.dev/iii/main/install.sh | sh +``` + +Verify it installed: + +```bash +iii --version +``` + +## Step 2: Create a Project + +```bash +curl -LO https://github.com/iii-hq/cli-tooling/releases/latest/download/quickstart.zip +unzip quickstart.zip +cd quickstart +``` + +The quickstart includes TypeScript, Python, and Rust workers. If you don't have all runtimes, the README includes Docker Compose instructions. + +## Step 3: Start the Engine + +```bash +iii --config iii-config.yaml +``` + +The engine starts and listens for worker connections on `ws://localhost:49134`. The console is available at `http://localhost:3000`. + +## Step 4: Install the SDK + +Pick your language: + +```bash +# TypeScript / Node.js +npm install iii-sdk + +# Python +pip install iii-sdk + +# Rust — add to Cargo.toml +# [dependencies] +# iii-sdk = "*" +``` + +## Step 5: Write Your First Worker + +### TypeScript + +```typescript +import { registerWorker, Logger, TriggerAction } from 'iii-sdk' + +const iii = registerWorker(process.env.III_URL ?? 'ws://localhost:49134') + +iii.registerFunction( + { id: 'hello::greet', description: 'Greet a user by name' }, + async (input) => { + const logger = new Logger() + const name = input?.name ?? 'world' + logger.info('Greeting user', { name }) + return { message: `Hello, ${name}!` } + }, +) + +iii.registerTrigger({ + type: 'http', + function_id: 'hello::greet', + config: { api_path: '/hello', http_method: 'POST' }, +}) +``` + +### Python + +```python +from iii import register_worker, InitOptions, Logger + +iii = register_worker(address="ws://localhost:49134", options=InitOptions(worker_name="hello-worker")) + +def greet(data): + logger = Logger() + name = data.get("name", "world") if isinstance(data, dict) else "world" + logger.info("Greeting user", {"name": name}) + return {"message": f"Hello, {name}!"} + +iii.register_function({"id": "hello::greet", "description": "Greet a user by name"}, greet) +iii.register_trigger({"type": "http", "function_id": "hello::greet", "config": {"api_path": "/hello", "http_method": "POST"}}) +``` + +### Rust + +```rust +use iii_sdk::{register_worker, InitOptions, Logger, RegisterFunctionMessage, RegisterTriggerInput}; +use serde_json::json; + +let iii = register_worker("ws://127.0.0.1:49134", InitOptions::default()); + +iii.register_function( + RegisterFunctionMessage::with_id("hello::greet".into()), + |input: serde_json::Value| async move { + let logger = Logger::new(); + let name = input["name"].as_str().unwrap_or("world"); + logger.info("Greeting user", Some(&json!({ "name": name }))); + Ok(json!({ "message": format!("Hello, {}!", name) })) + }, +); + +iii.register_trigger(RegisterTriggerInput { + trigger_type: "http".into(), + function_id: "hello::greet".into(), + config: json!({ "api_path": "/hello", "http_method": "POST" }), +})?; +``` + +## Step 6: Test It + +```bash +curl -X POST http://localhost:3000/hello \ + -H "Content-Type: application/json" \ + -d '{"name": "iii"}' +``` + +Expected response: + +```json +{"message": "Hello, iii!"} +``` + +## Install Agent Skills + +Get all 24 iii skills for your AI coding agent: + +```bash +npx skills add iii-hq/iii +``` + +Or with SkillKit: + +```bash +npx skillkit install iii-hq/iii +``` + +Skills teach your agent how to use every iii primitive — HTTP endpoints, cron scheduling, queues, state management, streams, channels, and more. Available for Claude Code, Cursor, Codex, Gemini CLI, and 30+ other agents. + +## Adapting This Pattern + +- Add more functions to the same worker — each gets its own `registerFunction` + `registerTrigger` calls +- Use `::` separator for function IDs to namespace them: `orders::create`, `orders::validate` +- Add cron triggers with `{ type: 'cron', config: { expression: '0 0 9 * * * *' } }` (7-field, includes seconds) +- Add queue triggers with `{ type: 'queue', config: { topic: 'my-queue' } }` +- Use `iii.trigger()` to invoke other functions from within a function +- Use `state::get` / `state::set` to persist data across function calls + +## Recommended Next Steps + +After getting your first worker running: + +1. **Add state** — Use `iii-state-management` skill to persist data +2. **Add a queue** — Use `iii-queue-processing` skill for async job processing +3. **Add a cron job** — Use `iii-cron-scheduling` skill for scheduled tasks +4. **Build an API** — Use `iii-http-endpoints` skill for REST endpoints with CRUD +5. **Add observability** — Use `iii-observability` skill for tracing and metrics +6. **Explore architecture patterns** — See `iii-agentic-backend`, `iii-reactive-backend`, `iii-workflow-orchestration` + +## Key Resources + +- [Quickstart Guide](https://iii.dev/docs/quickstart) +- [SDK Reference — Node.js](https://iii.dev/docs/api-reference/sdk-node) +- [SDK Reference — Python](https://iii.dev/docs/api-reference/sdk-python) +- [SDK Reference — Rust](https://iii.dev/docs/api-reference/sdk-rust) +- [Engine Configuration](https://iii.dev/docs/configuration) +- [Console Dashboard](https://iii.dev/docs/console) + +## Pattern Boundaries + +- For HTTP endpoint patterns (CRUD, parameterized routes), prefer `iii-http-endpoints` +- For cron/scheduling patterns, prefer `iii-cron-scheduling` +- For queue/async job patterns, prefer `iii-queue-processing` +- For state persistence patterns, prefer `iii-state-management` +- For engine configuration, prefer `iii-engine-config` +- Stay with `iii-getting-started` for installation, initial setup, and first-worker guidance + +## When to Use + +- Use this skill when the task is about installing iii, creating a new project, or writing a first worker. +- Triggers when the request asks for setup help, quickstart guidance, or getting started with iii. + +## Boundaries + +- Never use this skill as a generic fallback for unrelated tasks. +- You must not apply this skill when a more specific iii skill is a better fit. +- Always verify environment and safety constraints before applying examples from this skill. diff --git a/skills/iii-http-endpoints/SKILL.md b/skills/iii-http-endpoints/SKILL.md new file mode 100644 index 000000000..fce7888a5 --- /dev/null +++ b/skills/iii-http-endpoints/SKILL.md @@ -0,0 +1,83 @@ +--- +name: iii-http-endpoints +description: >- + Exposes iii functions as REST API endpoints. Use when building HTTP APIs, + webhooks, or inbound request handling where iii owns the route. +--- + +# HTTP Endpoints + +Comparable to: Express, Fastify, Flask + +## Key Concepts + +Use the concepts below when they fit the task. Not every HTTP endpoint needs all of them. + +- Each route is a **registered function** bound to a path and method via an HTTP trigger +- The handler receives an **ApiRequest** object containing `body`, `path_params`, `headers`, and `method` +- Handlers return `{ status_code, body, headers }` to shape the HTTP response +- **RestApiModule** serves all registered routes on port 3111 +- Path parameters use colon syntax (e.g. `/users/:id`) and arrive in `path_params` + +## Architecture + + HTTP request + → RestApiModule (port 3111) + → registerTrigger route match (method + path) + → registerFunction handler (receives ApiRequest) + → { status_code, body, headers } response + +## iii Primitives Used + +| Primitive | Purpose | +| --------------------------------------------------- | ------------------------------------------ | +| `registerFunction` | Define the handler for a route | +| `registerTrigger({ type: 'http' })` | Bind a route path and method to a function | +| `config: { api_path: '/path', http_method: 'GET' }` | Route configuration on the trigger | + +## Reference Implementation + +See [../references/http-endpoints.js](../references/http-endpoints.js) for the full working example — a REST API with parameterized routes handling GET and POST requests. + +Also available in **Python**: [../references/http-endpoints.py](../references/http-endpoints.py) + +Also available in **Rust**: [../references/http-endpoints.rs](../references/http-endpoints.rs) + +## Common Patterns + +Code using this pattern commonly includes, when relevant: + +- `registerWorker(url, { workerName })` — worker initialization +- `registerFunction(id, handler)` — define the route handler +- `registerTrigger({ type: 'http', config: { api_path, http_method } })` — bind path and method +- `req.body` — parsed request body for POST/PUT +- `req.path_params` — extracted path parameters +- `return { status_code: 200, body: { data }, headers: { 'Content-Type': 'application/json' } }` — response shape +- `const logger = new Logger()` — structured logging per handler + +## Adapting This Pattern + +Use the adaptations below when they apply to the task. + +- Add more routes by registering additional functions and HTTP triggers with distinct paths or methods +- Use `path_params` for resource identifiers (e.g. `/orders/:orderId`) +- Return appropriate status codes (201 for creation, 404 for not found, 400 for bad input) +- For authenticated routes, inspect `req.headers` for tokens or API keys +- Chain work behind an endpoint by enqueuing to a queue after returning a 202 Accepted + +## Pattern Boundaries + +- If the task is about calling external HTTP APIs from iii functions, prefer `iii-http-invoked-functions`. +- If async processing is needed behind the endpoint, prefer `iii-queue-processing` for the background work. +- Stay with `iii-http-endpoints` when iii owns the route and handles the inbound request directly. + +## When to Use + +- Use this skill when the task is primarily about `iii-http-endpoints` in the iii engine. +- Triggers when the request directly asks for this pattern or an equivalent implementation. + +## Boundaries + +- Never use this skill as a generic fallback for unrelated tasks. +- You must not apply this skill when a more specific iii skill is a better fit. +- Always verify environment and safety constraints before applying examples from this skill. diff --git a/skills/iii-http-invoked-functions/SKILL.md b/skills/iii-http-invoked-functions/SKILL.md new file mode 100644 index 000000000..41fb82bea --- /dev/null +++ b/skills/iii-http-invoked-functions/SKILL.md @@ -0,0 +1,60 @@ +--- +name: iii-http-invoked-functions +description: >- + Registers external HTTP endpoints as iii functions using + registerFunction(meta, HttpInvocationConfig). Use when adapting legacy APIs, + third-party webhooks, or immutable services into triggerable iii functions, + especially when prompts ask for endpoint maps like { path, id } iterated into + registerFunction calls. +--- + +# HTTP-Invoked Functions + +Use this pattern when iii should call external HTTP endpoints as functions. + +## Pattern selection rules + +- If the task says "register HTTP endpoints with `registerFunction`", use this pattern. +- If the task asks for an endpoint list/map (for example `{ path, id }`) and a loop over `registerFunction`, use this pattern. +- If the system being adapted cannot be modified, use this pattern. +- If the goal is exposing inbound routes that iii owns, use `registerTrigger({ type: 'http' })` instead. + +## Core model + +- `registerFunction(meta, HttpInvocationConfig)` registers an outbound HTTP-invoked function. +- `trigger({ function_id, payload })` invokes it like any other function. +- Trigger payload becomes request body for JSON-based calls. +- Non-2xx and network failures are treated as invocation failures. + +## Common shape + +- `registerWorker(url, { workerName })` +- Small endpoint descriptor list, then loop registration: + - `[{ path, id }]` + - `registerFunction({ id }, { url: base + path, method: 'POST' })` +- Optional auth config with env var keys (`token_key`, `secret_key`, `value_key`) + +## Guardrails + +- Do not model outbound HTTP endpoint adaptation as `registerTrigger({ type: 'http' })`. +- Do not pass raw secrets in auth fields; pass env var names. +- Keep function IDs stable and domain-prefixed (for example `legacy::orders`). + +## Reference + +See [../references/http-invoked-functions.js](../references/http-invoked-functions.js). + +Also available in **Python**: [../references/http-invoked-functions.py](../references/http-invoked-functions.py) + +Also available in **Rust**: [../references/http-invoked-functions.rs](../references/http-invoked-functions.rs) + +## When to Use + +- Use this skill when the task is primarily about `iii-http-invoked-functions` in the iii engine. +- Triggers when the request directly asks for this pattern or an equivalent implementation. + +## Boundaries + +- Never use this skill as a generic fallback for unrelated tasks. +- You must not apply this skill when a more specific iii skill is a better fit. +- Always verify environment and safety constraints before applying examples from this skill. diff --git a/skills/iii-low-code-automation/SKILL.md b/skills/iii-low-code-automation/SKILL.md new file mode 100644 index 000000000..2ad716f66 --- /dev/null +++ b/skills/iii-low-code-automation/SKILL.md @@ -0,0 +1,90 @@ +--- +name: iii-low-code-automation +description: >- + Builds trigger-transform-action automation chains on the iii engine. Use when + building Zapier/n8n-style automations, webhook-to-action pipelines, or simple + event-driven chains where each node is a small registered function chained via + named queues. +--- + +# Low-Code Automation Chains + +Comparable to: n8n, Zapier, LangFlow + +## Key Concepts + +Use the concepts below when they fit the task. Not every automation needs all of them. + +- Each "node" in the automation is a small registered function with a single job +- Nodes chain via **named queues** using `TriggerAction.Enqueue` — easy to add, remove, or reorder steps +- **HTTP triggers** receive external webhooks (form submissions, payment events) +- **Cron triggers** start scheduled automations (daily digests, periodic syncs) +- **PubSub** broadcasts completion events for downstream listeners + +## Architecture + +```text +Automation 1: Form → Enrich → Store → Notify + HTTP webhook → auto::enrich-lead → auto::store-lead → auto::notify-slack + +Automation 2: Cron → Fetch → Transform → Store + Cron (daily) → auto::fetch-rss → auto::transform-articles → auto::store-articles + +Automation 3: Payment webhook → Validate → Update → Notify + HTTP webhook → auto::validate-payment → auto::update-order → publish(payment.processed) +``` + +## iii Primitives Used + +| Primitive | Purpose | +| ------------------------------------------------------------ | ---------------------------------------- | +| `registerWorker` | Initialize the worker and connect to iii | +| `registerFunction` | Define each automation node | +| `trigger({ ..., action: TriggerAction.Enqueue({ queue }) })` | Chain nodes via named queues | +| `trigger({ function_id: 'state::set', payload })` | Persist data between nodes | +| `trigger({ ..., action: TriggerAction.Void() })` | Fire-and-forget notifications | +| `registerTrigger({ type: 'http' })` | Webhook entry points | +| `registerTrigger({ type: 'cron' })` | Scheduled automations | + +## Reference Implementation + +See [../references/low-code-automation.js](../references/low-code-automation.js) for the full working example — three automation chains: +form-to-Slack notification, RSS feed aggregation, and payment webhook processing. + +## Common Patterns + +Code using this pattern commonly includes, when relevant: + +- `registerWorker(url, { workerName })` — worker initialization +- `trigger({ function_id, payload, action: TriggerAction.Enqueue({ queue: 'automation' }) })` — node chaining +- Each node as its own `registerFunction` with `auto::` prefix IDs +- Small, focused functions that do one thing (enrich, validate, store, notify) +- `trigger({ function_id: 'state::set', payload: { scope, key, value } })` — persist between nodes +- `const logger = new Logger()` — structured logging per node + +## Adapting This Pattern + +Use the adaptations below when they apply to the task. + +- Add new automation chains by registering HTTP/cron triggers and chaining functions +- Each node should be independently testable — accept input, produce output +- Use separate queue names when different chains need different retry/concurrency settings +- For unreliable external services, wrap calls in try/catch and handle failures explicitly +- Keep node functions small — offload complex logic to dedicated functions + +## Pattern Boundaries + +- If the task requires durable multi-step workflows with saga compensation and step tracking, prefer `iii-workflow-orchestration`. +- If the task involves multiple AI agents handing off work, prefer `iii-agentic-backend`. +- Stay with `iii-low-code-automation` when the primary concern is simple trigger-transform-action chains with minimal orchestration overhead. + +## When to Use + +- Use this skill when the task is primarily about `iii-low-code-automation` in the iii engine. +- Triggers when the request directly asks for this pattern or an equivalent implementation. + +## Boundaries + +- Never use this skill as a generic fallback for unrelated tasks. +- You must not apply this skill when a more specific iii skill is a better fit. +- Always verify environment and safety constraints before applying examples from this skill. diff --git a/skills/iii-node-sdk/SKILL.md b/skills/iii-node-sdk/SKILL.md new file mode 100644 index 000000000..993c615d8 --- /dev/null +++ b/skills/iii-node-sdk/SKILL.md @@ -0,0 +1,52 @@ +--- +name: iii-node-sdk +description: >- + Node.js/TypeScript SDK for the iii engine. Use when building workers, + registering functions, or invoking triggers in TypeScript or JavaScript. +--- + +# Node.js SDK + +The TypeScript/JavaScript SDK for connecting workers to the iii engine. + +## Documentation + +Full API reference: + +## Install + +`npm install iii-sdk` + +## Key Exports + +| Export | Purpose | +| ------------------------------------------------ | ------------------------------------------- | +| `registerWorker(url, { workerName })` | Connect to the engine and return the client | +| `registerFunction({ id }, handler)` | Register an async function handler | +| `registerTrigger({ type, function_id, config })` | Bind a trigger to a function | +| `trigger({ function_id, payload, action? })` | Invoke a function | +| `TriggerAction.Void()` | Fire-and-forget invocation mode | +| `TriggerAction.Enqueue({ queue })` | Durable async invocation mode | +| `Logger` | Structured logging | +| `withSpan`, `getTracer`, `getMeter` | OpenTelemetry instrumentation | +| `createChannel()` | Binary streaming between workers | +| `createStream(name, adapter)` | Custom stream implementation | +| `registerTriggerType(id, handler)` | Custom trigger type registration | + +## Pattern Boundaries + +- For usage patterns and working examples, see `iii-functions-and-triggers` +- For HTTP endpoint patterns, see `iii-http-endpoints` +- For Python SDK, see `iii-python-sdk` +- For Rust SDK, see `iii-rust-sdk` + +## When to Use + +- Use this skill when the task is primarily about `iii-node-sdk` in the iii engine. +- Triggers when the request directly asks for this pattern or an equivalent implementation. + +## Boundaries + +- Never use this skill as a generic fallback for unrelated tasks. +- You must not apply this skill when a more specific iii skill is a better fit. +- Always verify environment and safety constraints before applying examples from this skill. diff --git a/skills/iii-observability/SKILL.md b/skills/iii-observability/SKILL.md new file mode 100644 index 000000000..2c782a59c --- /dev/null +++ b/skills/iii-observability/SKILL.md @@ -0,0 +1,94 @@ +--- +name: iii-observability +description: >- + Integrates OpenTelemetry tracing, metrics, and logging into iii workers. Use + when setting up distributed tracing, Prometheus metrics, custom spans, or + connecting to observability backends. +--- + +# Observability + +Comparable to: Datadog, Grafana, Honeycomb, Jaeger + +## Key Concepts + +Use the concepts below when they fit the task. Not every worker needs custom spans or metrics. + +- Built-in **OpenTelemetry** support across all SDKs — every function invocation is automatically traced +- The engine exports traces, metrics, and logs via **OTLP** to any compatible collector +- Workers propagate **W3C trace context** automatically across function invocations +- **Prometheus** metrics are exposed on port 9464 +- `registerWorker()` with `otel` config enables telemetry per worker +- **Custom spans** via `withSpan(name, opts, fn)` wrap async work with trace context +- **Custom metrics** via `getMeter()` create counters and histograms + +## Architecture + +The worker SDK generates spans, metrics, and logs during function execution. These flow to the engine, which exports them via OTLP to a collector (Jaeger, Grafana, Datadog). The engine also exposes a Prometheus endpoint on port 9464 for scraping. + +## iii Primitives Used + +| Primitive | Purpose | +| ---------------------------- | --------------------------------------------- | +| `registerWorker(url, { otel })` | Connect worker with telemetry config | +| `withSpan(name, opts, fn)` | Create a custom trace span | +| `getTracer()` | Access OpenTelemetry Tracer directly | +| `getMeter()` | Access OpenTelemetry Meter for custom metrics | +| `currentTraceId()` | Get active trace ID for correlation | +| `injectTraceparent()` | Inject W3C trace context into outbound calls | +| `onLog(callback, { level })` | Subscribe to log events | +| `shutdown_otel()` | Graceful shutdown of telemetry pipeline | + +## Reference Implementation + +See [../references/observability.js](../references/observability.js) for the full working example — a worker with custom spans, + +Also available in **Python**: [../references/observability.py](../references/observability.py) + +Also available in **Rust**: [../references/observability.rs](../references/observability.rs) +metrics counters, trace propagation, and log subscriptions connected to an OTel collector. + +## Common Patterns + +Code using this pattern commonly includes, when relevant: + +- `registerWorker('ws://localhost:49134', { otel: { enabled: true, serviceName: 'my-svc' } })` — enable telemetry +- `withSpan('validate-order', {}, async (span) => { span.setAttribute('order.id', id); ... })` — custom span +- `getMeter().createCounter('orders.processed')` — custom counter metric +- `getMeter().createHistogram('request.duration')` — custom histogram metric +- `onLog((log) => { ... }, { level: 'warn' })` — subscribe to warnings and above +- `currentTraceId()` — get active trace ID for correlation with external systems +- `injectTraceparent()` — propagate trace context to outbound HTTP calls +- Disable telemetry: `registerWorker(url, { otel: { enabled: false } })` or `OTEL_ENABLED=false` + +## Adapting This Pattern + +Use the adaptations below when they apply to the task. + +- Enable `otel` in `registerWorker()` config to start collecting traces automatically +- Add custom spans around expensive operations (DB queries, LLM calls, external APIs) +- Create domain-specific metrics (orders processed, payment failures, queue depth) +- Use `currentTraceId()` to correlate iii traces with external system logs +- Configure `OtelModule` in iii-config.yaml for engine-side exporter, sampling ratio, and alerts +- Point the OTLP endpoint at your collector (Jaeger, Grafana Tempo, Datadog Agent) + +## Engine Configuration + +OtelModule must be enabled in iii-config.yaml for engine-side traces, metrics, and logs. See [../references/iii-config.yaml](../references/iii-config.yaml) for the full annotated config reference. + +## Pattern Boundaries + +- For engine-side OtelModule YAML configuration, prefer `iii-engine-config`. +- For SDK init options and function registration, prefer `iii-functions-and-triggers`. +- Stay with `iii-observability` when the primary problem is SDK-level telemetry: spans, metrics, logs, and trace propagation. + +## When to Use + +- Use this skill when the task is primarily about `iii-observability` in the iii engine. +- Triggers when the request directly asks for this pattern or an equivalent implementation. + +## Boundaries + +- Never use this skill as a generic fallback for unrelated tasks. +- You must not apply this skill when a more specific iii skill is a better fit. +- Always verify environment and safety constraints before applying examples from this skill. diff --git a/skills/iii-python-sdk/SKILL.md b/skills/iii-python-sdk/SKILL.md new file mode 100644 index 000000000..f71147145 --- /dev/null +++ b/skills/iii-python-sdk/SKILL.md @@ -0,0 +1,75 @@ +--- +name: iii-python-sdk +description: >- + Python SDK for the iii engine. Use when building workers, registering + functions, or invoking triggers in Python. +--- + +# Python SDK + +The async Python SDK for connecting workers to the iii engine. + +## Documentation + +Full API reference: + +## Install + +`pip install iii-sdk` + +## Key Exports + +| Export | Purpose | +| --------------------------------------------- | ----------------------------------------------- | +| `register_worker(address, options?)` | Connect to the engine, returns the client | +| `InitOptions(worker_name, otel?)` | Connection configuration | +| `register_function(id, handler)` | Register an async function handler | +| `register_trigger(type, function_id, config)` | Bind a trigger to a function | +| `trigger(request)` | Invoke a function synchronously | +| `trigger_async(request)` | Invoke a function asynchronously | +| `get_context()` | Access logger and trace context inside handlers | +| `ApiRequest` / `ApiResponse` | HTTP request/response types (pydantic) | +| `IStream` | Interface for custom stream implementations | +| `on_functions_available(callback)` | Listen for function discovery | +| `on_connection_state_change(callback)` | Monitor connection state | + +## Key Notes + +- `register_worker()` returns a synchronous client; handlers are async +- `ApiResponse` uses camelCase `statusCode` (pydantic alias), not `status_code` +- End workers with `while True: await asyncio.sleep(60)` to keep the event loop alive +- Use `asyncio.to_thread()` for CPU-heavy sync work inside handlers +- The SDK implements both `trigger_async(request)` and a synchronous `trigger(request)`. Use `trigger_async` inside async handlers, and `trigger` in synchronous scripts or threads where blocking behavior is desired. + +## Examples + +```python +# Async invocation (non-blocking, typical inside handlers) +result = await iii.trigger_async({ + "function_id": "greet", + "payload": {"name": "World"} +}) + +# Sync invocation (blocks the current thread, useful in sync contexts) +result = iii.trigger({ + "function_id": "greet", + "payload": {"name": "World"} +}) +``` + +## Pattern Boundaries + +- For usage patterns and working examples, see `iii-functions-and-triggers` +- For Node.js SDK, see `iii-node-sdk` +- For Rust SDK, see `iii-rust-sdk` + +## When to Use + +- Use this skill when the task is primarily about `iii-python-sdk` in the iii engine. +- Triggers when the request directly asks for this pattern or an equivalent implementation. + +## Boundaries + +- Never use this skill as a generic fallback for unrelated tasks. +- You must not apply this skill when a more specific iii skill is a better fit. +- Always verify environment and safety constraints before applying examples from this skill. diff --git a/skills/iii-queue-processing/SKILL.md b/skills/iii-queue-processing/SKILL.md new file mode 100644 index 000000000..3e2c0c3ef --- /dev/null +++ b/skills/iii-queue-processing/SKILL.md @@ -0,0 +1,91 @@ +--- +name: iii-queue-processing +description: >- + Enqueues jobs, configures retry policies, sets concurrency limits, and orders + messages via named standard or FIFO queues. Use when building background job + workers, task queues, message queues, async pipelines, or any pattern needing + guaranteed delivery with exponential backoff and dead-letter handling. +--- + +# Queue Processing + +Comparable to: BullMQ, Celery, SQS + +## Key Concepts + +Use the concepts below when they fit the task. Not every queue setup needs all of them. + +- **Named queues** are declared in `iii-config.yaml` under `queue_configs` +- **Standard queues** process jobs concurrently; **FIFO queues** preserve ordering +- `TriggerAction.Enqueue({ queue })` dispatches a job to a named queue +- Failed jobs **auto-retry** with exponential backoff up to `max_retries` +- Jobs that exhaust retries land in a **dead letter queue** for inspection +- Each consumer function receives the job payload and a `messageReceiptId` + +## Architecture + + Producer function + → TriggerAction.Enqueue({ queue: 'task-queue' }) + → Named Queue (standard or FIFO) + → Consumer registerFunction handler + → success / retry with backoff + → Dead Letter Queue (after max_retries) + +## iii Primitives Used + +| Primitive | Purpose | +| ------------------------------------------------------------ | ---------------------------------------------- | +| `registerFunction` | Define the consumer that processes jobs | +| `trigger({ ..., action: TriggerAction.Enqueue({ queue }) })` | Dispatch a job to a named queue | +| `messageReceiptId` | Acknowledge or track individual job processing | +| `queue_configs` in `iii-config.yaml` | Declare queues with concurrency and retries | + +## Reference Implementation + +See [../references/queue-processing.js](../references/queue-processing.js) for the full working example — a producer that enqueues jobs and a consumer that processes them with retry logic. + +Also available in **Python**: [../references/queue-processing.py](../references/queue-processing.py) + +Also available in **Rust**: [../references/queue-processing.rs](../references/queue-processing.rs) + +## Common Patterns + +Code using this pattern commonly includes, when relevant: + +- `registerWorker(url, { workerName })` — worker initialization +- `registerFunction(id, handler)` — define the consumer +- `trigger({ function_id, payload, action: TriggerAction.Enqueue({ queue }) })` — enqueue a job +- `payload.messageReceiptId` — track or acknowledge the job +- `trigger({ function_id: 'state::set', payload })` — persist results after processing +- `const logger = new Logger()` — structured logging per job + +## Adapting This Pattern + +Use the adaptations below when they apply to the task. + +- Choose FIFO queues when job ordering matters (e.g. sequential pipeline steps) +- Set `max_retries` and `concurrency` in queue config to match your workload +- Chain multiple queues for multi-stage pipelines (queue A consumer enqueues to queue B) +- For idempotency, check state before processing to avoid duplicate work on retries + +## Engine Configuration + +Named queues are declared in iii-config.yaml under `queue_configs` with per-queue `max_retries`, `concurrency`, `type`, and `backoff_ms`. See [../references/iii-config.yaml](../references/iii-config.yaml) for the full annotated config reference. + +## Pattern Boundaries + +- If the task only needs fire-and-forget without retries or ordering, prefer `iii-trigger-actions` with `TriggerAction.Void()`. +- If failed jobs need special handling or alerting, prefer `iii-dead-letter-queues` for the DLQ consumer. +- If the task is step-by-step orchestration with branching, prefer `iii-workflow-orchestration`. +- Stay with `iii-queue-processing` when the primary need is reliable async job execution with retries. + +## When to Use + +- Use this skill when the task is primarily about `iii-queue-processing` in the iii engine. +- Triggers when the request directly asks for this pattern or an equivalent implementation. + +## Boundaries + +- Never use this skill as a generic fallback for unrelated tasks. +- You must not apply this skill when a more specific iii skill is a better fit. +- Always verify environment and safety constraints before applying examples from this skill. diff --git a/skills/iii-reactive-backend/SKILL.md b/skills/iii-reactive-backend/SKILL.md new file mode 100644 index 000000000..9a1614380 --- /dev/null +++ b/skills/iii-reactive-backend/SKILL.md @@ -0,0 +1,86 @@ +--- +name: iii-reactive-backend +description: >- + Builds reactive real-time backends on the iii engine. Use when building + event-driven apps where state changes automatically trigger side effects, + clients receive live updates via streams or websockets, or you need a + real-time database layer with pub/sub and CRUD endpoints. +--- + +# Reactive Backend + +Comparable to: Convex, Firebase, Supabase, Appwrite + +## Key Concepts + +Use the concepts below when they fit the task. Not every reactive backend needs every trigger or realtime surface shown here. + +- State is the "database" — CRUD via `state::set`, `state::get`, `state::update`, `state::delete`, `state::list` +- **State triggers** fire automatically when any value in a scope changes +- Side effects (notifications, metrics, stream pushes) are wired reactively, not imperatively +- **Streams** deliver real-time updates to connected clients + +## Architecture + +```text +HTTP CRUD endpoints + → `state::set`, `state::update`, `state::delete` (writes to 'todos' scope) + ↓ (automatic state triggers) + → on-change → stream::send (push to clients) + → update-metrics → state::update (aggregate counters) + +HTTP GET /metrics → reads from 'todo-metrics' scope +WebSocket clients ← stream 'todos-live' +``` + +## iii Primitives Used + +| Primitive | Purpose | +| ------------------------------------------------------- | ---------------------------------------- | +| `registerWorker` | Initialize the worker and connect to iii | +| `registerFunction` | CRUD handlers and reactive side effects | +| `trigger({ function_id: 'state::...', payload })` | Database layer | +| `registerTrigger({ type: 'state', config: { scope } })` | React to any change in a scope | +| `trigger({ ..., action: TriggerAction.Void() })` | Fire-and-forget stream push to clients | +| `registerTrigger({ type: 'http' })` | REST endpoints | + +## Reference Implementation + +See [../references/reactive-backend.js](../references/reactive-backend.js) for the full working example — a real-time todo app with +CRUD endpoints, automatic change broadcasting via streams, and reactive aggregate metrics. + +## Common Patterns + +Code using this pattern commonly includes, when relevant: + +- `registerWorker(url, { workerName })` — worker initialization +- trigger `state::set`, `state::get` — CRUD via state module +- `registerTrigger({ type: 'state', function_id, config: { scope } })` — reactive side effects on state change +- Event argument destructuring in reactive handlers: `async (event) => { const { new_value, old_value, key } = event }` +- `trigger({ function_id: 'stream::send', payload, action: TriggerAction.Void() })` — push live updates to clients +- `const logger = new Logger()` — structured logging inside handlers + +## Adapting This Pattern + +Use the adaptations below when they apply to the task. + +- State triggers fire on **any** change in the scope — use the `event` argument (`new_value`, `old_value`, `key`) to determine what changed +- Multiple functions can react to the same scope independently (on-change and update-metrics both watch `todos`) +- Stream clients connect via `ws://host:port/stream/{stream_name}/{group_id}` +- Keep reactive functions fast — offload heavy work to queues if needed + +## Pattern Boundaries + +- If the request focuses on registering external/legacy HTTP endpoints via `registerFunction` (especially with endpoint lists like `{ path, id }` plus iteration), prefer `iii-http-invoked-functions`. +- Stay with `iii-reactive-backend` when state scopes, state triggers, and live stream updates are the core requirement. + +## When to Use + +- Use this skill when the task is primarily about `iii-reactive-backend` in the iii engine. +- Triggers when the request directly asks for this pattern or an equivalent implementation. + +## Boundaries + +- Never use this skill as a generic fallback for unrelated tasks. +- You must not apply this skill when a more specific iii skill is a better fit. +- Always verify environment and safety constraints before applying examples from this skill. diff --git a/skills/iii-realtime-streams/SKILL.md b/skills/iii-realtime-streams/SKILL.md new file mode 100644 index 000000000..40b4d5f9a --- /dev/null +++ b/skills/iii-realtime-streams/SKILL.md @@ -0,0 +1,91 @@ +--- +name: iii-realtime-streams +description: >- + Pushes live updates to connected WebSocket clients via streams. Use when + building real-time dashboards, live feeds, or collaborative features. +--- + +# Realtime Streams + +Comparable to: Socket.io, Pusher, Firebase Realtime + +## Key Concepts + +Use the concepts below when they fit the task. Not every stream setup needs all of them. + +- **StreamModule** serves WebSocket connections on port 3112 +- Clients connect at `ws://host:3112/stream/{stream_name}/{group_id}` +- **stream::set** / **stream::get** / **stream::list** / **stream::delete** provide CRUD for stream items +- **stream::send** pushes events to all connected clients in a stream group +- `createStream` registers a custom adapter for non-default stream backends +- Each stream item is addressed by `stream_name`, `group_id`, `item_id`, and `data` + +## Architecture + + Function + → trigger('stream::set', { stream_name, group_id, item_id, data }) + → trigger('stream::send', { stream_name, group_id, data }) + → StreamModule + → WebSocket push + → Connected clients at /stream/{stream_name}/{group_id} + +## iii Primitives Used + +| Primitive | Purpose | +| ----------------------------------------------------- | ---------------------------------- | +| `trigger({ function_id: 'stream::set', payload })` | Create or update a stream item | +| `trigger({ function_id: 'stream::get', payload })` | Read a stream item | +| `trigger({ function_id: 'stream::list', payload })` | List items in a stream group | +| `trigger({ function_id: 'stream::delete', payload })` | Remove a stream item | +| `trigger({ function_id: 'stream::send', payload })` | Push an event to connected clients | +| `createStream` | Register a custom stream adapter | + +## Reference Implementation + +See [../references/realtime-streams.js](../references/realtime-streams.js) for the full working example — a stream that pushes live updates to WebSocket clients and manages stream items with CRUD operations. + +Also available in **Python**: [../references/realtime-streams.py](../references/realtime-streams.py) + +Also available in **Rust**: [../references/realtime-streams.rs](../references/realtime-streams.rs) + +## Common Patterns + +Code using this pattern commonly includes, when relevant: + +- `registerWorker(url, { workerName })` — worker initialization +- `trigger({ function_id: 'stream::set', payload: { stream_name, group_id, item_id, data } })` — write stream item +- `trigger({ function_id: 'stream::send', payload: { stream_name, group_id, data } })` — push event to clients +- `trigger({ function_id: 'stream::get', payload: { stream_name, group_id, item_id } })` — read stream item +- `trigger({ function_id: 'stream::list', payload: { stream_name, group_id } })` — list items in group +- `createStream(name, adapter)` — custom adapter for specialized backends +- `const logger = new Logger()` — structured logging + +## Adapting This Pattern + +Use the adaptations below when they apply to the task. + +- Name streams after your domain (e.g. `chat-messages`, `dashboard-metrics`, `notifications`) +- Use `group_id` to partition streams per user, room, or tenant +- Combine with `iii-state-reactions` to push a stream event whenever state changes +- Use `createStream` when the default adapter does not fit (e.g. custom persistence or fan-out logic) + +## Engine Configuration + +StreamModule must be enabled in iii-config.yaml with a port and adapter (KvStore or Redis). See [../references/iii-config.yaml](../references/iii-config.yaml) for the full annotated config reference. + +## Pattern Boundaries + +- If the task is about persistent key-value data without real-time push, prefer `iii-state-management`. +- If the task needs reactive triggers on state changes (server-side), prefer `iii-state-reactions`. +- Stay with `iii-realtime-streams` when the primary need is pushing live updates to connected clients. + +## When to Use + +- Use this skill when the task is primarily about `iii-realtime-streams` in the iii engine. +- Triggers when the request directly asks for this pattern or an equivalent implementation. + +## Boundaries + +- Never use this skill as a generic fallback for unrelated tasks. +- You must not apply this skill when a more specific iii skill is a better fit. +- Always verify environment and safety constraints before applying examples from this skill. diff --git a/skills/iii-rust-sdk/SKILL.md b/skills/iii-rust-sdk/SKILL.md new file mode 100644 index 000000000..25e3fb3b3 --- /dev/null +++ b/skills/iii-rust-sdk/SKILL.md @@ -0,0 +1,64 @@ +--- +name: iii-rust-sdk +description: >- + Rust SDK for the iii engine. Use when building high-performance workers, + registering functions, or invoking triggers in Rust. +--- + +# Rust SDK + +The native async Rust SDK for connecting workers to the iii engine via tokio. + +## Documentation + +Full API reference: + +## Install + +Add to `Cargo.toml`: + +`iii-sdk = { version = "0.10", features = ["otel"] }` + +## Key Types and Functions + +| Export | Purpose | +| -------------------------------------------------- | -------------------------------------------------------------------------------- | +| `register_worker(url, InitOptions)` | Connect to the engine, returns `III` client | +| `III::register_function(RegisterFunction::new(id, handler))` | Register a sync function using the builder API | +| `III::register_function(RegisterFunction::new_async(id, handler))` | Register an async function using the builder API | +| `III::register_function_with(msg, handler)` | Two-arg convenience method for function registration | +| `RegisterFunction` | Builder with `.description()` and auto-generated request schemas via `schemars` | +| `III::register_trigger(type, function_id, config)` | Bind a trigger to a function | +| `III::trigger(TriggerRequest)` | Invoke a function | +| `TriggerAction::Void` | Fire-and-forget invocation | +| `TriggerAction::Enqueue { queue }` | Durable async invocation | +| `IIIError` | Error type for handler failures | +| `Streams` | Helper for atomic stream CRUD | +| `with_span`, `get_tracer`, `get_meter` | OpenTelemetry (requires `otel` feature) | +| `execute_traced_request` | HTTP client with trace context propagation | + +## Key Notes + +- Add `features = ["otel"]` to `Cargo.toml` for OpenTelemetry support +- Use `RegisterFunction::new("id", handler)` for sync handlers, `RegisterFunction::new_async("id", handler)` for async +- Handler input types that derive `schemars::JsonSchema` get auto-generated request schemas +- Chain `.description("...")` on `RegisterFunction` to document the function +- Keep the tokio runtime alive (e.g., `tokio::time::sleep` loop) for event processing +- `register_trigger` returns `Ok(())` on success; propagate errors with `?` + +## Pattern Boundaries + +- For usage patterns and working examples, see `iii-functions-and-triggers` +- For Node.js SDK, see `iii-node-sdk` +- For Python SDK, see `iii-python-sdk` + +## When to Use + +- Use this skill when the task is primarily about `iii-rust-sdk` in the iii engine. +- Triggers when the request directly asks for this pattern or an equivalent implementation. + +## Boundaries + +- Never use this skill as a generic fallback for unrelated tasks. +- You must not apply this skill when a more specific iii skill is a better fit. +- Always verify environment and safety constraints before applying examples from this skill. diff --git a/skills/iii-state-management/SKILL.md b/skills/iii-state-management/SKILL.md new file mode 100644 index 000000000..86e0209e7 --- /dev/null +++ b/skills/iii-state-management/SKILL.md @@ -0,0 +1,94 @@ +--- +name: iii-state-management +description: >- + Creates scoped key-value stores, reads and writes state entries, lists keys, + and performs partial updates across functions. Use when persisting data between + invocations, managing user sessions, caching computed values, storing feature + flags, sharing state between workers, or building a KV data layer as an + alternative to Redis or DynamoDB. +--- + +# State Management + +Comparable to: Redis, DynamoDB, Memcached + +## Key Concepts + +Use the concepts below when they fit the task. Not every state operation needs all of them. + +- State is a **scoped key-value store** accessed via built-in trigger functions +- **state::set** writes a value; **state::get** reads it (returns `null` for missing keys) +- **state::list** retrieves all keys in a scope; **state::delete** removes a key +- **state::update** performs a **partial merge** using an `ops` array for fine-grained changes +- Payloads use `scope`, `key`, and `value` to address state entries +- State is shared across all functions — use meaningful scope names to avoid collisions + +## Architecture + + Function + → trigger('state::set', { scope, key, value }) + → trigger('state::get', { scope, key }) + → trigger('state::update', { scope, key, ops }) + → trigger('state::delete', { scope, key }) + → trigger('state::list', { scope }) + → StateModule → KvStore / Redis adapter + +## iii Primitives Used + +| Primitive | Purpose | +| ------------------------------------------------------------- | ----------------------------------- | +| `trigger({ function_id: 'state::set', payload })` | Write a value to state | +| `trigger({ function_id: 'state::get', payload })` | Read a value from state | +| `trigger({ function_id: 'state::list', payload })` | List all keys in a scope | +| `trigger({ function_id: 'state::delete', payload })` | Remove a key from state | +| `trigger({ function_id: 'state::update', payload: { ops } })` | Partial merge with operations array | + +## Reference Implementation + +See [../references/state-management.js](../references/state-management.js) for the full working example — functions that read, write, update, and delete state entries across a shared scope. + +Also available in **Python**: [../references/state-management.py](../references/state-management.py) + +Also available in **Rust**: [../references/state-management.rs](../references/state-management.rs) + +## Common Patterns + +Code using this pattern commonly includes, when relevant: + +- `registerWorker(url, { workerName })` — worker initialization +- `trigger({ function_id: 'state::set', payload: { scope, key, value } })` — write state +- `trigger({ function_id: 'state::get', payload: { scope, key } })` — read state (returns `null` if missing) +- `trigger({ function_id: 'state::update', payload: { scope, key, ops } })` — partial merge +- `trigger({ function_id: 'state::list', payload: { scope } })` — enumerate keys +- `trigger({ function_id: 'state::delete', payload: { scope, key } })` — remove entry +- `const logger = new Logger()` — structured logging + +## Adapting This Pattern + +Use the adaptations below when they apply to the task. + +- Name scopes after your domain (e.g. `user-sessions`, `order-data`, `config`) +- Use `state::get` with a `null` check to handle missing keys gracefully +- Use `state::update` with `ops` for partial updates instead of read-modify-write cycles +- Combine with `iii-queue-processing` to persist results after async job completion + +## Engine Configuration + +StateModule must be enabled in iii-config.yaml with a KvStore adapter (file-based or Redis). See [../references/iii-config.yaml](../references/iii-config.yaml) for the full annotated config reference. + +## Pattern Boundaries + +- If the task needs reactive side effects when state changes, prefer `iii-state-reactions`. +- If the task needs real-time client push when data updates, prefer `iii-realtime-streams`. +- Stay with `iii-state-management` when the primary need is reading and writing persistent key-value data. + +## When to Use + +- Use this skill when the task is primarily about `iii-state-management` in the iii engine. +- Triggers when the request directly asks for this pattern or an equivalent implementation. + +## Boundaries + +- Never use this skill as a generic fallback for unrelated tasks. +- You must not apply this skill when a more specific iii skill is a better fit. +- Always verify environment and safety constraints before applying examples from this skill. diff --git a/skills/iii-state-reactions/SKILL.md b/skills/iii-state-reactions/SKILL.md new file mode 100644 index 000000000..728763c9b --- /dev/null +++ b/skills/iii-state-reactions/SKILL.md @@ -0,0 +1,90 @@ +--- +name: iii-state-reactions +description: >- + Registers state-type triggers that automatically fire functions when key-value + state is created, updated, or deleted within a scope. Use when building + reactive side effects, change watchers, audit logs, cache invalidation, + notification dispatchers, or any observer pattern where data changes should + trigger downstream processing. +--- + +# State Reactions + +Comparable to: Firebase onSnapshot, Convex mutations + +## Key Concepts + +Use the concepts below when they fit the task. Not every state reaction needs all of them. + +- A **state trigger** fires whenever a value changes within a watched scope +- The handler receives `{ new_value, old_value, key, event_type }` describing the change +- **condition_function_id** gates execution — the reaction only fires if the condition returns truthy +- Multiple reactions can **independently watch** the same scope +- Reactions fire on `state::set`, `state::update`, and `state::delete` operations + +## Architecture + + state::set, state::update, or state::delete + → StateModule emits change event + → registerTrigger type:'state' (scope match) + → condition_function_id check (if configured) + → registerFunction handler ({ new_value, old_value, key, event_type }) + +## iii Primitives Used + +| Primitive | Purpose | +| ---------------------------------------------------------- | ---------------------------------------- | +| `registerFunction` | Define the reaction handler | +| `registerTrigger({ type: 'state' })` | Watch a scope for changes | +| `config: { scope, key, condition_function_id }` | Scope filter and optional condition gate | +| Event payload: `{ new_value, old_value, key, event_type }` | Change details passed to the handler | + +## Reference Implementation + +See [../references/state-reactions.js](../references/state-reactions.js) for the full working example — a reaction that watches a state scope and fires side effects when values change, with an optional condition gate. + +Also available in **Python**: [../references/state-reactions.py](../references/state-reactions.py) + +Also available in **Rust**: [../references/state-reactions.rs](../references/state-reactions.rs) + +## Common Patterns + +Code using this pattern commonly includes, when relevant: + +- `registerWorker(url, { workerName })` — worker initialization +- `registerFunction(id, handler)` — define the reaction handler +- `registerTrigger({ type: 'state', config: { scope, key, condition_function_id } })` — watch for changes +- `payload.new_value` / `payload.old_value` — compare before and after +- `payload.event_type` — distinguish between set, update, and delete events +- `trigger({ function_id: 'state::set', payload })` — write derived state from the reaction +- `const logger = new Logger()` — structured logging per reaction + +## Adapting This Pattern + +Use the adaptations below when they apply to the task. + +- Set `scope` to watch a specific domain (e.g. `orders`, `user-profiles`) +- Use `key` to narrow reactions to a single key within a scope +- Add a `condition_function_id` to filter — only react when the condition function returns truthy +- Chain reactions by writing state in one handler that triggers another reaction on a different scope + +## Engine Configuration + +StateModule must be enabled in iii-config.yaml for state triggers to fire. See [../references/iii-config.yaml](../references/iii-config.yaml) for the full annotated config reference. + +## Pattern Boundaries + +- If the task is about directly reading or writing state without reactions, prefer `iii-state-management`. +- If the task needs conditional trigger logic shared across trigger types, prefer `iii-trigger-conditions`. +- Stay with `iii-state-reactions` when the primary need is automatic side effects on state changes. + +## When to Use + +- Use this skill when the task is primarily about `iii-state-reactions` in the iii engine. +- Triggers when the request directly asks for this pattern or an equivalent implementation. + +## Boundaries + +- Never use this skill as a generic fallback for unrelated tasks. +- You must not apply this skill when a more specific iii skill is a better fit. +- Always verify environment and safety constraints before applying examples from this skill. diff --git a/skills/iii-trigger-actions/SKILL.md b/skills/iii-trigger-actions/SKILL.md new file mode 100644 index 000000000..42dd0f6f7 --- /dev/null +++ b/skills/iii-trigger-actions/SKILL.md @@ -0,0 +1,85 @@ +--- +name: iii-trigger-actions +description: >- + Selects how functions are invoked — synchronous calls that return results, + fire-and-forget void dispatches, or durable enqueue through named queues with + retries. Use when deciding between blocking RPC calls, background job + dispatch, async workers, or reliable message delivery with acknowledgement. +--- + +# Trigger Actions + +Comparable to: RPC vs message queue vs fire-and-forget patterns + +## Key Concepts + +Use the concepts below when they fit the task. Not every invocation needs all three modes. + +- **Synchronous** (default): caller blocks until the function returns a result or times out +- **Void** (`TriggerAction.Void()`): fire-and-forget dispatch, returns immediately with `null`, no retry guarantees +- **Enqueue** (`TriggerAction.Enqueue({ queue })`): routes through a named queue with automatic retries and backoff, returns a `messageReceiptId` +- Decision guide: need the result? use sync. Must complete reliably? use enqueue. Optional side effect? use void. + +## Architecture + +The caller invokes `trigger()` with an optional action parameter. Synchronous mode waits for the handler result. Void mode dispatches and returns null immediately. Enqueue mode places the payload on a named queue where a consumer processes it with retry guarantees. + +## iii Primitives Used + +| Primitive | Purpose | +| ------------------------------------------------------------ | ---------------------------------------------- | +| `trigger({ function_id, payload })` | Synchronous invocation, blocks for result | +| `trigger({ ..., action: TriggerAction.Void() })` | Fire-and-forget, returns immediately with null | +| `trigger({ ..., action: TriggerAction.Enqueue({ queue }) })` | Durable async via named queue, returns receipt | +| `iii trigger --function-id=ID --payload=JSON` | CLI trigger (part of the engine binary) | +| `--timeout-ms` | CLI flag to set trigger timeout (default 30s) | + +## Reference Implementation + +See [../references/trigger-actions.js](../references/trigger-actions.js) for the full working example — a comparison of all three + +Also available in **Python**: [../references/trigger-actions.py](../references/trigger-actions.py) + +Also available in **Rust**: [../references/trigger-actions.rs](../references/trigger-actions.rs) +invocation modes showing when and how to use sync, void, and enqueue patterns. + +## Common Patterns + +Code using this pattern commonly includes, when relevant: + +- `await iii.trigger({ function_id: 'users::get', payload: { id } })` — sync, get result directly +- `iii.trigger({ function_id: 'analytics::track', payload: event, action: TriggerAction.Void() })` — fire-and-forget +- `iii.trigger({ function_id: 'orders::process', payload: order, action: TriggerAction.Enqueue({ queue: 'payments' }) })` — durable enqueue +- Sync returns the function result directly +- Void returns `null` / `None` +- Enqueue returns `{ messageReceiptId: string }` for tracking +- `iii trigger --function-id='users::get' --payload='{"id":"123"}'` — invoke via CLI +- `iii trigger --function-id='users::get' --payload='{"id":"123"}' --timeout-ms=5000` — with custom timeout + +## Adapting This Pattern + +Use the adaptations below when they apply to the task. + +- Default to synchronous when the caller needs the result to proceed +- Use void for logging, analytics, or any side effect where failure is acceptable +- Use enqueue for anything that must complete reliably — payments, emails, notifications +- Combine modes in a single handler: sync call for validation, then enqueue for processing +- Named queues let you configure retries and concurrency per workload type + +## Pattern Boundaries + +- For queue configuration (retries, concurrency, FIFO ordering), prefer `iii-engine-config`. +- For DLQ handling when enqueued jobs exhaust retries, prefer `iii-dead-letter-queues`. +- For function registration and trigger binding, prefer `iii-functions-and-triggers`. +- Stay with `iii-trigger-actions` when the primary problem is choosing the right invocation mode. + +## When to Use + +- Use this skill when the task is primarily about `iii-trigger-actions` in the iii engine. +- Triggers when the request directly asks for this pattern or an equivalent implementation. + +## Boundaries + +- Never use this skill as a generic fallback for unrelated tasks. +- You must not apply this skill when a more specific iii skill is a better fit. +- Always verify environment and safety constraints before applying examples from this skill. diff --git a/skills/iii-trigger-conditions/SKILL.md b/skills/iii-trigger-conditions/SKILL.md new file mode 100644 index 000000000..489617181 --- /dev/null +++ b/skills/iii-trigger-conditions/SKILL.md @@ -0,0 +1,83 @@ +--- +name: iii-trigger-conditions +description: >- + Registers a boolean condition function and attaches it to triggers via + condition_function_id so handlers only fire when the condition passes. Use + when gating triggers on business rules, checking user permissions, validating + data before processing, filtering high-value orders, rate-limiting events, or + conditionally skipping handlers based on payload content. +--- + +# Trigger Conditions + +Comparable to: Middleware guards, event filters + +## Key Concepts + +Use the concepts below when they fit the task. Not every trigger needs a condition. + +- A **Condition Function** is a registered function that returns a boolean (`true` or `false`) +- The engine calls the condition function before the handler; the handler runs only if `true` +- Attach a condition to any trigger type via `condition_function_id` in the trigger config +- The condition function receives the same event data as the handler would +- Works with all trigger types: http, queue, cron, state, stream, subscribe + +## Architecture + +When a trigger fires, the engine first invokes the condition function with the event data. If the condition returns true, the handler executes normally. If false, the handler is skipped silently with no error or retry. + +## iii Primitives Used + +| Primitive | Purpose | +| --------------------------------------------------------------------------- | ------------------------------------------------- | +| `registerFunction({ id }, handler)` (condition) | Register the condition function (returns boolean) | +| `registerFunction({ id }, handler)` (handler) | Register the handler function | +| `registerTrigger({ type, function_id, config: { condition_function_id } })` | Bind trigger with condition gate | + +## Reference Implementation + +See [../references/trigger-conditions.js](../references/trigger-conditions.js) for the full working example — a condition-gated trigger + +Also available in **Python**: [../references/trigger-conditions.py](../references/trigger-conditions.py) + +Also available in **Rust**: [../references/trigger-conditions.rs](../references/trigger-conditions.rs) +where a business rule function filters events before the handler processes them. + +## Common Patterns + +Code using this pattern commonly includes, when relevant: + +- `registerFunction({ id: 'conditions::is-high-value' }, async (input) => input.new_value?.amount >= 1000)` — condition function +- `registerFunction({ id: 'orders::notify-high-value' }, async (input) => { ... })` — handler function +- `registerTrigger({ type: 'state', function_id: 'orders::notify-high-value', config: { scope: 'orders', key: 'status', condition_function_id: 'conditions::is-high-value' } })` — bind with condition +- Condition returns `true` — handler executes +- Condition returns `false` — handler is skipped silently +- Use `conditions::` prefix for condition function IDs to keep them organized + +## Adapting This Pattern + +Use the adaptations below when they apply to the task. + +- Replace the condition logic with your business rules (threshold checks, role validation, feature flags) +- Conditions work on all trigger types — use them on HTTP triggers for auth guards, on queue triggers for message filtering +- Keep condition functions lightweight and fast since they run on every trigger fire +- Combine multiple business rules in a single condition function rather than chaining conditions +- Condition functions can call `trigger()` internally to check state or other functions + +## Pattern Boundaries + +- For registering functions and triggers in general, prefer `iii-functions-and-triggers`. +- For state change triggers specifically, prefer `iii-state-reactions`. +- For invocation modes (sync/void/enqueue), prefer `iii-trigger-actions`. +- Stay with `iii-trigger-conditions` when the primary problem is gating trigger execution with a condition check. + +## When to Use + +- Use this skill when the task is primarily about `iii-trigger-conditions` in the iii engine. +- Triggers when the request directly asks for this pattern or an equivalent implementation. + +## Boundaries + +- Never use this skill as a generic fallback for unrelated tasks. +- You must not apply this skill when a more specific iii skill is a better fit. +- Always verify environment and safety constraints before applying examples from this skill. diff --git a/skills/iii-workflow-orchestration/SKILL.md b/skills/iii-workflow-orchestration/SKILL.md new file mode 100644 index 000000000..b9f48662e --- /dev/null +++ b/skills/iii-workflow-orchestration/SKILL.md @@ -0,0 +1,100 @@ +--- +name: iii-workflow-orchestration +description: >- + Orchestrates durable multi-step workflow pipelines on the iii engine. Use + when building order fulfillment, data pipelines, task orchestration, or any + sequential process requiring retries, backoff, step tracking, scheduled + cleanup, or dead letter queue (DLQ) handling. +--- + +# Workflow Orchestration & Durable Execution + +Comparable to: Temporal, Airflow, Inngest + +## Key Concepts + +Use the concepts below when they fit the task. Not every workflow needs every durability or tracking mechanism shown here. + +- Each pipeline step is a registered function chained via **named queues** with config-driven retries +- Step progress is tracked in **shared state** and broadcast via **streams** +- A **cron trigger** handles scheduled maintenance (e.g. stale order cleanup) +- Queue behavior (retries, backoff, concurrency, FIFO) is defined per queue in `iii-config.yaml` + +## Architecture + +```text +HTTP (create order) + → Enqueue(order-validate) → validate + → Enqueue(order-payment) → charge-payment + → Enqueue(order-ship) → ship + → publish(order.fulfilled) + +Cron (hourly) → cleanup-stale + +Queue configs (iii-config.yaml): + order-validate: max_retries: 2 + order-payment: max_retries: 5, type: fifo, concurrency: 2 + order-ship: max_retries: 3 +``` + +## iii Primitives Used + +| Primitive | Purpose | +| ------------------------------------------------------------ | ----------------------------------------- | +| `registerWorker` | Initialize the worker and connect to iii | +| `registerFunction` | Define each pipeline step | +| `trigger({ ..., action: TriggerAction.Enqueue({ queue }) })` | Durable step chaining via named queues | +| `trigger({ function_id: 'state::...', payload })` | Track step progress | +| `trigger({ ..., action: TriggerAction.Void() })` | Fire-and-forget stream events and publish | +| `registerTrigger({ type: 'cron' })` | Scheduled maintenance | +| `registerTrigger({ type: 'http' })` | Entry point | + +## Reference Implementation + +See [../references/workflow-orchestration.js](../references/workflow-orchestration.js) for the full working example — an order fulfillment pipeline +with validate → charge → ship steps, retry configuration, stream-based progress tracking, +and hourly stale-order cleanup. + +## Common Patterns + +Code using this pattern commonly includes, when relevant: + +- `registerWorker(url, { workerName })` — worker initialization +- `trigger({ function_id, payload, action: TriggerAction.Enqueue({ queue }) })` — durable step chaining via named queues +- `trigger({ function_id: 'state::update', payload: { scope, key, ops } })` — step progress tracking +- Named queues with a comment referencing `iii-config.yaml` for retry/concurrency settings +- `const logger = new Logger()` — structured logging per step +- Each step as its own `registerFunction` with a single responsibility +- `trigger({ function_id: 'publish', payload, action: TriggerAction.Void() })` — completion broadcast + +## Adapting This Pattern + +Use the adaptations below when they apply to the task. + +- Each step should do one thing and enqueue the next function on success +- Define separate named queues in `iii-config.yaml` when steps need different retry/concurrency settings +- Capture enqueue receipts (`messageReceiptId`) for observability and DLQ correlation when needed +- The `trackStep` helper pattern (state update + stream event) is reusable for any pipeline +- Failed jobs exhaust retries and move to a DLQ — see the [dead-letter-queues HOWTO](https://iii.dev/docs/how-to/dead-letter-queues) +- DLQ support for named queues is provided by the Builtin and RabbitMQ adapters (Redis is pub/sub only) +- Cron expressions use 7-position numeric format: `0 0 * * * * *` (every hour) + +## Engine Configuration + +Named queues for pipeline steps are declared in iii-config.yaml under `queue_configs` with per-queue retry, concurrency, and FIFO settings. See [../references/iii-config.yaml](../references/iii-config.yaml) for the full annotated config reference. + +## Pattern Boundaries + +- If the task is "model HTTP endpoints as HTTP-invoked `registerFunction` functions" (including `{ path, id }` arrays iterated into registration), prefer `iii-http-invoked-functions`. +- Stay with `iii-workflow-orchestration` when durable step sequencing, queue retries/backoff, and workflow progress tracking are the primary concerns. + +## When to Use + +- Use this skill when the task is primarily about `iii-workflow-orchestration` in the iii engine. +- Triggers when the request directly asks for this pattern or an equivalent implementation. + +## Boundaries + +- Never use this skill as a generic fallback for unrelated tasks. +- You must not apply this skill when a more specific iii skill is a better fit. +- Always verify environment and safety constraints before applying examples from this skill. diff --git a/skills/references/agentic-backend.js b/skills/references/agentic-backend.js new file mode 100644 index 000000000..b29753fc4 --- /dev/null +++ b/skills/references/agentic-backend.js @@ -0,0 +1,181 @@ +/** + * Pattern: Agentic Backend Infrastructure + * Comparable to: LangGraph, CrewAI, AutoGen, Letta + * + * Demonstrates a multi-agent research pipeline where specialized agents + * collaborate through named queues and shared state. Each agent processes a + * task, writes its findings to state, and hands off to the next agent via + * a named queue. Fan-out (completion broadcast) uses pubsub. + * + * How-to references: + * - Functions & Triggers: https://iii.dev/docs/how-to/use-functions-and-triggers + * - State management: https://iii.dev/docs/how-to/manage-state + * - State reactions: https://iii.dev/docs/how-to/react-to-state-changes + * - Queues: https://iii.dev/docs/how-to/use-queues + * - Conditions: https://iii.dev/docs/how-to/use-trigger-conditions + */ + +import { registerWorker, Logger, TriggerAction } from 'iii-sdk' + +const iii = registerWorker(process.env.III_ENGINE_URL || 'ws://localhost:49134', { + workerName: 'agentic-backend', +}) + +// --------------------------------------------------------------------------- +// Agent 1 — Researcher: gathers raw information on a topic +// --------------------------------------------------------------------------- +iii.registerFunction({ id: 'agents::researcher' }, async (data) => { + const logger = new Logger() + logger.info('Researcher agent working', { topic: data.topic }) + + const findings = { + topic: data.topic, + sources: ['arxiv', 'wikipedia', 'internal-kb'], + summary: `Key findings on ${data.topic}: ...`, + confidence: 0.82, + } + + // Store findings in shared state so other agents can read them + await iii.trigger({ + function_id: 'state::set', + payload: { + scope: 'research-tasks', + key: data.task_id, + value: { + _key: data.task_id, + task_id: data.task_id, + topic: data.topic, + phase: 'researched', + findings, + }, + }, + }) + + // Hand off to the critic agent via named queue + iii.trigger({ + function_id: 'agents::critic', + payload: { task_id: data.task_id }, + action: TriggerAction.Enqueue({ queue: 'agent-tasks' }), + }) + + return findings +}) + +// --------------------------------------------------------------------------- +// Agent 2 — Critic: reviews and scores the researcher's output +// --------------------------------------------------------------------------- +iii.registerFunction({ id: 'agents::critic' }, async (data) => { + const logger = new Logger() + + const task = await iii.trigger({ + function_id: 'state::get', + payload: { scope: 'research-tasks', key: data.task_id }, + }) + + logger.info('Critic reviewing findings', { confidence: task.findings.confidence }) + + const review = { + score: task.findings.confidence > 0.7 ? 'pass' : 'needs-revision', + feedback: 'Findings are well-sourced and relevant.', + } + + await iii.trigger({ + function_id: 'state::update', + payload: { + scope: 'research-tasks', + key: data.task_id, + ops: [ + { type: 'set', path: 'phase', value: 'reviewed' }, + { type: 'set', path: 'review', value: review }, + ], + }, + }) + + // Only hand off to the synthesizer if approved + const approved = await iii.trigger({ + function_id: 'agents::is-approved', + payload: { task_id: data.task_id }, + }) + + if (approved) { + iii.trigger({ + function_id: 'agents::synthesizer', + payload: { task_id: data.task_id }, + action: TriggerAction.Enqueue({ queue: 'agent-tasks' }), + }) + } + + return review +}) + +// --------------------------------------------------------------------------- +// Agent 3 — Synthesizer: produces a final report from reviewed findings +// --------------------------------------------------------------------------- +iii.registerFunction({ id: 'agents::synthesizer' }, async (data) => { + const logger = new Logger() + + const task = await iii.trigger({ + function_id: 'state::get', + payload: { scope: 'research-tasks', key: data.task_id }, + }) + + logger.info('Synthesizer creating report', { task_id: data.task_id }) + + const report = { + title: `Report: ${task.topic}`, + body: `Based on ${task.findings.sources.length} sources...`, + review_score: task.review.score, + generated_at: new Date().toISOString(), + } + + await iii.trigger({ + function_id: 'state::update', + payload: { + scope: 'research-tasks', + key: data.task_id, + ops: [ + { type: 'set', path: 'phase', value: 'complete' }, + { type: 'set', path: 'report', value: report }, + ], + }, + }) + + // Broadcast completion for any listening services + iii.trigger({ + function_id: 'publish', + payload: { topic: 'research.complete', data: { task_id: data.task_id, report } }, + action: TriggerAction.Void(), + }) + + return report +}) + +// --------------------------------------------------------------------------- +// Condition: only synthesize if the critic passed the findings +// --------------------------------------------------------------------------- +iii.registerFunction({ id: 'agents::is-approved' }, async (data) => { + const task = await iii.trigger({ + function_id: 'state::get', + payload: { scope: 'research-tasks', key: data.task_id }, + }) + return task?.review?.score === 'pass' +}) + +// --------------------------------------------------------------------------- +// HTTP trigger — kick off a research task +// --------------------------------------------------------------------------- +iii.registerFunction({ id: 'agents::start-research' }, async (data) => { + const task_id = `task-${Date.now()}` + iii.trigger({ + function_id: 'agents::researcher', + payload: { task_id, topic: data.topic }, + action: TriggerAction.Enqueue({ queue: 'agent-tasks' }), + }) + return { task_id, status: 'queued' } +}) + +iii.registerTrigger({ + type: 'http', + function_id: 'agents::start-research', + config: { api_path: '/agents/research', http_method: 'POST' }, +}) diff --git a/skills/references/channels.js b/skills/references/channels.js new file mode 100644 index 000000000..d5968e339 --- /dev/null +++ b/skills/references/channels.js @@ -0,0 +1,93 @@ +/** + * Pattern: Channels + * Comparable to: Unix pipes, gRPC streaming, WebSocket data streams + * + * Demonstrates binary streaming between workers: creating channels, + * passing refs across functions, writing/reading binary data, and + * using text messages for signaling. + * + * How-to references: + * - Channels: https://iii.dev/docs/how-to/use-channels + */ + +import { registerWorker, Logger } from 'iii-sdk' + +const iii = registerWorker(process.env.III_ENGINE_URL || 'ws://localhost:49134', { + workerName: 'channels-example', +}) + +// --------------------------------------------------------------------------- +// 1. Producer — creates a channel and streams data through it +// --------------------------------------------------------------------------- +iii.registerFunction({ id: 'pipeline::produce' }, async (data) => { + const logger = new Logger() + + // Create a channel pair + const channel = await iii.createChannel() + + // Pass the reader ref to the consumer via trigger + iii.trigger({ + function_id: 'pipeline::consume', + payload: { + readerRef: channel.readerRef, + recordCount: data.records.length, + }, + }) + + // Send metadata as a text message + channel.writer.sendMessage( + JSON.stringify({ type: 'metadata', format: 'ndjson', encoding: 'utf-8' }), + ) + + // Stream records as binary data (newline-delimited JSON) + for (const record of data.records) { + const line = JSON.stringify(record) + '\n' + channel.writer.stream.write(Buffer.from(line)) + } + + // Signal end of stream + channel.writer.close() + logger.info('Producer finished streaming', { records: data.records.length }) + + return { status: 'streaming', readerRef: channel.readerRef } +}) + +// --------------------------------------------------------------------------- +// 2. Consumer — receives a channel ref and reads the stream +// --------------------------------------------------------------------------- +iii.registerFunction({ id: 'pipeline::consume' }, async (data) => { + const logger = new Logger() + + // Reconstruct reader from the ref passed in the payload + const reader = data.readerRef + + // Listen for text messages (metadata, signaling) + reader.onMessage((msg) => { + const parsed = JSON.parse(msg) + logger.info('Received metadata', parsed) + }) + + // Read entire binary stream + const buffer = await reader.readAll() + const text = buffer.toString('utf-8').trim() + + let records + if (!text) { + records = [{ processed: 0 }] + } else { + const lines = text.split('\n').filter((line) => line.trim() !== '') + records = lines.map((line) => JSON.parse(line)) + } + + logger.info('Consumer processed records', { count: records.length }) + return { processed: records.length } +}) + +// --------------------------------------------------------------------------- +// 3. HTTP trigger to kick off the pipeline +// --------------------------------------------------------------------------- +iii.registerTrigger({ + type: 'http', + function_id: 'pipeline::produce', + config: { api_path: '/pipeline/start', http_method: 'POST' }, +}) diff --git a/skills/references/channels.py b/skills/references/channels.py new file mode 100644 index 000000000..f85378e01 --- /dev/null +++ b/skills/references/channels.py @@ -0,0 +1,95 @@ +""" +Pattern: Channels (Python) +Comparable to: Unix pipes, gRPC streaming, WebSocket data streams + +Demonstrates binary streaming between workers: creating channels, +passing refs across functions, writing/reading binary data, and +using text messages for signaling. + +How-to references: + - Channels: https://iii.dev/docs/how-to/use-channels +""" + +import json +import os + +from iii import InitOptions, Logger, register_worker + +engine_url = os.environ.get("III_ENGINE_URL", "ws://localhost:49134") +iii = register_worker( + address=engine_url, + options=InitOptions(worker_name="channels-example"), +) + +# --------------------------------------------------------------------------- +# 1. Producer — creates a channel and streams data through it +# --------------------------------------------------------------------------- +async def produce(data): + logger = Logger(service_name="pipeline::produce") + + # Create a channel pair + channel = await iii.create_channel_async() + + # Pass the reader ref to the consumer via trigger + await iii.trigger_async({ + "function_id": "pipeline::consume", + "payload": { + "reader_ref": channel.reader_ref, + "record_count": len(data.get("records", [])), + }, + }) + + # Send metadata as a text message + await channel.writer.send_message_async( + json.dumps({"type": "metadata", "format": "ndjson", "encoding": "utf-8"}) + ) + + # Stream records as binary data (newline-delimited JSON) + for record in data.get("records", []): + line = json.dumps(record) + "\n" + await channel.writer.write(line.encode("utf-8")) + + # Signal end of stream + await channel.writer.close_async() + logger.info("Producer finished streaming", {"records": len(data.get("records", []))}) + + return {"status": "streaming"} + +iii.register_function("pipeline::produce", produce) + +# --------------------------------------------------------------------------- +# 2. Consumer — receives a channel ref and reads the stream +# --------------------------------------------------------------------------- +async def consume(data): + logger = Logger(service_name="pipeline::consume") + + # The reader ref is automatically resolved to a ChannelReader instance + reader = data["reader_ref"] + + # Listen for text messages (metadata, signaling) + messages = [] + reader.on_message(lambda msg: messages.append(json.loads(msg))) + + # Read entire binary stream + raw = await reader.read_all() + decoded = raw.decode("utf-8").strip() + + if not decoded: + records = [] + else: + lines = decoded.split("\n") + records = [json.loads(line) for line in lines if line.strip()] + + logger.info("Consumer processed records", {"count": len(records)}) + return {"processed": len(records)} + +iii.register_function("pipeline::consume", consume) + +# --------------------------------------------------------------------------- +# 3. HTTP trigger to kick off the pipeline +# --------------------------------------------------------------------------- +iii.register_trigger({ + "type": "http", + "function_id": "pipeline::produce", + "config": {"api_path": "/pipeline/start", "http_method": "POST"}, +}) diff --git a/skills/references/channels.rs b/skills/references/channels.rs new file mode 100644 index 000000000..e22789e65 --- /dev/null +++ b/skills/references/channels.rs @@ -0,0 +1,142 @@ +/** + * Pattern: Channels (Rust) + * Comparable to: Unix pipes, gRPC streaming, WebSocket data streams + * + * Demonstrates binary streaming between workers: creating channels, + * passing refs across functions, writing/reading binary data, and + * using text messages for signaling. + * + * How-to references: + * - Channels: https://iii.dev/docs/how-to/use-channels + */ + +use std::time::Duration; + +use iii_sdk::{ + register_worker, InitOptions, RegisterFunction, TriggerRequest, TriggerAction, + ChannelReader, extract_channel_refs, + builtin_triggers::*, + IIITrigger, +}; +use serde_json::json; + +#[derive(serde::Deserialize, schemars::JsonSchema)] +struct ProduceInput { + records: Vec, +} + +#[tokio::main] +async fn main() -> Result<(), Box> { + let url = std::env::var("III_ENGINE_URL").unwrap_or("ws://127.0.0.1:49134".into()); + let iii = register_worker(&url, InitOptions::default()); + + // ----------------------------------------------------------------------- + // 1. Producer — creates a channel and streams binary data + // ----------------------------------------------------------------------- + let iii_producer = iii.clone(); + iii.register_function(RegisterFunction::new_async( + "pipeline::produce", + move |input: ProduceInput| { + let iii = iii_producer.clone(); + async move { + // Create a channel pair + let channel = iii.create_channel(None).await.map_err(|e| e.to_string())?; + + // Pass the reader ref to the consumer via trigger without waiting + iii.trigger(TriggerRequest { + function_id: "pipeline::consume".into(), + payload: json!({ + "reader_ref": channel.reader_ref, + "record_count": input.records.len(), + }), + action: Some(TriggerAction::Void), + timeout_ms: None, + }) + .await + .map_err(|e| e.to_string())?; + + // Send metadata as a text message + channel + .writer + .send_message(&serde_json::to_string(&json!({ + "type": "metadata", + "format": "ndjson", + "encoding": "utf-8", + })).unwrap()) + .await + .map_err(|e| e.to_string())?; + + // Stream records as binary data (newline-delimited JSON) + for record in &input.records { + let mut line = serde_json::to_string(record).unwrap(); + line.push('\n'); + channel + .writer + .write(line.as_bytes()) + .await + .map_err(|e| e.to_string())?; + } + + // Signal end of stream + channel.writer.close().await.map_err(|e| e.to_string())?; + + Ok(json!({ "status": "streaming", "records": input.records.len() })) + } + }, + )); + + // ----------------------------------------------------------------------- + // 2. Consumer — receives a channel ref and reads the stream + // ----------------------------------------------------------------------- + let iii_consumer = iii.clone(); + iii.register_function(RegisterFunction::new_async( + "pipeline::consume", + move |input: serde_json::Value| { + let iii = iii_consumer.clone(); + async move { + // Extract channel refs from the payload + let refs = extract_channel_refs(&input); + let reader_ref = refs + .iter() + .find(|(k, _)| k == "reader_ref") + .map(|(_, r)| r.clone()) + .ok_or("missing reader_ref")?; + + // Create reader from the ref + let reader = ChannelReader::new(iii.address(), &reader_ref); + + // Listen for text messages + reader + .on_message(|msg| { + println!("Metadata: {}", msg); + }) + .await; + + // Read entire binary stream + let raw = reader.read_all().await.map_err(|e| e.to_string())?; + let text = String::from_utf8(raw).map_err(|e| e.to_string())?; + let records: Vec = text + .trim() + .lines() + .map(|line| serde_json::from_str(line).unwrap()) + .collect(); + + Ok(json!({ "processed": records.len() })) + } + }, + )); + + // ----------------------------------------------------------------------- + // 3. HTTP trigger to kick off the pipeline + // ----------------------------------------------------------------------- + iii.register_trigger( + IIITrigger::Http(HttpTriggerConfig::new("/pipeline/start").method(HttpMethod::Post)) + .for_function("pipeline::produce"), + ) + .expect("failed to register http trigger"); + + // Keep the process alive for event processing + tokio::time::sleep(Duration::from_secs(u64::MAX)).await; + iii.shutdown(); + Ok(()) +} diff --git a/skills/references/cron-scheduling.js b/skills/references/cron-scheduling.js new file mode 100644 index 000000000..f92b223f7 --- /dev/null +++ b/skills/references/cron-scheduling.js @@ -0,0 +1,163 @@ +/** + * Pattern: Cron Scheduling + * Comparable to: node-cron, APScheduler, crontab + * + * Schedules recurring tasks using 7-field cron expressions: + * second minute hour day month weekday year + * + * Cron handlers should be fast — enqueue heavy work to a queue. + * + * How-to references: + * - Cron scheduling: https://iii.dev/docs/how-to/schedule-cron-task + */ + +import { registerWorker, Logger, TriggerAction } from 'iii-sdk' + +const iii = registerWorker(process.env.III_ENGINE_URL || 'ws://localhost:49134', { + workerName: 'cron-scheduling', +}) + +// --------------------------------------------------------------------------- +// Hourly cleanup — runs at the top of every hour +// Cron: 0 0 * * * * * (second=0, minute=0, every hour) +// --------------------------------------------------------------------------- +iii.registerFunction({ id: 'cron::hourly-cleanup' }, async () => { + const logger = new Logger() + logger.info('Hourly cleanup started') + + const expiredItems = await iii.trigger({ + function_id: 'state::list', + payload: { scope: 'sessions' }, + }) + + const now = Date.now() + let cleaned = 0 + + for (const session of expiredItems || []) { + const age = now - new Date(session.last_active).getTime() + if (age > 3600000) { + // Enqueue heavy deletion work instead of doing it inline + iii.trigger({ + function_id: 'cleanup::process-expired', + payload: { sessionId: session.id }, + action: TriggerAction.Enqueue({ queue: 'cleanup' }), + }) + cleaned++ + } + } + + logger.info('Hourly cleanup enqueued', { cleaned }) + return { cleaned } +}) + +iii.registerTrigger({ + type: 'cron', + function_id: 'cron::hourly-cleanup', + config: { expression: '0 0 * * * * *' }, +}) + +// --------------------------------------------------------------------------- +// Daily report — runs at midnight every day +// Cron: 0 0 0 * * * * (second=0, minute=0, hour=0, every day) +// --------------------------------------------------------------------------- +iii.registerFunction({ id: 'cron::daily-report' }, async () => { + const logger = new Logger() + logger.info('Daily report generation started') + + const metrics = await iii.trigger({ + function_id: 'state::get', + payload: { scope: 'daily-metrics', key: 'today' }, + }) + + // Enqueue heavy report generation to a queue + const result = await iii.trigger({ + function_id: 'reports::generate', + payload: { + type: 'daily-summary', + date: new Date().toISOString().split('T')[0], + metrics: metrics || { signups: 0, orders: 0, revenue: 0 }, + }, + action: TriggerAction.Enqueue({ queue: 'reports' }), + }) + + logger.info('Daily report enqueued', { messageReceiptId: result.messageReceiptId }) + + // Reset daily counters + await iii.trigger({ + function_id: 'state::set', + payload: { + scope: 'daily-metrics', + key: 'today', + value: { signups: 0, orders: 0, revenue: 0, reset_at: new Date().toISOString() }, + }, + }) + + return { status: 'enqueued' } +}) + +iii.registerTrigger({ + type: 'cron', + function_id: 'cron::daily-report', + config: { expression: '0 0 0 * * * *' }, +}) + +// --------------------------------------------------------------------------- +// Health check — runs every 5 minutes +// Cron: 0 */5 * * * * * (second=0, every 5th minute) +// --------------------------------------------------------------------------- +iii.registerFunction({ id: 'cron::health-check' }, async () => { + const logger = new Logger() + const timestamp = new Date().toISOString() + + // Quick check — read a known state key + const status = await iii.trigger({ + function_id: 'state::get', + payload: { scope: 'system', key: 'health' }, + }) + + const healthy = !!status + + // Persist health result + await iii.trigger({ + function_id: 'state::set', + payload: { + scope: 'system', + key: 'health', + value: { healthy, checked_at: timestamp }, + }, + }) + + if (!healthy) { + logger.warn('Health check failed', { timestamp }) + + // Enqueue alert instead of blocking the cron handler + iii.trigger({ + function_id: 'alerts::send', + payload: { type: 'health-check-failed', timestamp }, + action: TriggerAction.Enqueue({ queue: 'alerts' }), + }) + } + + return { healthy, checked_at: timestamp } +}) + +iii.registerTrigger({ + type: 'cron', + function_id: 'cron::health-check', + config: { expression: '0 */5 * * * * *' }, +}) + +// --------------------------------------------------------------------------- +// Worker for enqueued cleanup tasks +// --------------------------------------------------------------------------- +iii.registerFunction({ id: 'cleanup::process-expired' }, async (data) => { + const logger = new Logger() + + await iii.trigger({ + function_id: 'state::delete', + payload: { scope: 'sessions', key: data.sessionId }, + }) + + logger.info('Expired session cleaned up', { sessionId: data.sessionId }) + return { deleted: data.sessionId } +}) diff --git a/skills/references/cron-scheduling.py b/skills/references/cron-scheduling.py new file mode 100644 index 000000000..82636e22b --- /dev/null +++ b/skills/references/cron-scheduling.py @@ -0,0 +1,186 @@ +""" +Pattern: Cron Scheduling +Comparable to: node-cron, APScheduler, crontab + +Schedules recurring tasks using 7-field cron expressions: + second minute hour day month weekday year + +Cron handlers should be fast — enqueue heavy work to a queue. + +How-to references: + - Cron scheduling: https://iii.dev/docs/how-to/schedule-cron-task +""" + +import asyncio +import os +import time +from datetime import datetime, timezone + +from iii import InitOptions, Logger, TriggerAction, register_worker + +iii = register_worker( + address=os.environ.get("III_ENGINE_URL", "ws://localhost:49134"), + options=InitOptions(worker_name="cron-scheduling"), +) + +# --- +# Hourly cleanup — runs at the top of every hour +# Cron: 0 0 * * * * * (second=0, minute=0, every hour) +# --- + + +async def hourly_cleanup(data): + logger = Logger() + logger.info("Hourly cleanup started") + + expired_items = await iii.trigger_async({ + "function_id": "state::list", + "payload": {"scope": "sessions"}, + }) + + now = int(time.time() * 1000) + cleaned = 0 + + for session in expired_items or []: + last_active_ms = int(datetime.fromisoformat(session["last_active"]).timestamp() * 1000) + age = now - last_active_ms + if age > 3600000: + iii.trigger({ + "function_id": "cleanup::process-expired", + "payload": {"sessionId": session["id"]}, + "action": TriggerAction.Enqueue({"queue": "cleanup"}), + }) + cleaned += 1 + + logger.info("Hourly cleanup enqueued", {"cleaned": cleaned}) + return {"cleaned": cleaned} + + +iii.register_function("cron::hourly-cleanup", hourly_cleanup) + +iii.register_trigger({ + "type": "cron", + "function_id": "cron::hourly-cleanup", + "config": {"expression": "0 0 * * * * *"}, +}) + +# --- +# Daily report — runs at midnight every day +# Cron: 0 0 0 * * * * (second=0, minute=0, hour=0, every day) +# --- + + +async def daily_report(data): + logger = Logger() + logger.info("Daily report generation started") + + metrics = await iii.trigger_async({ + "function_id": "state::get", + "payload": {"scope": "daily-metrics", "key": "today"}, + }) + + result = await iii.trigger_async({ + "function_id": "reports::generate", + "payload": { + "type": "daily-summary", + "date": datetime.now(timezone.utc).isoformat().split("T")[0], + "metrics": metrics or {"signups": 0, "orders": 0, "revenue": 0}, + }, + "action": TriggerAction.Enqueue({"queue": "reports"}), + }) + + logger.info("Daily report enqueued", {"messageReceiptId": result["messageReceiptId"]}) + + await iii.trigger_async({ + "function_id": "state::set", + "payload": { + "scope": "daily-metrics", + "key": "today", + "value": {"signups": 0, "orders": 0, "revenue": 0, "reset_at": datetime.now(timezone.utc).isoformat()}, + }, + }) + + return {"status": "enqueued"} + + +iii.register_function("cron::daily-report", daily_report) + +iii.register_trigger({ + "type": "cron", + "function_id": "cron::daily-report", + "config": {"expression": "0 0 0 * * * *"}, +}) + +# --- +# Health check — runs every 5 minutes +# Cron: 0 */5 * * * * * (second=0, every 5th minute) +# --- + + +async def health_check(data): + logger = Logger() + timestamp = datetime.now(timezone.utc).isoformat() + + status = await iii.trigger_async({ + "function_id": "state::get", + "payload": {"scope": "system", "key": "health"}, + }) + + healthy = status.get("healthy", True) if isinstance(status, dict) else True + + await iii.trigger_async({ + "function_id": "state::set", + "payload": { + "scope": "system", + "key": "health", + "value": {"healthy": healthy, "checked_at": timestamp}, + }, + }) + + if not healthy: + logger.warn("Health check failed", {"timestamp": timestamp}) + + iii.trigger({ + "function_id": "alerts::send", + "payload": {"type": "health-check-failed", "timestamp": timestamp}, + "action": TriggerAction.Enqueue({"queue": "alerts"}), + }) + + return {"healthy": healthy, "checked_at": timestamp} + + +iii.register_function("cron::health-check", health_check) + +iii.register_trigger({ + "type": "cron", + "function_id": "cron::health-check", + "config": {"expression": "0 */5 * * * * *"}, +}) + +# --- +# Worker for enqueued cleanup tasks +# --- + + +async def process_expired(data): + logger = Logger() + + await iii.trigger_async({ + "function_id": "state::delete", + "payload": {"scope": "sessions", "key": data["sessionId"]}, + }) + + logger.info("Expired session cleaned up", {"sessionId": data["sessionId"]}) + return {"deleted": data["sessionId"]} + + +iii.register_function("cleanup::process-expired", process_expired) + + +async def main(): + while True: + await asyncio.sleep(60) + + +if __name__ == "__main__": + asyncio.run(main()) diff --git a/skills/references/cron-scheduling.rs b/skills/references/cron-scheduling.rs new file mode 100644 index 000000000..b9f253d63 --- /dev/null +++ b/skills/references/cron-scheduling.rs @@ -0,0 +1,259 @@ +/// Pattern: Cron Scheduling +/// Comparable to: node-cron, APScheduler, crontab +/// +/// Schedules recurring tasks using 7-field cron expressions: +/// second minute hour day month weekday year +/// +/// Cron handlers should be fast - enqueue heavy work to a queue. + +use iii_sdk::{ + register_worker, InitOptions, RegisterFunction, TriggerRequest, TriggerAction, + builtin_triggers::*, IIITrigger, Logger, +}; +use serde_json::json; +use std::time::Duration; + +use serde; +use schemars; + +#[derive(serde::Deserialize, schemars::JsonSchema)] +struct CleanupInput { + #[serde(rename = "sessionId")] + session_id: String, +} + +fn main() { + let url = std::env::var("III_ENGINE_URL").unwrap_or("ws://127.0.0.1:49134".into()); + let iii = register_worker(&url, InitOptions::default()); + + // --- + // Hourly cleanup - runs at the top of every hour + // Cron: 0 0 * * * * * (second=0, minute=0, every hour) + // --- + let iii_clone = iii.clone(); + iii.register_function( + RegisterFunction::new_async("cron::hourly-cleanup", move |_: serde_json::Value| { + let iii = iii_clone.clone(); + async move { + let logger = Logger::new(); + logger.info("Hourly cleanup started", &json!({})); + + let expired_items = iii + .trigger(TriggerRequest { + function_id: "state::list".into(), + payload: json!({ "scope": "sessions" }), + action: None, + timeout_ms: None, + }) + .await + .map_err(|e| e.to_string())?; + + let now = chrono::Utc::now().timestamp_millis(); + let mut cleaned = 0u64; + + if let Some(sessions) = expired_items.as_array() { + for session in sessions { + let last_active = session["last_active"] + .as_str() + .and_then(|s| chrono::DateTime::parse_from_rfc3339(s).ok()) + .map(|dt| dt.timestamp_millis()) + .unwrap_or(0); + + let age = now - last_active; + if age > 3_600_000 { + let session_id = session["id"].as_str().unwrap_or("").to_string(); + iii.trigger(TriggerRequest { + function_id: "cleanup::process-expired".into(), + payload: json!({ "sessionId": session_id }), + action: Some(TriggerAction::Enqueue { queue: "cleanup".into() }), + timeout_ms: None, + }) + .await + .ok(); + cleaned += 1; + } + } + } + + logger.info("Hourly cleanup enqueued", &json!({ "cleaned": cleaned })); + Ok(json!({ "cleaned": cleaned })) + } + }) + .description("Hourly cleanup of expired sessions"), + ); + + iii.register_trigger( + IIITrigger::Cron(CronTriggerConfig::new("0 0 * * * * *")) + .for_function("cron::hourly-cleanup"), + ) + .expect("failed"); + + // --- + // Daily report - runs at midnight every day + // Cron: 0 0 0 * * * * (second=0, minute=0, hour=0, every day) + // --- + let iii_clone = iii.clone(); + iii.register_function( + RegisterFunction::new_async("cron::daily-report", move |_: serde_json::Value| { + let iii = iii_clone.clone(); + async move { + let logger = Logger::new(); + logger.info("Daily report generation started", &json!({})); + + let metrics = iii + .trigger(TriggerRequest { + function_id: "state::get".into(), + payload: json!({ "scope": "daily-metrics", "key": "today" }), + action: None, + timeout_ms: None, + }) + .await + .map_err(|e| e.to_string())?; + + let metrics_val = if metrics.is_null() { + json!({ "signups": 0, "orders": 0, "revenue": 0 }) + } else { + metrics + }; + + let today = chrono::Utc::now().format("%Y-%m-%d").to_string(); + + let result = iii + .trigger(TriggerRequest { + function_id: "reports::generate".into(), + payload: json!({ + "type": "daily-summary", + "date": today, + "metrics": metrics_val, + }), + action: Some(TriggerAction::Enqueue { queue: "reports".into() }), + timeout_ms: None, + }) + .await + .map_err(|e| e.to_string())?; + + logger.info("Daily report enqueued", &json!({ "messageReceiptId": result["messageReceiptId"] })); + + iii.trigger(TriggerRequest { + function_id: "state::set".into(), + payload: json!({ + "scope": "daily-metrics", + "key": "today", + "value": { + "signups": 0, + "orders": 0, + "revenue": 0, + "reset_at": chrono::Utc::now().to_rfc3339(), + }, + }), + action: None, + timeout_ms: None, + }) + .await + .map_err(|e| e.to_string())?; + + Ok(json!({ "status": "enqueued" })) + } + }) + .description("Generate daily report at midnight"), + ); + + iii.register_trigger( + IIITrigger::Cron(CronTriggerConfig::new("0 0 0 * * * *")) + .for_function("cron::daily-report"), + ) + .expect("failed"); + + // --- + // Health check - runs every 5 minutes + // Cron: 0 */5 * * * * * (second=0, every 5th minute) + // --- + let iii_clone = iii.clone(); + iii.register_function( + RegisterFunction::new_async("cron::health-check", move |_: serde_json::Value| { + let iii = iii_clone.clone(); + async move { + let logger = Logger::new(); + let timestamp = chrono::Utc::now().to_rfc3339(); + + let status = iii + .trigger(TriggerRequest { + function_id: "state::get".into(), + payload: json!({ "scope": "system", "key": "health" }), + action: None, + timeout_ms: None, + }) + .await + .map_err(|e| e.to_string())?; + + let healthy = !status.is_null(); + + iii.trigger(TriggerRequest { + function_id: "state::set".into(), + payload: json!({ + "scope": "system", + "key": "health", + "value": { "healthy": healthy, "checked_at": timestamp }, + }), + action: None, + timeout_ms: None, + }) + .await + .map_err(|e| e.to_string())?; + + if !healthy { + logger.warn("Health check failed", &json!({ "timestamp": timestamp })); + + iii.trigger(TriggerRequest { + function_id: "alerts::send".into(), + payload: json!({ "type": "health-check-failed", "timestamp": timestamp }), + action: Some(TriggerAction::Enqueue { queue: "alerts".into() }), + timeout_ms: None, + }) + .await + .ok(); + } + + Ok(json!({ "healthy": healthy, "checked_at": timestamp })) + } + }) + .description("Health check every 5 minutes"), + ); + + iii.register_trigger( + IIITrigger::Cron(CronTriggerConfig::new("0 */5 * * * * *")) + .for_function("cron::health-check"), + ) + .expect("failed"); + + // --- + // Worker for enqueued cleanup tasks + // --- + let iii_clone = iii.clone(); + iii.register_function( + RegisterFunction::new_async("cleanup::process-expired", move |data: CleanupInput| { + let iii = iii_clone.clone(); + async move { + let logger = Logger::new(); + + iii.trigger(TriggerRequest { + function_id: "state::delete".into(), + payload: json!({ "scope": "sessions", "key": data.session_id }), + action: None, + timeout_ms: None, + }) + .await + .map_err(|e| e.to_string())?; + + logger.info("Expired session cleaned up", &json!({ "sessionId": data.session_id })); + Ok(json!({ "deleted": data.session_id })) + } + }) + .description("Clean up an expired session"), + ); + + tokio::runtime::Runtime::new().unwrap().block_on(async { + tokio::signal::ctrl_c().await.ok(); + }); + iii.shutdown(); +} diff --git a/skills/references/custom-triggers.js b/skills/references/custom-triggers.js new file mode 100644 index 000000000..641e82280 --- /dev/null +++ b/skills/references/custom-triggers.js @@ -0,0 +1,188 @@ +/** + * Pattern: Custom Triggers + * Comparable to: Custom event adapters, webhook connectors, polling integrators + * + * Demonstrates how to define entirely new trigger types beyond the built-in + * http, queue, cron, state, and subscribe triggers. A custom trigger type + * registers handler callbacks that the engine invokes when triggers of that + * type are created or removed, letting you bridge any external event source + * (webhooks, file-system watchers, pollers) into the iii function graph. + * + * How-to references: + * - Custom trigger types: https://iii.dev/docs/how-to/create-custom-trigger-type + */ + +import { registerWorker, Logger, TriggerAction } from 'iii-sdk' + +const iii = registerWorker(process.env.III_ENGINE_URL || 'ws://localhost:49134', { + workerName: 'custom-triggers', +}) + +// --------------------------------------------------------------------------- +// Custom trigger type — Webhook receiver +// Registers an HTTP endpoint per trigger and fires the bound function when +// an external service POSTs to it. +// --------------------------------------------------------------------------- +const webhookEndpoints = new Map() + +iii.registerTriggerType({ + id: 'webhook', + description: 'Fires when an external service sends an HTTP POST to the registered endpoint', + handler: { + // Called when a trigger of this type is created via registerTrigger + // TriggerConfig shape: { id, function_id, config } + registerTrigger: async (triggerConfig) => { + const logger = new Logger() + const { id, function_id, config } = triggerConfig + const path = config.path || `/webhooks/${id}` + + logger.info('Registering webhook endpoint', { id, path }) + + // In a real implementation you would bind an HTTP route here. + // When the route receives a POST the callback fires the bound function. + const endpoint = { + path, + callback: async (requestBody) => { + await iii.trigger({ + function_id, + payload: { source: 'webhook', trigger_id: id, data: requestBody }, + }) + }, + } + + webhookEndpoints.set(id, endpoint) + }, + + // Called when the trigger is removed — clean up the endpoint + unregisterTrigger: async (triggerConfig) => { + const logger = new Logger() + logger.info('Removing webhook endpoint', { id: triggerConfig.id }) + webhookEndpoints.delete(triggerConfig.id) + }, + }, +}) + +// --------------------------------------------------------------------------- +// Custom trigger type — File watcher +// Uses fs.watch to fire the bound function whenever a file changes. +// --------------------------------------------------------------------------- +import fs from 'fs' + +const fileWatchers = new Map() + +iii.registerTriggerType({ + id: 'file-watch', + description: 'Fires when a file on the local filesystem changes', + handler: { + registerTrigger: async (triggerConfig) => { + const { id, function_id, config } = triggerConfig + const filePath = config.file_path + + const watcher = fs.watch(filePath, (eventType, filename) => { + iii.trigger({ + function_id, + payload: { source: 'file-watch', trigger_id: id, eventType, filename }, + action: TriggerAction.Void(), // fire-and-forget, don't block the watcher + }) + }) + + fileWatchers.set(id, watcher) + }, + + unregisterTrigger: async (triggerConfig) => { + const watcher = fileWatchers.get(triggerConfig.id) + if (watcher) { + watcher.close() + fileWatchers.delete(triggerConfig.id) + } + }, + }, +}) + +// --------------------------------------------------------------------------- +// Custom trigger type — Polling with ETag +// Periodically fetches a URL and fires only when the content changes. +// --------------------------------------------------------------------------- +const pollers = new Map() + +iii.registerTriggerType({ + id: 'polling', + description: 'Polls a URL at a fixed interval and fires when the ETag changes', + handler: { + registerTrigger: async (triggerConfig) => { + const { id, function_id, config } = triggerConfig + const { url, interval_ms = 30000 } = config + let lastETag = null + + const timer = setInterval(async () => { + try { + const res = await fetch(url, { + method: 'GET', + headers: lastETag ? { 'If-None-Match': lastETag } : {}, + }) + + if (res.status === 304) return // no change + + const etag = res.headers.get('etag') + if (etag && etag !== lastETag) { + lastETag = etag + const body = await res.json() + + await iii.trigger({ + function_id, + payload: { source: 'polling', trigger_id: id, etag, data: body }, + }) + } + } catch (err) { + const logger = new Logger() + logger.error('Polling failed', { id, url, error: err.message }) + } + }, interval_ms) + + pollers.set(id, timer) + }, + + unregisterTrigger: async (triggerConfig) => { + const timer = pollers.get(triggerConfig.id) + if (timer) { + clearInterval(timer) + pollers.delete(triggerConfig.id) + } + }, + }, +}) + +// --------------------------------------------------------------------------- +// Handler function — processes events from any custom trigger above +// --------------------------------------------------------------------------- +iii.registerFunction({ id: 'custom-triggers::on-event' }, async (data) => { + const logger = new Logger() + logger.info('Custom trigger fired', { source: data.source, trigger_id: data.trigger_id }) + return { received: true, source: data.source } +}) + +// --------------------------------------------------------------------------- +// Bind triggers using the custom types defined above +// --------------------------------------------------------------------------- +iii.registerTrigger({ + type: 'webhook', + function_id: 'custom-triggers::on-event', + config: { path: '/hooks/github' }, +}) + +iii.registerTrigger({ + type: 'file-watch', + function_id: 'custom-triggers::on-event', + config: { file_path: '/var/data/config.json' }, +}) + +iii.registerTrigger({ + type: 'polling', + function_id: 'custom-triggers::on-event', + config: { url: 'https://api.example.com/status', interval_ms: 60000 }, +}) + +// --------------------------------------------------------------------------- +// Cleanup — unregister a trigger type when it is no longer needed +// --------------------------------------------------------------------------- +// iii.unregisterTriggerType('polling') diff --git a/skills/references/custom-triggers.py b/skills/references/custom-triggers.py new file mode 100644 index 000000000..a23545b75 --- /dev/null +++ b/skills/references/custom-triggers.py @@ -0,0 +1,175 @@ +""" +Pattern: Custom Triggers +Comparable to: Custom event adapters, webhook connectors, polling integrators + +Demonstrates how to define entirely new trigger types beyond the built-in +http, queue, cron, state, and subscribe triggers. A custom trigger type +registers handler callbacks that the engine invokes when triggers of that +type are created or removed, letting you bridge any external event source +(webhooks, pollers) into the iii function graph. + +Note: File watcher is omitted — it requires the watchdog dependency. +Polling uses asyncio.create_task instead of setInterval. + +How-to references: + - Custom trigger types: https://iii.dev/docs/how-to/create-custom-trigger-type +""" + +import asyncio +import os + +from iii import InitOptions, Logger, TriggerAction, register_worker + +iii = register_worker( + address=os.environ.get("III_ENGINE_URL", "ws://localhost:49134"), + options=InitOptions(worker_name="custom-triggers"), +) + +# --- +# Custom trigger type — Webhook receiver +# Registers an HTTP endpoint per trigger and fires the bound function when +# an external service POSTs to it. +# --- +webhook_endpoints = {} + + +async def webhook_register(trigger_config): + logger = Logger() + trigger_id = trigger_config["id"] + function_id = trigger_config["function_id"] + config = trigger_config["config"] + path = config.get("path", f"/webhooks/{trigger_id}") + + logger.info("Registering webhook endpoint", {"id": trigger_id, "path": path}) + + async def callback(request_body): + await iii.trigger_async({ + "function_id": function_id, + "payload": {"source": "webhook", "trigger_id": trigger_id, "data": request_body}, + }) + + webhook_endpoints[trigger_id] = {"path": path, "callback": callback} + + +async def webhook_unregister(trigger_config): + logger = Logger() + logger.info("Removing webhook endpoint", {"id": trigger_config["id"]}) + webhook_endpoints.pop(trigger_config["id"], None) + + +iii.register_trigger_type({ + "id": "webhook", + "description": "Fires when an external service sends an HTTP POST to the registered endpoint", + "handler": { + "register_trigger": webhook_register, + "unregister_trigger": webhook_unregister, + }, +}) + +# --- +# Custom trigger type — Polling with ETag +# Periodically fetches a URL and fires only when the content changes. +# Uses asyncio.create_task for the polling loop instead of setInterval. +# --- +pollers = {} + + +async def _poll_loop(trigger_id, function_id, url, interval_ms): + import urllib.request + import json + + last_etag = None + interval_s = interval_ms / 1000 + + while True: + try: + req = urllib.request.Request(url, method="GET") + if last_etag: + req.add_header("If-None-Match", last_etag) + + resp = await asyncio.to_thread(urllib.request.urlopen, req) + + if resp.status == 304: + await asyncio.sleep(interval_s) + continue + + etag = resp.headers.get("ETag") + if etag and etag != last_etag: + last_etag = etag + body = json.loads(resp.read().decode()) + + await iii.trigger_async({ + "function_id": function_id, + "payload": {"source": "polling", "trigger_id": trigger_id, "etag": etag, "data": body}, + }) + except asyncio.CancelledError: + break + except Exception as err: + logger = Logger() + logger.error("Polling failed", {"id": trigger_id, "url": url, "error": str(err)}) + + await asyncio.sleep(interval_s) + + +async def polling_register(trigger_config): + trigger_id = trigger_config["id"] + function_id = trigger_config["function_id"] + config = trigger_config["config"] + url = config["url"] + interval_ms = config.get("interval_ms", 30000) + + task = asyncio.create_task(_poll_loop(trigger_id, function_id, url, interval_ms)) + pollers[trigger_id] = task + + +async def polling_unregister(trigger_config): + task = pollers.pop(trigger_config["id"], None) + if task: + task.cancel() + + +iii.register_trigger_type({ + "id": "polling", + "description": "Polls a URL at a fixed interval and fires when the ETag changes", + "handler": { + "register_trigger": polling_register, + "unregister_trigger": polling_unregister, + }, +}) + +# --- +# Handler function — processes events from any custom trigger above +# --- + + +async def on_event(data): + logger = Logger() + logger.info("Custom trigger fired", {"source": data["source"], "trigger_id": data["trigger_id"]}) + return {"received": True, "source": data["source"]} + + +iii.register_function("custom-triggers::on-event", on_event) + +# --- +# Bind triggers using the custom types defined above +# --- +iii.register_trigger({ + "type": "webhook", + "function_id": "custom-triggers::on-event", + "config": {"path": "/hooks/github"}, +}) + +iii.register_trigger({ + "type": "polling", + "function_id": "custom-triggers::on-event", + "config": {"url": "https://api.example.com/status", "interval_ms": 60000}, +}) + + +async def main(): + while True: + await asyncio.sleep(60) + + +if __name__ == "__main__": + asyncio.run(main()) diff --git a/skills/references/custom-triggers.rs b/skills/references/custom-triggers.rs new file mode 100644 index 000000000..3e6b5d206 --- /dev/null +++ b/skills/references/custom-triggers.rs @@ -0,0 +1,230 @@ +/// Pattern: Custom Triggers +/// Comparable to: Custom event adapters, webhook connectors, polling integrators +/// +/// Demonstrates how to define entirely new trigger types beyond the built-in +/// http, queue, cron, state, and subscribe triggers. A custom trigger type +/// registers handler callbacks that the engine invokes when triggers of that +/// type are created or removed, letting you bridge any external event source +/// (webhooks, pollers) into the iii function graph. +/// +/// How-to references: +/// - Custom trigger types: https://iii.dev/docs/how-to/create-custom-trigger-type + +use std::collections::HashMap; +use std::sync::Arc; +use std::time::Duration; + +use iii_sdk::{ + register_worker, InitOptions, RegisterFunction, TriggerRequest, TriggerAction, + TriggerConfig, TriggerHandler, +}; +use serde_json::json; +use tokio::sync::Mutex; +use tokio::task::JoinHandle; + +// --------------------------------------------------------------------------- +// Custom trigger type — Webhook receiver +// Registers an HTTP endpoint per trigger and fires the bound function when +// an external service POSTs to it. +// --------------------------------------------------------------------------- + +struct WebhookEndpoint { + path: String, + function_id: String, +} + +struct WebhookTriggerHandler { + iii: iii_sdk::III, + endpoints: Arc>>, +} + +impl TriggerHandler for WebhookTriggerHandler { + async fn register_trigger(&self, config: TriggerConfig) -> Result<(), String> { + let path = config + .config + .get("path") + .and_then(|v| v.as_str()) + .unwrap_or(&format!("/webhooks/{}", config.id)) + .to_string(); + + let endpoint = WebhookEndpoint { + path: path.clone(), + function_id: config.function_id.clone(), + }; + + self.endpoints + .lock() + .await + .insert(config.id.clone(), endpoint); + + Ok(()) + } + + // NOTE: In production, an HTTP listener would match incoming requests + // to endpoints and call iii.trigger(endpoint.function_id, payload) + async fn unregister_trigger(&self, config: TriggerConfig) -> Result<(), String> { + self.endpoints.lock().await.remove(&config.id); + Ok(()) + } +} + +// --------------------------------------------------------------------------- +// Custom trigger type — Polling with ETag +// Periodically fetches a URL and fires only when the content changes. +// --------------------------------------------------------------------------- + +struct PollingTriggerHandler { + iii: iii_sdk::III, + tasks: Arc>>>, +} + +impl TriggerHandler for PollingTriggerHandler { + async fn register_trigger(&self, config: TriggerConfig) -> Result<(), String> { + let trigger_id = config.id.clone(); + let function_id = config.function_id.clone(); + let url = config + .config + .get("url") + .and_then(|v| v.as_str()) + .ok_or("missing url in config")? + .to_string(); + let interval_ms = match config.config.get("interval_ms").and_then(|v| v.as_u64()) { + Some(0) => return Err("interval_ms must be greater than 0".into()), + Some(ms) => ms, + None => 30_000, + }; + + let iii = self.iii.clone(); + + let handle = tokio::spawn(async move { + let client = reqwest::Client::new(); + let mut last_etag: Option = None; + + loop { + let mut req = client.get(&url); + if let Some(ref etag) = last_etag { + req = req.header("If-None-Match", etag); + } + + match req.send().await { + Ok(resp) => { + let status = resp.status().as_u16(); + + if status == 304 { + tokio::time::sleep(Duration::from_millis(interval_ms)).await; + continue; + } + + if !(200..300).contains(&status) { + tokio::time::sleep(Duration::from_millis(interval_ms)).await; + continue; + } + + let etag = resp + .headers() + .get("etag") + .and_then(|v| v.to_str().ok()) + .map(|s| s.to_string()); + + if etag.is_some() && etag != last_etag { + if let Ok(body) = resp.json::().await { + let result = iii + .trigger(TriggerRequest { + function_id: function_id.clone(), + payload: json!({ + "source": "polling", + "trigger_id": trigger_id, + "etag": etag, + "data": body, + }), + action: None, + timeout_ms: None, + }) + .await; + + if result.is_ok() { + last_etag = etag; + } + } + } + } + Err(_) => {} + } + + tokio::time::sleep(Duration::from_millis(interval_ms)).await; + } + }); + + self.tasks.lock().await.insert(config.id.clone(), handle); + Ok(()) + } + + async fn unregister_trigger(&self, config: TriggerConfig) -> Result<(), String> { + if let Some(handle) = self.tasks.lock().await.remove(&config.id) { + handle.abort(); + } + Ok(()) + } +} + +// --------------------------------------------------------------------------- +// Handler function — processes events from any custom trigger above +// --------------------------------------------------------------------------- + +fn on_event(input: serde_json::Value) -> Result { + Ok(json!({ + "received": true, + "source": input.get("source").and_then(|v| v.as_str()).unwrap_or("unknown"), + })) +} + +// --------------------------------------------------------------------------- +// Main +// --------------------------------------------------------------------------- + +#[tokio::main] +async fn main() -> Result<(), Box> { + let url = std::env::var("III_ENGINE_URL").unwrap_or("ws://127.0.0.1:49134".into()); + let iii = register_worker(&url, InitOptions::default()); + + // Register handler function + iii.register_function( + RegisterFunction::new("custom-triggers::on-event", on_event) + .description("Processes events from custom trigger types"), + ); + + // Register webhook trigger type + let webhook_handler = WebhookTriggerHandler { + iii: iii.clone(), + endpoints: Arc::new(Mutex::new(HashMap::new())), + }; + iii.register_trigger_type("webhook", webhook_handler) + .expect("failed to register webhook trigger type"); + + // Register polling trigger type + let polling_handler = PollingTriggerHandler { + iii: iii.clone(), + tasks: Arc::new(Mutex::new(HashMap::new())), + }; + iii.register_trigger_type("polling", polling_handler) + .expect("failed to register polling trigger type"); + + // Bind triggers using the custom types + iii.register_trigger_with_config( + "webhook", + "custom-triggers::on-event", + json!({ "path": "/hooks/github" }), + ) + .expect("failed to register webhook trigger"); + + iii.register_trigger_with_config( + "polling", + "custom-triggers::on-event", + json!({ "url": "https://api.example.com/status", "interval_ms": 60000 }), + ) + .expect("failed to register polling trigger"); + + tokio::signal::ctrl_c().await.ok(); + iii.shutdown(); + Ok(()) +} diff --git a/skills/references/dead-letter-queues.js b/skills/references/dead-letter-queues.js new file mode 100644 index 000000000..15da31569 --- /dev/null +++ b/skills/references/dead-letter-queues.js @@ -0,0 +1,160 @@ +/** + * Pattern: Dead Letter Queues + * Comparable to: SQS DLQ, RabbitMQ dead-letter exchanges, BullMQ failed jobs + * + * When a queued function exhausts its retry budget (configured via + * queue_configs.max_retries and backoff_ms in iii.config.yaml) the message + * moves to the queue's dead-letter queue (DLQ). Messages in the DLQ can be + * inspected and redriven back to the source queue via the SDK or CLI. + * + * How-to references: + * - Dead letter queues: https://iii.dev/docs/how-to/dead-letter-queues + */ + +import { registerWorker, Logger, TriggerAction } from 'iii-sdk' + +const iii = registerWorker(process.env.III_ENGINE_URL || 'ws://localhost:49134', { + workerName: 'dead-letter-queues', +}) + +// --------------------------------------------------------------------------- +// Queue configuration reference (iii.config.yaml) +// +// queue_configs: +// payment: +// max_retries: 3 # after 3 failures the message goes to DLQ +// backoff_ms: 1000 # exponential backoff base +// email: +// max_retries: 5 +// backoff_ms: 2000 +// --------------------------------------------------------------------------- + +// --------------------------------------------------------------------------- +// 1. Function that processes payments — may fail and exhaust retries +// After max_retries failures the message lands in the "payment" DLQ. +// --------------------------------------------------------------------------- +iii.registerFunction({ id: 'payments::charge' }, async (data) => { + const logger = new Logger() + logger.info('Attempting payment charge', { orderId: data.order_id }) + + // Simulate a transient failure (e.g. gateway timeout) + const gatewayUp = Math.random() > 0.7 + if (!gatewayUp) { + throw new Error('Payment gateway timeout — will be retried') + } + + logger.info('Payment succeeded', { orderId: data.order_id }) + return { charged: true, order_id: data.order_id } +}) + +iii.registerTrigger({ + type: 'queue', + function_id: 'payments::charge', + config: { queue: 'payment' }, +}) + +// --------------------------------------------------------------------------- +// 2. Enqueue a payment to demonstrate the retry / DLQ flow +// --------------------------------------------------------------------------- +iii.registerFunction({ id: 'orders::submit-payment' }, async (data) => { + const logger = new Logger() + + const receipt = await iii.trigger({ + function_id: 'payments::charge', + payload: { order_id: data.order_id, amount: data.amount }, + action: TriggerAction.Enqueue({ queue: 'payment' }), + }) + + logger.info('Payment enqueued', { receiptId: receipt.messageReceiptId }) + return receipt +}) + +iii.registerTrigger({ + type: 'http', + function_id: 'orders::submit-payment', + config: { api_path: '/orders/pay', http_method: 'POST' }, +}) + +// --------------------------------------------------------------------------- +// 3. Redrive DLQ messages back to the source queue via SDK +// Calls the built-in iii::queue::redrive function. Returns the queue name +// and the count of redriven messages. +// --------------------------------------------------------------------------- +iii.registerFunction({ id: 'admin::redrive-payments' }, async () => { + const logger = new Logger() + + const result = await iii.trigger({ + function_id: 'iii::queue::redrive', + payload: { queue: 'payment' }, + }) + + // result shape: { queue: 'payment', redriven: 12 } + logger.info('Redrive complete', { queue: result.queue, redriven: result.redriven }) + return result +}) + +iii.registerTrigger({ + type: 'http', + function_id: 'admin::redrive-payments', + config: { api_path: '/admin/redrive/payments', http_method: 'POST' }, +}) + +// --------------------------------------------------------------------------- +// CLI alternative for redrive (run from terminal — iii trigger is part of the engine binary): +// iii trigger --function-id='iii::queue::redrive' --payload='{"queue": "payment"}' +// iii trigger --function-id='iii::queue::redrive' --payload='{"queue": "payment"}' --timeout-ms=60000 +// --------------------------------------------------------------------------- + +// --------------------------------------------------------------------------- +// 4. DLQ inspection pattern — check how many messages are stuck +// --------------------------------------------------------------------------- +iii.registerFunction({ id: 'admin::dlq-status' }, async () => { + const logger = new Logger() + + // Inspect DLQ for each configured queue + const queues = ['payment', 'email'] + const statuses = [] + + for (const queue of queues) { + const info = await iii.trigger({ + function_id: 'iii::queue::status', + payload: { queue }, + }) + + logger.info('Queue status', { queue, dlq_count: info.dlq_count, pending: info.pending }) + statuses.push({ queue, dlq_count: info.dlq_count, pending: info.pending }) + } + + return { queues: statuses } +}) + +iii.registerTrigger({ + type: 'http', + function_id: 'admin::dlq-status', + config: { api_path: '/admin/dlq/status', http_method: 'GET' }, +}) + +// --------------------------------------------------------------------------- +// 5. Targeted redrive — redrive a single queue from a cron schedule +// Useful for automatically retrying failed messages every hour. +// --------------------------------------------------------------------------- +iii.registerFunction({ id: 'admin::auto-redrive' }, async () => { + const logger = new Logger() + + const result = await iii.trigger({ + function_id: 'iii::queue::redrive', + payload: { queue: 'payment' }, + }) + + if (result.redriven > 0) { + logger.info('Auto-redrive recovered messages', { redriven: result.redriven }) + } + + return result +}) + +iii.registerTrigger({ + type: 'cron', + function_id: 'admin::auto-redrive', + config: { expression: '0 * * * *' }, // every hour +}) diff --git a/skills/references/dead-letter-queues.py b/skills/references/dead-letter-queues.py new file mode 100644 index 000000000..c60ce22e1 --- /dev/null +++ b/skills/references/dead-letter-queues.py @@ -0,0 +1,193 @@ +""" +Pattern: Dead Letter Queues +Comparable to: SQS DLQ, RabbitMQ dead-letter exchanges, BullMQ failed jobs + +When a queued function exhausts its retry budget (configured via +queue_configs.max_retries and backoff_ms in iii.config.yaml) the message +moves to the queue's dead-letter queue (DLQ). Messages in the DLQ can be +inspected and redriven back to the source queue via the SDK or CLI. + +How-to references: + - Dead letter queues: https://iii.dev/docs/how-to/dead-letter-queues +""" + +import asyncio +import os +import random + +from iii import InitOptions, Logger, TriggerAction, register_worker + +iii = register_worker( + address=os.environ.get("III_ENGINE_URL", "ws://localhost:49134"), + options=InitOptions(worker_name="dead-letter-queues"), +) + +# --- +# Queue configuration reference (iii.config.yaml) +# +# queue_configs: +# payment: +# max_retries: 3 # after 3 failures the message goes to DLQ +# backoff_ms: 1000 # exponential backoff base +# email: +# max_retries: 5 +# backoff_ms: 2000 +# --- + +# --- +# 1. Function that processes payments — may fail and exhaust retries +# After max_retries failures the message lands in the "payment" DLQ. +# --- + + +async def payments_charge(data): + logger = Logger() + logger.info("Attempting payment charge", {"orderId": data["order_id"]}) + + gateway_up = random.random() > 0.7 + if not gateway_up: + raise Exception("Payment gateway timeout — will be retried") + + logger.info("Payment succeeded", {"orderId": data["order_id"]}) + return {"charged": True, "order_id": data["order_id"]} + + +iii.register_function("payments::charge", payments_charge) + +iii.register_trigger({ + "type": "queue", + "function_id": "payments::charge", + "config": {"queue": "payment"}, +}) + +# --- +# 2. Enqueue a payment to demonstrate the retry / DLQ flow +# --- + + +async def submit_payment(data): + logger = Logger() + + order_id = data.get("order_id") if isinstance(data, dict) else None + amount = data.get("amount") if isinstance(data, dict) else None + if not order_id or amount is None: + return {"status_code": 400, "body": {"error": "order_id and amount required"}} + + receipt = await iii.trigger_async({ + "function_id": "payments::charge", + "payload": {"order_id": order_id, "amount": amount}, + "action": TriggerAction.Enqueue({"queue": "payment"}), + }) + + logger.info("Payment enqueued", {"receiptId": receipt["messageReceiptId"]}) + return receipt + + +iii.register_function("orders::submit-payment", submit_payment) + +iii.register_trigger({ + "type": "http", + "function_id": "orders::submit-payment", + "config": {"api_path": "/orders/pay", "http_method": "POST"}, +}) + +# --- +# 3. Redrive DLQ messages back to the source queue via SDK +# Calls the built-in iii::queue::redrive function. Returns the queue name +# and the count of redriven messages. +# --- + + +async def redrive_payments(data): + logger = Logger() + + result = await iii.trigger_async({ + "function_id": "iii::queue::redrive", + "payload": {"queue": "payment"}, + }) + + logger.info("Redrive complete", {"queue": result["queue"], "redriven": result["redriven"]}) + return result + + +iii.register_function("admin::redrive-payments", redrive_payments) + +iii.register_trigger({ + "type": "http", + "function_id": "admin::redrive-payments", + "config": {"api_path": "/admin/redrive/payments", "http_method": "POST"}, +}) + +# --- +# CLI alternative for redrive (run from terminal): +# iii trigger --function-id='iii::queue::redrive' --payload='{"queue": "payment"}' +# iii trigger --function-id='iii::queue::redrive' --payload='{"queue": "payment"}' --timeout-ms=60000 +# --- + +# --- +# 4. DLQ inspection pattern — check how many messages are stuck +# --- + + +async def dlq_status(data): + logger = Logger() + + queues = ["payment", "email"] + statuses = [] + + for queue in queues: + info = await iii.trigger_async({ + "function_id": "iii::queue::status", + "payload": {"queue": queue}, + }) + + logger.info("Queue status", {"queue": queue, "dlq_count": info["dlq_count"], "pending": info["pending"]}) + statuses.append({"queue": queue, "dlq_count": info["dlq_count"], "pending": info["pending"]}) + + return {"queues": statuses} + + +iii.register_function("admin::dlq-status", dlq_status) + +iii.register_trigger({ + "type": "http", + "function_id": "admin::dlq-status", + "config": {"api_path": "/admin/dlq/status", "http_method": "GET"}, +}) + +# --- +# 5. Targeted redrive — redrive a single queue from a cron schedule +# Useful for automatically retrying failed messages every hour. +# --- + + +async def auto_redrive(data): + logger = Logger() + + result = await iii.trigger_async({ + "function_id": "iii::queue::redrive", + "payload": {"queue": "payment"}, + }) + + if result["redriven"] > 0: + logger.info("Auto-redrive recovered messages", {"redriven": result["redriven"]}) + + return result + + +iii.register_function("admin::auto-redrive", auto_redrive) + +iii.register_trigger({ + "type": "cron", + "function_id": "admin::auto-redrive", + "config": {"expression": "0 * * * *"}, +}) + + +async def main(): + while True: + await asyncio.sleep(60) + + +if __name__ == "__main__": + asyncio.run(main()) diff --git a/skills/references/dead-letter-queues.rs b/skills/references/dead-letter-queues.rs new file mode 100644 index 000000000..f3bb6aaa4 --- /dev/null +++ b/skills/references/dead-letter-queues.rs @@ -0,0 +1,234 @@ +/// Pattern: Dead Letter Queues +/// Comparable to: SQS DLQ, RabbitMQ dead-letter exchanges, BullMQ failed jobs +/// +/// When a queued function exhausts its retry budget (configured via +/// queue_configs.max_retries and backoff_ms in iii.config.yaml) the message +/// moves to the queue's dead-letter queue (DLQ). Messages in the DLQ can be +/// inspected and redriven back to the source queue via the SDK or CLI. + +use iii_sdk::{ + register_worker, InitOptions, RegisterFunction, TriggerRequest, TriggerAction, + builtin_triggers::*, IIITrigger, Logger, +}; +use serde_json::json; +use std::time::Duration; + +use serde; +use schemars; + +// Queue configuration reference (iii.config.yaml) +// +// queue_configs: +// payment: +// max_retries: 3 # after 3 failures the message goes to DLQ +// backoff_ms: 1000 # exponential backoff base +// email: +// max_retries: 5 +// backoff_ms: 2000 + +#[derive(serde::Deserialize, schemars::JsonSchema)] +struct ChargeInput { + order_id: String, + amount: Option, +} + +#[derive(serde::Deserialize, schemars::JsonSchema)] +struct SubmitPaymentInput { + order_id: String, + amount: f64, +} + +fn main() { + let url = std::env::var("III_ENGINE_URL").unwrap_or("ws://127.0.0.1:49134".into()); + let iii = register_worker(&url, InitOptions::default()); + + // --- + // 1. Function that processes payments - may fail and exhaust retries + // After max_retries failures the message lands in the "payment" DLQ. + // --- + iii.register_function( + RegisterFunction::new("payments::charge", |data: ChargeInput| -> Result { + let logger = Logger::new(); + logger.info("Attempting payment charge", &json!({ "orderId": data.order_id })); + + let gateway_up = rand::random::() > 0.7; + if !gateway_up { + return Err("Payment gateway timeout - will be retried".into()); + } + + logger.info("Payment succeeded", &json!({ "orderId": data.order_id })); + Ok(json!({ "charged": true, "order_id": data.order_id })) + }) + .description("Charge payment (may fail for DLQ demo)"), + ); + + iii.register_trigger( + IIITrigger::Queue(QueueTriggerConfig::new("payment")) + .for_function("payments::charge"), + ) + .expect("failed"); + + // --- + // 2. Enqueue a payment to demonstrate the retry / DLQ flow + // --- + let iii_clone = iii.clone(); + iii.register_function( + RegisterFunction::new_async("orders::submit-payment", move |data: SubmitPaymentInput| { + let iii = iii_clone.clone(); + async move { + let logger = Logger::new(); + + let receipt = iii + .trigger(TriggerRequest { + function_id: "payments::charge".into(), + payload: json!({ "order_id": data.order_id, "amount": data.amount }), + action: Some(TriggerAction::Enqueue { queue: "payment".into() }), + timeout_ms: None, + }) + .await + .map_err(|e| e.to_string())?; + + logger.info("Payment enqueued", &json!({ "receiptId": receipt["messageReceiptId"] })); + Ok(receipt) + } + }) + .description("Submit a payment to the queue"), + ); + + iii.register_trigger( + IIITrigger::Http(HttpTriggerConfig::new("/orders/pay").method(HttpMethod::Post)) + .for_function("orders::submit-payment"), + ) + .expect("failed"); + + // --- + // 3. Redrive DLQ messages back to the source queue via SDK + // Calls the built-in iii::queue::redrive function. Returns the queue name + // and the count of redriven messages. + // --- + let iii_clone = iii.clone(); + iii.register_function( + RegisterFunction::new_async("admin::redrive-payments", move |_: serde_json::Value| { + let iii = iii_clone.clone(); + async move { + let logger = Logger::new(); + + let result = iii + .trigger(TriggerRequest { + function_id: "iii::queue::redrive".into(), + payload: json!({ "queue": "payment" }), + action: None, + timeout_ms: None, + }) + .await + .map_err(|e| e.to_string())?; + + logger.info("Redrive complete", &json!({ "queue": result["queue"], "redriven": result["redriven"] })); + Ok(result) + } + }) + .description("Redrive payment DLQ messages"), + ); + + iii.register_trigger( + IIITrigger::Http(HttpTriggerConfig::new("/admin/redrive/payments").method(HttpMethod::Post)) + .for_function("admin::redrive-payments"), + ) + .expect("failed"); + + // CLI alternative for redrive (run from terminal): + // iii trigger --function-id='iii::queue::redrive' --payload='{"queue": "payment"}' + // iii trigger --function-id='iii::queue::redrive' --payload='{"queue": "payment"}' --timeout-ms=60000 + + // --- + // 4. DLQ inspection pattern - check how many messages are stuck + // --- + let iii_clone = iii.clone(); + iii.register_function( + RegisterFunction::new_async("admin::dlq-status", move |_: serde_json::Value| { + let iii = iii_clone.clone(); + async move { + let logger = Logger::new(); + + let queues = vec!["payment", "email"]; + let mut statuses = Vec::new(); + + for queue in queues { + let info = iii + .trigger(TriggerRequest { + function_id: "iii::queue::status".into(), + payload: json!({ "queue": queue }), + action: None, + timeout_ms: None, + }) + .await + .map_err(|e| e.to_string())?; + + logger.info("Queue status", &json!({ + "queue": queue, + "dlq_count": info["dlq_count"], + "pending": info["pending"], + })); + + statuses.push(json!({ + "queue": queue, + "dlq_count": info["dlq_count"], + "pending": info["pending"], + })); + } + + Ok(json!({ "queues": statuses })) + } + }) + .description("Inspect DLQ status for all queues"), + ); + + iii.register_trigger( + IIITrigger::Http(HttpTriggerConfig::new("/admin/dlq/status").method(HttpMethod::Get)) + .for_function("admin::dlq-status"), + ) + .expect("failed"); + + // --- + // 5. Targeted redrive - redrive a single queue from a cron schedule + // Useful for automatically retrying failed messages every hour. + // --- + let iii_clone = iii.clone(); + iii.register_function( + RegisterFunction::new_async("admin::auto-redrive", move |_: serde_json::Value| { + let iii = iii_clone.clone(); + async move { + let logger = Logger::new(); + + let result = iii + .trigger(TriggerRequest { + function_id: "iii::queue::redrive".into(), + payload: json!({ "queue": "payment" }), + action: None, + timeout_ms: None, + }) + .await + .map_err(|e| e.to_string())?; + + let redriven = result["redriven"].as_u64().unwrap_or(0); + if redriven > 0 { + logger.info("Auto-redrive recovered messages", &json!({ "redriven": redriven })); + } + + Ok(result) + } + }) + .description("Auto-redrive payment DLQ every hour"), + ); + + iii.register_trigger( + IIITrigger::Cron(CronTriggerConfig::new("0 * * * *")) + .for_function("admin::auto-redrive"), + ) + .expect("failed"); + + tokio::runtime::Runtime::new().unwrap().block_on(async { + tokio::signal::ctrl_c().await.ok(); + }); + iii.shutdown(); +} diff --git a/skills/references/effect-system.js b/skills/references/effect-system.js new file mode 100644 index 000000000..46b9411db --- /dev/null +++ b/skills/references/effect-system.js @@ -0,0 +1,154 @@ +/** + * Pattern: Effect Systems & Typed Functional Infrastructure + * Comparable to: Effect-TS + * + * Demonstrates composable, pipeable function chains where each step + * is a pure(ish) function registered in iii. Steps are composed + * by calling one function from another, building a pipeline that + * is traceable, retryable, and observable end-to-end. + * + * How-to references: + * - Functions & Triggers: https://iii.dev/docs/how-to/use-functions-and-triggers + * - HTTP endpoints: https://iii.dev/docs/how-to/expose-http-endpoint + */ + +import { registerWorker, Logger, TriggerAction } from 'iii-sdk' + +const iii = registerWorker(process.env.III_ENGINE_URL || 'ws://localhost:49134', { + workerName: 'effect-system', +}) + +// --------------------------------------------------------------------------- +// Primitive effects — small, composable functions (like Effect-TS layers) +// --------------------------------------------------------------------------- + +// Effect: validate and parse input +iii.registerFunction({ id: 'fx::parse-user-input' }, async (data) => { + if (!data.email || !data.email.includes('@')) { + throw new Error('ValidationError: invalid email') + } + if (!data.name || data.name.trim().length < 2) { + throw new Error('ValidationError: name too short') + } + + return { + email: data.email.toLowerCase().trim(), + name: data.name.trim(), + source: data.source || 'unknown', + } +}) + +// Effect: check for duplicates +iii.registerFunction({ id: 'fx::check-duplicate' }, async (data) => { + const existing = await iii.trigger({ + function_id: 'state::get', + payload: { scope: 'users', key: data.email }, + }) + + if (existing) { + throw new Error(`DuplicateError: ${data.email} already registered`) + } + + return data // pass through +}) + +// Effect: enrich with defaults +iii.registerFunction({ id: 'fx::enrich-user' }, async (data) => { + return { + ...data, + id: `usr-${Date.now()}`, + role: 'member', + created_at: new Date().toISOString(), + preferences: { theme: 'light', notifications: true }, + } +}) + +// Effect: persist to state +iii.registerFunction({ id: 'fx::persist-user' }, async (data) => { + await iii.trigger({ + function_id: 'state::set', + payload: { scope: 'users', key: data.email, value: { _key: data.email, ...data } }, + }) + return data +}) + +// Effect: send welcome notification (fire-and-forget side effect) +iii.registerFunction({ id: 'fx::send-welcome' }, async (data) => { + const logger = new Logger() + logger.info('Sending welcome email', { to: data.email }) + + iii.trigger({ + function_id: 'publish', + payload: { + topic: 'notifications.send', + data: { + type: 'email', + to: data.email, + template: 'welcome', + vars: { name: data.name }, + }, + }, + action: TriggerAction.Void(), + }) + + return data +}) + +// --------------------------------------------------------------------------- +// Pipeline — compose effects into a single workflow (like Effect.pipe) +// +// Each step calls the next via iii.trigger, which gives us: +// - Full distributed tracing across all steps +// - Each step is independently testable and retryable +// - Errors propagate cleanly (thrown errors bubble up) +// --------------------------------------------------------------------------- +iii.registerFunction({ id: 'fx::register-user-pipeline' }, async (rawInput) => { + const logger = new Logger() + logger.info('Starting registration pipeline') + + // pipe: parse → check duplicate → enrich → persist → welcome + const parsed = await iii.trigger({ function_id: 'fx::parse-user-input', payload: rawInput }) + const checked = await iii.trigger({ function_id: 'fx::check-duplicate', payload: parsed }) + const enriched = await iii.trigger({ function_id: 'fx::enrich-user', payload: checked }) + const saved = await iii.trigger({ function_id: 'fx::persist-user', payload: enriched }) + await iii.trigger({ function_id: 'fx::send-welcome', payload: saved }) + + logger.info('Pipeline complete', { userId: saved.id }) + return { id: saved.id, email: saved.email, status: 'registered' } +}) + +iii.registerTrigger({ + type: 'http', + function_id: 'fx::register-user-pipeline', + config: { api_path: '/users/register', http_method: 'POST' }, +}) + +// --------------------------------------------------------------------------- +// Composition — reuse the same primitives in a different pipeline +// --------------------------------------------------------------------------- +iii.registerFunction({ id: 'fx::import-users-batch' }, async (data) => { + const logger = new Logger() + const results = { succeeded: 0, failed: 0, errors: [] } + + for (const user of data.users) { + try { + const parsed = await iii.trigger({ function_id: 'fx::parse-user-input', payload: user }) + const checked = await iii.trigger({ function_id: 'fx::check-duplicate', payload: parsed }) + const enriched = await iii.trigger({ function_id: 'fx::enrich-user', payload: checked }) + await iii.trigger({ function_id: 'fx::persist-user', payload: enriched }) + results.succeeded++ + } catch (err) { + results.failed++ + results.errors.push({ email: user.email, error: err.message }) + } + } + + logger.info('Batch import complete', results) + return results +}) + +iii.registerTrigger({ + type: 'http', + function_id: 'fx::import-users-batch', + config: { api_path: '/users/import', http_method: 'POST' }, +}) diff --git a/skills/references/event-driven-cqrs.js b/skills/references/event-driven-cqrs.js new file mode 100644 index 000000000..9be8a2d49 --- /dev/null +++ b/skills/references/event-driven-cqrs.js @@ -0,0 +1,203 @@ +/** + * Pattern: Event-Driven / Message Systems (CQRS) + * Comparable to: Kafka, RabbitMQ, CQRS/Event Sourcing systems + * + * Demonstrates CQRS (Command Query Responsibility Segregation) with + * event sourcing. Commands publish domain events via pubsub. Multiple + * read model projections subscribe independently. PubSub handles all + * fan-out — both to projections and downstream notification consumers. + * + * How-to references: + * - Queues: https://iii.dev/docs/how-to/use-queues + * - State management: https://iii.dev/docs/how-to/manage-state + * - State reactions: https://iii.dev/docs/how-to/react-to-state-changes + * - HTTP endpoints: https://iii.dev/docs/how-to/expose-http-endpoint + * - PubSub: https://iii.dev/docs/how-to/use-functions-and-triggers + */ + +import { registerWorker, Logger, TriggerAction } from 'iii-sdk' + +const iii = registerWorker(process.env.III_ENGINE_URL || 'ws://localhost:49134', { + workerName: 'event-driven-cqrs', +}) + +// =================================================================== +// WRITE SIDE — Commands that validate + emit domain events +// =================================================================== + +// Command: Add item to inventory +iii.registerFunction({ id: 'cmd::add-inventory-item' }, async (data) => { + const logger = new Logger() + const { sku, name, quantity, price } = data + + if (!sku || !name || quantity <= 0) { + throw new Error('Invalid inventory item') + } + + logger.info('Command: add inventory item', { sku }) + + // Append to the event log (event sourcing) + const event = { + type: 'ItemAdded', + sku, + name, + quantity, + price, + timestamp: new Date().toISOString(), + } + + await appendEvent('inventory', sku, event) + + // Publish domain event for all projections to consume + iii.trigger({ function_id: 'publish', payload: { topic: 'inventory.item-added', data: event }, action: TriggerAction.Void() }) + + return { event: 'ItemAdded', sku } +}) + +// Command: Sell items (reduce stock) +iii.registerFunction({ id: 'cmd::sell-item' }, async (data) => { + const logger = new Logger() + const { sku, quantity } = data + + // Read current state to validate + const item = await iii.trigger({ function_id: 'state::get', payload: { scope: 'inventory-read', key: sku } }) + if (!item) throw new Error(`Item ${sku} not found`) + if (item.stock < quantity) throw new Error(`Insufficient stock: ${item.stock} < ${quantity}`) + + logger.info('Command: sell item', { sku, quantity }) + + const event = { + type: 'ItemSold', + sku, + quantity, + revenue: quantity * item.price, + timestamp: new Date().toISOString(), + } + + await appendEvent('inventory', sku, event) + + iii.trigger({ function_id: 'publish', payload: { topic: 'inventory.item-sold', data: event }, action: TriggerAction.Void() }) + + return { event: 'ItemSold', sku, remaining: item.stock - quantity } +}) + +// HTTP command endpoints +iii.registerTrigger({ type: 'http', function_id: 'cmd::add-inventory-item', config: { api_path: '/inventory/add', http_method: 'POST' } }) +iii.registerTrigger({ type: 'http', function_id: 'cmd::sell-item', config: { api_path: '/inventory/sell', http_method: 'POST' } }) + +// =================================================================== +// EVENT LOG — append-only event store (event sourcing) +// =================================================================== +async function appendEvent(aggregate, key, event) { + const log = await iii.trigger({ function_id: 'state::get', payload: { scope: 'event-log', key: `${aggregate}:${key}` } }) + const events = log?.events || [] + events.push(event) + + await iii.trigger({ function_id: 'state::set', payload: { + scope: 'event-log', + key: `${aggregate}:${key}`, + value: { _key: `${aggregate}:${key}`, events }, + } }) +} + +// =================================================================== +// READ SIDE — Projections that build query-optimized views from events +// =================================================================== + +// Projection 1: Inventory catalog (current stock levels) +iii.registerFunction({ id: 'proj::catalog-on-add' }, async (event) => { + await iii.trigger({ function_id: 'state::set', payload: { + scope: 'inventory-read', + key: event.sku, + value: { + _key: event.sku, + sku: event.sku, + name: event.name, + price: event.price, + stock: event.quantity, + last_updated: event.timestamp, + }, + } }) +}) + +iii.registerFunction({ id: 'proj::catalog-on-sell' }, async (event) => { + await iii.trigger({ function_id: 'state::update', payload: { + scope: 'inventory-read', + key: event.sku, + ops: [ + { type: 'increment', path: 'stock', by: -event.quantity }, + { type: 'set', path: 'last_updated', value: event.timestamp }, + ], + } }) +}) + +// Projection 2: Sales analytics (aggregated metrics) +iii.registerFunction({ id: 'proj::sales-analytics' }, async (event) => { + await iii.trigger({ function_id: 'state::update', payload: { + scope: 'sales-analytics', + key: 'global', + ops: [ + { type: 'increment', path: 'total_sales', by: 1 }, + { type: 'increment', path: 'total_revenue', by: event.revenue }, + { type: 'increment', path: `by_sku.${event.sku}`, by: event.quantity }, + { type: 'set', path: 'last_sale_at', value: event.timestamp }, + ], + } }) +}) + +// Projections subscribe to domain events independently via pubsub +iii.registerTrigger({ type: 'subscribe', function_id: 'proj::catalog-on-add', config: { topic: 'inventory.item-added' } }) +iii.registerTrigger({ type: 'subscribe', function_id: 'proj::catalog-on-sell', config: { topic: 'inventory.item-sold' } }) +iii.registerTrigger({ type: 'subscribe', function_id: 'proj::sales-analytics', config: { topic: 'inventory.item-sold' } }) + +// =================================================================== +// FAN-OUT — PubSub notifications to downstream systems +// =================================================================== +iii.registerFunction({ id: 'notify::low-stock-alert' }, async (event) => { + const item = await iii.trigger({ function_id: 'state::get', payload: { scope: 'inventory-read', key: event.sku } }) + if (item && item.stock <= 5) { + iii.trigger({ function_id: 'publish', payload: { + topic: 'alerts.low-stock', + data: { sku: event.sku, name: item.name, remaining: item.stock }, + }, action: TriggerAction.Void() }) + } +}) + +iii.registerTrigger({ + type: 'subscribe', + function_id: 'notify::low-stock-alert', + config: { topic: 'inventory.item-sold' }, +}) + +// Fan-out subscriber: could be a separate service listening for alerts +iii.registerFunction({ id: 'notify::slack-low-stock' }, async (data) => { + const logger = new Logger() + logger.warn('LOW STOCK ALERT', { sku: data.sku, remaining: data.remaining }) + // In production: POST to Slack webhook, send email, page oncall, etc. +}) + +iii.registerTrigger({ + type: 'subscribe', + function_id: 'notify::slack-low-stock', + config: { topic: 'alerts.low-stock' }, +}) + +// =================================================================== +// QUERY ENDPOINTS — read from projections (not the event log) +// =================================================================== +iii.registerFunction({ id: 'query::catalog' }, async () => { + return await iii.trigger({ function_id: 'state::list', payload: { scope: 'inventory-read' } }) +}) + +iii.registerFunction({ id: 'query::sales-analytics' }, async () => { + return await iii.trigger({ function_id: 'state::get', payload: { scope: 'sales-analytics', key: 'global' } }) +}) + +iii.registerFunction({ id: 'query::event-history' }, async (data) => { + const log = await iii.trigger({ function_id: 'state::get', payload: { scope: 'event-log', key: `inventory:${data.sku}` } }) + return log?.events || [] +}) + +iii.registerTrigger({ type: 'http', function_id: 'query::catalog', config: { api_path: '/inventory', http_method: 'GET' } }) +iii.registerTrigger({ type: 'http', function_id: 'query::sales-analytics', config: { api_path: '/inventory/analytics', http_method: 'GET' } }) +iii.registerTrigger({ type: 'http', function_id: 'query::event-history', config: { api_path: '/inventory/history', http_method: 'GET' } }) diff --git a/skills/references/functions-and-triggers.js b/skills/references/functions-and-triggers.js new file mode 100644 index 000000000..2537dc1b4 --- /dev/null +++ b/skills/references/functions-and-triggers.js @@ -0,0 +1,161 @@ +/** + * Pattern: Functions & Triggers + * Comparable to: Core primitives of iii + * + * Demonstrates every fundamental building block: registering functions, + * binding triggers of each built-in type (http, queue, cron, state, subscribe), + * cross-function invocation, fire-and-forget calls, and external HTTP-invoked + * functions via HttpInvocationConfig. + * + * How-to references: + * - Functions & Triggers: https://iii.dev/docs/how-to/use-functions-and-triggers + */ + +import { registerWorker, Logger, TriggerAction } from 'iii-sdk' + +const iii = registerWorker(process.env.III_ENGINE_URL || 'ws://localhost:49134', { + workerName: 'functions-and-triggers', +}) + +// --------------------------------------------------------------------------- +// 1. Register a simple function +// --------------------------------------------------------------------------- +iii.registerFunction({ id: 'orders::validate' }, async (data) => { + const logger = new Logger() + logger.info('Validating order', { orderId: data.order_id }) + + if (!data.order_id || !data.items?.length) { + return { valid: false, reason: 'Missing order_id or items' } + } + return { valid: true, order_id: data.order_id } +}) + +// --------------------------------------------------------------------------- +// 2. HTTP trigger — expose a function as a REST endpoint +// --------------------------------------------------------------------------- +iii.registerTrigger({ + type: 'http', + function_id: 'orders::validate', + config: { api_path: '/orders/validate', http_method: 'POST' }, +}) + +// --------------------------------------------------------------------------- +// 3. Queue trigger — process items from a named queue +// --------------------------------------------------------------------------- +iii.registerFunction({ id: 'orders::fulfill' }, async (data) => { + const logger = new Logger() + logger.info('Fulfilling order', { orderId: data.order_id }) + // ... fulfillment logic + return { fulfilled: true, order_id: data.order_id } +}) + +iii.registerTrigger({ + type: 'queue', + function_id: 'orders::fulfill', + config: { queue: 'fulfillment' }, +}) + +// --------------------------------------------------------------------------- +// 4. Cron trigger — run a function on a schedule +// --------------------------------------------------------------------------- +iii.registerFunction({ id: 'reports::daily-summary' }, async () => { + const logger = new Logger() + logger.info('Generating daily summary') + return { generated_at: new Date().toISOString() } +}) + +iii.registerTrigger({ + type: 'cron', + function_id: 'reports::daily-summary', + config: { expression: '0 9 * * *' }, // every day at 09:00 +}) + +// --------------------------------------------------------------------------- +// 5. State trigger — react when a state scope/key changes +// --------------------------------------------------------------------------- +iii.registerFunction({ id: 'orders::on-status-change' }, async (data) => { + const logger = new Logger() + logger.info('Order status changed', { key: data.key, value: data.value }) + return { notified: true } +}) + +iii.registerTrigger({ + type: 'state', + function_id: 'orders::on-status-change', + config: { scope: 'orders' }, // fires on any key change within scope +}) + +// --------------------------------------------------------------------------- +// 6. Subscribe trigger — listen for pubsub messages on a topic +// --------------------------------------------------------------------------- +iii.registerFunction({ id: 'notifications::on-order-complete' }, async (data) => { + const logger = new Logger() + logger.info('Order completed event received', { orderId: data.order_id }) + return { processed: true } +}) + +iii.registerTrigger({ + type: 'subscribe', + function_id: 'notifications::on-order-complete', + config: { topic: 'orders.completed' }, +}) + +// --------------------------------------------------------------------------- +// 7. Cross-function invocation — one function calling another +// --------------------------------------------------------------------------- +iii.registerFunction({ id: 'orders::create' }, async (data) => { + const logger = new Logger() + + // Synchronous call — blocks until validate returns + const validation = await iii.trigger({ + function_id: 'orders::validate', + payload: { order_id: data.order_id, items: data.items }, + }) + + if (!validation.valid) { + return { error: validation.reason } + } + + // Fire-and-forget — send a notification without waiting + iii.trigger({ + function_id: 'notifications::on-order-complete', + payload: { order_id: data.order_id }, + action: TriggerAction.Void(), + }) + + // Enqueue — durable async handoff to fulfillment + await iii.trigger({ + function_id: 'orders::fulfill', + payload: { order_id: data.order_id, items: data.items }, + action: TriggerAction.Enqueue({ queue: 'fulfillment' }), + }) + + return { order_id: data.order_id, status: 'accepted' } +}) + +iii.registerTrigger({ + type: 'http', + function_id: 'orders::create', + config: { api_path: '/orders', http_method: 'POST' }, +}) + +// --------------------------------------------------------------------------- +// 8. External HTTP-invoked function (HttpInvocationConfig) +// Wraps a third-party API as an iii function so other functions can call it +// with iii.trigger() like any internal function. +// --------------------------------------------------------------------------- +iii.registerFunction( + { + id: 'external::payment-gateway', + invocation: { + url: 'https://api.stripe.com/v1/charges', + method: 'POST', + timeout_ms: 10000, + auth: { + type: 'bearer', + token: process.env.STRIPE_API_KEY, + }, + }, + }, + // No handler needed — the engine proxies the call to the external URL +) diff --git a/skills/references/functions-and-triggers.py b/skills/references/functions-and-triggers.py new file mode 100644 index 000000000..32d73fce5 --- /dev/null +++ b/skills/references/functions-and-triggers.py @@ -0,0 +1,172 @@ +""" +Pattern: Functions & Triggers (Python) +Comparable to: Core primitives of iii + +Demonstrates every fundamental building block in Python: registering functions, +binding triggers of each built-in type (http, queue, cron, state, subscribe), +cross-function invocation, fire-and-forget calls, and external HTTP-invoked +functions via HttpInvocationConfig. + +How-to references: + - Functions & Triggers: https://iii.dev/docs/how-to/use-functions-and-triggers +""" + +import asyncio +import os + +from iii import InitOptions, Logger, TriggerAction, register_worker + +engine_url = os.environ.get("III_ENGINE_URL", "ws://localhost:49134") +iii = register_worker( + address=engine_url, + options=InitOptions(worker_name="functions-and-triggers"), +) + +# --------------------------------------------------------------------------- +# 1. Register a simple function +# --------------------------------------------------------------------------- +async def validate_order(data): + logger = Logger(service_name="orders::validate") + logger.info("Validating order", {"order_id": data.get("order_id")}) + + if not data.get("order_id") or not data.get("items"): + return {"valid": False, "reason": "Missing order_id or items"} + return {"valid": True, "order_id": data["order_id"]} + +iii.register_function("orders::validate", validate_order) + +# --------------------------------------------------------------------------- +# 2. HTTP trigger — expose a function as a REST endpoint +# --------------------------------------------------------------------------- +iii.register_trigger({ + "type": "http", + "function_id": "orders::validate", + "config": {"api_path": "/orders/validate", "http_method": "POST"}, +}) + +# --------------------------------------------------------------------------- +# 3. Queue trigger — process items from a named queue +# --------------------------------------------------------------------------- +async def fulfill_order(data): + logger = Logger(service_name="orders::fulfill") + logger.info("Fulfilling order", {"order_id": data.get("order_id")}) + return {"fulfilled": True, "order_id": data["order_id"]} + +iii.register_function("orders::fulfill", fulfill_order) + +iii.register_trigger({ + "type": "queue", + "function_id": "orders::fulfill", + "config": {"queue": "fulfillment"}, +}) + +# --------------------------------------------------------------------------- +# 4. Cron trigger — run a function on a schedule +# --------------------------------------------------------------------------- +async def daily_summary(_data): + logger = Logger(service_name="reports::daily-summary") + logger.info("Generating daily summary") + return {"generated_at": "now"} + +iii.register_function("reports::daily-summary", daily_summary) + +iii.register_trigger({ + "type": "cron", + "function_id": "reports::daily-summary", + "config": {"expression": "0 9 * * *"}, +}) + +# --------------------------------------------------------------------------- +# 5. State trigger — react when a state scope/key changes +# --------------------------------------------------------------------------- +async def on_status_change(data): + logger = Logger(service_name="orders::on-status-change") + logger.info("Order status changed", {"key": data.get("key"), "value": data.get("value")}) + return {"notified": True} + +iii.register_function("orders::on-status-change", on_status_change) + +iii.register_trigger({ + "type": "state", + "function_id": "orders::on-status-change", + "config": {"scope": "orders"}, +}) + +# --------------------------------------------------------------------------- +# 6. Subscribe trigger — listen for pubsub messages on a topic +# --------------------------------------------------------------------------- +async def on_order_complete(data): + logger = Logger(service_name="notifications::on-order-complete") + logger.info("Order completed event received", {"order_id": data.get("order_id")}) + return {"processed": True} + +iii.register_function("notifications::on-order-complete", on_order_complete) + +iii.register_trigger({ + "type": "subscribe", + "function_id": "notifications::on-order-complete", + "config": {"topic": "orders.completed"}, +}) + +# --------------------------------------------------------------------------- +# 7. Cross-function invocation — one function calling another +# --------------------------------------------------------------------------- +async def create_order(data): + logger = Logger(service_name="orders::create") + + # Synchronous call — blocks until validate returns + validation = await iii.trigger_async({ + "function_id": "orders::validate", + "payload": {"order_id": data.get("order_id"), "items": data.get("items")}, + }) + + if not validation.get("valid"): + return {"error": validation.get("reason")} + + # Fire-and-forget — send a notification without waiting + await iii.trigger_async({ + "function_id": "notifications::on-order-complete", + "payload": {"order_id": data.get("order_id")}, + "action": TriggerAction.Void(), + }) + + # Enqueue — durable async handoff to fulfillment + await iii.trigger_async({ + "function_id": "orders::fulfill", + "payload": {"order_id": data.get("order_id"), "items": data.get("items")}, + "action": TriggerAction.Enqueue({"queue": "fulfillment"}), + }) + + return {"order_id": data.get("order_id"), "status": "accepted"} + +iii.register_function("orders::create", create_order) + +iii.register_trigger({ + "type": "http", + "function_id": "orders::create", + "config": {"api_path": "/orders", "http_method": "POST"}, +}) + +# --------------------------------------------------------------------------- +# 8. External HTTP-invoked function (HttpInvocationConfig) +# --------------------------------------------------------------------------- +iii.register_function({ + "id": "external::payment-gateway", + "invocation": { + "url": "https://api.stripe.com/v1/charges", + "method": "POST", + "timeout_ms": 10000, + "auth": { + "type": "bearer", + "token": os.environ.get("STRIPE_API_KEY", ""), + }, + }, +}) + +# Keep the process alive for event processing +async def main(): + while True: + await asyncio.sleep(60) + +if __name__ == "__main__": + asyncio.run(main()) diff --git a/skills/references/functions-and-triggers.rs b/skills/references/functions-and-triggers.rs new file mode 100644 index 000000000..5d1e1574d --- /dev/null +++ b/skills/references/functions-and-triggers.rs @@ -0,0 +1,218 @@ +/** + * Pattern: Functions & Triggers (Rust) + * Comparable to: Core primitives of iii + * + * Demonstrates every fundamental building block in Rust: registering functions + * with the RegisterFunction builder, binding triggers of each built-in type + * (http, queue, cron, state, subscribe), cross-function invocation, and + * fire-and-forget calls. + * + * How-to references: + * - Functions & Triggers: https://iii.dev/docs/how-to/use-functions-and-triggers + */ + +use std::time::Duration; + +use iii_sdk::{ + InitOptions, RegisterFunction, TriggerRequest, TriggerAction, + register_worker, + builtin_triggers::*, + IIITrigger, +}; +use serde_json::json; + +// --------------------------------------------------------------------------- +// Typed request structs — derive JsonSchema for auto-generated request format +// --------------------------------------------------------------------------- + +#[derive(serde::Deserialize, schemars::JsonSchema)] +struct ValidateOrderInput { + order_id: String, + items: Vec, +} + +#[derive(serde::Deserialize, schemars::JsonSchema)] +struct FulfillOrderInput { + order_id: String, + items: Vec, +} + +#[derive(serde::Deserialize, schemars::JsonSchema)] +struct CreateOrderInput { + order_id: String, + items: Vec, +} + +#[derive(serde::Deserialize, schemars::JsonSchema)] +struct StateChangeEvent { + key: String, + value: serde_json::Value, +} + +// --------------------------------------------------------------------------- +// Handlers +// --------------------------------------------------------------------------- + +fn validate_order(input: ValidateOrderInput) -> Result { + if input.order_id.is_empty() || input.items.is_empty() { + return Ok(json!({ "valid": false, "reason": "Missing order_id or items" })); + } + Ok(json!({ "valid": true, "order_id": input.order_id })) +} + +fn fulfill_order(input: FulfillOrderInput) -> Result { + Ok(json!({ "fulfilled": true, "order_id": input.order_id })) +} + +fn on_status_change(input: StateChangeEvent) -> Result { + Ok(json!({ "notified": true, "key": input.key })) +} + +fn on_order_complete(input: serde_json::Value) -> Result { + Ok(json!({ "processed": true, "event": input })) +} + +fn daily_summary(_input: serde_json::Value) -> Result { + Ok(json!({ "generated_at": "now" })) +} + +#[tokio::main] +async fn main() -> Result<(), Box> { + let url = std::env::var("III_ENGINE_URL").unwrap_or("ws://127.0.0.1:49134".into()); + let iii = register_worker(&url, InitOptions::default()); + + // ----------------------------------------------------------------------- + // 1. Register a simple function with the builder API + // ----------------------------------------------------------------------- + iii.register_function( + RegisterFunction::new("orders::validate", validate_order) + .description("Validate an incoming order"), + ); + + // ----------------------------------------------------------------------- + // 2. HTTP trigger — expose a function as a REST endpoint + // ----------------------------------------------------------------------- + iii.register_trigger( + IIITrigger::Http(HttpTriggerConfig::new("/orders/validate").method(HttpMethod::Post)) + .for_function("orders::validate"), + ) + .expect("failed to register http trigger"); + + // ----------------------------------------------------------------------- + // 3. Queue trigger — process items from a named queue + // ----------------------------------------------------------------------- + iii.register_function( + RegisterFunction::new("orders::fulfill", fulfill_order) + .description("Fulfill a validated order"), + ); + + iii.register_trigger( + IIITrigger::Queue(QueueTriggerConfig::new("fulfillment")) + .for_function("orders::fulfill"), + ) + .expect("failed to register queue trigger"); + + // ----------------------------------------------------------------------- + // 4. Cron trigger — run a function on a schedule + // ----------------------------------------------------------------------- + iii.register_function( + RegisterFunction::new("reports::daily-summary", daily_summary) + .description("Generate daily summary report"), + ); + + iii.register_trigger( + IIITrigger::Cron(CronTriggerConfig::new("0 9 * * *")) + .for_function("reports::daily-summary"), + ) + .expect("failed to register cron trigger"); + + // ----------------------------------------------------------------------- + // 5. State trigger — react when a state scope/key changes + // ----------------------------------------------------------------------- + iii.register_function( + RegisterFunction::new("orders::on-status-change", on_status_change) + .description("React to order status changes"), + ); + + iii.register_trigger( + IIITrigger::State(StateTriggerConfig::new().scope("orders")) + .for_function("orders::on-status-change"), + ) + .expect("failed to register state trigger"); + + // ----------------------------------------------------------------------- + // 6. Subscribe trigger — listen for pubsub messages on a topic + // ----------------------------------------------------------------------- + iii.register_function( + RegisterFunction::new("notifications::on-order-complete", on_order_complete) + .description("Process order completion events"), + ); + + iii.register_trigger( + IIITrigger::Subscribe(SubscribeTriggerConfig::new("orders.completed")) + .for_function("notifications::on-order-complete"), + ) + .expect("failed to register subscribe trigger"); + + // ----------------------------------------------------------------------- + // 7. Cross-function invocation — one function calling another + // ----------------------------------------------------------------------- + let iii_clone = iii.clone(); + iii.register_function(RegisterFunction::new_async( + "orders::create", + move |input: CreateOrderInput| { + let iii = iii_clone.clone(); + async move { + // Synchronous call — blocks until validate returns + let validation = iii + .trigger(TriggerRequest { + function_id: "orders::validate".into(), + payload: json!({ "order_id": input.order_id, "items": input.items }), + action: None, + timeout_ms: None, + }) + .await + .map_err(|e| e.to_string())?; + + if validation["valid"] == false { + return Ok(json!({ "error": validation["reason"] })); + } + + // Fire-and-forget — send a notification without waiting + let _ = iii + .trigger(TriggerRequest { + function_id: "notifications::on-order-complete".into(), + payload: json!({ "order_id": input.order_id }), + action: Some(TriggerAction::Void), + timeout_ms: None, + }) + .await; + + // Enqueue — durable async handoff to fulfillment + iii.trigger(TriggerRequest { + function_id: "orders::fulfill".into(), + payload: json!({ "order_id": input.order_id, "items": input.items }), + action: Some(TriggerAction::Enqueue { + queue: "fulfillment".into(), + }), + timeout_ms: None, + }) + .await + .map_err(|e| e.to_string())?; + + Ok(json!({ "order_id": input.order_id, "status": "accepted" })) + } + }, + )); + + iii.register_trigger( + IIITrigger::Http(HttpTriggerConfig::new("/orders").method(HttpMethod::Post)) + .for_function("orders::create"), + ) + .expect("failed to register http trigger"); + + // Keep the process alive for event processing + tokio::time::sleep(Duration::from_secs(u64::MAX)).await; + iii.shutdown(); + Ok(()) +} diff --git a/skills/references/http-endpoints.js b/skills/references/http-endpoints.js new file mode 100644 index 000000000..62e698d3d --- /dev/null +++ b/skills/references/http-endpoints.js @@ -0,0 +1,132 @@ +/** + * Pattern: HTTP Endpoints + * Comparable to: Express, Fastify, Flask + * + * Exposes RESTful HTTP endpoints backed by iii functions. + * Each handler receives an ApiRequest object and returns + * { status_code, body, headers }. + * + * How-to references: + * - HTTP endpoints: https://iii.dev/docs/how-to/expose-http-endpoint + */ + +import { registerWorker, Logger, TriggerAction } from 'iii-sdk' + +const iii = registerWorker(process.env.III_ENGINE_URL || 'ws://localhost:49134', { + workerName: 'http-endpoints', +}) + +// --------------------------------------------------------------------------- +// POST /users — Create a new user +// ApiRequest: { body, path_params, headers, method } +// --------------------------------------------------------------------------- +iii.registerFunction({ id: 'users::create' }, async (req) => { + const logger = new Logger() + const { name, email } = req.body + const id = `usr-${Date.now()}` + + const user = { id, name, email, created_at: new Date().toISOString() } + + await iii.trigger({ + function_id: 'state::set', + payload: { scope: 'users', key: id, value: user }, + }) + + logger.info('User created', { id, email }) + + return { status_code: 201, body: user, headers: { 'Content-Type': 'application/json' } } +}) + +// --------------------------------------------------------------------------- +// GET /users/:id — Retrieve a user by path parameter +// --------------------------------------------------------------------------- +iii.registerFunction({ id: 'users::get-by-id' }, async (req) => { + const { id } = req.path_params + + const user = await iii.trigger({ + function_id: 'state::get', + payload: { scope: 'users', key: id }, + }) + + if (!user) { + return { status_code: 404, body: { error: 'User not found' } } + } + + return { status_code: 200, body: user } +}) + +// --------------------------------------------------------------------------- +// GET /users — List all users +// --------------------------------------------------------------------------- +iii.registerFunction({ id: 'users::list' }, async () => { + const users = await iii.trigger({ + function_id: 'state::list', + payload: { scope: 'users' }, + }) + + return { status_code: 200, body: users } +}) + +// --------------------------------------------------------------------------- +// PUT /users/:id — Update an existing user +// --------------------------------------------------------------------------- +iii.registerFunction({ id: 'users::update' }, async (req) => { + const { id } = req.path_params + const updates = req.body + + const existing = await iii.trigger({ + function_id: 'state::get', + payload: { scope: 'users', key: id }, + }) + + if (!existing) { + return { status_code: 404, body: { error: 'User not found' } } + } + + const ops = Object.entries(updates).map(([path, value]) => ({ + type: 'set', + path, + value, + })) + + ops.push({ type: 'set', path: 'updated_at', value: new Date().toISOString() }) + + await iii.trigger({ + function_id: 'state::update', + payload: { scope: 'users', key: id, ops }, + }) + + return { status_code: 200, body: { id, ...updates } } +}) + +// --------------------------------------------------------------------------- +// DELETE /users/:id — Remove a user +// --------------------------------------------------------------------------- +iii.registerFunction({ id: 'users::delete' }, async (req) => { + const { id } = req.path_params + + const existing = await iii.trigger({ + function_id: 'state::get', + payload: { scope: 'users', key: id }, + }) + + if (!existing) { + return { status_code: 404, body: { error: 'User not found' } } + } + + await iii.trigger({ + function_id: 'state::delete', + payload: { scope: 'users', key: id }, + }) + + return { status_code: 204, body: null } +}) + +// --------------------------------------------------------------------------- +// HTTP trigger registrations +// --------------------------------------------------------------------------- +iii.registerTrigger({ type: 'http', function_id: 'users::create', config: { api_path: '/users', http_method: 'POST' } }) +iii.registerTrigger({ type: 'http', function_id: 'users::get-by-id', config: { api_path: '/users/:id', http_method: 'GET' } }) +iii.registerTrigger({ type: 'http', function_id: 'users::list', config: { api_path: '/users', http_method: 'GET' } }) +iii.registerTrigger({ type: 'http', function_id: 'users::update', config: { api_path: '/users/:id', http_method: 'PUT' } }) +iii.registerTrigger({ type: 'http', function_id: 'users::delete', config: { api_path: '/users/:id', http_method: 'DELETE' } }) diff --git a/skills/references/http-endpoints.py b/skills/references/http-endpoints.py new file mode 100644 index 000000000..86ab91a90 --- /dev/null +++ b/skills/references/http-endpoints.py @@ -0,0 +1,160 @@ +""" +Pattern: HTTP Endpoints +Comparable to: Express, Fastify, Flask + +Exposes RESTful HTTP endpoints backed by iii functions. +Each handler receives an ApiRequest object and returns +{ status_code, body, headers }. + +How-to references: + - HTTP endpoints: https://iii.dev/docs/how-to/expose-http-endpoint +""" + +import asyncio +import os +import time +from datetime import datetime, timezone + +from iii import InitOptions, Logger, TriggerAction, register_worker + +iii = register_worker( + address=os.environ.get("III_ENGINE_URL", "ws://localhost:49134"), + options=InitOptions(worker_name="http-endpoints"), +) + +# --- +# POST /users — Create a new user +# ApiRequest: { body, path_params, headers, method } +# --- + + +async def users_create(req): + logger = Logger() + name = req["body"]["name"] + email = req["body"]["email"] + id = f"usr-{int(time.time() * 1000)}" + + user = {"id": id, "name": name, "email": email, "created_at": datetime.now(timezone.utc).isoformat()} + + await iii.trigger_async({ + "function_id": "state::set", + "payload": {"scope": "users", "key": id, "value": user}, + }) + + logger.info("User created", {"id": id, "email": email}) + + return {"status_code": 201, "body": user, "headers": {"Content-Type": "application/json"}} + + +iii.register_function("users::create", users_create) + +# --- +# GET /users/:id — Retrieve a user by path parameter +# --- + + +async def users_get_by_id(req): + id = req["path_params"]["id"] + + user = await iii.trigger_async({ + "function_id": "state::get", + "payload": {"scope": "users", "key": id}, + }) + + if not user: + return {"status_code": 404, "body": {"error": "User not found"}} + + return {"status_code": 200, "body": user} + + +iii.register_function("users::get-by-id", users_get_by_id) + +# --- +# GET /users — List all users +# --- + + +async def users_list(data): + users = await iii.trigger_async({ + "function_id": "state::list", + "payload": {"scope": "users"}, + }) + + return {"status_code": 200, "body": users} + + +iii.register_function("users::list", users_list) + +# --- +# PUT /users/:id — Update an existing user +# --- + + +async def users_update(req): + id = req["path_params"]["id"] + updates = req["body"] + + existing = await iii.trigger_async({ + "function_id": "state::get", + "payload": {"scope": "users", "key": id}, + }) + + if not existing: + return {"status_code": 404, "body": {"error": "User not found"}} + + ops = [{"type": "set", "path": path, "value": value} for path, value in updates.items()] + ops.append({"type": "set", "path": "updated_at", "value": datetime.now(timezone.utc).isoformat()}) + + await iii.trigger_async({ + "function_id": "state::update", + "payload": {"scope": "users", "key": id, "ops": ops}, + }) + + return {"status_code": 200, "body": {"id": id, **updates}} + + +iii.register_function("users::update", users_update) + +# --- +# DELETE /users/:id — Remove a user +# --- + + +async def users_delete(req): + id = req["path_params"]["id"] + + existing = await iii.trigger_async({ + "function_id": "state::get", + "payload": {"scope": "users", "key": id}, + }) + + if not existing: + return {"status_code": 404, "body": {"error": "User not found"}} + + await iii.trigger_async({ + "function_id": "state::delete", + "payload": {"scope": "users", "key": id}, + }) + + return {"status_code": 204, "body": None} + + +iii.register_function("users::delete", users_delete) + +# --- +# HTTP trigger registrations +# --- +iii.register_trigger({"type": "http", "function_id": "users::create", "config": {"api_path": "/users", "http_method": "POST"}}) +iii.register_trigger({"type": "http", "function_id": "users::get-by-id", "config": {"api_path": "/users/:id", "http_method": "GET"}}) +iii.register_trigger({"type": "http", "function_id": "users::list", "config": {"api_path": "/users", "http_method": "GET"}}) +iii.register_trigger({"type": "http", "function_id": "users::update", "config": {"api_path": "/users/:id", "http_method": "PUT"}}) +iii.register_trigger({"type": "http", "function_id": "users::delete", "config": {"api_path": "/users/:id", "http_method": "DELETE"}}) + + +async def main(): + while True: + await asyncio.sleep(60) + + +if __name__ == "__main__": + asyncio.run(main()) diff --git a/skills/references/http-endpoints.rs b/skills/references/http-endpoints.rs new file mode 100644 index 000000000..fb3a95034 --- /dev/null +++ b/skills/references/http-endpoints.rs @@ -0,0 +1,248 @@ +/// Pattern: HTTP Endpoints +/// Comparable to: Express, Fastify, Actix-web +/// +/// Exposes RESTful HTTP endpoints backed by iii functions. +/// Each handler receives an ApiRequest object and returns +/// { status_code, body, headers }. + +use iii_sdk::{ + register_worker, InitOptions, RegisterFunction, TriggerRequest, TriggerAction, + builtin_triggers::*, IIITrigger, Logger, ApiRequest, +}; +use serde_json::json; + +#[derive(serde::Deserialize, schemars::JsonSchema)] +struct CreateUserBody { + name: String, + email: String, +} + +fn main() { + let url = std::env::var("III_ENGINE_URL").unwrap_or("ws://127.0.0.1:49134".into()); + let iii = register_worker(&url, InitOptions::default()); + + // --- + // POST /users - Create a new user + // ApiRequest: { body, path_params, headers, method } + // --- + let iii_clone = iii.clone(); + iii.register_function( + RegisterFunction::new_async("users::create", move |req: ApiRequest| { + let iii = iii_clone.clone(); + async move { + let logger = Logger::new(); + let id = format!("usr-{:x}", std::time::SystemTime::now().duration_since(std::time::UNIX_EPOCH).unwrap().as_nanos()); + + let user = json!({ + "id": id, + "name": req.body.name, + "email": req.body.email, + "created_at": chrono::Utc::now().to_rfc3339(), + }); + + iii.trigger(TriggerRequest { + function_id: "state::set".into(), + payload: json!({ "scope": "users", "key": id, "value": user }), + action: None, + timeout_ms: None, + }) + .await + .map_err(|e| e.to_string())?; + + logger.info("User created", &json!({ "event": "user_created", "id": id })); + + Ok(json!({ + "status_code": 201, + "body": user, + "headers": { "Content-Type": "application/json" }, + })) + } + }) + .description("Create a new user"), + ); + + // --- + // GET /users/:id - Retrieve a user by path parameter + // --- + let iii_clone = iii.clone(); + iii.register_function( + RegisterFunction::new_async("users::get-by-id", move |req: ApiRequest| { + let iii = iii_clone.clone(); + async move { + let id = req.path_params.get("id").cloned().unwrap_or_default(); + + let user = iii + .trigger(TriggerRequest { + function_id: "state::get".into(), + payload: json!({ "scope": "users", "key": id }), + action: None, + timeout_ms: None, + }) + .await + .map_err(|e| e.to_string())?; + + if user.is_null() { + return Ok(json!({ "status_code": 404, "body": { "error": "User not found" } })); + } + + Ok(json!({ "status_code": 200, "body": user })) + } + }) + .description("Get user by ID"), + ); + + // --- + // GET /users - List all users + // --- + let iii_clone = iii.clone(); + iii.register_function( + RegisterFunction::new_async("users::list", move |_: serde_json::Value| { + let iii = iii_clone.clone(); + async move { + let users = iii + .trigger(TriggerRequest { + function_id: "state::list".into(), + payload: json!({ "scope": "users" }), + action: None, + timeout_ms: None, + }) + .await + .map_err(|e| e.to_string())?; + + Ok(json!({ "status_code": 200, "body": users })) + } + }) + .description("List all users"), + ); + + // --- + // PUT /users/:id - Update an existing user + // --- + let iii_clone = iii.clone(); + iii.register_function( + RegisterFunction::new_async("users::update", move |req: ApiRequest| { + let iii = iii_clone.clone(); + async move { + let id = req.path_params.get("id").cloned().unwrap_or_default(); + let updates = req.body; + + let obj = match updates.as_object() { + Some(o) => o, + None => return Ok(json!({ "status_code": 400, "body": { "error": "Request body must be a JSON object" } })), + }; + + let existing = iii + .trigger(TriggerRequest { + function_id: "state::get".into(), + payload: json!({ "scope": "users", "key": id }), + action: None, + timeout_ms: None, + }) + .await + .map_err(|e| e.to_string())?; + + if existing.is_null() { + return Ok(json!({ "status_code": 404, "body": { "error": "User not found" } })); + } + + const IMMUTABLE_FIELDS: &[&str] = &["id", "created_at"]; + let mut ops: Vec = obj + .iter() + .filter(|(path, _)| !IMMUTABLE_FIELDS.contains(&path.as_str())) + .map(|(path, value)| json!({ "type": "set", "path": path, "value": value })) + .collect(); + + ops.push(json!({ "type": "set", "path": "updated_at", "value": chrono::Utc::now().to_rfc3339() })); + + iii.trigger(TriggerRequest { + function_id: "state::update".into(), + payload: json!({ "scope": "users", "key": id, "ops": ops }), + action: None, + timeout_ms: None, + }) + .await + .map_err(|e| e.to_string())?; + + Ok(json!({ "status_code": 200, "body": { "id": id } })) + } + }) + .description("Update a user"), + ); + + // --- + // DELETE /users/:id - Remove a user + // --- + let iii_clone = iii.clone(); + iii.register_function( + RegisterFunction::new_async("users::delete", move |req: ApiRequest| { + let iii = iii_clone.clone(); + async move { + let id = req.path_params.get("id").cloned().unwrap_or_default(); + + let existing = iii + .trigger(TriggerRequest { + function_id: "state::get".into(), + payload: json!({ "scope": "users", "key": id }), + action: None, + timeout_ms: None, + }) + .await + .map_err(|e| e.to_string())?; + + if existing.is_null() { + return Ok(json!({ "status_code": 404, "body": { "error": "User not found" } })); + } + + iii.trigger(TriggerRequest { + function_id: "state::delete".into(), + payload: json!({ "scope": "users", "key": id }), + action: None, + timeout_ms: None, + }) + .await + .map_err(|e| e.to_string())?; + + Ok(json!({ "status_code": 204, "body": null })) + } + }) + .description("Delete a user"), + ); + + // --- + // HTTP trigger registrations + // --- + iii.register_trigger( + IIITrigger::Http(HttpTriggerConfig::new("/users").method(HttpMethod::Post)) + .for_function("users::create"), + ) + .expect("failed"); + + iii.register_trigger( + IIITrigger::Http(HttpTriggerConfig::new("/users/:id").method(HttpMethod::Get)) + .for_function("users::get-by-id"), + ) + .expect("failed"); + + iii.register_trigger( + IIITrigger::Http(HttpTriggerConfig::new("/users").method(HttpMethod::Get)) + .for_function("users::list"), + ) + .expect("failed"); + + iii.register_trigger( + IIITrigger::Http(HttpTriggerConfig::new("/users/:id").method(HttpMethod::Put)) + .for_function("users::update"), + ) + .expect("failed"); + + iii.register_trigger( + IIITrigger::Http(HttpTriggerConfig::new("/users/:id").method(HttpMethod::Delete)) + .for_function("users::delete"), + ) + .expect("failed"); + + tokio::runtime::Runtime::new().unwrap().block_on(async { + tokio::signal::ctrl_c().await.ok(); + }); + iii.shutdown(); +} diff --git a/skills/references/http-invoked-functions.js b/skills/references/http-invoked-functions.js new file mode 100644 index 000000000..da3d7f9ee --- /dev/null +++ b/skills/references/http-invoked-functions.js @@ -0,0 +1,206 @@ +/** + * Pattern: HTTP-Invoked Functions + * + * Registers external HTTP endpoints as iii functions so the engine + * calls them when triggered — no client-side HTTP code needed. + * Combines with cron, state, and queue triggers for reactive integrations. + * + * How-to references: + * - HTTP-invoked functions: https://iii.dev/docs/how-to/use-functions-and-triggers#http-invoked-functions + * - Engine config: https://iii.dev/docs/how-to/configure-engine + * - State management: https://iii.dev/docs/how-to/manage-state + * - Cron: https://iii.dev/docs/how-to/schedule-cron-task + * - Queues: https://iii.dev/docs/how-to/use-queues + * + * Prerequisites: + * - HttpFunctionsModule enabled in iii engine config + * - Env vars: SLACK_WEBHOOK_TOKEN, STRIPE_API_KEY, ORDER_WEBHOOK_SECRET + */ + +import { registerWorker, Logger, TriggerAction } from 'iii-sdk' + +const iii = registerWorker(process.env.III_ENGINE_URL || 'ws://localhost:49134', { + workerName: 'http-invoked-integrations', +}) + +// --------------------------------------------------------------------------- +// Data-driven registration for immutable legacy endpoints +// --------------------------------------------------------------------------- +const legacyBaseUrl = process.env.LEGACY_API_URL || 'https://legacy.internal.example.com' +const legacyEndpoints = [ + { path: '/webhook', id: 'legacy::webhook' }, + { path: '/orders', id: 'legacy::orders' }, +] + +legacyEndpoints.forEach(({ path, id }) => { + iii.registerFunction( + { id, description: `Proxy legacy endpoint ${path}` }, + { + url: `${legacyBaseUrl}${path}`, + method: 'POST', + timeout_ms: 8000, + }, + ) +}) + +// --------------------------------------------------------------------------- +// HTTP-invoked function: Slack webhook (bearer auth) +// --------------------------------------------------------------------------- +iii.registerFunction( + { + id: 'integrations::slack-notify', + description: 'POST notification to Slack webhook', + }, + { + url: 'https://hooks.slack.example.com/services/incoming', + method: 'POST', + timeout_ms: 5000, + headers: { 'Content-Type': 'application/json' }, + auth: { + type: 'bearer', + token_key: 'SLACK_WEBHOOK_TOKEN', + }, + }, +) + +// --------------------------------------------------------------------------- +// HTTP-invoked function: Stripe charges (api_key auth) +// --------------------------------------------------------------------------- +iii.registerFunction( + { + id: 'integrations::stripe-charge', + description: 'Create a charge via Stripe API', + }, + { + url: 'https://api.stripe.example.com/v1/charges', + method: 'POST', + timeout_ms: 10000, + headers: { 'Content-Type': 'application/x-www-form-urlencoded' }, + auth: { + type: 'api_key', + header_name: 'Authorization', + value_key: 'STRIPE_API_KEY', + }, + }, +) + +// --------------------------------------------------------------------------- +// HTTP-invoked function: Analytics endpoint (no auth) +// --------------------------------------------------------------------------- +iii.registerFunction( + { + id: 'integrations::analytics-track', + description: 'POST event to analytics service', + }, + { + url: 'https://analytics.internal.example.com/events', + method: 'POST', + timeout_ms: 3000, + }, +) + +// --------------------------------------------------------------------------- +// HTTP-invoked function: Order status webhook (hmac auth) +// --------------------------------------------------------------------------- +iii.registerFunction( + { + id: 'integrations::order-webhook', + description: 'POST order status change to fulfillment partner', + }, + { + url: 'https://fulfillment.partner.example.com/webhooks/orders', + method: 'POST', + timeout_ms: 5000, + auth: { + type: 'hmac', + secret_key: 'ORDER_WEBHOOK_SECRET', + }, + }, +) + +// --------------------------------------------------------------------------- +// Handler-based function that triggers HTTP-invoked functions +// --------------------------------------------------------------------------- +iii.registerFunction({ id: 'orders::process' }, async (data) => { + const logger = new Logger() + + await iii.trigger({ + function_id: 'state::set', + payload: { scope: 'orders', key: data.orderId, value: { ...data, status: 'processing' } }, + }) + + // Charge payment via Stripe (await result) + const chargeResult = await iii.trigger({ + function_id: 'integrations::stripe-charge', + payload: { amount: data.amount, currency: 'usd', source: data.paymentToken }, + }) + + logger.info('Payment charged', { orderId: data.orderId, chargeId: chargeResult.id }) + + await iii.trigger({ + function_id: 'state::set', + payload: { scope: 'orders', key: data.orderId, value: { ...data, status: 'charged' } }, + }) + + // Notify Slack (fire-and-forget) + iii.trigger({ + function_id: 'integrations::slack-notify', + payload: { text: `Order ${data.orderId} charged $${data.amount}` }, + action: TriggerAction.Void(), + }) + + // Track in analytics (fire-and-forget) + iii.trigger({ + function_id: 'integrations::analytics-track', + payload: { event: 'order.charged', properties: { orderId: data.orderId, amount: data.amount } }, + action: TriggerAction.Void(), + }) + + return { orderId: data.orderId, chargeId: chargeResult.id, status: 'charged' } +}) + +// --------------------------------------------------------------------------- +// Trigger: state change → notify fulfillment partner via HTTP-invoked function +// --------------------------------------------------------------------------- +iii.registerTrigger({ + type: 'state', + function_id: 'integrations::order-webhook', + config: { scope: 'orders', key: 'status' }, +}) + +// --------------------------------------------------------------------------- +// Trigger: scheduled analytics ping every hour +// --------------------------------------------------------------------------- +iii.registerFunction({ id: 'integrations::hourly-heartbeat' }, async () => { + const logger = new Logger() + const workerCount = await iii.trigger({ function_id: 'engine::workers::list', payload: {} }) + + await iii.trigger({ + function_id: 'integrations::analytics-track', + payload: { + event: 'system.heartbeat', + properties: { workers: workerCount.length, timestamp: new Date().toISOString() }, + }, + }) + + logger.info('Hourly heartbeat sent') +}) + +iii.registerTrigger({ + type: 'cron', + function_id: 'integrations::hourly-heartbeat', + config: { expression: '0 0 * * * * *' }, +}) + +// --------------------------------------------------------------------------- +// Trigger: enqueue Stripe charges for reliable delivery with retries +// --------------------------------------------------------------------------- +iii.registerFunction({ id: 'orders::enqueue-charge' }, async (data) => { + const result = await iii.trigger({ + function_id: 'integrations::stripe-charge', + payload: { amount: data.amount, currency: 'usd', source: data.paymentToken }, + action: TriggerAction.Enqueue({ queue: 'payments' }), + }) + + return { messageReceiptId: result.messageReceiptId } +}) diff --git a/skills/references/http-invoked-functions.py b/skills/references/http-invoked-functions.py new file mode 100644 index 000000000..1ffa25213 --- /dev/null +++ b/skills/references/http-invoked-functions.py @@ -0,0 +1,216 @@ +""" +Pattern: HTTP-Invoked Functions + +Registers external HTTP endpoints as iii functions so the engine +calls them when triggered — no client-side HTTP code needed. +Combines with cron, state, and queue triggers for reactive integrations. + +How-to references: + - HTTP-invoked functions: https://iii.dev/docs/how-to/use-functions-and-triggers#http-invoked-functions + - Engine config: https://iii.dev/docs/how-to/configure-engine + - State management: https://iii.dev/docs/how-to/manage-state + - Cron: https://iii.dev/docs/how-to/schedule-cron-task + - Queues: https://iii.dev/docs/how-to/use-queues + +Prerequisites: + - HttpFunctionsModule enabled in iii engine config + - Env vars: SLACK_WEBHOOK_TOKEN, STRIPE_API_KEY, ORDER_WEBHOOK_SECRET +""" + +import asyncio +import os +from datetime import datetime, timezone + +from iii import InitOptions, Logger, TriggerAction, register_worker + +iii = register_worker( + address=os.environ.get("III_ENGINE_URL", "ws://localhost:49134"), + options=InitOptions(worker_name="http-invoked-integrations"), +) + +# --- +# Data-driven registration for immutable legacy endpoints +# --- +legacy_base_url = os.environ.get("LEGACY_API_URL", "https://legacy.internal.example.com") +legacy_endpoints = [ + {"path": "/webhook", "id": "legacy::webhook"}, + {"path": "/orders", "id": "legacy::orders"}, +] + +for ep in legacy_endpoints: + iii.register_function( + ep["id"], + { + "url": f"{legacy_base_url}{ep['path']}", + "method": "POST", + "timeout_ms": 8000, + }, + ) + +# --- +# HTTP-invoked function: Slack webhook (bearer auth) +# --- +iii.register_function( + "integrations::slack-notify", + { + "url": "https://hooks.slack.example.com/services/incoming", + "method": "POST", + "timeout_ms": 5000, + "headers": {"Content-Type": "application/json"}, + "auth": { + "type": "bearer", + "token_key": "SLACK_WEBHOOK_TOKEN", + }, + }, +) + +# --- +# HTTP-invoked function: Stripe charges (api_key auth) +# --- +iii.register_function( + "integrations::stripe-charge", + { + "url": "https://api.stripe.example.com/v1/charges", + "method": "POST", + "timeout_ms": 10000, + "headers": {"Content-Type": "application/x-www-form-urlencoded"}, + "auth": { + "type": "api_key", + "header_name": "Authorization", + "value_key": "STRIPE_API_KEY", + }, + }, +) + +# --- +# HTTP-invoked function: Analytics endpoint (no auth) +# --- +iii.register_function( + "integrations::analytics-track", + { + "url": "https://analytics.internal.example.com/events", + "method": "POST", + "timeout_ms": 3000, + }, +) + +# --- +# HTTP-invoked function: Order status webhook (hmac auth) +# --- +iii.register_function( + "integrations::order-webhook", + { + "url": "https://fulfillment.partner.example.com/webhooks/orders", + "method": "POST", + "timeout_ms": 5000, + "auth": { + "type": "hmac", + "secret_key": "ORDER_WEBHOOK_SECRET", + }, + }, +) + +# --- +# Handler-based function that triggers HTTP-invoked functions +# --- + + +async def orders_process(data): + logger = Logger() + + await iii.trigger_async({ + "function_id": "state::set", + "payload": {"scope": "orders", "key": data["orderId"], "value": {**data, "status": "processing"}}, + }) + + charge_result = await iii.trigger_async({ + "function_id": "integrations::stripe-charge", + "payload": {"amount": data["amount"], "currency": "usd", "source": data["paymentToken"]}, + }) + + logger.info("Payment charged", {"orderId": data["orderId"], "chargeId": charge_result["id"]}) + + await iii.trigger_async({ + "function_id": "state::set", + "payload": {"scope": "orders", "key": data["orderId"], "value": {**data, "status": "charged"}}, + }) + + iii.trigger({ + "function_id": "integrations::slack-notify", + "payload": {"text": f"Order {data['orderId']} charged ${data['amount']}"}, + "action": TriggerAction.Void(), + }) + + iii.trigger({ + "function_id": "integrations::analytics-track", + "payload": {"event": "order.charged", "properties": {"orderId": data["orderId"], "amount": data["amount"]}}, + "action": TriggerAction.Void(), + }) + + return {"orderId": data["orderId"], "chargeId": charge_result["id"], "status": "charged"} + + +iii.register_function("orders::process", orders_process) + +# --- +# Trigger: state change -> notify fulfillment partner via HTTP-invoked function +# --- +iii.register_trigger({ + "type": "state", + "function_id": "integrations::order-webhook", + "config": {"scope": "orders", "key": "status"}, +}) + +# --- +# Trigger: scheduled analytics ping every hour +# --- + + +async def hourly_heartbeat(data): + logger = Logger() + worker_count = await iii.trigger_async({"function_id": "engine::workers::list", "payload": {}}) + + await iii.trigger_async({ + "function_id": "integrations::analytics-track", + "payload": { + "event": "system.heartbeat", + "properties": {"workers": len(worker_count), "timestamp": datetime.now(timezone.utc).isoformat()}, + }, + }) + + logger.info("Hourly heartbeat sent") + + +iii.register_function("integrations::hourly-heartbeat", hourly_heartbeat) + +iii.register_trigger({ + "type": "cron", + "function_id": "integrations::hourly-heartbeat", + "config": {"expression": "0 0 * * * * *"}, +}) + +# --- +# Trigger: enqueue Stripe charges for reliable delivery with retries +# --- + + +async def orders_enqueue_charge(data): + result = await iii.trigger_async({ + "function_id": "integrations::stripe-charge", + "payload": {"amount": data["amount"], "currency": "usd", "source": data["paymentToken"]}, + "action": TriggerAction.Enqueue({"queue": "payments"}), + }) + + return {"messageReceiptId": result["messageReceiptId"]} + + +iii.register_function("orders::enqueue-charge", orders_enqueue_charge) + + +async def main(): + while True: + await asyncio.sleep(60) + + +if __name__ == "__main__": + asyncio.run(main()) diff --git a/skills/references/http-invoked-functions.rs b/skills/references/http-invoked-functions.rs new file mode 100644 index 000000000..3d8ce6192 --- /dev/null +++ b/skills/references/http-invoked-functions.rs @@ -0,0 +1,329 @@ +/// Pattern: HTTP-Invoked Functions +/// Comparable to: AWS Lambda URL invocations, Cloudflare Workers, webhook proxies +/// +/// Registers external HTTP endpoints as iii functions so the engine +/// calls them when triggered - no client-side HTTP code needed. +/// Combines with cron, state, and queue triggers for reactive integrations. +/// +/// Prerequisites: +/// - HttpFunctionsModule enabled in iii engine config +/// - Env vars: SLACK_WEBHOOK_TOKEN, STRIPE_API_KEY, ORDER_WEBHOOK_SECRET + +use iii_sdk::{ + register_worker, InitOptions, RegisterFunction, RegisterFunctionMessage, + TriggerRequest, TriggerAction, HttpInvocationConfig, HttpAuthConfig, + builtin_triggers::*, IIITrigger, Logger, + protocol::HttpMethod as ProtoHttpMethod, +}; +use serde_json::json; +use std::collections::HashMap; +use std::time::Duration; + +use serde; +use schemars; + +#[derive(serde::Deserialize, schemars::JsonSchema)] +struct ProcessOrderInput { + #[serde(rename = "orderId")] + order_id: String, + amount: f64, + #[serde(rename = "paymentToken")] + payment_token: String, +} + +#[derive(serde::Deserialize, schemars::JsonSchema)] +struct EnqueueChargeInput { + amount: f64, + #[serde(rename = "paymentToken")] + payment_token: String, +} + +fn main() { + let url = std::env::var("III_ENGINE_URL").unwrap_or("ws://127.0.0.1:49134".into()); + let iii = register_worker(&url, InitOptions::default()); + + // --- + // Data-driven registration for immutable legacy endpoints + // --- + let legacy_base_url = std::env::var("LEGACY_API_URL") + .unwrap_or("https://legacy.internal.example.com".into()); + + let legacy_endpoints = vec![ + ("/webhook", "legacy::webhook"), + ("/orders", "legacy::orders"), + ]; + + for (path, id) in legacy_endpoints { + let mut msg = RegisterFunctionMessage::with_id(id.into()) + .with_description(format!("Proxy legacy endpoint {path}")); + + iii.register_function_with( + msg, + HttpInvocationConfig { + url: format!("{legacy_base_url}{path}"), + method: ProtoHttpMethod::Post, + timeout_ms: Some(8000), + headers: HashMap::new(), + auth: None, + }, + ); + } + + // --- + // HTTP-invoked function: Slack webhook (bearer auth) + // --- + iii.register_function_with( + RegisterFunctionMessage::with_id("integrations::slack-notify".into()) + .with_description("POST notification to Slack webhook".into()), + HttpInvocationConfig { + url: "https://hooks.slack.example.com/services/incoming".into(), + method: ProtoHttpMethod::Post, + timeout_ms: Some(5000), + headers: { + let mut h = HashMap::new(); + h.insert("Content-Type".into(), "application/json".into()); + h + }, + auth: Some(HttpAuthConfig::Bearer { + token_key: "SLACK_WEBHOOK_TOKEN".into(), + }), + }, + ); + + // --- + // HTTP-invoked function: Stripe charges (api_key auth) + // --- + iii.register_function_with( + RegisterFunctionMessage::with_id("integrations::stripe-charge".into()) + .with_description("Create a charge via Stripe API".into()), + HttpInvocationConfig { + url: "https://api.stripe.example.com/v1/charges".into(), + method: ProtoHttpMethod::Post, + timeout_ms: Some(10000), + headers: { + let mut h = HashMap::new(); + h.insert("Content-Type".into(), "application/x-www-form-urlencoded".into()); + h + }, + auth: Some(HttpAuthConfig::ApiKey { + header: "Authorization".into(), + value_key: "STRIPE_API_KEY".into(), + }), + }, + ); + + // --- + // HTTP-invoked function: Analytics endpoint (no auth) + // --- + iii.register_function_with( + RegisterFunctionMessage::with_id("integrations::analytics-track".into()) + .with_description("POST event to analytics service".into()), + HttpInvocationConfig { + url: "https://analytics.internal.example.com/events".into(), + method: ProtoHttpMethod::Post, + timeout_ms: Some(3000), + headers: HashMap::new(), + auth: None, + }, + ); + + // --- + // HTTP-invoked function: Order status webhook (hmac auth) + // --- + iii.register_function_with( + RegisterFunctionMessage::with_id("integrations::order-webhook".into()) + .with_description("POST order status change to fulfillment partner".into()), + HttpInvocationConfig { + url: "https://fulfillment.partner.example.com/webhooks/orders".into(), + method: ProtoHttpMethod::Post, + timeout_ms: Some(5000), + headers: HashMap::new(), + auth: Some(HttpAuthConfig::Hmac { + secret_key: "ORDER_WEBHOOK_SECRET".into(), + }), + }, + ); + + // --- + // Handler-based function that triggers HTTP-invoked functions + // --- + let iii_clone = iii.clone(); + iii.register_function( + RegisterFunction::new_async("orders::process", move |data: ProcessOrderInput| { + let iii = iii_clone.clone(); + async move { + let logger = Logger::new(); + + iii.trigger(TriggerRequest { + function_id: "state::set".into(), + payload: json!({ + "scope": "orders", + "key": data.order_id, + "value": { + "orderId": data.order_id, + "amount": data.amount, + "status": "processing", + }, + }), + action: None, + timeout_ms: None, + }) + .await + .map_err(|e| e.to_string())?; + + let charge_result = iii + .trigger(TriggerRequest { + function_id: "integrations::stripe-charge".into(), + payload: json!({ + "amount": data.amount, + "currency": "usd", + "source": data.payment_token, + }), + action: None, + timeout_ms: None, + }) + .await + .map_err(|e| e.to_string())?; + + let charge_id = charge_result["id"].as_str().unwrap_or("unknown"); + logger.info("Payment charged", &json!({ "orderId": data.order_id, "chargeId": charge_id })); + + iii.trigger(TriggerRequest { + function_id: "state::set".into(), + payload: json!({ + "scope": "orders", + "key": data.order_id, + "value": { + "orderId": data.order_id, + "amount": data.amount, + "status": "charged", + }, + }), + action: None, + timeout_ms: None, + }) + .await + .map_err(|e| e.to_string())?; + + iii.trigger(TriggerRequest { + function_id: "integrations::slack-notify".into(), + payload: json!({ "text": format!("Order {} charged ${}", data.order_id, data.amount) }), + action: Some(TriggerAction::Void), + timeout_ms: None, + }) + .await + .ok(); + + iii.trigger(TriggerRequest { + function_id: "integrations::analytics-track".into(), + payload: json!({ + "event": "order.charged", + "properties": { "orderId": data.order_id, "amount": data.amount }, + }), + action: Some(TriggerAction::Void), + timeout_ms: None, + }) + .await + .ok(); + + Ok(json!({ + "orderId": data.order_id, + "chargeId": charge_id, + "status": "charged", + })) + } + }) + .description("Process an order with payment and notifications"), + ); + + // --- + // Trigger: state change -> notify fulfillment partner via HTTP-invoked function + // --- + iii.register_trigger( + IIITrigger::State(StateTriggerConfig::new().scope("orders").key("status")) + .for_function("integrations::order-webhook"), + ) + .expect("failed"); + + // --- + // Trigger: scheduled analytics ping every hour + // --- + let iii_clone = iii.clone(); + iii.register_function( + RegisterFunction::new_async("integrations::hourly-heartbeat", move |_: serde_json::Value| { + let iii = iii_clone.clone(); + async move { + let logger = Logger::new(); + let worker_count = iii + .trigger(TriggerRequest { + function_id: "engine::workers::list".into(), + payload: json!({}), + action: None, + timeout_ms: None, + }) + .await + .map_err(|e| e.to_string())?; + + let count = worker_count.as_array().map(|a| a.len()).unwrap_or(0); + + iii.trigger(TriggerRequest { + function_id: "integrations::analytics-track".into(), + payload: json!({ + "event": "system.heartbeat", + "properties": { + "workers": count, + "timestamp": chrono::Utc::now().to_rfc3339(), + }, + }), + action: None, + timeout_ms: None, + }) + .await + .map_err(|e| e.to_string())?; + + logger.info("Hourly heartbeat sent", &json!({})); + Ok(json!(null)) + } + }) + .description("Send hourly analytics heartbeat"), + ); + + iii.register_trigger( + IIITrigger::Cron(CronTriggerConfig::new("0 0 * * * * *")) + .for_function("integrations::hourly-heartbeat"), + ) + .expect("failed"); + + // --- + // Trigger: enqueue Stripe charges for reliable delivery with retries + // --- + let iii_clone = iii.clone(); + iii.register_function( + RegisterFunction::new_async("orders::enqueue-charge", move |data: EnqueueChargeInput| { + let iii = iii_clone.clone(); + async move { + let result = iii + .trigger(TriggerRequest { + function_id: "integrations::stripe-charge".into(), + payload: json!({ + "amount": data.amount, + "currency": "usd", + "source": data.payment_token, + }), + action: Some(TriggerAction::Enqueue { queue: "payments".into() }), + timeout_ms: None, + }) + .await + .map_err(|e| e.to_string())?; + + Ok(json!({ "messageReceiptId": result["messageReceiptId"] })) + } + }) + .description("Enqueue a Stripe charge for reliable processing"), + ); + + tokio::runtime::Runtime::new().unwrap().block_on(async { + tokio::signal::ctrl_c().await.ok(); + }); + iii.shutdown(); +} diff --git a/skills/references/iii-config.yaml b/skills/references/iii-config.yaml new file mode 100644 index 000000000..48f193d2c --- /dev/null +++ b/skills/references/iii-config.yaml @@ -0,0 +1,421 @@ +# Engine WebSocket port for SDK/worker connections. +# Default: 49134 +port: 49134 + +# External binary workers managed via `iii worker add/remove`. +# Each entry maps to a binary in the iii_workers/ directory. +# Config blocks between BEGIN/END markers are auto-managed by the CLI. +# workers: +# # === iii:pdfkit BEGIN === +# - class: workers::pdfkit::PdfKitWorker +# config: +# output_dir: ./output +# format: pdf +# # === iii:pdfkit END === +# # === iii:image-processor BEGIN === +# - class: workers::image_processor::ImageProcessorWorker +# config: +# max_width: 2048 +# output_format: webp +# # === iii:image-processor END === +# +# Workers are also tracked in iii.toml: +# [workers] +# pdfkit = "1.0.0" +# image-processor = "2.3.1" +# +# CLI commands: +# iii worker add pdfkit@1.0.0 # Install from registry +# iii worker add # Install all from iii.toml +# iii worker remove pdfkit # Remove binary, manifest, and config +# iii worker list # Show installed workers +# iii worker info pdfkit # Show registry details + +modules: + # REST API module - exposes HTTP endpoints for triggers and the core API surface. + # Functions with HTTP triggers are served through this module. + - class: modules::api::RestApiModule + config: + # TCP port for the HTTP server. Default: 3111 + port: 3111 + # Network interface to bind. Use 0.0.0.0 for all interfaces, 127.0.0.1 for localhost only. + host: 127.0.0.1 + # Maximum time (ms) before an HTTP request times out. Default: 30000 + default_timeout: 30000 + # Maximum concurrent HTTP requests the server will handle. Default: 1024 + concurrency_request_limit: 1024 + # Cross-Origin Resource Sharing configuration for browser clients. + cors: + # Origins allowed to make requests. Use '*' for any origin, or list specific domains. + allowed_origins: + - http://localhost:3113 + - http://127.0.0.1:3113 + # HTTP methods permitted for cross-origin requests. + # Options: GET, POST, PUT, DELETE, PATCH, OPTIONS, HEAD + allowed_methods: + - GET + - POST + - PUT + - DELETE + - OPTIONS + + # Stream module - real-time WebSocket pub/sub for live data streaming to clients. + # Clients connect via WebSocket to receive pushed updates on subscribed channels. + - class: modules::stream::StreamModule + config: + # TCP port for WebSocket connections. Default: 3112 + port: ${STREAM_PORT:3112} + # Network interface to bind. Use 0.0.0.0 for all interfaces, 127.0.0.1 for localhost only. + host: 0.0.0.0 + # Function that validates stream connection/subscription requests (e.g., auth checks). + # Set to null or omit to allow all connections. + auth_function: stream.auth + # Storage backend for stream state (subscriptions, message history). + adapter: + # KvStore adapter - file or memory-based local storage. + class: modules::stream::adapters::KvStore + config: + # Storage mode. Options: file_based (persists to disk), in_memory (lost on restart) + store_method: file_based + # Directory path for file_based storage. Required when store_method is file_based. + file_path: ./data/stream_store + # How often (ms) to flush dirty data to disk. Lower = more durable, higher = better performance. + save_interval_ms: 5000 + # Alternative adapters: + # adapter: + # # Redis adapter - distributed storage via Redis server. + # class: modules::stream::adapters::RedisAdapter + # config: + # redis_url: redis://localhost:6379 + # adapter: + # # Bridge adapter - connects to another iii engine instance for distributed streaming. + # class: modules::stream::adapters::Bridge + # config: + # bridge_url: ws://localhost:49134 + + # State module - persistent key-value storage for function state across invocations. + # Functions use ctx.state.get/set to read/write stateful data. + - class: modules::state::StateModule + config: + adapter: + # KvStore adapter - file or memory-based local storage. + class: modules::state::adapters::KvStore + config: + # Storage mode. Options: file_based (persists to disk), in_memory (lost on restart) + store_method: file_based + # Directory path for file_based storage. Required when store_method is file_based. + file_path: ./data/state_store + # How often (ms) to flush dirty data to disk. + save_interval_ms: 5000 + # Alternative adapters: + # adapter: + # # Redis adapter - distributed state via Redis server. + # class: modules::state::adapters::RedisAdapter + # config: + # redis_url: redis://localhost:6379 + # adapter: + # # Bridge adapter - forwards state operations to another iii engine instance. + # class: modules::state::adapters::Bridge + # config: + # bridge_url: ws://localhost:49134 + + # Queue module - background job processing with retries, dead-letter queues, and concurrency control. + # Functions enqueue jobs; subscribers process them asynchronously. + - class: modules::queue::QueueModule + config: + queue_configs: + default: + max_retries: 3 + concurrency: 5 + type: standard + adapter: + # Built-in queue adapter - local job queue with persistence options. + class: modules::queue::BuiltinQueueAdapter + config: + # Maximum delivery attempts before moving to dead-letter queue. Default: 3 + max_attempts: 3 + # Initial delay (ms) before retry. Uses exponential backoff (1000 → 2000 → 4000...). + backoff_ms: 1000 + # Maximum parallel job workers. Higher = more throughput, more resource usage. + concurrency: 10 + # How often (ms) to check for new jobs. Lower = faster processing, higher CPU. + poll_interval_ms: 100 + # Processing order. Options: concurrent (parallel, any order), fifo (sequential, ordered) + mode: concurrent + # Storage mode. Options: file_based (persists jobs to disk), in_memory (lost on restart) + store_method: file_based + # Directory path for file_based storage. Required when store_method is file_based. + file_path: ./data/queue_store + # How often (ms) to flush job state to disk. + save_interval_ms: 5000 + # Alternative adapters: + # adapter: + # # Redis adapter - distributed job queue via Redis. + # class: modules::queue::RedisAdapter + # config: + # redis_url: redis://localhost:6379 + # adapter: + # # Bridge adapter - forwards queue operations to another iii engine instance. + # class: modules::queue::adapters::Bridge + # config: + # bridge_url: ws://localhost:49134 + # adapter: + # # RabbitMQ adapter - enterprise message broker integration. + # class: modules::queue::RabbitMQAdapter + # config: + # amqp_url: amqp://localhost:5672 + # max_attempts: 3 + # # Messages to prefetch per consumer. Higher = more throughput, more memory. + # prefetch_count: 10 + # # RabbitMQ queue mode. Options: standard, quorum (replicated for HA) + # queue_mode: standard + + # PubSub module - in-process event fanout for decoupled function communication. + # Functions publish events; multiple subscribers receive them immediately. + - class: modules::pubsub::PubSubModule + config: + adapter: + # Local adapter - in-process pub/sub, events don't cross engine instances. + class: modules::pubsub::LocalAdapter + # Alternative adapter: + # adapter: + # # Redis adapter - distributed pub/sub across multiple engine instances. + # class: modules::pubsub::RedisAdapter + # config: + # redis_url: redis://localhost:6379 + + # Cron module - time-based job scheduling using cron expressions. + # Functions with cron triggers execute on schedule. + - class: modules::cron::CronModule + config: + adapter: + # KV-based cron adapter - uses local KV store for distributed lock coordination. + # Warning: Local locks only prevent duplicates within same process; multiple engine + # instances may execute the same cron job. Use RedisCronAdapter for true distributed locking. + class: modules::cron::KvCronAdapter + config: + # Lock timeout (ms). Job re-executes if lock holder crashes and lock expires. + lock_ttl_ms: 30000 + # KV index name for storing lock data. + lock_index: cron_locks + # Storage mode. Options: file_based (persists locks to disk), in_memory + store_method: file_based + # Directory path for file_based storage. + file_path: ./data/cron_locks + # How often (ms) to flush lock state to disk. + save_interval_ms: 5000 + # Alternative adapter: + # adapter: + # # Redis cron adapter - true distributed locking across engine instances. + # class: modules::cron::RedisCronAdapter + # config: + # redis_url: redis://localhost:6379 + + # Observability module - OpenTelemetry tracing, metrics, logs, and alerting. + # Provides visibility into function execution, performance, and errors. + - class: modules::observability::OtelModule + config: + # Master switch for all observability features. + enabled: ${OTEL_ENABLED:true} + # Service name in traces/metrics/logs. Used for filtering in observability backends. + service_name: ${OTEL_SERVICE_NAME:iii} + # Service version tag. Useful for correlating deployments with telemetry changes. + service_version: ${SERVICE_VERSION:1.0.0} + # Namespace/environment label (e.g., production, staging, development). + service_namespace: ${SERVICE_NAMESPACE:development} + # Trace export destination. Options: memory (queryable via API), otlp (send to collector), both + exporter: ${OTEL_EXPORTER_TYPE:memory} + # OTLP collector endpoint. Required when exporter is otlp or both. + # Common endpoints: Jaeger (4317), Grafana Tempo (4317), Honeycomb, Datadog. + endpoint: ${OTEL_EXPORTER_OTLP_ENDPOINT:http://localhost:4317} + # Base sampling ratio (0.0-1.0). 1.0 = sample all traces, 0.1 = sample 10%. + # Overridden by advanced sampling rules below when configured. + sampling_ratio: 1.0 + # Advanced sampling - fine-grained control over which traces to keep. + sampling: + # Sampling ratio for traces not matching any rule below. + default: 0.5 + # Inherit sampling decision from parent span to keep distributed traces complete. + parent_based: true + # Sampling rules evaluated in order; first match wins. + # Patterns support wildcards: * (any chars), ? (single char). + rules: + # Sample 80% of API operations for debugging visibility. + - operation: api.* + rate: 0.8 + # Sample 100% of traces from this service when investigating issues. + - service: iii + rate: 1.0 + # Sample only 30% of queue operations to reduce noise while maintaining visibility. + - operation: queue.* + service: iii + rate: 0.3 + # Global rate limit to cap telemetry volume during traffic spikes. + rate_limit: + max_traces_per_second: 100 + # Maximum spans to retain in memory. Used when exporter is memory or both. + # Higher = more history for local debugging, more memory usage. + memory_max_spans: ${OTEL_MEMORY_MAX_SPANS:10000} + # Metrics collection for counters, gauges, histograms. + metrics_enabled: true + # Metrics storage. Options: memory (queryable via API), otlp (send to collector) + metrics_exporter: ${OTEL_METRICS_EXPORTER:memory} + # How long (seconds) to retain metrics before expiration. Default: 3600 (1 hour) + metrics_retention_seconds: 3600 + # Maximum metric data points to store. Prevents unbounded memory growth. + metrics_max_count: 10000 + # Log collection and storage. + logs_enabled: ${OTEL_LOGS_ENABLED:true} + # Log export destination. Options: memory (queryable via API), otlp (send to collector), both + logs_exporter: ${OTEL_LOGS_EXPORTER:memory} + # Maximum log records to retain in memory. + logs_max_count: ${OTEL_LOGS_MAX_COUNT:1000} + # How long (seconds) to retain logs before expiration. Default: 3600 (1 hour) + logs_retention_seconds: ${OTEL_LOGS_RETENTION_SECONDS:3600} + # Batch size for OTLP log export. Larger batches = fewer network calls, higher latency. + logs_batch_size: 100 + # How often (ms) to flush logs to OTLP collector. + logs_flush_interval_ms: 5000 + # Log sampling ratio (0.0-1.0). 1.0 = keep all logs, 0.5 = keep 50%. + logs_sampling_ratio: ${OTEL_LOGS_SAMPLING_RATIO:1.0} + # Also print SDK logs to engine console for local debugging. + logs_console_output: ${OTEL_LOGS_CONSOLE_OUTPUT:true} + # Alert rules - trigger actions when metrics cross thresholds. + alerts: + # Alert when error rate exceeds threshold. + - name: high_error_rate + # Metric name to monitor. Built-in metrics: iii.invocations.*, iii.workers.*, etc. + metric: iii.invocations.error + # Threshold value for comparison. + threshold: 100 + # Comparison operator. Options: > (gt), >= (gte), < (lt), <= (lte), == (eq), != (ne) + operator: '>' + # Time window (seconds) for metric aggregation. + window_seconds: 60 + # Action when alert triggers. + # action: + # # Webhook action - POST alert payload to URL. + # type: webhook + # url: https://example.com/alert + enabled: true + # Minimum seconds between repeated alerts (debounce). + cooldown_seconds: 300 + # Alert when active workers drop below threshold. + - name: low_workers + metric: iii.workers.active + threshold: 1 + operator: '<' + window_seconds: 60 + action: + # Function action - invoke a function to handle the alert (e.g., auto-scaling). + type: function + path: alerts.handle_low_workers + enabled: true + cooldown_seconds: 60 + # Engine console log level. Options: trace, debug, info, warn, error + # trace = most verbose, error = only errors. + level: info + # Alias for level (deprecated, use level instead). + log_level: info + # Console output format. Options: default (human-readable with colors), json (structured) + format: default + + # HTTP Functions module - enables HTTP-invoked functions (outbound HTTP calls from the engine). + # Required for functions registered with HttpInvocationConfig. + # The engine makes the HTTP request on behalf of the function and enforces URL security policies. + - class: modules::http_functions::HttpFunctionsModule + config: + security: + # URL patterns allowed for outbound requests. Use '*' to allow all URLs. + # Examples: 'https://api.example.com/*', 'https://*.trusted.com/*' + url_allowlist: + - '*' + # Block requests to private/internal IP ranges (10.x, 172.16-31.x, 192.168.x, localhost). + # Helps prevent SSRF attacks. Default: true + block_private_ips: true + # Require HTTPS for all outbound requests. Prevents accidental plaintext transmission. + # Default: true + require_https: true + # Local development alternative — relaxes security to allow localhost targets: + # security: + # url_allowlist: + # - '*' + # block_private_ips: false + # require_https: false + + # Exec module - spawns external processes (SDK workers) and watches for file changes. + # Useful for setups where workers need to run alongside the engine on the same host. + # Also useful from frameworks that support this style of operation such as Motia. + # # Rust worker: + # - class: modules::shell::ExecModule + # config: + # watch: + # - workers/**/*.rs + # - src/**/*.rs + # exec: + # - cargo run --build my-rust-worker + # + # # Python worker: + # - class: modules::shell::ExecModule + # config: + # watch: + # - workers/**/*.py + # - src/**/*.py + # exec: + # - uv run ./src/my_python_worker.py + # + # # Node/TypeScript worker: + # - class: modules::shell::ExecModule + # config: + # watch: + # - workers/**/*.ts + # - src/**/*.ts + # - workers/**/*.js + # - src/**/*.js + # exec: + # - pnpm dev:my-node-worker + # + # # Motia worker: + # - class: modules::shell::ExecModule + # config: + # watch: + # - steps/**/*.ts + # - steps/**/*.py + # exec: + # - npx motia-node + + # Bridge Client module - connects this engine to a remote iii instance for cross-instance + # function invocation. Enables distributed architectures and function federation. + # - class: modules::bridge_client::BridgeClientModule + # config: + # # WebSocket URL of the remote iii engine to connect to. + # url: ws://localhost:49134 + # # Unique identifier for this client in the remote engine's registry. + # service_id: bridge-client + # # Human-readable name for logging and observability. + # service_name: bridge-client + # # Functions to expose to the remote engine (remote can call local functions). + # expose: + # # Local function becomes callable from remote as remote_function (or same name if omitted). + # - local_function: logger.info + # remote_function: logger.info + # # Functions to forward to remote engine (local calls invoke remote functions). + # forward: + # # Calling local_function locally will invoke remote_function on the remote engine. + # - local_function: remote.state.get + # remote_function: state::get + # # Maximum time (ms) to wait for remote response. + # timeout_ms: 5000 + + # Telemetry module - anonymous product usage analytics for iii development. + # Helps the team understand usage patterns and prioritize features. + - class: modules::telemetry::TelemetryModule + config: + # Enable/disable anonymous telemetry. Set to false to opt out. + enabled: true + # API key for telemetry backend (leave empty for anonymous tracking). + api_key: '' + # Separate API key for SDK telemetry events. + sdk_api_key: '' + # How often (seconds) to send heartbeat events. Default: 21600 (6 hours) + heartbeat_interval_secs: 21600 diff --git a/skills/references/low-code-automation.js b/skills/references/low-code-automation.js new file mode 100644 index 000000000..854bb56c0 --- /dev/null +++ b/skills/references/low-code-automation.js @@ -0,0 +1,145 @@ +/** + * Pattern: Low-Code/No-Code Workflow Builders + * Comparable to: n8n, Zapier, LangFlow + * + * Demonstrates simple trigger → transform → action chains. + * Each "node" in the automation is a small registered function. + * Automations are chained via named queues, making it easy to + * add/remove/reorder steps. + * + * How-to references: + * - Functions & Triggers: https://iii.dev/docs/how-to/use-functions-and-triggers + * - HTTP endpoints: https://iii.dev/docs/how-to/expose-http-endpoint + * - Queues: https://iii.dev/docs/how-to/use-queues + * - PubSub: https://iii.dev/docs/how-to/use-functions-and-triggers + * - Cron: https://iii.dev/docs/how-to/schedule-cron-task + */ + +import { registerWorker, Logger, TriggerAction } from 'iii-sdk' + +const iii = registerWorker(process.env.III_ENGINE_URL || 'ws://localhost:49134', { + workerName: 'low-code-automation', +}) + +// =================================================================== +// Automation 1: "When a form is submitted → enrich → store → notify" +// (Like a Zapier zap: Typeform → Clearbit → Google Sheets → Slack) +// =================================================================== + +// Node: Webhook trigger (incoming form data) +iii.registerFunction({ id: 'auto::form-webhook' }, async (data) => { + iii.trigger({ + function_id: 'auto::enrich-lead', + payload: { + submission_id: `sub-${Date.now()}`, + email: data.email, + company: data.company, + message: data.message, + received_at: new Date().toISOString(), + }, + action: TriggerAction.Enqueue({ queue: 'automation' }), + }) + return { status: 'accepted' } +}) + +iii.registerTrigger({ + type: 'http', + function_id: 'auto::form-webhook', + config: { api_path: '/webhooks/form', http_method: 'POST' }, +}) + +// Node: Enrich the lead data +iii.registerFunction({ id: 'auto::enrich-lead' }, async (data) => { + const logger = new Logger() + logger.info('Enriching lead', { email: data.email }) + + const enriched = { + ...data, + company_size: 'mid-market', + industry: 'technology', + enriched: true, + } + + iii.trigger({ + function_id: 'auto::store-lead', + payload: enriched, + action: TriggerAction.Enqueue({ queue: 'automation' }), + }) + + return enriched +}) + +// Node: Store in "spreadsheet" (state) +iii.registerFunction({ id: 'auto::store-lead' }, async (data) => { + await iii.trigger({ function_id: 'state::set', payload: { + scope: 'leads', + key: data.submission_id, + value: { _key: data.submission_id, ...data }, + } }) + + iii.trigger({ + function_id: 'auto::notify-team', + payload: data, + action: TriggerAction.Enqueue({ queue: 'automation' }), + }) +}) + +// Node: Send a Slack-like notification +iii.registerFunction({ id: 'auto::notify-team' }, async (data) => { + const logger = new Logger() + logger.info('Notifying team about new lead', { + email: data.email, + company: data.company, + }) + + // In production, this would call a Slack webhook or similar + iii.trigger({ function_id: 'publish', payload: { + topic: 'notifications.internal', + data: { + channel: '#leads', + text: `New lead from ${data.email} at ${data.company} (${data.company_size})`, + }, + }, action: TriggerAction.Void() }) +}) + +// =================================================================== +// Automation 2: "Every morning → pull metrics → format → email digest" +// (Like a Zapier schedule: Schedule → HTTP → Formatter → Gmail) +// =================================================================== + +iii.registerFunction({ id: 'auto::daily-digest' }, async () => { + const logger = new Logger() + + // Pull all leads from the "spreadsheet" + const leads = await iii.trigger({ function_id: 'state::list', payload: { scope: 'leads' } }) + + const today = new Date().toISOString().split('T')[0] + const todayLeads = leads.filter((l) => + l.received_at?.startsWith(today) + ) + + const digest = { + date: today, + total_leads: leads.length, + new_today: todayLeads.length, + top_companies: todayLeads.map((l) => l.company).filter(Boolean), + } + + logger.info('Daily digest generated', digest) + + iii.trigger({ function_id: 'publish', payload: { + topic: 'notifications.internal', + data: { + channel: '#daily-digest', + text: `Daily Report (${today}): ${digest.new_today} new leads, ${digest.total_leads} total`, + }, + }, action: TriggerAction.Void() }) + + return digest +}) + +iii.registerTrigger({ + type: 'cron', + function_id: 'auto::daily-digest', + config: { expression: '0 0 8 * * * *' }, // 8 AM daily +}) diff --git a/skills/references/observability.js b/skills/references/observability.js new file mode 100644 index 000000000..73fcf085a --- /dev/null +++ b/skills/references/observability.js @@ -0,0 +1,169 @@ +/** + * Pattern: Observability + * Comparable to: Datadog, Grafana, Honeycomb, OpenTelemetry SDK + * + * iii has built-in OpenTelemetry support for traces, metrics, and logs. + * This file shows how to configure the telemetry pipeline, create custom + * spans and metrics, propagate trace context across function calls, listen + * for log events, and cleanly shut down the exporter. + * + * How-to references: + * - Telemetry & observability: https://iii.dev/docs/advanced/telemetry + */ + +import { registerWorker, Logger, TriggerAction } from 'iii-sdk' + +// --------------------------------------------------------------------------- +// 1. SDK initialization with OpenTelemetry config +// --------------------------------------------------------------------------- +const iii = registerWorker(process.env.III_ENGINE_URL || 'ws://localhost:49134', { + workerName: 'observability', + otel: { + enabled: true, + serviceName: 'my-service', + serviceVersion: '1.2.0', + metricsEnabled: true, + }, +}) + +// --------------------------------------------------------------------------- +// 2. Custom spans — wrap an operation in a named span for tracing +// iii.withSpan(name, options, callback) creates a child span under the +// current trace context. The span is automatically closed when the +// callback completes or throws. +// --------------------------------------------------------------------------- +iii.registerFunction({ id: 'orders::process' }, async (data) => { + const logger = new Logger() + + const result = await iii.withSpan('validate-order', { attributes: { orderId: data.order_id } }, async () => { + logger.info('Validating order inside span', { orderId: data.order_id }) + + if (!data.items?.length) { + throw new Error('Empty cart') + } + + return { valid: true, itemCount: data.items.length } + }) + + // Nested spans for sub-operations + const total = await iii.withSpan('calculate-total', {}, async () => { + return data.items.reduce((sum, item) => sum + item.price * item.qty, 0) + }) + + await iii.withSpan('persist-order', { attributes: { total } }, async () => { + await iii.trigger({ + function_id: 'state::set', + payload: { + scope: 'orders', + key: data.order_id, + value: { _key: data.order_id, total, status: 'confirmed' }, + }, + }) + }) + + return { order_id: data.order_id, total, validated: result.valid } +}) + +iii.registerTrigger({ + type: 'http', + function_id: 'orders::process', + config: { api_path: '/orders/process', http_method: 'POST' }, +}) + +// --------------------------------------------------------------------------- +// 3. Custom metrics — counters and histograms via getMeter() +// --------------------------------------------------------------------------- +const meter = iii.getMeter() + +const orderCounter = meter.createCounter('orders.processed', { + description: 'Total number of orders processed', +}) + +const latencyHistogram = meter.createHistogram('orders.latency_ms', { + description: 'Order processing latency in milliseconds', + unit: 'ms', +}) + +iii.registerFunction({ id: 'orders::with-metrics' }, async (data) => { + const start = Date.now() + + // ... order processing logic + const result = { order_id: data.order_id, status: 'complete' } + + // Record metrics + orderCounter.add(1, { status: 'success', region: data.region || 'us-east-1' }) + latencyHistogram.record(Date.now() - start, { endpoint: '/orders' }) + + return result +}) + +// --------------------------------------------------------------------------- +// 4. Trace context propagation +// Access the current trace ID, inject traceparent headers for outbound HTTP +// calls, and attach baggage for cross-service context. +// --------------------------------------------------------------------------- +iii.registerFunction({ id: 'orders::call-external' }, async (data) => { + const logger = new Logger() + + // Read current trace ID for correlation + const traceId = iii.currentTraceId() + logger.info('Current trace', { traceId }) + + // Build headers with W3C traceparent for downstream services + const headers = {} + iii.injectTraceparent(headers) // adds 'traceparent' header + iii.injectBaggage(headers, { 'user.id': data.user_id }) // adds 'baggage' header + + // Use these headers when calling external services + // e.g. fetch('https://api.partner.com/verify', { headers }) + + return { traceId, propagated: true } +}) + +// --------------------------------------------------------------------------- +// 5. Log listener — subscribe to all log events for external forwarding +// --------------------------------------------------------------------------- +iii.onLog((logEntry) => { + // logEntry shape: { level, message, attributes, timestamp, traceId, spanId } + // Forward to external system (Datadog, Splunk, etc.) + if (logEntry.level === 'error') { + // e.g. externalLogger.error(logEntry.message, logEntry.attributes) + } +}) + +// --------------------------------------------------------------------------- +// 6. Structured logging with trace correlation +// Logger automatically attaches trace/span IDs when otel is enabled. +// --------------------------------------------------------------------------- +iii.registerFunction({ id: 'debug::log-demo' }, async (data) => { + const logger = new Logger() + + logger.info('Processing request', { requestId: data.id }) + logger.warn('Slow query detected', { query: data.query, duration_ms: 1200 }) + logger.error('Unexpected state', { expected: 'active', actual: data.status }) + + return { logged: true } +}) + +// --------------------------------------------------------------------------- +// 7. Disable telemetry — useful for local development or testing +// --------------------------------------------------------------------------- +// const iiiNoTelemetry = registerWorker(process.env.III_ENGINE_URL || 'ws://localhost:49134', { +// workerName: 'observability-no-otel', +// otel: { +// enabled: false, +// }, +// }) + +// --------------------------------------------------------------------------- +// 8. Clean shutdown — flush pending spans and metrics on process exit +// --------------------------------------------------------------------------- +process.on('SIGTERM', async () => { + await iii.shutdown_otel() + process.exit(0) +}) + +process.on('SIGINT', async () => { + await iii.shutdown_otel() + process.exit(0) +}) diff --git a/skills/references/observability.py b/skills/references/observability.py new file mode 100644 index 000000000..46a44c4a5 --- /dev/null +++ b/skills/references/observability.py @@ -0,0 +1,169 @@ +""" +Pattern: Observability +Comparable to: Datadog, Grafana, Honeycomb, OpenTelemetry SDK + +iii has built-in OpenTelemetry support for traces, metrics, and logs. +The Python SDK provides get_context() for trace correlation and Logger +for structured logging. with_span and get_meter are JS SDK features +that do not exist in the Python SDK — this file uses what is actually +available. + +How-to references: + - Telemetry & observability: https://iii.dev/docs/advanced/telemetry +""" + +import asyncio +import os +import signal +import time + +from iii import InitOptions, Logger, TriggerAction, register_worker + +# --- +# 1. SDK initialization with OpenTelemetry config +# --- +iii = register_worker( + address=os.environ.get("III_ENGINE_URL", "ws://localhost:49134"), + options=InitOptions( + worker_name="observability", + otel={ + "enabled": True, + "service_name": "my-service", + "service_version": "1.2.0", + "metrics_enabled": True, + }, + ), +) + +# --- +# 2. Structured logging with trace correlation +# Logger automatically attaches trace/span IDs when otel is enabled. +# Use get_context() to read the current trace ID for manual correlation. +# --- + + +async def orders_process(data): + logger = Logger() + + ctx = iii.get_context() + logger.info("Processing order", {"orderId": data["order_id"], "traceId": ctx.get("trace_id")}) + + items = data.get("items") or [] + if not items: + raise Exception("Empty cart") + + item_count = len(items) + logger.info("Validated order", {"orderId": data["order_id"], "itemCount": item_count}) + + total = sum(item["price"] * item["qty"] for item in items) + logger.info("Calculated total", {"orderId": data["order_id"], "total": total}) + + await iii.trigger_async({ + "function_id": "state::set", + "payload": { + "scope": "orders", + "key": data["order_id"], + "value": {"_key": data["order_id"], "total": total, "status": "confirmed"}, + }, + }) + + logger.info("Order persisted", {"orderId": data["order_id"], "total": total}) + return {"order_id": data["order_id"], "total": total, "validated": True} + + +iii.register_function("orders::process", orders_process) + +iii.register_trigger({ + "type": "http", + "function_id": "orders::process", + "config": {"api_path": "/orders/process", "http_method": "POST"}, +}) + +# --- +# 3. Metrics via structured logging +# The Python SDK does not expose get_meter(). Record metrics as structured +# log entries — the OTEL log exporter forwards them to your collector. +# --- + + +async def orders_with_metrics(data): + logger = Logger() + start = time.time() + + result = {"order_id": data["order_id"], "status": "complete"} + + elapsed_ms = (time.time() - start) * 1000 + logger.info("metric.orders.processed", { + "status": "success", + "region": data.get("region", "us-east-1"), + "latency_ms": elapsed_ms, + "endpoint": "/orders", + }) + + return result + + +iii.register_function("orders::with-metrics", orders_with_metrics) + +# --- +# 4. Trace context propagation +# Use get_context() to read the current trace ID for correlation with +# external services. +# --- + + +async def call_external(data): + logger = Logger() + + ctx = iii.get_context() + trace_id = ctx.get("trace_id") + logger.info("Current trace", {"traceId": trace_id}) + + logger.info("Trace context available for propagation", { + "traceId": trace_id, + "userId": data.get("user_id"), + }) + + return {"traceId": trace_id, "propagated": True} + + +iii.register_function("orders::call-external", call_external) + +# --- +# 5. Structured logging levels with trace correlation +# --- + + +async def log_demo(data): + logger = Logger() + + logger.info("Processing request", {"requestId": data.get("id")}) + logger.warn("Slow query detected", {"query": data.get("query"), "duration_ms": 1200}) + logger.error("Unexpected state", {"expected": "active", "actual": data.get("status")}) + + return {"logged": True} + + +iii.register_function("debug::log-demo", log_demo) + +# --- +# 6. Clean shutdown — flush pending telemetry on process exit +# --- + + +def _shutdown(signum, frame): + asyncio.get_event_loop().run_until_complete(iii.shutdown_otel()) + raise SystemExit(0) + + +signal.signal(signal.SIGTERM, _shutdown) +signal.signal(signal.SIGINT, _shutdown) + + +async def main(): + while True: + await asyncio.sleep(60) + + +if __name__ == "__main__": + asyncio.run(main()) diff --git a/skills/references/observability.rs b/skills/references/observability.rs new file mode 100644 index 000000000..f43357dab --- /dev/null +++ b/skills/references/observability.rs @@ -0,0 +1,279 @@ +/// Pattern: Observability +/// Comparable to: Datadog, Grafana, Honeycomb, OpenTelemetry SDK +/// +/// iii has built-in OpenTelemetry support for traces, metrics, and logs. +/// This file shows how to configure the telemetry pipeline, create custom +/// spans and metrics, propagate trace context across function calls, listen +/// for log events, and cleanly shut down the exporter. +/// +/// Requires the `otel` feature: iii-sdk = { version = "...", features = ["otel"] } + +use iii_sdk::{ + register_worker, InitOptions, RegisterFunction, TriggerRequest, TriggerAction, + builtin_triggers::*, IIITrigger, Logger, +}; +use serde_json::json; +use std::time::Duration; + +use serde; +use schemars; + +#[cfg(feature = "otel")] +use iii_sdk::{ + with_span, get_tracer, get_meter, shutdown_otel, init_otel, + current_trace_id, inject_traceparent, inject_baggage, + OtelConfig, SpanKind, +}; + +#[derive(serde::Deserialize, schemars::JsonSchema)] +struct OrderInput { + order_id: String, + items: Option>, + region: Option, + user_id: Option, +} + +#[derive(serde::Deserialize, serde::Serialize, schemars::JsonSchema)] +struct OrderItem { + price: f64, + qty: i64, +} + +#[derive(serde::Deserialize, schemars::JsonSchema)] +struct ExternalCallInput { + user_id: String, +} + +#[derive(serde::Deserialize, schemars::JsonSchema)] +struct LogDemoInput { + id: String, + query: Option, + status: Option, +} + +fn main() { + let url = std::env::var("III_ENGINE_URL").unwrap_or("ws://127.0.0.1:49134".into()); + + // --- + // 1. SDK initialization with OpenTelemetry config + // --- + let iii = register_worker( + &url, + InitOptions { + #[cfg(feature = "otel")] + otel: Some(OtelConfig { + enabled: Some(true), + service_name: Some("my-service".into()), + service_version: Some("1.2.0".into()), + metrics_enabled: Some(true), + ..Default::default() + }), + ..Default::default() + }, + ); + + // --- + // 2. Custom spans - wrap an operation in a named span for tracing + // with_span(name, traceparent, kind, callback) creates a child span under + // the current trace context. The span is automatically closed when the + // callback completes or throws. + // --- + let iii_clone = iii.clone(); + iii.register_function( + RegisterFunction::new_async("orders::process", move |data: OrderInput| { + let iii = iii_clone.clone(); + async move { + let logger = Logger::new(); + let items = data.items.unwrap_or_default(); + + #[cfg(feature = "otel")] + let validation = with_span("validate-order", None, Some(SpanKind::Internal), || async { + logger.info("Validating order inside span", &json!({ "orderId": data.order_id })); + + if items.is_empty() { + return Err("Empty cart".into()); + } + + Ok(json!({ "valid": true, "itemCount": items.len() })) + }) + .await + .map_err(|e| e.to_string())?; + + #[cfg(not(feature = "otel"))] + let validation = { + if items.is_empty() { + return Err("Empty cart".into()); + } + json!({ "valid": true, "itemCount": items.len() }) + }; + + let total: f64 = items.iter().map(|i| i.price * i.qty as f64).sum(); + let order_id = data.order_id.clone(); + + #[cfg(feature = "otel")] + with_span("persist-order", None, None, || { + let iii = iii.clone(); + let order_id = order_id.clone(); + async move { + iii.trigger(TriggerRequest { + function_id: "state::set".into(), + payload: json!({ + "scope": "orders", + "key": order_id, + "value": { "_key": order_id, "total": total, "status": "confirmed" }, + }), + action: None, + timeout_ms: None, + }) + .await + .map_err(|e| -> Box { e.to_string().into() })?; + Ok(()) + } + }) + .await + .map_err(|e| e.to_string())?; + + #[cfg(not(feature = "otel"))] + iii.trigger(TriggerRequest { + function_id: "state::set".into(), + payload: json!({ + "scope": "orders", + "key": order_id, + "value": { "_key": order_id, "total": total, "status": "confirmed" }, + }), + action: None, + timeout_ms: None, + }) + .await + .map_err(|e| e.to_string())?; + + Ok(json!({ + "order_id": order_id, + "total": total, + "validated": validation["valid"], + })) + } + }) + .description("Process an order with tracing spans"), + ); + + iii.register_trigger( + IIITrigger::Http(HttpTriggerConfig::new("/orders/process").method(HttpMethod::Post)) + .for_function("orders::process"), + ) + .expect("failed"); + + // --- + // 3. Custom metrics - counters and histograms via get_meter() + // --- + #[cfg(feature = "otel")] + let order_counter = { + let meter = get_meter(); + meter + .u64_counter("orders.processed") + .with_description("Total number of orders processed") + .build() + }; + + #[cfg(feature = "otel")] + let latency_histogram = { + let meter = get_meter(); + meter + .f64_histogram("orders.latency_ms") + .with_description("Order processing latency in milliseconds") + .with_unit("ms") + .build() + }; + + iii.register_function( + RegisterFunction::new("orders::with-metrics", { + #[cfg(feature = "otel")] + let order_counter = order_counter.clone(); + #[cfg(feature = "otel")] + let latency_histogram = latency_histogram.clone(); + move |data: OrderInput| -> Result { + let start = std::time::Instant::now(); + + let result = json!({ "order_id": data.order_id, "status": "complete" }); + + #[cfg(feature = "otel")] + { + use opentelemetry::KeyValue; + let region = data.region.unwrap_or("us-east-1".into()); + order_counter.add(1, &[ + KeyValue::new("status", "success"), + KeyValue::new("region", region), + ]); + latency_histogram.record(start.elapsed().as_millis() as f64, &[ + KeyValue::new("endpoint", "/orders"), + ]); + } + + Ok(result) + } + }) + .description("Process order with custom metrics"), + ); + + // --- + // 4. Trace context propagation + // Access the current trace ID, inject traceparent headers for outbound HTTP + // calls, and attach baggage for cross-service context. + // --- + iii.register_function( + RegisterFunction::new("orders::call-external", move |data: ExternalCallInput| -> Result { + let logger = Logger::new(); + + #[cfg(feature = "otel")] + { + let trace_id = current_trace_id(); + logger.info("Current trace", &json!({ "traceId": trace_id })); + + let mut headers = std::collections::HashMap::new(); + if let Some(tp) = inject_traceparent() { + headers.insert("traceparent".to_string(), tp); + } + if let Some(bg) = inject_baggage() { + headers.insert("baggage".to_string(), bg); + } + + Ok(json!({ "traceId": trace_id, "propagated": true })) + } + + #[cfg(not(feature = "otel"))] + { + logger.info("Trace propagation requires otel feature", &json!({})); + Ok(json!({ "traceId": null, "propagated": false })) + } + }) + .description("Demonstrate trace context propagation"), + ); + + // --- + // 5. Structured logging with trace correlation + // Logger automatically attaches trace/span IDs when otel is enabled. + // --- + iii.register_function( + RegisterFunction::new("debug::log-demo", |data: LogDemoInput| -> Result { + let logger = Logger::new(); + + logger.info("Processing request", &json!({ "requestId": data.id })); + logger.warn("Slow query detected", &json!({ "query": data.query, "duration_ms": 1200 })); + logger.error("Unexpected state", &json!({ "expected": "active", "actual": data.status })); + + Ok(json!({ "logged": true })) + }) + .description("Demonstrate structured logging"), + ); + + // --- + // 6. Clean shutdown - flush pending spans and metrics on process exit + // --- + tokio::runtime::Runtime::new().unwrap().block_on(async { + tokio::signal::ctrl_c().await.ok(); + + #[cfg(feature = "otel")] + shutdown_otel().await; + }); + iii.shutdown(); +} diff --git a/skills/references/queue-processing.js b/skills/references/queue-processing.js new file mode 100644 index 000000000..ff8ed2b52 --- /dev/null +++ b/skills/references/queue-processing.js @@ -0,0 +1,207 @@ +/** + * Pattern: Queue Processing + * Comparable to: BullMQ, Celery, SQS + * + * Enqueue work for durable, retryable async processing. + * Standard queues process concurrently; FIFO queues preserve order. + * + * Retry / backoff is configured in iii-config.yaml under queue_configs: + * queue_configs: + * - name: payment + * max_retries: 3 + * backoff_ms: 1000 + * backoff_multiplier: 2 + * - name: email + * fifo: true + * max_retries: 5 + * backoff_ms: 500 + * + * How-to references: + * - Queues: https://iii.dev/docs/how-to/use-queues + */ + +import { registerWorker, Logger, TriggerAction } from 'iii-sdk' + +const iii = registerWorker(process.env.III_ENGINE_URL || 'ws://localhost:49134', { + workerName: 'queue-processing', +}) + +// --------------------------------------------------------------------------- +// Enqueue work — standard queue (concurrent processing) +// --------------------------------------------------------------------------- +iii.registerFunction({ id: 'payments::submit' }, async (data) => { + const logger = new Logger() + + try { + const result = await iii.trigger({ + function_id: 'payments::process', + payload: { + orderId: data.orderId, + amount: data.amount, + currency: data.currency || 'usd', + method: data.paymentMethod, + }, + action: TriggerAction.Enqueue({ queue: 'payment' }), + }) + + logger.info('Payment enqueued', { + orderId: data.orderId, + messageReceiptId: result.messageReceiptId, + }) + + return { status: 'queued', messageReceiptId: result.messageReceiptId } + } catch (err) { + logger.error('Failed to enqueue payment', { orderId: data.orderId, error: err.message }) + throw err + } +}) + +// --------------------------------------------------------------------------- +// Process payment — handler that runs from the queue +// --------------------------------------------------------------------------- +iii.registerFunction({ id: 'payments::process' }, async (data) => { + const logger = new Logger() + logger.info('Processing payment', { orderId: data.orderId, amount: data.amount }) + + // Simulate payment gateway call + const chargeId = `ch-${Date.now()}` + + await iii.trigger({ + function_id: 'state::set', + payload: { + scope: 'payments', + key: data.orderId, + value: { + orderId: data.orderId, + chargeId, + amount: data.amount, + currency: data.currency, + status: 'captured', + processed_at: new Date().toISOString(), + }, + }, + }) + + // Fire-and-forget notification + iii.trigger({ + function_id: 'notifications::send', + payload: { type: 'payment_captured', orderId: data.orderId, chargeId }, + action: TriggerAction.Void(), + }) + + logger.info('Payment captured', { orderId: data.orderId, chargeId }) + return { chargeId, status: 'captured' } +}) + +// --------------------------------------------------------------------------- +// Enqueue work — FIFO queue (ordered processing) +// FIFO queues guarantee messages are processed in the order they arrive. +// Configure fifo: true in iii-config.yaml queue_configs. +// --------------------------------------------------------------------------- +iii.registerFunction({ id: 'emails::enqueue' }, async (data) => { + const logger = new Logger() + + const result = await iii.trigger({ + function_id: 'emails::send', + payload: { + to: data.to, + subject: data.subject, + body: data.body, + template: data.template, + }, + action: TriggerAction.Enqueue({ queue: 'email' }), + }) + + logger.info('Email enqueued (FIFO)', { + to: data.to, + messageReceiptId: result.messageReceiptId, + }) + + return { status: 'queued', messageReceiptId: result.messageReceiptId } +}) + +// --------------------------------------------------------------------------- +// Process email — FIFO handler preserves send order +// --------------------------------------------------------------------------- +iii.registerFunction({ id: 'emails::send' }, async (data) => { + const logger = new Logger() + logger.info('Sending email', { to: data.to, subject: data.subject }) + + // Simulate email sending + const messageId = `msg-${Date.now()}` + + await iii.trigger({ + function_id: 'state::set', + payload: { + scope: 'email-log', + key: messageId, + value: { + messageId, + to: data.to, + subject: data.subject, + status: 'sent', + sent_at: new Date().toISOString(), + }, + }, + }) + + logger.info('Email sent', { messageId, to: data.to }) + return { messageId, status: 'sent' } +}) + +// --------------------------------------------------------------------------- +// Receipt capture — checking enqueue acknowledgement +// --------------------------------------------------------------------------- +iii.registerFunction({ id: 'orders::place' }, async (data) => { + const logger = new Logger() + + // Enqueue payment + const paymentReceipt = await iii.trigger({ + function_id: 'payments::process', + payload: { orderId: data.orderId, amount: data.total, currency: 'usd', method: data.method }, + action: TriggerAction.Enqueue({ queue: 'payment' }), + }) + + // Enqueue confirmation email + const emailReceipt = await iii.trigger({ + function_id: 'emails::send', + payload: { to: data.email, subject: 'Order confirmed', body: `Order ${data.orderId}` }, + action: TriggerAction.Enqueue({ queue: 'email' }), + }) + + logger.info('Order placed', { + orderId: data.orderId, + paymentReceipt: paymentReceipt.messageReceiptId, + emailReceipt: emailReceipt.messageReceiptId, + }) + + // Store receipts for tracking + await iii.trigger({ + function_id: 'state::set', + payload: { + scope: 'orders', + key: data.orderId, + value: { + orderId: data.orderId, + status: 'pending', + paymentReceiptId: paymentReceipt.messageReceiptId, + emailReceiptId: emailReceipt.messageReceiptId, + }, + }, + }) + + return { + orderId: data.orderId, + paymentReceiptId: paymentReceipt.messageReceiptId, + emailReceiptId: emailReceipt.messageReceiptId, + } +}) + +// --------------------------------------------------------------------------- +// HTTP trigger to accept orders +// --------------------------------------------------------------------------- +iii.registerTrigger({ + type: 'http', + function_id: 'orders::place', + config: { api_path: '/orders', http_method: 'POST' }, +}) diff --git a/skills/references/queue-processing.py b/skills/references/queue-processing.py new file mode 100644 index 000000000..e308f7fa8 --- /dev/null +++ b/skills/references/queue-processing.py @@ -0,0 +1,235 @@ +""" +Pattern: Queue Processing +Comparable to: BullMQ, Celery, SQS + +Enqueue work for durable, retryable async processing. +Standard queues process concurrently; FIFO queues preserve order. + +Retry / backoff is configured in iii-config.yaml under queue_configs: + queue_configs: + - name: payment + max_retries: 3 + backoff_ms: 1000 + backoff_multiplier: 2 + - name: email + fifo: true + max_retries: 5 + backoff_ms: 500 + +How-to references: + - Queues: https://iii.dev/docs/how-to/use-queues +""" + +import asyncio +import os +import time +from datetime import datetime, timezone + +from iii import InitOptions, Logger, TriggerAction, register_worker + +iii = register_worker( + address=os.environ.get("III_ENGINE_URL", "ws://localhost:49134"), + options=InitOptions(worker_name="queue-processing"), +) + +# --- +# Enqueue work — standard queue (concurrent processing) +# --- + + +async def payments_submit(data): + logger = Logger() + + try: + result = await iii.trigger_async({ + "function_id": "payments::process", + "payload": { + "orderId": data["orderId"], + "amount": data["amount"], + "currency": data.get("currency", "usd"), + "method": data["paymentMethod"], + }, + "action": TriggerAction.Enqueue({"queue": "payment"}), + }) + + logger.info("Payment enqueued", { + "orderId": data["orderId"], + "messageReceiptId": result["messageReceiptId"], + }) + + return {"status": "queued", "messageReceiptId": result["messageReceiptId"]} + except Exception as err: + logger.error("Failed to enqueue payment", {"orderId": data["orderId"], "error": str(err)}) + raise + + +iii.register_function("payments::submit", payments_submit) + +# --- +# Process payment — handler that runs from the queue +# --- + + +async def payments_process(data): + logger = Logger() + logger.info("Processing payment", {"orderId": data["orderId"], "amount": data["amount"]}) + + charge_id = f"ch-{int(time.time() * 1000)}" + + await iii.trigger_async({ + "function_id": "state::set", + "payload": { + "scope": "payments", + "key": data["orderId"], + "value": { + "orderId": data["orderId"], + "chargeId": charge_id, + "amount": data["amount"], + "currency": data["currency"], + "status": "captured", + "processed_at": datetime.now(timezone.utc).isoformat(), + }, + }, + }) + + iii.trigger({ + "function_id": "notifications::send", + "payload": {"type": "payment_captured", "orderId": data["orderId"], "chargeId": charge_id}, + "action": TriggerAction.Void(), + }) + + logger.info("Payment captured", {"orderId": data["orderId"], "chargeId": charge_id}) + return {"chargeId": charge_id, "status": "captured"} + + +iii.register_function("payments::process", payments_process) + +# --- +# Enqueue work — FIFO queue (ordered processing) +# FIFO queues guarantee messages are processed in the order they arrive. +# Configure fifo: true in iii-config.yaml queue_configs. +# --- + + +async def emails_enqueue(data): + logger = Logger() + + result = await iii.trigger_async({ + "function_id": "emails::send", + "payload": { + "to": data["to"], + "subject": data["subject"], + "body": data["body"], + "template": data.get("template"), + }, + "action": TriggerAction.Enqueue({"queue": "email"}), + }) + + logger.info("Email enqueued (FIFO)", { + "to": data["to"], + "messageReceiptId": result["messageReceiptId"], + }) + + return {"status": "queued", "messageReceiptId": result["messageReceiptId"]} + + +iii.register_function("emails::enqueue", emails_enqueue) + +# --- +# Process email — FIFO handler preserves send order +# --- + + +async def emails_send(data): + logger = Logger() + logger.info("Sending email", {"to": data["to"], "subject": data["subject"]}) + + message_id = f"msg-{int(time.time() * 1000)}" + + await iii.trigger_async({ + "function_id": "state::set", + "payload": { + "scope": "email-log", + "key": message_id, + "value": { + "messageId": message_id, + "to": data["to"], + "subject": data["subject"], + "status": "sent", + "sent_at": datetime.now(timezone.utc).isoformat(), + }, + }, + }) + + logger.info("Email sent", {"messageId": message_id, "to": data["to"]}) + return {"messageId": message_id, "status": "sent"} + + +iii.register_function("emails::send", emails_send) + +# --- +# Receipt capture — checking enqueue acknowledgement +# --- + + +async def orders_place(data): + logger = Logger() + + payment_receipt = await iii.trigger_async({ + "function_id": "payments::process", + "payload": {"orderId": data["orderId"], "amount": data["total"], "currency": "usd", "method": data["method"]}, + "action": TriggerAction.Enqueue({"queue": "payment"}), + }) + + email_receipt = await iii.trigger_async({ + "function_id": "emails::send", + "payload": {"to": data["email"], "subject": "Order confirmed", "body": f"Order {data['orderId']}"}, + "action": TriggerAction.Enqueue({"queue": "email"}), + }) + + logger.info("Order placed", { + "orderId": data["orderId"], + "paymentReceipt": payment_receipt["messageReceiptId"], + "emailReceipt": email_receipt["messageReceiptId"], + }) + + await iii.trigger_async({ + "function_id": "state::set", + "payload": { + "scope": "orders", + "key": data["orderId"], + "value": { + "orderId": data["orderId"], + "status": "pending", + "paymentReceiptId": payment_receipt["messageReceiptId"], + "emailReceiptId": email_receipt["messageReceiptId"], + }, + }, + }) + + return { + "orderId": data["orderId"], + "paymentReceiptId": payment_receipt["messageReceiptId"], + "emailReceiptId": email_receipt["messageReceiptId"], + } + + +iii.register_function("orders::place", orders_place) + +# --- +# HTTP trigger to accept orders +# --- +iii.register_trigger({ + "type": "http", + "function_id": "orders::place", + "config": {"api_path": "/orders", "http_method": "POST"}, +}) + + +async def main(): + while True: + await asyncio.sleep(60) + + +if __name__ == "__main__": + asyncio.run(main()) diff --git a/skills/references/queue-processing.rs b/skills/references/queue-processing.rs new file mode 100644 index 000000000..644e9dc3b --- /dev/null +++ b/skills/references/queue-processing.rs @@ -0,0 +1,324 @@ +/// Pattern: Queue Processing +/// Comparable to: BullMQ, Celery, SQS +/// +/// Enqueue work for durable, retryable async processing. +/// Standard queues process concurrently; FIFO queues preserve order. +/// +/// Retry / backoff is configured in iii-config.yaml under queue_configs: +/// queue_configs: +/// - name: payment +/// max_retries: 3 +/// backoff_ms: 1000 +/// backoff_multiplier: 2 +/// - name: email +/// fifo: true +/// max_retries: 5 +/// backoff_ms: 500 + +use iii_sdk::{ + register_worker, InitOptions, RegisterFunction, TriggerRequest, TriggerAction, + builtin_triggers::*, IIITrigger, Logger, +}; +use serde_json::json; +use std::time::Duration; + +use serde; +use schemars; + +#[derive(serde::Deserialize, schemars::JsonSchema)] +struct SubmitPaymentInput { + #[serde(rename = "orderId")] + order_id: String, + amount: f64, + currency: Option, + #[serde(rename = "paymentMethod")] + payment_method: String, +} + +#[derive(serde::Deserialize, schemars::JsonSchema)] +struct ProcessPaymentInput { + #[serde(rename = "orderId")] + order_id: String, + amount: f64, + currency: String, + method: Option, +} + +#[derive(serde::Deserialize, schemars::JsonSchema)] +struct EnqueueEmailInput { + to: String, + subject: String, + body: String, + template: Option, +} + +#[derive(serde::Deserialize, schemars::JsonSchema)] +struct SendEmailInput { + to: String, + subject: String, + body: Option, + template: Option, +} + +#[derive(serde::Deserialize, schemars::JsonSchema)] +struct PlaceOrderInput { + #[serde(rename = "orderId")] + order_id: String, + total: f64, + method: Option, + email: String, +} + +fn main() { + let url = std::env::var("III_ENGINE_URL").unwrap_or("ws://127.0.0.1:49134".into()); + let iii = register_worker(&url, InitOptions::default()); + + // --- + // Enqueue work - standard queue (concurrent processing) + // --- + let iii_clone = iii.clone(); + iii.register_function( + RegisterFunction::new_async("payments::submit", move |data: SubmitPaymentInput| { + let iii = iii_clone.clone(); + async move { + let logger = Logger::new(); + + let result = iii + .trigger(TriggerRequest { + function_id: "payments::process".into(), + payload: json!({ + "orderId": data.order_id, + "amount": data.amount, + "currency": data.currency.unwrap_or("usd".into()), + "method": data.payment_method, + }), + action: Some(TriggerAction::Enqueue { queue: "payment".into() }), + timeout_ms: None, + }) + .await + .map_err(|e| e.to_string())?; + + logger.info("Payment enqueued", &json!({ + "orderId": data.order_id, + "messageReceiptId": result["messageReceiptId"], + })); + + Ok(json!({ "status": "queued", "messageReceiptId": result["messageReceiptId"] })) + } + }) + .description("Submit a payment for queued processing"), + ); + + // --- + // Process payment - handler that runs from the queue + // --- + let iii_clone = iii.clone(); + iii.register_function( + RegisterFunction::new_async("payments::process", move |data: ProcessPaymentInput| { + let iii = iii_clone.clone(); + async move { + let logger = Logger::new(); + logger.info("Processing payment", &json!({ "orderId": data.order_id, "amount": data.amount })); + + let charge_id = format!("ch-{}", chrono::Utc::now().timestamp_millis()); + + iii.trigger(TriggerRequest { + function_id: "state::set".into(), + payload: json!({ + "scope": "payments", + "key": data.order_id, + "value": { + "orderId": data.order_id, + "chargeId": charge_id, + "amount": data.amount, + "currency": data.currency, + "status": "captured", + "processed_at": chrono::Utc::now().to_rfc3339(), + }, + }), + action: None, + timeout_ms: None, + }) + .await + .map_err(|e| e.to_string())?; + + iii.trigger(TriggerRequest { + function_id: "notifications::send".into(), + payload: json!({ "type": "payment_captured", "orderId": data.order_id, "chargeId": charge_id }), + action: Some(TriggerAction::Void), + timeout_ms: None, + }) + .await + .ok(); + + logger.info("Payment captured", &json!({ "orderId": data.order_id, "chargeId": charge_id })); + Ok(json!({ "chargeId": charge_id, "status": "captured" })) + } + }) + .description("Process a payment from the queue"), + ); + + // --- + // Enqueue work - FIFO queue (ordered processing) + // FIFO queues guarantee messages are processed in the order they arrive. + // Configure fifo: true in iii-config.yaml queue_configs. + // --- + let iii_clone = iii.clone(); + iii.register_function( + RegisterFunction::new_async("emails::enqueue", move |data: EnqueueEmailInput| { + let iii = iii_clone.clone(); + async move { + let logger = Logger::new(); + + let result = iii + .trigger(TriggerRequest { + function_id: "emails::send".into(), + payload: json!({ + "to": data.to, + "subject": data.subject, + "body": data.body, + "template": data.template, + }), + action: Some(TriggerAction::Enqueue { queue: "email".into() }), + timeout_ms: None, + }) + .await + .map_err(|e| e.to_string())?; + + logger.info("Email enqueued (FIFO)", &json!({ + "to": data.to, + "messageReceiptId": result["messageReceiptId"], + })); + + Ok(json!({ "status": "queued", "messageReceiptId": result["messageReceiptId"] })) + } + }) + .description("Enqueue an email for FIFO delivery"), + ); + + // --- + // Process email - FIFO handler preserves send order + // --- + let iii_clone = iii.clone(); + iii.register_function( + RegisterFunction::new_async("emails::send", move |data: SendEmailInput| { + let iii = iii_clone.clone(); + async move { + let logger = Logger::new(); + logger.info("Sending email", &json!({ "to": data.to, "subject": data.subject })); + + let message_id = format!("msg-{}", chrono::Utc::now().timestamp_millis()); + + iii.trigger(TriggerRequest { + function_id: "state::set".into(), + payload: json!({ + "scope": "email-log", + "key": message_id, + "value": { + "messageId": message_id, + "to": data.to, + "subject": data.subject, + "status": "sent", + "sent_at": chrono::Utc::now().to_rfc3339(), + }, + }), + action: None, + timeout_ms: None, + }) + .await + .map_err(|e| e.to_string())?; + + logger.info("Email sent", &json!({ "messageId": message_id, "to": data.to })); + Ok(json!({ "messageId": message_id, "status": "sent" })) + } + }) + .description("Send an email from the FIFO queue"), + ); + + // --- + // Receipt capture - checking enqueue acknowledgement + // --- + let iii_clone = iii.clone(); + iii.register_function( + RegisterFunction::new_async("orders::place", move |data: PlaceOrderInput| { + let iii = iii_clone.clone(); + async move { + let logger = Logger::new(); + + let payment_receipt = iii + .trigger(TriggerRequest { + function_id: "payments::process".into(), + payload: json!({ + "orderId": data.order_id, + "amount": data.total, + "currency": "usd", + "method": data.method, + }), + action: Some(TriggerAction::Enqueue { queue: "payment".into() }), + timeout_ms: None, + }) + .await + .map_err(|e| e.to_string())?; + + let email_receipt = iii + .trigger(TriggerRequest { + function_id: "emails::send".into(), + payload: json!({ + "to": data.email, + "subject": "Order confirmed", + "body": format!("Order {}", data.order_id), + }), + action: Some(TriggerAction::Enqueue { queue: "email".into() }), + timeout_ms: None, + }) + .await + .map_err(|e| e.to_string())?; + + logger.info("Order placed", &json!({ + "orderId": data.order_id, + "paymentReceipt": payment_receipt["messageReceiptId"], + "emailReceipt": email_receipt["messageReceiptId"], + })); + + iii.trigger(TriggerRequest { + function_id: "state::set".into(), + payload: json!({ + "scope": "orders", + "key": data.order_id, + "value": { + "orderId": data.order_id, + "status": "pending", + "paymentReceiptId": payment_receipt["messageReceiptId"], + "emailReceiptId": email_receipt["messageReceiptId"], + }, + }), + action: None, + timeout_ms: None, + }) + .await + .map_err(|e| e.to_string())?; + + Ok(json!({ + "orderId": data.order_id, + "paymentReceiptId": payment_receipt["messageReceiptId"], + "emailReceiptId": email_receipt["messageReceiptId"], + })) + } + }) + .description("Place an order with queued payment and email"), + ); + + // --- + // HTTP trigger to accept orders + // --- + iii.register_trigger( + IIITrigger::Http(HttpTriggerConfig::new("/orders").method(HttpMethod::Post)) + .for_function("orders::place"), + ) + .expect("failed"); + + tokio::runtime::Runtime::new().unwrap().block_on(async { + tokio::signal::ctrl_c().await.ok(); + }); + iii.shutdown(); +} diff --git a/skills/references/reactive-backend.js b/skills/references/reactive-backend.js new file mode 100644 index 000000000..f31d61125 --- /dev/null +++ b/skills/references/reactive-backend.js @@ -0,0 +1,177 @@ +/** + * Pattern: Reactive Backend + * Comparable to: Convex, Firebase, Supabase, Appwrite + * + * Demonstrates a real-time todo app backend where state changes + * automatically trigger side effects (notifications, metrics) and + * clients receive live updates via streams. + * + * How-to references: + * - State management: https://iii.dev/docs/how-to/manage-state + * - State reactions: https://iii.dev/docs/how-to/react-to-state-changes + * - Streams: https://iii.dev/docs/how-to/stream-realtime-data + * - HTTP endpoints: https://iii.dev/docs/how-to/expose-http-endpoint + */ + +import { registerWorker, Logger, TriggerAction } from 'iii-sdk' + +const iii = registerWorker(process.env.III_ENGINE_URL || 'ws://localhost:49134', { + workerName: 'reactive-backend', +}) + +// --------------------------------------------------------------------------- +// CRUD — HTTP endpoints that write to state (the "database") +// --------------------------------------------------------------------------- +iii.registerFunction({ id: 'todos::create' }, async (data) => { + const id = `todo-${Date.now()}` + const todo = { + _key: id, + id, + title: data.title, + completed: false, + created_at: new Date().toISOString(), + } + + await iii.trigger({ + function_id: 'state::set', + payload: { scope: 'todos', key: id, value: todo }, + }) + + return todo +}) + +iii.registerFunction({ id: 'todos::toggle' }, async (data) => { + const todo = await iii.trigger({ + function_id: 'state::get', + payload: { scope: 'todos', key: data.id }, + }) + + if (!todo) throw new Error(`Todo ${data.id} not found`) + + await iii.trigger({ + function_id: 'state::update', + payload: { + scope: 'todos', + key: data.id, + ops: [ + { type: 'set', path: 'completed', value: !todo.completed }, + { type: 'set', path: 'updated_at', value: new Date().toISOString() }, + ], + }, + }) + + return { id: data.id, completed: !todo.completed } +}) + +iii.registerFunction({ id: 'todos::list' }, async () => { + return await iii.trigger({ + function_id: 'state::list', + payload: { scope: 'todos' }, + }) +}) + +iii.registerFunction({ id: 'todos::delete' }, async (data) => { + await iii.trigger({ + function_id: 'state::delete', + payload: { scope: 'todos', key: data.id }, + }) + return { deleted: data.id } +}) + +// HTTP triggers +iii.registerTrigger({ type: 'http', function_id: 'todos::create', config: { api_path: '/todos', http_method: 'POST' } }) +iii.registerTrigger({ type: 'http', function_id: 'todos::list', config: { api_path: '/todos', http_method: 'GET' } }) +iii.registerTrigger({ type: 'http', function_id: 'todos::toggle', config: { api_path: '/todos/toggle', http_method: 'POST' } }) +iii.registerTrigger({ type: 'http', function_id: 'todos::delete', config: { api_path: '/todos/delete', http_method: 'POST' } }) + +// --------------------------------------------------------------------------- +// Reactive side effect — push changes to connected clients via stream +// Fires automatically whenever ANY todo in the 'todos' scope changes. +// Clients connect via: ws://localhost:3112/stream/todos-live/all +// --------------------------------------------------------------------------- +iii.registerFunction({ id: 'todos::on-change' }, async (event) => { + const { new_value, old_value, key } = event + const logger = new Logger() + + const action = !old_value ? 'created' : !new_value ? 'deleted' : 'updated' + logger.info('Todo changed', { key, action }) + + // Push the change to all connected clients + iii.trigger({ + function_id: 'stream::send', + payload: { + stream_name: 'todos-live', + group_id: 'all', + id: `change-${Date.now()}`, + event_type: 'todo_changed', + data: { action, key, todo: new_value }, + }, + action: TriggerAction.Void(), + }) + + return { action, key } +}) + +iii.registerTrigger({ + type: 'state', + function_id: 'todos::on-change', + config: { scope: 'todos' }, +}) + +// --------------------------------------------------------------------------- +// Reactive side effect — update aggregate metrics on any change +// --------------------------------------------------------------------------- +iii.registerFunction({ id: 'todos::update-metrics' }, async (event) => { + const { new_value, old_value } = event + + const ops = [] + + // New todo created + if (new_value && !old_value) { + ops.push({ type: 'increment', path: 'total', by: 1 }) + } + + // Todo deleted + if (!new_value && old_value) { + ops.push({ type: 'increment', path: 'total', by: -1 }) + if (old_value.completed) { + ops.push({ type: 'increment', path: 'completed', by: -1 }) + } + } + + // Todo toggled + if (new_value && old_value && new_value.completed !== old_value.completed) { + ops.push({ + type: 'increment', + path: 'completed', + by: new_value.completed ? 1 : -1, + }) + } + + if (ops.length > 0) { + await iii.trigger({ + function_id: 'state::update', + payload: { scope: 'todo-metrics', key: 'global', ops }, + }) + } +}) + +iii.registerTrigger({ + type: 'state', + function_id: 'todos::update-metrics', + config: { scope: 'todos' }, +}) + +// Expose metrics via HTTP +iii.registerFunction({ id: 'todos::get-metrics' }, async () => { + return await iii.trigger({ + function_id: 'state::get', + payload: { scope: 'todo-metrics', key: 'global' }, + }) +}) + +iii.registerTrigger({ + type: 'http', + function_id: 'todos::get-metrics', + config: { api_path: '/todos/metrics', http_method: 'GET' }, +}) diff --git a/skills/references/realtime-streams.js b/skills/references/realtime-streams.js new file mode 100644 index 000000000..ef61a49f1 --- /dev/null +++ b/skills/references/realtime-streams.js @@ -0,0 +1,250 @@ +/** + * Pattern: Realtime Streams + * Comparable to: Socket.io, Pusher, Firebase Realtime + * + * Push live data to connected WebSocket clients. + * Clients connect at: ws://host:3112/stream/{stream_name}/{group_id} + * + * Built-in stream operations: stream::set, stream::get, stream::list, + * stream::delete, stream::send. Use createStream for custom adapters. + * + * How-to references: + * - Realtime streams: https://iii.dev/docs/how-to/stream-realtime-data + */ + +import { registerWorker, Logger, TriggerAction } from 'iii-sdk' + +const iii = registerWorker(process.env.III_ENGINE_URL || 'ws://localhost:49134', { + workerName: 'realtime-streams', +}) + +// --------------------------------------------------------------------------- +// stream::set — Persist an item in a stream group +// Payload: { stream_name, group_id, item_id, data } +// --------------------------------------------------------------------------- +iii.registerFunction({ id: 'chat::post-message' }, async (data) => { + const logger = new Logger() + const messageId = `msg-${Date.now()}` + + await iii.trigger({ + function_id: 'stream::set', + payload: { + stream_name: 'chat', + group_id: data.room, + item_id: messageId, + data: { + sender: data.sender, + text: data.text, + timestamp: new Date().toISOString(), + }, + }, + }) + + logger.info('Message stored in stream', { room: data.room, messageId }) + return { messageId } +}) + +// --------------------------------------------------------------------------- +// stream::get — Retrieve a single item from a stream group +// Payload: { stream_name, group_id, item_id } +// --------------------------------------------------------------------------- +iii.registerFunction({ id: 'chat::get-message' }, async (data) => { + const message = await iii.trigger({ + function_id: 'stream::get', + payload: { + stream_name: 'chat', + group_id: data.room, + item_id: data.messageId, + }, + }) + + if (!message) { + return { error: 'Message not found' } + } + + return message +}) + +// --------------------------------------------------------------------------- +// stream::list — List all items in a stream group +// Payload: { stream_name, group_id } +// --------------------------------------------------------------------------- +iii.registerFunction({ id: 'chat::list-messages' }, async (data) => { + const messages = await iii.trigger({ + function_id: 'stream::list', + payload: { + stream_name: 'chat', + group_id: data.room, + }, + }) + + return { room: data.room, messages: messages || [] } +}) + +// --------------------------------------------------------------------------- +// stream::delete — Remove an item from a stream group +// Payload: { stream_name, group_id, item_id } +// --------------------------------------------------------------------------- +iii.registerFunction({ id: 'chat::delete-message' }, async (data) => { + await iii.trigger({ + function_id: 'stream::delete', + payload: { + stream_name: 'chat', + group_id: data.room, + item_id: data.messageId, + }, + }) + + return { deleted: data.messageId } +}) + +// --------------------------------------------------------------------------- +// stream::send — Push a live event to all connected clients +// Clients on ws://host:3112/stream/chat/{room} receive this instantly. +// Use TriggerAction.Void() for fire-and-forget delivery. +// --------------------------------------------------------------------------- +iii.registerFunction({ id: 'chat::broadcast' }, async (data) => { + const logger = new Logger() + const eventId = `evt-${Date.now()}` + + // Store the message + await iii.trigger({ + function_id: 'stream::set', + payload: { + stream_name: 'chat', + group_id: data.room, + item_id: eventId, + data: { + sender: data.sender, + text: data.text, + timestamp: new Date().toISOString(), + }, + }, + }) + + // Push live event to connected WebSocket clients (fire-and-forget) + iii.trigger({ + function_id: 'stream::send', + payload: { + stream_name: 'chat', + group_id: data.room, + id: eventId, + event_type: 'new_message', + data: { + sender: data.sender, + text: data.text, + timestamp: new Date().toISOString(), + }, + }, + action: TriggerAction.Void(), + }) + + logger.info('Message broadcast', { room: data.room, eventId }) + return { eventId } +}) + +// --------------------------------------------------------------------------- +// createStream — Custom stream adapter with get/set/delete/list/listGroups +// Useful for integrating external data sources as stream backends. +// --------------------------------------------------------------------------- +iii.createStream('presence', { + get: async ({ group_id, item_id }) => { + return await iii.trigger({ + function_id: 'state::get', + payload: { scope: `presence::${group_id}`, key: item_id }, + }) + }, + set: async ({ group_id, item_id, data }) => { + await iii.trigger({ + function_id: 'state::set', + payload: { + scope: `presence::${group_id}`, + key: item_id, + value: { ...data, updated_at: new Date().toISOString() }, + }, + }) + }, + delete: async ({ group_id, item_id }) => { + await iii.trigger({ + function_id: 'state::delete', + payload: { scope: `presence::${group_id}`, key: item_id }, + }) + }, + list: async ({ group_id }) => { + return await iii.trigger({ + function_id: 'state::list', + payload: { scope: `presence::${group_id}` }, + }) + }, + listGroups: async () => { + const registry = await iii.trigger({ + function_id: 'state::get', + payload: { scope: 'presence-registry', key: 'groups' }, + }) + return registry?.groups || [] + }, +}) + +// --------------------------------------------------------------------------- +// Presence tracking — user joins/leaves +// Clients connect at: ws://host:3112/stream/presence/{room} +// --------------------------------------------------------------------------- +iii.registerFunction({ id: 'presence::join' }, async (data) => { + await iii.trigger({ + function_id: 'stream::set', + payload: { + stream_name: 'presence', + group_id: data.room, + item_id: data.userId, + data: { userId: data.userId, name: data.name, status: 'online' }, + }, + }) + + // Notify all connected clients + iii.trigger({ + function_id: 'stream::send', + payload: { + stream_name: 'presence', + group_id: data.room, + id: `join-${Date.now()}`, + event_type: 'user_joined', + data: { userId: data.userId, name: data.name }, + }, + action: TriggerAction.Void(), + }) + + return { joined: data.room } +}) + +iii.registerFunction({ id: 'presence::leave' }, async (data) => { + await iii.trigger({ + function_id: 'stream::delete', + payload: { + stream_name: 'presence', + group_id: data.room, + item_id: data.userId, + }, + }) + + iii.trigger({ + function_id: 'stream::send', + payload: { + stream_name: 'presence', + group_id: data.room, + id: `leave-${Date.now()}`, + event_type: 'user_left', + data: { userId: data.userId }, + }, + action: TriggerAction.Void(), + }) + + return { left: data.room } +}) + +// --------------------------------------------------------------------------- +// HTTP triggers +// --------------------------------------------------------------------------- +iii.registerTrigger({ type: 'http', function_id: 'chat::broadcast', config: { api_path: '/chat/send', http_method: 'POST' } }) +iii.registerTrigger({ type: 'http', function_id: 'chat::list-messages', config: { api_path: '/chat/:room/messages', http_method: 'GET' } }) +iii.registerTrigger({ type: 'http', function_id: 'presence::join', config: { api_path: '/presence/join', http_method: 'POST' } }) +iii.registerTrigger({ type: 'http', function_id: 'presence::leave', config: { api_path: '/presence/leave', http_method: 'POST' } }) diff --git a/skills/references/realtime-streams.py b/skills/references/realtime-streams.py new file mode 100644 index 000000000..2ab9afb46 --- /dev/null +++ b/skills/references/realtime-streams.py @@ -0,0 +1,253 @@ +""" +Pattern: Realtime Streams +Comparable to: Socket.io, Pusher, Firebase Realtime + +Push live data to connected WebSocket clients. +Clients connect at: ws://host:3112/stream/{stream_name}/{group_id} + +Built-in stream operations: stream::set, stream::get, stream::list, +stream::delete, stream::send. + +Note: The Python SDK does not support createStream for custom adapters. +Use the built-in stream operations and state-backed presence instead. + +How-to references: + - Realtime streams: https://iii.dev/docs/how-to/stream-realtime-data +""" + +import asyncio +import os +import uuid +from datetime import datetime, timezone + +from iii import InitOptions, Logger, TriggerAction, register_worker + +iii = register_worker( + address=os.environ.get("III_ENGINE_URL", "ws://localhost:49134"), + options=InitOptions(worker_name="realtime-streams"), +) + +# --- +# stream::set — Persist an item in a stream group +# Payload: { stream_name, group_id, item_id, data } +# --- + + +async def post_message(data): + logger = Logger() + message_id = f"msg-{uuid.uuid4().hex}" + + await iii.trigger_async({ + "function_id": "stream::set", + "payload": { + "stream_name": "chat", + "group_id": data["room"], + "item_id": message_id, + "data": { + "sender": data["sender"], + "text": data["text"], + "timestamp": datetime.now(timezone.utc).isoformat(), + }, + }, + }) + + logger.info("Message stored in stream", {"room": data["room"], "messageId": message_id}) + return {"messageId": message_id} + + +iii.register_function("chat::post-message", post_message) + +# --- +# stream::get — Retrieve a single item from a stream group +# Payload: { stream_name, group_id, item_id } +# --- + + +async def get_message(data): + message = await iii.trigger_async({ + "function_id": "stream::get", + "payload": { + "stream_name": "chat", + "group_id": data["room"], + "item_id": data["messageId"], + }, + }) + + if not message: + return {"error": "Message not found"} + + return message + + +iii.register_function("chat::get-message", get_message) + +# --- +# stream::list — List all items in a stream group +# Payload: { stream_name, group_id } +# --- + + +async def list_messages(data): + messages = await iii.trigger_async({ + "function_id": "stream::list", + "payload": { + "stream_name": "chat", + "group_id": data["room"], + }, + }) + + return {"room": data["room"], "messages": messages or []} + + +iii.register_function("chat::list-messages", list_messages) + +# --- +# stream::delete — Remove an item from a stream group +# Payload: { stream_name, group_id, item_id } +# --- + + +async def delete_message(data): + await iii.trigger_async({ + "function_id": "stream::delete", + "payload": { + "stream_name": "chat", + "group_id": data["room"], + "item_id": data["messageId"], + }, + }) + + return {"deleted": data["messageId"]} + + +iii.register_function("chat::delete-message", delete_message) + +# --- +# stream::send — Push a live event to all connected clients +# Clients on ws://host:3112/stream/chat/{room} receive this instantly. +# Use TriggerAction.Void() for fire-and-forget delivery. +# --- + + +async def broadcast(data): + logger = Logger() + event_id = f"evt-{uuid.uuid4().hex}" + + await iii.trigger_async({ + "function_id": "stream::set", + "payload": { + "stream_name": "chat", + "group_id": data["room"], + "item_id": event_id, + "data": { + "sender": data["sender"], + "text": data["text"], + "timestamp": datetime.now(timezone.utc).isoformat(), + }, + }, + }) + + iii.trigger({ + "function_id": "stream::send", + "payload": { + "stream_name": "chat", + "group_id": data["room"], + "id": event_id, + "event_type": "new_message", + "data": { + "sender": data["sender"], + "text": data["text"], + "timestamp": datetime.now(timezone.utc).isoformat(), + }, + }, + "action": TriggerAction.Void(), + }) + + logger.info("Message broadcast", {"room": data["room"], "eventId": event_id}) + return {"eventId": event_id} + + +iii.register_function("chat::broadcast", broadcast) + +# --- +# Presence tracking — user joins/leaves +# Uses state-backed storage since Python SDK lacks createStream. +# Clients connect at: ws://host:3112/stream/presence/{room} +# --- + + +async def presence_join(data): + await iii.trigger_async({ + "function_id": "state::set", + "payload": { + "scope": f"presence::{data['room']}", + "key": data["userId"], + "value": { + "userId": data["userId"], + "name": data["name"], + "status": "online", + "updated_at": datetime.now(timezone.utc).isoformat(), + }, + }, + }) + + iii.trigger({ + "function_id": "stream::send", + "payload": { + "stream_name": "presence", + "group_id": data["room"], + "id": f"join-{uuid.uuid4().hex}", + "event_type": "user_joined", + "data": {"userId": data["userId"], "name": data["name"]}, + }, + "action": TriggerAction.Void(), + }) + + return {"joined": data["room"]} + + +iii.register_function("presence::join", presence_join) + + +async def presence_leave(data): + await iii.trigger_async({ + "function_id": "state::delete", + "payload": { + "scope": f"presence::{data['room']}", + "key": data["userId"], + }, + }) + + iii.trigger({ + "function_id": "stream::send", + "payload": { + "stream_name": "presence", + "group_id": data["room"], + "id": f"leave-{uuid.uuid4().hex}", + "event_type": "user_left", + "data": {"userId": data["userId"]}, + }, + "action": TriggerAction.Void(), + }) + + return {"left": data["room"]} + + +iii.register_function("presence::leave", presence_leave) + +# --- +# HTTP triggers +# --- +iii.register_trigger({"type": "http", "function_id": "chat::broadcast", "config": {"api_path": "/chat/send", "http_method": "POST"}}) +iii.register_trigger({"type": "http", "function_id": "chat::list-messages", "config": {"api_path": "/chat/:room/messages", "http_method": "GET"}}) +iii.register_trigger({"type": "http", "function_id": "presence::join", "config": {"api_path": "/presence/join", "http_method": "POST"}}) +iii.register_trigger({"type": "http", "function_id": "presence::leave", "config": {"api_path": "/presence/leave", "http_method": "POST"}}) + + +async def main(): + while True: + await asyncio.sleep(60) + + +if __name__ == "__main__": + asyncio.run(main()) diff --git a/skills/references/realtime-streams.rs b/skills/references/realtime-streams.rs new file mode 100644 index 000000000..78fc4edd7 --- /dev/null +++ b/skills/references/realtime-streams.rs @@ -0,0 +1,368 @@ +/// Pattern: Realtime Streams +/// Comparable to: Socket.io, Pusher, Firebase Realtime +/// +/// Push live data to connected WebSocket clients. +/// Clients connect at: ws://host:3112/stream/{stream_name}/{group_id} +/// +/// Built-in stream operations: stream::set, stream::get, stream::list, +/// stream::delete, stream::send. + +use iii_sdk::{ + register_worker, InitOptions, RegisterFunction, TriggerRequest, TriggerAction, + builtin_triggers::*, IIITrigger, Logger, +}; +use serde_json::json; +use std::time::Duration; + +use serde; +use schemars; + +#[derive(serde::Deserialize, schemars::JsonSchema)] +struct PostMessageInput { + room: String, + sender: String, + text: String, +} + +#[derive(serde::Deserialize, schemars::JsonSchema)] +struct GetMessageInput { + room: String, + #[serde(rename = "messageId")] + message_id: String, +} + +#[derive(serde::Deserialize, schemars::JsonSchema)] +struct ListMessagesInput { + room: String, +} + +#[derive(serde::Deserialize, schemars::JsonSchema)] +struct DeleteMessageInput { + room: String, + #[serde(rename = "messageId")] + message_id: String, +} + +#[derive(serde::Deserialize, schemars::JsonSchema)] +struct BroadcastInput { + room: String, + sender: String, + text: String, +} + +#[derive(serde::Deserialize, schemars::JsonSchema)] +struct PresenceInput { + room: String, + #[serde(rename = "userId")] + user_id: String, + name: Option, +} + +fn main() { + let url = std::env::var("III_ENGINE_URL").unwrap_or("ws://127.0.0.1:49134".into()); + let iii = register_worker(&url, InitOptions::default()); + + // --- + // stream::set - Persist an item in a stream group + // Payload: { stream_name, group_id, item_id, data } + // --- + let iii_clone = iii.clone(); + iii.register_function( + RegisterFunction::new_async("chat::post-message", move |data: PostMessageInput| { + let iii = iii_clone.clone(); + async move { + let logger = Logger::new(); + let message_id = format!("msg-{}", chrono::Utc::now().timestamp_millis()); + + iii.trigger(TriggerRequest { + function_id: "stream::set".into(), + payload: json!({ + "stream_name": "chat", + "group_id": data.room, + "item_id": message_id, + "data": { + "sender": data.sender, + "text": data.text, + "timestamp": chrono::Utc::now().to_rfc3339(), + }, + }), + action: None, + timeout_ms: None, + }) + .await + .map_err(|e| e.to_string())?; + + logger.info("Message stored in stream", &json!({ "room": data.room, "messageId": message_id })); + Ok(json!({ "messageId": message_id })) + } + }) + .description("Post a chat message to a room stream"), + ); + + // --- + // stream::get - Retrieve a single item from a stream group + // Payload: { stream_name, group_id, item_id } + // --- + let iii_clone = iii.clone(); + iii.register_function( + RegisterFunction::new_async("chat::get-message", move |data: GetMessageInput| { + let iii = iii_clone.clone(); + async move { + let message = iii + .trigger(TriggerRequest { + function_id: "stream::get".into(), + payload: json!({ + "stream_name": "chat", + "group_id": data.room, + "item_id": data.message_id, + }), + action: None, + timeout_ms: None, + }) + .await + .map_err(|e| e.to_string())?; + + if message.is_null() { + return Ok(json!({ "error": "Message not found" })); + } + + Ok(message) + } + }) + .description("Get a single chat message"), + ); + + // --- + // stream::list - List all items in a stream group + // Payload: { stream_name, group_id } + // --- + let iii_clone = iii.clone(); + iii.register_function( + RegisterFunction::new_async("chat::list-messages", move |data: ListMessagesInput| { + let iii = iii_clone.clone(); + async move { + let messages = iii + .trigger(TriggerRequest { + function_id: "stream::list".into(), + payload: json!({ + "stream_name": "chat", + "group_id": data.room, + }), + action: None, + timeout_ms: None, + }) + .await + .map_err(|e| e.to_string())?; + + let arr = messages.as_array().cloned().unwrap_or_default(); + Ok(json!({ "room": data.room, "messages": arr })) + } + }) + .description("List all messages in a chat room"), + ); + + // --- + // stream::delete - Remove an item from a stream group + // Payload: { stream_name, group_id, item_id } + // --- + let iii_clone = iii.clone(); + iii.register_function( + RegisterFunction::new_async("chat::delete-message", move |data: DeleteMessageInput| { + let iii = iii_clone.clone(); + async move { + iii.trigger(TriggerRequest { + function_id: "stream::delete".into(), + payload: json!({ + "stream_name": "chat", + "group_id": data.room, + "item_id": data.message_id, + }), + action: None, + timeout_ms: None, + }) + .await + .map_err(|e| e.to_string())?; + + Ok(json!({ "deleted": data.message_id })) + } + }) + .description("Delete a chat message"), + ); + + // --- + // stream::send - Push a live event to all connected clients + // Clients on ws://host:3112/stream/chat/{room} receive this instantly. + // Use TriggerAction::Void for fire-and-forget delivery. + // --- + let iii_clone = iii.clone(); + iii.register_function( + RegisterFunction::new_async("chat::broadcast", move |data: BroadcastInput| { + let iii = iii_clone.clone(); + async move { + let logger = Logger::new(); + let event_id = format!("evt-{}", chrono::Utc::now().timestamp_millis()); + let timestamp = chrono::Utc::now().to_rfc3339(); + + iii.trigger(TriggerRequest { + function_id: "stream::set".into(), + payload: json!({ + "stream_name": "chat", + "group_id": data.room, + "item_id": event_id, + "data": { + "sender": data.sender, + "text": data.text, + "timestamp": timestamp, + }, + }), + action: None, + timeout_ms: None, + }) + .await + .map_err(|e| e.to_string())?; + + iii.trigger(TriggerRequest { + function_id: "stream::send".into(), + payload: json!({ + "stream_name": "chat", + "group_id": data.room, + "id": event_id, + "event_type": "new_message", + "data": { + "sender": data.sender, + "text": data.text, + "timestamp": timestamp, + }, + }), + action: Some(TriggerAction::Void), + timeout_ms: None, + }) + .await + .ok(); + + logger.info("Message broadcast", &json!({ "room": data.room, "eventId": event_id })); + Ok(json!({ "eventId": event_id })) + } + }) + .description("Broadcast a message to all connected clients"), + ); + + // --- + // Presence tracking - user joins/leaves + // Clients connect at: ws://host:3112/stream/presence/{room} + // --- + let iii_clone = iii.clone(); + iii.register_function( + RegisterFunction::new_async("presence::join", move |data: PresenceInput| { + let iii = iii_clone.clone(); + async move { + iii.trigger(TriggerRequest { + function_id: "stream::set".into(), + payload: json!({ + "stream_name": "presence", + "group_id": data.room, + "item_id": data.user_id, + "data": { + "userId": data.user_id, + "name": data.name, + "status": "online", + }, + }), + action: None, + timeout_ms: None, + }) + .await + .map_err(|e| e.to_string())?; + + iii.trigger(TriggerRequest { + function_id: "stream::send".into(), + payload: json!({ + "stream_name": "presence", + "group_id": data.room, + "id": format!("join-{}", chrono::Utc::now().timestamp_millis()), + "event_type": "user_joined", + "data": { "userId": data.user_id, "name": data.name }, + }), + action: Some(TriggerAction::Void), + timeout_ms: None, + }) + .await + .ok(); + + Ok(json!({ "joined": data.room })) + } + }) + .description("User joins a presence room"), + ); + + let iii_clone = iii.clone(); + iii.register_function( + RegisterFunction::new_async("presence::leave", move |data: PresenceInput| { + let iii = iii_clone.clone(); + async move { + iii.trigger(TriggerRequest { + function_id: "stream::delete".into(), + payload: json!({ + "stream_name": "presence", + "group_id": data.room, + "item_id": data.user_id, + }), + action: None, + timeout_ms: None, + }) + .await + .map_err(|e| e.to_string())?; + + iii.trigger(TriggerRequest { + function_id: "stream::send".into(), + payload: json!({ + "stream_name": "presence", + "group_id": data.room, + "id": format!("leave-{}", chrono::Utc::now().timestamp_millis()), + "event_type": "user_left", + "data": { "userId": data.user_id }, + }), + action: Some(TriggerAction::Void), + timeout_ms: None, + }) + .await + .ok(); + + Ok(json!({ "left": data.room })) + } + }) + .description("User leaves a presence room"), + ); + + // --- + // HTTP triggers + // --- + iii.register_trigger( + IIITrigger::Http(HttpTriggerConfig::new("/chat/send").method(HttpMethod::Post)) + .for_function("chat::broadcast"), + ) + .expect("failed"); + + iii.register_trigger( + IIITrigger::Http(HttpTriggerConfig::new("/chat/:room/messages").method(HttpMethod::Get)) + .for_function("chat::list-messages"), + ) + .expect("failed"); + + iii.register_trigger( + IIITrigger::Http(HttpTriggerConfig::new("/presence/join").method(HttpMethod::Post)) + .for_function("presence::join"), + ) + .expect("failed"); + + iii.register_trigger( + IIITrigger::Http(HttpTriggerConfig::new("/presence/leave").method(HttpMethod::Post)) + .for_function("presence::leave"), + ) + .expect("failed"); + + tokio::runtime::Runtime::new().unwrap().block_on(async { + tokio::signal::ctrl_c().await.ok(); + }); + iii.shutdown(); +} diff --git a/skills/references/state-management.js b/skills/references/state-management.js new file mode 100644 index 000000000..df82bf452 --- /dev/null +++ b/skills/references/state-management.js @@ -0,0 +1,172 @@ +/** + * Pattern: State Management + * Comparable to: Redis, DynamoDB, Memcached + * + * Persistent key-value state scoped by namespace. Supports set, get, + * list, delete, and partial update operations. + * + * How-to references: + * - State management: https://iii.dev/docs/how-to/manage-state + */ + +import { registerWorker, Logger, TriggerAction } from 'iii-sdk' + +const iii = registerWorker(process.env.III_ENGINE_URL || 'ws://localhost:49134', { + workerName: 'state-management', +}) + +// --------------------------------------------------------------------------- +// state::set — Store a value under a scoped key +// Payload: { scope, key, value } +// --------------------------------------------------------------------------- +iii.registerFunction({ id: 'products::create' }, async (data) => { + const id = `prod-${Date.now()}` + const product = { + id, + name: data.name, + price: data.price, + category: data.category, + stock: data.stock || 0, + created_at: new Date().toISOString(), + } + + await iii.trigger({ + function_id: 'state::set', + payload: { scope: 'products', key: id, value: product }, + }) + + return product +}) + +// --------------------------------------------------------------------------- +// state::get — Retrieve a value by scope and key +// Payload: { scope, key } +// Returns null if the key does not exist — always guard for null. +// --------------------------------------------------------------------------- +iii.registerFunction({ id: 'products::get' }, async (data) => { + const product = await iii.trigger({ + function_id: 'state::get', + payload: { scope: 'products', key: data.id }, + }) + + // Null guard — state::get returns null for missing keys + if (!product) { + return { error: 'Product not found', id: data.id } + } + + return product +}) + +// --------------------------------------------------------------------------- +// state::list — Retrieve all values in a scope +// Payload: { scope } +// Returns an array of all stored values. +// --------------------------------------------------------------------------- +iii.registerFunction({ id: 'products::list-all' }, async () => { + const products = await iii.trigger({ + function_id: 'state::list', + payload: { scope: 'products' }, + }) + + return { count: (products || []).length, products: products || [] } +}) + +// --------------------------------------------------------------------------- +// state::delete — Remove a key from a scope +// Payload: { scope, key } +// --------------------------------------------------------------------------- +iii.registerFunction({ id: 'products::remove' }, async (data) => { + const existing = await iii.trigger({ + function_id: 'state::get', + payload: { scope: 'products', key: data.id }, + }) + + if (!existing) { + return { error: 'Product not found', id: data.id } + } + + await iii.trigger({ + function_id: 'state::delete', + payload: { scope: 'products', key: data.id }, + }) + + return { deleted: data.id } +}) + +// --------------------------------------------------------------------------- +// state::update — Partial merge using ops array +// Payload: { scope, key, ops } +// ops: [{ type: 'set', path, value }] +// Use update instead of get-then-set for atomic partial changes. +// --------------------------------------------------------------------------- +iii.registerFunction({ id: 'products::update-price' }, async (data) => { + const existing = await iii.trigger({ + function_id: 'state::get', + payload: { scope: 'products', key: data.id }, + }) + + if (!existing) { + return { error: 'Product not found', id: data.id } + } + + await iii.trigger({ + function_id: 'state::update', + payload: { + scope: 'products', + key: data.id, + ops: [ + { type: 'set', path: 'price', value: data.newPrice }, + { type: 'set', path: 'updated_at', value: new Date().toISOString() }, + ], + }, + }) + + return { id: data.id, price: data.newPrice } +}) + +// --------------------------------------------------------------------------- +// Combining operations — inventory adjustment with update +// --------------------------------------------------------------------------- +iii.registerFunction({ id: 'products::adjust-stock' }, async (data) => { + const logger = new Logger() + + const product = await iii.trigger({ + function_id: 'state::get', + payload: { scope: 'products', key: data.id }, + }) + + if (!product) { + return { error: 'Product not found', id: data.id } + } + + const newStock = product.stock + data.adjustment + + if (newStock < 0) { + return { error: 'Insufficient stock', current: product.stock, requested: data.adjustment } + } + + await iii.trigger({ + function_id: 'state::update', + payload: { + scope: 'products', + key: data.id, + ops: [ + { type: 'set', path: 'stock', value: newStock }, + { type: 'set', path: 'last_stock_change', value: new Date().toISOString() }, + ], + }, + }) + + logger.info('Stock adjusted', { id: data.id, from: product.stock, to: newStock }) + return { id: data.id, previousStock: product.stock, newStock } +}) + +// --------------------------------------------------------------------------- +// HTTP triggers +// --------------------------------------------------------------------------- +iii.registerTrigger({ type: 'http', function_id: 'products::create', config: { api_path: '/products', http_method: 'POST' } }) +iii.registerTrigger({ type: 'http', function_id: 'products::get', config: { api_path: '/products/:id', http_method: 'GET' } }) +iii.registerTrigger({ type: 'http', function_id: 'products::list-all', config: { api_path: '/products', http_method: 'GET' } }) +iii.registerTrigger({ type: 'http', function_id: 'products::remove', config: { api_path: '/products/:id', http_method: 'DELETE' } }) +iii.registerTrigger({ type: 'http', function_id: 'products::update-price', config: { api_path: '/products/:id/price', http_method: 'PUT' } }) +iii.registerTrigger({ type: 'http', function_id: 'products::adjust-stock', config: { api_path: '/products/:id/stock', http_method: 'POST' } }) diff --git a/skills/references/state-management.py b/skills/references/state-management.py new file mode 100644 index 000000000..c1ffc5704 --- /dev/null +++ b/skills/references/state-management.py @@ -0,0 +1,206 @@ +""" +Pattern: State Management +Comparable to: Redis, DynamoDB, Memcached + +Persistent key-value state scoped by namespace. Supports set, get, +list, delete, and partial update operations. + +How-to references: + - State management: https://iii.dev/docs/how-to/manage-state +""" + +import asyncio +import os +import time +from datetime import datetime, timezone + +from iii import InitOptions, Logger, TriggerAction, register_worker + +iii = register_worker( + address=os.environ.get("III_ENGINE_URL", "ws://localhost:49134"), + options=InitOptions(worker_name="state-management"), +) + +# --- +# state::set — Store a value under a scoped key +# Payload: { scope, key, value } +# --- + + +async def products_create(data): + id = f"prod-{int(time.time() * 1000)}" + product = { + "id": id, + "name": data["name"], + "price": data["price"], + "category": data["category"], + "stock": data.get("stock", 0), + "created_at": datetime.now(timezone.utc).isoformat(), + } + + await iii.trigger_async({ + "function_id": "state::set", + "payload": {"scope": "products", "key": id, "value": product}, + }) + + return product + + +iii.register_function("products::create", products_create) + +# --- +# state::get — Retrieve a value by scope and key +# Payload: { scope, key } +# Returns None if the key does not exist — always guard for None. +# --- + + +async def products_get(data): + product = await iii.trigger_async({ + "function_id": "state::get", + "payload": {"scope": "products", "key": data["id"]}, + }) + + if not product: + return {"error": "Product not found", "id": data["id"]} + + return product + + +iii.register_function("products::get", products_get) + +# --- +# state::list — Retrieve all values in a scope +# Payload: { scope } +# Returns an array of all stored values. +# --- + + +async def products_list_all(data): + products = await iii.trigger_async({ + "function_id": "state::list", + "payload": {"scope": "products"}, + }) + + products = products or [] + return {"count": len(products), "products": products} + + +iii.register_function("products::list-all", products_list_all) + +# --- +# state::delete — Remove a key from a scope +# Payload: { scope, key } +# --- + + +async def products_remove(data): + existing = await iii.trigger_async({ + "function_id": "state::get", + "payload": {"scope": "products", "key": data["id"]}, + }) + + if not existing: + return {"error": "Product not found", "id": data["id"]} + + await iii.trigger_async({ + "function_id": "state::delete", + "payload": {"scope": "products", "key": data["id"]}, + }) + + return {"deleted": data["id"]} + + +iii.register_function("products::remove", products_remove) + +# --- +# state::update — Partial merge using ops array +# Payload: { scope, key, ops } +# ops: [{ type: "set", path, value }] +# Use update instead of get-then-set for atomic partial changes. +# --- + + +async def products_update_price(data): + existing = await iii.trigger_async({ + "function_id": "state::get", + "payload": {"scope": "products", "key": data["id"]}, + }) + + if not existing: + return {"error": "Product not found", "id": data["id"]} + + await iii.trigger_async({ + "function_id": "state::update", + "payload": { + "scope": "products", + "key": data["id"], + "ops": [ + {"type": "set", "path": "price", "value": data["newPrice"]}, + {"type": "set", "path": "updated_at", "value": datetime.now(timezone.utc).isoformat()}, + ], + }, + }) + + return {"id": data["id"], "price": data["newPrice"]} + + +iii.register_function("products::update-price", products_update_price) + +# --- +# Combining operations — inventory adjustment with update +# --- + + +async def products_adjust_stock(data): + logger = Logger() + + product = await iii.trigger_async({ + "function_id": "state::get", + "payload": {"scope": "products", "key": data["id"]}, + }) + + if not product: + return {"error": "Product not found", "id": data["id"]} + + new_stock = product["stock"] + data["adjustment"] + + if new_stock < 0: + return {"error": "Insufficient stock", "current": product["stock"], "requested": data["adjustment"]} + + await iii.trigger_async({ + "function_id": "state::update", + "payload": { + "scope": "products", + "key": data["id"], + "ops": [ + {"type": "set", "path": "stock", "value": new_stock}, + {"type": "set", "path": "last_stock_change", "value": datetime.now(timezone.utc).isoformat()}, + ], + }, + }) + + logger.info("Stock adjusted", {"id": data["id"], "from": product["stock"], "to": new_stock}) + return {"id": data["id"], "previousStock": product["stock"], "newStock": new_stock} + + +iii.register_function("products::adjust-stock", products_adjust_stock) + +# --- +# HTTP triggers +# --- +iii.register_trigger({"type": "http", "function_id": "products::create", "config": {"api_path": "/products", "http_method": "POST"}}) +iii.register_trigger({"type": "http", "function_id": "products::get", "config": {"api_path": "/products/:id", "http_method": "GET"}}) +iii.register_trigger({"type": "http", "function_id": "products::list-all", "config": {"api_path": "/products", "http_method": "GET"}}) +iii.register_trigger({"type": "http", "function_id": "products::remove", "config": {"api_path": "/products/:id", "http_method": "DELETE"}}) +iii.register_trigger({"type": "http", "function_id": "products::update-price", "config": {"api_path": "/products/:id/price", "http_method": "PUT"}}) +iii.register_trigger({"type": "http", "function_id": "products::adjust-stock", "config": {"api_path": "/products/:id/stock", "http_method": "POST"}}) + + +async def main(): + while True: + await asyncio.sleep(60) + + +if __name__ == "__main__": + asyncio.run(main()) diff --git a/skills/references/state-management.rs b/skills/references/state-management.rs new file mode 100644 index 000000000..472a666af --- /dev/null +++ b/skills/references/state-management.rs @@ -0,0 +1,325 @@ +/// Pattern: State Management +/// Comparable to: Redis, DynamoDB, Memcached +/// +/// Persistent key-value state scoped by namespace. Supports set, get, +/// list, delete, and partial update operations. + +use iii_sdk::{ + register_worker, InitOptions, RegisterFunction, TriggerRequest, TriggerAction, + builtin_triggers::*, IIITrigger, Logger, +}; +use serde_json::json; + +#[derive(serde::Deserialize, schemars::JsonSchema)] +struct CreateProductInput { + name: String, + price: f64, + category: String, + stock: Option, +} + +#[derive(serde::Deserialize, schemars::JsonSchema)] +struct GetProductInput { + id: String, +} + +#[derive(serde::Deserialize, schemars::JsonSchema)] +struct RemoveProductInput { + id: String, +} + +#[derive(serde::Deserialize, schemars::JsonSchema)] +struct UpdatePriceInput { + id: String, + #[serde(rename = "newPrice")] + new_price: f64, +} + +#[derive(serde::Deserialize, schemars::JsonSchema)] +struct AdjustStockInput { + id: String, + adjustment: i64, +} + +fn main() { + let url = std::env::var("III_ENGINE_URL").unwrap_or("ws://127.0.0.1:49134".into()); + let iii = register_worker(&url, InitOptions::default()); + + // --- + // state::set - Store a value under a scoped key + // Payload: { scope, key, value } + // --- + let iii_clone = iii.clone(); + iii.register_function( + RegisterFunction::new_async("products::create", move |data: CreateProductInput| { + let iii = iii_clone.clone(); + async move { + let id = format!("prod-{}", chrono::Utc::now().timestamp_millis()); + let product = json!({ + "id": id, + "name": data.name, + "price": data.price, + "category": data.category, + "stock": data.stock.unwrap_or(0), + "created_at": chrono::Utc::now().to_rfc3339(), + }); + + iii.trigger(TriggerRequest { + function_id: "state::set".into(), + payload: json!({ "scope": "products", "key": id, "value": product }), + action: None, + timeout_ms: None, + }) + .await + .map_err(|e| e.to_string())?; + + Ok(product) + } + }) + .description("Create a new product"), + ); + + // --- + // state::get - Retrieve a value by scope and key + // Payload: { scope, key } + // Returns null if the key does not exist - always guard for null. + // --- + let iii_clone = iii.clone(); + iii.register_function( + RegisterFunction::new_async("products::get", move |data: GetProductInput| { + let iii = iii_clone.clone(); + async move { + let product = iii + .trigger(TriggerRequest { + function_id: "state::get".into(), + payload: json!({ "scope": "products", "key": data.id }), + action: None, + timeout_ms: None, + }) + .await + .map_err(|e| e.to_string())?; + + if product.is_null() { + return Ok(json!({ "error": "Product not found", "id": data.id })); + } + + Ok(product) + } + }) + .description("Get a product by ID"), + ); + + // --- + // state::list - Retrieve all values in a scope + // Payload: { scope } + // Returns an array of all stored values. + // --- + let iii_clone = iii.clone(); + iii.register_function( + RegisterFunction::new_async("products::list-all", move |_: serde_json::Value| { + let iii = iii_clone.clone(); + async move { + let products = iii + .trigger(TriggerRequest { + function_id: "state::list".into(), + payload: json!({ "scope": "products" }), + action: None, + timeout_ms: None, + }) + .await + .map_err(|e| e.to_string())?; + + let arr = products.as_array().cloned().unwrap_or_default(); + Ok(json!({ "count": arr.len(), "products": arr })) + } + }) + .description("List all products"), + ); + + // --- + // state::delete - Remove a key from a scope + // Payload: { scope, key } + // --- + let iii_clone = iii.clone(); + iii.register_function( + RegisterFunction::new_async("products::remove", move |data: RemoveProductInput| { + let iii = iii_clone.clone(); + async move { + let existing = iii + .trigger(TriggerRequest { + function_id: "state::get".into(), + payload: json!({ "scope": "products", "key": data.id }), + action: None, + timeout_ms: None, + }) + .await + .map_err(|e| e.to_string())?; + + if existing.is_null() { + return Ok(json!({ "error": "Product not found", "id": data.id })); + } + + iii.trigger(TriggerRequest { + function_id: "state::delete".into(), + payload: json!({ "scope": "products", "key": data.id }), + action: None, + timeout_ms: None, + }) + .await + .map_err(|e| e.to_string())?; + + Ok(json!({ "deleted": data.id })) + } + }) + .description("Remove a product by ID"), + ); + + // --- + // state::update - Partial merge using ops array + // Payload: { scope, key, ops } + // ops: [{ type: "set", path, value }] + // Use update instead of get-then-set for atomic partial changes. + // --- + let iii_clone = iii.clone(); + iii.register_function( + RegisterFunction::new_async("products::update-price", move |data: UpdatePriceInput| { + let iii = iii_clone.clone(); + async move { + let existing = iii + .trigger(TriggerRequest { + function_id: "state::get".into(), + payload: json!({ "scope": "products", "key": data.id }), + action: None, + timeout_ms: None, + }) + .await + .map_err(|e| e.to_string())?; + + if existing.is_null() { + return Ok(json!({ "error": "Product not found", "id": data.id })); + } + + iii.trigger(TriggerRequest { + function_id: "state::update".into(), + payload: json!({ + "scope": "products", + "key": data.id, + "ops": [ + { "type": "set", "path": "price", "value": data.new_price }, + { "type": "set", "path": "updated_at", "value": chrono::Utc::now().to_rfc3339() }, + ], + }), + action: None, + timeout_ms: None, + }) + .await + .map_err(|e| e.to_string())?; + + Ok(json!({ "id": data.id, "price": data.new_price })) + } + }) + .description("Update product price"), + ); + + // --- + // Combining operations - inventory adjustment with update + // --- + let iii_clone = iii.clone(); + iii.register_function( + RegisterFunction::new_async("products::adjust-stock", move |data: AdjustStockInput| { + let iii = iii_clone.clone(); + async move { + let logger = Logger::new(); + + let product = iii + .trigger(TriggerRequest { + function_id: "state::get".into(), + payload: json!({ "scope": "products", "key": data.id }), + action: None, + timeout_ms: None, + }) + .await + .map_err(|e| e.to_string())?; + + if product.is_null() { + return Ok(json!({ "error": "Product not found", "id": data.id })); + } + + let current_stock = product["stock"].as_i64().unwrap_or(0); + let new_stock = current_stock + data.adjustment; + + if new_stock < 0 { + return Ok(json!({ + "error": "Insufficient stock", + "current": current_stock, + "requested": data.adjustment, + })); + } + + iii.trigger(TriggerRequest { + function_id: "state::update".into(), + payload: json!({ + "scope": "products", + "key": data.id, + "ops": [ + { "type": "set", "path": "stock", "value": new_stock }, + { "type": "set", "path": "last_stock_change", "value": chrono::Utc::now().to_rfc3339() }, + ], + }), + action: None, + timeout_ms: None, + }) + .await + .map_err(|e| e.to_string())?; + + logger.info("Stock adjusted", &json!({ "id": data.id, "from": current_stock, "to": new_stock })); + Ok(json!({ "id": data.id, "previousStock": current_stock, "newStock": new_stock })) + } + }) + .description("Adjust product stock"), + ); + + // --- + // HTTP triggers + // --- + iii.register_trigger( + IIITrigger::Http(HttpTriggerConfig::new("/products").method(HttpMethod::Post)) + .for_function("products::create"), + ) + .expect("failed"); + + iii.register_trigger( + IIITrigger::Http(HttpTriggerConfig::new("/products/:id").method(HttpMethod::Get)) + .for_function("products::get"), + ) + .expect("failed"); + + iii.register_trigger( + IIITrigger::Http(HttpTriggerConfig::new("/products").method(HttpMethod::Get)) + .for_function("products::list-all"), + ) + .expect("failed"); + + iii.register_trigger( + IIITrigger::Http(HttpTriggerConfig::new("/products/:id").method(HttpMethod::Delete)) + .for_function("products::remove"), + ) + .expect("failed"); + + iii.register_trigger( + IIITrigger::Http(HttpTriggerConfig::new("/products/:id/price").method(HttpMethod::Put)) + .for_function("products::update-price"), + ) + .expect("failed"); + + iii.register_trigger( + IIITrigger::Http(HttpTriggerConfig::new("/products/:id/stock").method(HttpMethod::Post)) + .for_function("products::adjust-stock"), + ) + .expect("failed"); + + tokio::runtime::Runtime::new().unwrap().block_on(async { + tokio::signal::ctrl_c().await.ok(); + }); + iii.shutdown(); +} diff --git a/skills/references/state-reactions.js b/skills/references/state-reactions.js new file mode 100644 index 000000000..05b9b5cf2 --- /dev/null +++ b/skills/references/state-reactions.js @@ -0,0 +1,158 @@ +/** + * Pattern: State Reactions + * Comparable to: Firebase onSnapshot, Convex mutations + * + * Register functions that fire automatically when state changes + * in a given scope. Optionally filter with a condition function + * that returns a boolean. + * + * How-to references: + * - State reactions: https://iii.dev/docs/how-to/react-to-state-changes + */ + +import { registerWorker, Logger, TriggerAction } from 'iii-sdk' + +const iii = registerWorker(process.env.III_ENGINE_URL || 'ws://localhost:49134', { + workerName: 'state-reactions', +}) + +// --------------------------------------------------------------------------- +// Basic state reaction — fires on ANY change in the 'orders' scope +// The handler receives: { new_value, old_value, key, event_type } +// event_type: 'set' | 'update' | 'delete' +// --------------------------------------------------------------------------- +iii.registerFunction({ id: 'reactions::order-audit-log' }, async (event) => { + const logger = new Logger() + const { new_value, old_value, key, event_type } = event + + const action = !old_value ? 'created' : !new_value ? 'deleted' : 'updated' + logger.info('Order changed', { key, action, event_type }) + + // Persist audit entry + const auditId = `audit-${Date.now()}` + await iii.trigger({ + function_id: 'state::set', + payload: { + scope: 'order-audit', + key: auditId, + value: { + auditId, + orderKey: key, + action, + event_type, + before: old_value, + after: new_value, + timestamp: new Date().toISOString(), + }, + }, + }) + + return { auditId, action } +}) + +iii.registerTrigger({ + type: 'state', + function_id: 'reactions::order-audit-log', + config: { scope: 'orders' }, +}) + +// --------------------------------------------------------------------------- +// Conditional reaction — only fires when condition function returns true +// The condition function receives the same event and must return a boolean. +// --------------------------------------------------------------------------- +iii.registerFunction({ id: 'reactions::high-value-alert-condition' }, async (event) => { + const { new_value } = event + + // Only react when an order total exceeds $1000 + return new_value && new_value.total > 1000 +}) + +iii.registerFunction({ id: 'reactions::high-value-alert' }, async (event) => { + const logger = new Logger() + const { new_value, key } = event + + logger.info('High-value order detected', { key, total: new_value.total }) + + // Enqueue alert for reliable delivery + iii.trigger({ + function_id: 'alerts::notify-manager', + payload: { + type: 'high-value-order', + orderId: key, + total: new_value.total, + customer: new_value.customer, + }, + action: TriggerAction.Enqueue({ queue: 'alerts' }), + }) + + return { alerted: true, orderId: key } +}) + +iii.registerTrigger({ + type: 'state', + function_id: 'reactions::high-value-alert', + config: { + scope: 'orders', + condition_function_id: 'reactions::high-value-alert-condition', + }, +}) + +// --------------------------------------------------------------------------- +// Multiple independent reactions to the same scope +// Each trigger registers a separate function on the same scope. +// All registered reactions fire independently on every matching change. +// --------------------------------------------------------------------------- + +// Reaction 1: Update aggregate metrics +iii.registerFunction({ id: 'reactions::order-metrics' }, async (event) => { + const { new_value, old_value } = event + + const ops = [] + + if (new_value && !old_value) { + ops.push({ type: 'increment', path: 'total_orders', by: 1 }) + ops.push({ type: 'increment', path: 'total_revenue', by: new_value.total || 0 }) + } + + if (!new_value && old_value) { + ops.push({ type: 'increment', path: 'total_orders', by: -1 }) + ops.push({ type: 'increment', path: 'total_revenue', by: -(old_value.total || 0) }) + } + + if (ops.length > 0) { + await iii.trigger({ + function_id: 'state::update', + payload: { scope: 'order-metrics', key: 'global', ops }, + }) + } +}) + +iii.registerTrigger({ + type: 'state', + function_id: 'reactions::order-metrics', + config: { scope: 'orders' }, +}) + +// Reaction 2: Push live update to connected clients +iii.registerFunction({ id: 'reactions::order-live-feed' }, async (event) => { + const { new_value, old_value, key } = event + const action = !old_value ? 'created' : !new_value ? 'deleted' : 'updated' + + iii.trigger({ + function_id: 'stream::send', + payload: { + stream_name: 'orders-live', + group_id: 'dashboard', + id: `evt-${Date.now()}`, + event_type: 'order_changed', + data: { action, key, order: new_value }, + }, + action: TriggerAction.Void(), + }) +}) + +iii.registerTrigger({ + type: 'state', + function_id: 'reactions::order-live-feed', + config: { scope: 'orders' }, +}) diff --git a/skills/references/state-reactions.py b/skills/references/state-reactions.py new file mode 100644 index 000000000..e5f0c6c3f --- /dev/null +++ b/skills/references/state-reactions.py @@ -0,0 +1,200 @@ +""" +Pattern: State Reactions +Comparable to: Firebase onSnapshot, Convex mutations + +Register functions that fire automatically when state changes +in a given scope. Optionally filter with a condition function +that returns a boolean. + +How-to references: + - State reactions: https://iii.dev/docs/how-to/react-to-state-changes +""" + +import asyncio +import os +import time +from datetime import datetime, timezone + +from iii import InitOptions, Logger, TriggerAction, register_worker + +iii = register_worker( + address=os.environ.get("III_ENGINE_URL", "ws://localhost:49134"), + options=InitOptions(worker_name="state-reactions"), +) + +# --- +# Basic state reaction — fires on ANY change in the 'orders' scope +# The handler receives: { new_value, old_value, key, event_type } +# event_type: 'set' | 'update' | 'delete' +# --- + + +async def order_audit_log(event): + logger = Logger() + new_value = event.get("new_value") + old_value = event.get("old_value") + key = event.get("key") + event_type = event.get("event_type") + + if not old_value: + action = "created" + elif not new_value: + action = "deleted" + else: + action = "updated" + + logger.info("Order changed", {"key": key, "action": action, "event_type": event_type}) + + audit_id = f"audit-{int(time.time() * 1000)}" + await iii.trigger_async({ + "function_id": "state::set", + "payload": { + "scope": "order-audit", + "key": audit_id, + "value": { + "auditId": audit_id, + "orderKey": key, + "action": action, + "event_type": event_type, + "before": old_value, + "after": new_value, + "timestamp": datetime.now(timezone.utc).isoformat(), + }, + }, + }) + + return {"auditId": audit_id, "action": action} + + +iii.register_function("reactions::order-audit-log", order_audit_log) + +iii.register_trigger({ + "type": "state", + "function_id": "reactions::order-audit-log", + "config": {"scope": "orders"}, +}) + +# --- +# Conditional reaction — only fires when condition function returns true +# The condition function receives the same event and must return a boolean. +# --- + + +async def high_value_alert_condition(event): + new_value = event.get("new_value") + return bool(new_value and new_value.get("total", 0) > 1000) + + +iii.register_function("reactions::high-value-alert-condition", high_value_alert_condition) + + +async def high_value_alert(event): + logger = Logger() + new_value = event.get("new_value") + key = event.get("key") + + logger.info("High-value order detected", {"key": key, "total": new_value["total"]}) + + iii.trigger({ + "function_id": "alerts::notify-manager", + "payload": { + "type": "high-value-order", + "orderId": key, + "total": new_value["total"], + "customer": new_value.get("customer"), + }, + "action": TriggerAction.Enqueue({"queue": "alerts"}), + }) + + return {"alerted": True, "orderId": key} + + +iii.register_function("reactions::high-value-alert", high_value_alert) + +iii.register_trigger({ + "type": "state", + "function_id": "reactions::high-value-alert", + "config": { + "scope": "orders", + "condition_function_id": "reactions::high-value-alert-condition", + }, +}) + +# --- +# Multiple independent reactions to the same scope +# Each trigger registers a separate function on the same scope. +# All registered reactions fire independently on every matching change. +# --- + + +async def order_metrics(event): + new_value = event.get("new_value") + old_value = event.get("old_value") + + ops = [] + + if new_value and not old_value: + ops.append({"type": "increment", "path": "total_orders", "by": 1}) + ops.append({"type": "increment", "path": "total_revenue", "by": new_value.get("total", 0)}) + + if not new_value and old_value: + ops.append({"type": "increment", "path": "total_orders", "by": -1}) + ops.append({"type": "increment", "path": "total_revenue", "by": -(old_value.get("total", 0))}) + + if ops: + await iii.trigger_async({ + "function_id": "state::update", + "payload": {"scope": "order-metrics", "key": "global", "ops": ops}, + }) + + +iii.register_function("reactions::order-metrics", order_metrics) + +iii.register_trigger({ + "type": "state", + "function_id": "reactions::order-metrics", + "config": {"scope": "orders"}, +}) + + +async def order_live_feed(event): + new_value = event.get("new_value") + old_value = event.get("old_value") + key = event.get("key") + + if not old_value: + action = "created" + elif not new_value: + action = "deleted" + else: + action = "updated" + + iii.trigger({ + "function_id": "stream::send", + "payload": { + "stream_name": "orders-live", + "group_id": "dashboard", + "id": f"evt-{int(time.time() * 1000)}", + "event_type": "order_changed", + "data": {"action": action, "key": key, "order": new_value}, + }, + "action": TriggerAction.Void(), + }) + + +iii.register_function("reactions::order-live-feed", order_live_feed) + +iii.register_trigger({ + "type": "state", + "function_id": "reactions::order-live-feed", + "config": {"scope": "orders"}, +}) + + +async def main(): + while True: + await asyncio.sleep(60) + + +if __name__ == "__main__": + asyncio.run(main()) diff --git a/skills/references/state-reactions.rs b/skills/references/state-reactions.rs new file mode 100644 index 000000000..87342347b --- /dev/null +++ b/skills/references/state-reactions.rs @@ -0,0 +1,237 @@ +/// Pattern: State Reactions +/// Comparable to: Firebase onSnapshot, Convex mutations +/// +/// Register functions that fire automatically when state changes +/// in a given scope. Optionally filter with a condition function +/// that returns a boolean. + +use iii_sdk::{ + register_worker, InitOptions, RegisterFunction, TriggerRequest, TriggerAction, + builtin_triggers::*, IIITrigger, Logger, +}; +use serde_json::json; +use std::time::Duration; + +use serde; +use schemars; + +#[derive(serde::Deserialize, schemars::JsonSchema)] +struct StateEvent { + new_value: Option, + old_value: Option, + key: String, + event_type: String, +} + +fn main() { + let url = std::env::var("III_ENGINE_URL").unwrap_or("ws://127.0.0.1:49134".into()); + let iii = register_worker(&url, InitOptions::default()); + + // --- + // Basic state reaction - fires on ANY change in the 'orders' scope + // The handler receives: { new_value, old_value, key, event_type } + // event_type: "set" | "update" | "delete" + // --- + let iii_clone = iii.clone(); + iii.register_function( + RegisterFunction::new_async("reactions::order-audit-log", move |event: StateEvent| { + let iii = iii_clone.clone(); + async move { + let logger = Logger::new(); + let action = if event.old_value.is_none() { + "created" + } else if event.new_value.is_none() { + "deleted" + } else { + "updated" + }; + + logger.info("Order changed", &json!({ "key": event.key, "action": action, "event_type": event.event_type })); + + let audit_id = format!("audit-{}", chrono::Utc::now().timestamp_millis()); + iii.trigger(TriggerRequest { + function_id: "state::set".into(), + payload: json!({ + "scope": "order-audit", + "key": audit_id, + "value": { + "auditId": audit_id, + "orderKey": event.key, + "action": action, + "event_type": event.event_type, + "before": event.old_value, + "after": event.new_value, + "timestamp": chrono::Utc::now().to_rfc3339(), + }, + }), + action: None, + timeout_ms: None, + }) + .await + .map_err(|e| e.to_string())?; + + Ok(json!({ "auditId": audit_id, "action": action })) + } + }) + .description("Audit log for order changes"), + ); + + iii.register_trigger( + IIITrigger::State(StateTriggerConfig::new().scope("orders")) + .for_function("reactions::order-audit-log"), + ) + .expect("failed"); + + // --- + // Conditional reaction - only fires when condition function returns true + // The condition function receives the same event and must return a boolean. + // --- + iii.register_function( + RegisterFunction::new("reactions::high-value-alert-condition", |event: StateEvent| -> Result { + let is_high_value = event + .new_value + .as_ref() + .and_then(|v| v["total"].as_f64()) + .map(|total| total > 1000.0) + .unwrap_or(false); + Ok(json!(is_high_value)) + }) + .description("Condition: order total exceeds $1000"), + ); + + let iii_clone = iii.clone(); + iii.register_function( + RegisterFunction::new_async("reactions::high-value-alert", move |event: StateEvent| { + let iii = iii_clone.clone(); + async move { + let logger = Logger::new(); + let total = event.new_value.as_ref().and_then(|v| v["total"].as_f64()).unwrap_or(0.0); + let customer = event.new_value.as_ref().and_then(|v| v["customer"].clone().into()); + + logger.info("High-value order detected", &json!({ "key": event.key, "total": total })); + + iii.trigger(TriggerRequest { + function_id: "alerts::notify-manager".into(), + payload: json!({ + "type": "high-value-order", + "orderId": event.key, + "total": total, + "customer": customer, + }), + action: Some(TriggerAction::Enqueue { queue: "alerts".into() }), + timeout_ms: None, + }) + .await + .map_err(|e| e.to_string())?; + + Ok(json!({ "alerted": true, "orderId": event.key })) + } + }) + .description("Alert on high-value orders"), + ); + + iii.register_trigger( + IIITrigger::State( + StateTriggerConfig::new() + .scope("orders") + .condition("reactions::high-value-alert-condition"), + ) + .for_function("reactions::high-value-alert"), + ) + .expect("failed"); + + // --- + // Multiple independent reactions to the same scope + // Each trigger registers a separate function on the same scope. + // All registered reactions fire independently on every matching change. + // --- + + // Reaction 1: Update aggregate metrics + let iii_clone = iii.clone(); + iii.register_function( + RegisterFunction::new_async("reactions::order-metrics", move |event: StateEvent| { + let iii = iii_clone.clone(); + async move { + let mut ops = Vec::new(); + + if event.new_value.is_some() && event.old_value.is_none() { + let total = event.new_value.as_ref().and_then(|v| v["total"].as_f64()).unwrap_or(0.0); + ops.push(json!({ "type": "increment", "path": "total_orders", "by": 1 })); + ops.push(json!({ "type": "increment", "path": "total_revenue", "by": total })); + } + + if event.new_value.is_none() && event.old_value.is_some() { + let total = event.old_value.as_ref().and_then(|v| v["total"].as_f64()).unwrap_or(0.0); + ops.push(json!({ "type": "increment", "path": "total_orders", "by": -1 })); + ops.push(json!({ "type": "increment", "path": "total_revenue", "by": -total })); + } + + if !ops.is_empty() { + iii.trigger(TriggerRequest { + function_id: "state::update".into(), + payload: json!({ "scope": "order-metrics", "key": "global", "ops": ops }), + action: None, + timeout_ms: None, + }) + .await + .map_err(|e| e.to_string())?; + } + + Ok(json!(null)) + } + }) + .description("Update aggregate order metrics"), + ); + + iii.register_trigger( + IIITrigger::State(StateTriggerConfig::new().scope("orders")) + .for_function("reactions::order-metrics"), + ) + .expect("failed"); + + // Reaction 2: Push live update to connected clients + let iii_clone = iii.clone(); + iii.register_function( + RegisterFunction::new_async("reactions::order-live-feed", move |event: StateEvent| { + let iii = iii_clone.clone(); + async move { + let action = if event.old_value.is_none() { + "created" + } else if event.new_value.is_none() { + "deleted" + } else { + "updated" + }; + + iii.trigger(TriggerRequest { + function_id: "stream::send".into(), + payload: json!({ + "stream_name": "orders-live", + "group_id": "dashboard", + "id": format!("evt-{}", chrono::Utc::now().timestamp_millis()), + "event_type": "order_changed", + "data": { "action": action, "key": event.key, "order": event.new_value }, + }), + action: Some(TriggerAction::Void), + timeout_ms: None, + }) + .await + .ok(); + + Ok(json!(null)) + } + }) + .description("Push order changes to live feed"), + ); + + iii.register_trigger( + IIITrigger::State(StateTriggerConfig::new().scope("orders")) + .for_function("reactions::order-live-feed"), + ) + .expect("failed"); + + tokio::runtime::Runtime::new().unwrap().block_on(async { + tokio::signal::ctrl_c().await.ok(); + }); + iii.shutdown(); +} diff --git a/skills/references/trigger-actions.js b/skills/references/trigger-actions.js new file mode 100644 index 000000000..2e10f6629 --- /dev/null +++ b/skills/references/trigger-actions.js @@ -0,0 +1,157 @@ +/** + * Pattern: Trigger Actions (Invocation Modes) + * Comparable to: Synchronous calls, async queues, fire-and-forget messaging + * + * Every iii.trigger() call can specify an invocation mode via the `action` + * parameter. There are exactly three modes: + * 1. Synchronous (default) — blocks until the target returns a result. + * 2. Fire-and-forget (TriggerAction.Void()) — returns null immediately. + * 3. Enqueue (TriggerAction.Enqueue({ queue })) — durably enqueues and + * returns { messageReceiptId }. + * + * This file shows each mode in isolation and then combines all three in a + * realistic checkout workflow. + * + * How-to references: + * - Trigger actions: https://iii.dev/docs/how-to/trigger-actions + */ + +import { registerWorker, Logger, TriggerAction } from 'iii-sdk' + +const iii = registerWorker(process.env.III_ENGINE_URL || 'ws://localhost:49134', { + workerName: 'trigger-actions', +}) + +// --------------------------------------------------------------------------- +// Helper functions used by the examples below +// --------------------------------------------------------------------------- +iii.registerFunction({ id: 'checkout::validate-cart' }, async (data) => { + const logger = new Logger() + logger.info('Validating cart', { cartId: data.cart_id }) + + if (!data.items?.length) { + return { valid: false, reason: 'Cart is empty' } + } + + const total = data.items.reduce((sum, i) => sum + i.price * i.qty, 0) + return { valid: true, cart_id: data.cart_id, total } +}) + +iii.registerFunction({ id: 'checkout::charge-payment' }, async (data) => { + const logger = new Logger() + logger.info('Charging payment', { cart_id: data.cart_id, total: data.total }) + // Simulate payment processing + return { charged: true, transaction_id: `txn_${Date.now()}` } +}) + +iii.registerFunction({ id: 'checkout::send-confirmation' }, async (data) => { + const logger = new Logger() + logger.info('Sending order confirmation email', { email: data.email }) + return { sent: true } +}) + +// --------------------------------------------------------------------------- +// Mode 1 — Synchronous (default) +// Blocks until the target function returns. The result is the function's +// return value. Use this when the caller needs the result to continue. +// --------------------------------------------------------------------------- +iii.registerFunction({ id: 'examples::sync-call' }, async (data) => { + const logger = new Logger() + + // No action parameter — defaults to synchronous + const result = await iii.trigger({ + function_id: 'checkout::validate-cart', + payload: { cart_id: data.cart_id, items: data.items }, + }) + + logger.info('Sync result received', { valid: result.valid, total: result.total }) + return result +}) + +// --------------------------------------------------------------------------- +// Mode 2 — Fire-and-forget (TriggerAction.Void()) +// Returns null immediately. The target function runs asynchronously and its +// return value is discarded. Use for side-effects like logging, notifications, +// or analytics where the caller does not need to wait. +// --------------------------------------------------------------------------- +iii.registerFunction({ id: 'examples::void-call' }, async (data) => { + const logger = new Logger() + + // TriggerAction.Void() — returns null, does not wait + iii.trigger({ + function_id: 'checkout::send-confirmation', + payload: { email: data.email, order_id: data.order_id }, + action: TriggerAction.Void(), + }) + + logger.info('Confirmation dispatched (fire-and-forget)') + return { dispatched: true } +}) + +// --------------------------------------------------------------------------- +// Mode 3 — Enqueue (TriggerAction.Enqueue({ queue })) +// Durably enqueues the payload onto a named queue. Returns immediately with +// { messageReceiptId }. The target function processes the message when a +// worker picks it up. Use for work that must survive crashes and be retried. +// --------------------------------------------------------------------------- +iii.registerFunction({ id: 'examples::enqueue-call' }, async (data) => { + const logger = new Logger() + + const receipt = await iii.trigger({ + function_id: 'checkout::charge-payment', + payload: { cart_id: data.cart_id, total: data.total }, + action: TriggerAction.Enqueue({ queue: 'payments' }), + }) + + logger.info('Payment enqueued', { messageReceiptId: receipt.messageReceiptId }) + return receipt +}) + +// --------------------------------------------------------------------------- +// Realistic workflow — Checkout combining all three modes +// 1. Validate cart (sync) — need the result to decide whether to proceed +// 2. Charge payment (enqueue) — durable, retryable, must not be lost +// 3. Send email (void) — best-effort notification, don't block +// --------------------------------------------------------------------------- +iii.registerFunction({ id: 'checkout::process' }, async (data) => { + const logger = new Logger() + + // Step 1: synchronous validation — we need the total to charge + const validation = await iii.trigger({ + function_id: 'checkout::validate-cart', + payload: { cart_id: data.cart_id, items: data.items }, + }) + + if (!validation.valid) { + return { error: validation.reason } + } + + // Step 2: enqueue payment — durable async, survives crashes + const receipt = await iii.trigger({ + function_id: 'checkout::charge-payment', + payload: { cart_id: data.cart_id, total: validation.total }, + action: TriggerAction.Enqueue({ queue: 'payments' }), + }) + + logger.info('Payment queued', { receiptId: receipt.messageReceiptId }) + + // Step 3: fire-and-forget email — don't block the checkout response + iii.trigger({ + function_id: 'checkout::send-confirmation', + payload: { email: data.email, order_id: data.cart_id }, + action: TriggerAction.Void(), + }) + + return { + status: 'accepted', + cart_id: data.cart_id, + total: validation.total, + payment_receipt: receipt.messageReceiptId, + } +}) + +iii.registerTrigger({ + type: 'http', + function_id: 'checkout::process', + config: { api_path: '/checkout', http_method: 'POST' }, +}) diff --git a/skills/references/trigger-actions.py b/skills/references/trigger-actions.py new file mode 100644 index 000000000..ff4e52c86 --- /dev/null +++ b/skills/references/trigger-actions.py @@ -0,0 +1,190 @@ +""" +Pattern: Trigger Actions (Invocation Modes) +Comparable to: Synchronous calls, async queues, fire-and-forget messaging + +Every iii.trigger() call can specify an invocation mode via the `action` +parameter. There are exactly three modes: + 1. Synchronous (default) — blocks until the target returns a result. + 2. Fire-and-forget (TriggerAction.Void()) — returns None immediately. + 3. Enqueue (TriggerAction.Enqueue({ queue })) — durably enqueues and + returns { messageReceiptId }. + +This file shows each mode in isolation and then combines all three in a +realistic checkout workflow. + +How-to references: + - Trigger actions: https://iii.dev/docs/how-to/trigger-actions +""" + +import asyncio +import os +import time + +from iii import InitOptions, Logger, TriggerAction, register_worker + +iii = register_worker( + address=os.environ.get("III_ENGINE_URL", "ws://localhost:49134"), + options=InitOptions(worker_name="trigger-actions"), +) + +# --- +# Helper functions used by the examples below +# --- + + +async def validate_cart(data): + logger = Logger() + logger.info("Validating cart", {"cartId": data.get("cart_id")}) + + items = data.get("items") or [] + if not items: + return {"valid": False, "reason": "Cart is empty"} + + total = sum(i["price"] * i["qty"] for i in items) + return {"valid": True, "cart_id": data["cart_id"], "total": total} + + +iii.register_function("checkout::validate-cart", validate_cart) + + +async def charge_payment(data): + logger = Logger() + logger.info("Charging payment", {"cart_id": data["cart_id"], "total": data["total"]}) + return {"charged": True, "transaction_id": f"txn_{int(time.time() * 1000)}"} + + +iii.register_function("checkout::charge-payment", charge_payment) + + +async def send_confirmation(data): + logger = Logger() + logger.info("Sending order confirmation email", {"email": data["email"]}) + return {"sent": True} + + +iii.register_function("checkout::send-confirmation", send_confirmation) + +# --- +# Mode 1 — Synchronous (default) +# Blocks until the target function returns. The result is the function's +# return value. Use this when the caller needs the result to continue. +# --- + + +async def sync_call(data): + logger = Logger() + + result = await iii.trigger_async({ + "function_id": "checkout::validate-cart", + "payload": {"cart_id": data["cart_id"], "items": data["items"]}, + }) + + logger.info("Sync result received", {"valid": result["valid"], "total": result.get("total")}) + return result + + +iii.register_function("examples::sync-call", sync_call) + +# --- +# Mode 2 — Fire-and-forget (TriggerAction.Void()) +# Returns None immediately. The target function runs asynchronously and its +# return value is discarded. Use for side-effects like logging, notifications, +# or analytics where the caller does not need to wait. +# --- + + +async def void_call(data): + logger = Logger() + + iii.trigger({ + "function_id": "checkout::send-confirmation", + "payload": {"email": data["email"], "order_id": data["order_id"]}, + "action": TriggerAction.Void(), + }) + + logger.info("Confirmation dispatched (fire-and-forget)") + return {"dispatched": True} + + +iii.register_function("examples::void-call", void_call) + +# --- +# Mode 3 — Enqueue (TriggerAction.Enqueue({ queue })) +# Durably enqueues the payload onto a named queue. Returns immediately with +# { messageReceiptId }. The target function processes the message when a +# worker picks it up. Use for work that must survive crashes and be retried. +# --- + + +async def enqueue_call(data): + logger = Logger() + + receipt = await iii.trigger_async({ + "function_id": "checkout::charge-payment", + "payload": {"cart_id": data["cart_id"], "total": data["total"]}, + "action": TriggerAction.Enqueue({"queue": "payments"}), + }) + + logger.info("Payment enqueued", {"messageReceiptId": receipt["messageReceiptId"]}) + return receipt + + +iii.register_function("examples::enqueue-call", enqueue_call) + +# --- +# Realistic workflow — Checkout combining all three modes +# 1. Validate cart (sync) — need the result to decide whether to proceed +# 2. Charge payment (enqueue) — durable, retryable, must not be lost +# 3. Send email (void) — best-effort notification, don't block +# --- + + +async def checkout_process(data): + logger = Logger() + + validation = await iii.trigger_async({ + "function_id": "checkout::validate-cart", + "payload": {"cart_id": data["cart_id"], "items": data["items"]}, + }) + + if not validation["valid"]: + return {"error": validation["reason"]} + + receipt = await iii.trigger_async({ + "function_id": "checkout::charge-payment", + "payload": {"cart_id": data["cart_id"], "total": validation["total"]}, + "action": TriggerAction.Enqueue({"queue": "payments"}), + }) + + logger.info("Payment queued", {"receiptId": receipt["messageReceiptId"]}) + + iii.trigger({ + "function_id": "checkout::send-confirmation", + "payload": {"email": data["email"], "order_id": data["cart_id"]}, + "action": TriggerAction.Void(), + }) + + return { + "status": "accepted", + "cart_id": data["cart_id"], + "total": validation["total"], + "payment_receipt": receipt["messageReceiptId"], + } + + +iii.register_function("checkout::process", checkout_process) + +iii.register_trigger({ + "type": "http", + "function_id": "checkout::process", + "config": {"api_path": "/checkout", "http_method": "POST"}, +}) + + +async def main(): + while True: + await asyncio.sleep(60) + + +if __name__ == "__main__": + asyncio.run(main()) diff --git a/skills/references/trigger-actions.rs b/skills/references/trigger-actions.rs new file mode 100644 index 000000000..567a258a8 --- /dev/null +++ b/skills/references/trigger-actions.rs @@ -0,0 +1,273 @@ +/// Pattern: Trigger Actions (Invocation Modes) +/// Comparable to: Synchronous calls, async queues, fire-and-forget messaging +/// +/// Every iii.trigger() call can specify an invocation mode via the `action` +/// parameter. There are exactly three modes: +/// 1. Synchronous (default) - blocks until the target returns a result. +/// 2. Fire-and-forget (TriggerAction::Void) - returns null immediately. +/// 3. Enqueue (TriggerAction::Enqueue { queue }) - durably enqueues and +/// returns { messageReceiptId }. +/// +/// This file shows each mode in isolation and then combines all three in a +/// realistic checkout workflow. + +use iii_sdk::{ + register_worker, InitOptions, RegisterFunction, TriggerRequest, TriggerAction, + builtin_triggers::*, IIITrigger, Logger, +}; +use serde_json::json; +use std::time::Duration; + +use serde; +use schemars; + +#[derive(serde::Deserialize, schemars::JsonSchema)] +struct CartInput { + cart_id: String, + items: Option>, +} + +#[derive(serde::Deserialize, serde::Serialize, schemars::JsonSchema)] +struct CartItem { + price: f64, + qty: i64, +} + +#[derive(serde::Deserialize, schemars::JsonSchema)] +struct ChargeInput { + cart_id: String, + total: f64, +} + +#[derive(serde::Deserialize, schemars::JsonSchema)] +struct ConfirmationInput { + email: String, + order_id: String, +} + +#[derive(serde::Deserialize, schemars::JsonSchema)] +struct SyncCallInput { + cart_id: String, + items: Vec, +} + +#[derive(serde::Deserialize, schemars::JsonSchema)] +struct VoidCallInput { + email: String, + order_id: String, +} + +#[derive(serde::Deserialize, schemars::JsonSchema)] +struct EnqueueCallInput { + cart_id: String, + total: f64, +} + +#[derive(serde::Deserialize, schemars::JsonSchema)] +struct CheckoutInput { + cart_id: String, + items: Vec, + email: String, +} + +fn main() { + let url = std::env::var("III_ENGINE_URL").unwrap_or("ws://127.0.0.1:49134".into()); + let iii = register_worker(&url, InitOptions::default()); + + // --- + // Helper functions used by the examples below + // --- + iii.register_function( + RegisterFunction::new("checkout::validate-cart", |data: CartInput| -> Result { + let logger = Logger::new(); + logger.info("Validating cart", &json!({ "cartId": data.cart_id })); + + let items = data.items.unwrap_or_default(); + if items.is_empty() { + return Ok(json!({ "valid": false, "reason": "Cart is empty" })); + } + + let total: f64 = items.iter().map(|i| i.price * i.qty as f64).sum(); + Ok(json!({ "valid": true, "cart_id": data.cart_id, "total": total })) + }) + .description("Validate a shopping cart"), + ); + + iii.register_function( + RegisterFunction::new("checkout::charge-payment", |data: ChargeInput| -> Result { + let logger = Logger::new(); + logger.info("Charging payment", &json!({ "cart_id": data.cart_id, "total": data.total })); + Ok(json!({ "charged": true, "transaction_id": format!("txn_{}", chrono::Utc::now().timestamp_millis()) })) + }) + .description("Charge payment for cart"), + ); + + iii.register_function( + RegisterFunction::new("checkout::send-confirmation", |data: ConfirmationInput| -> Result { + let logger = Logger::new(); + logger.info("Sending order confirmation email", &json!({ "email": data.email })); + Ok(json!({ "sent": true })) + }) + .description("Send order confirmation email"), + ); + + // --- + // Mode 1 - Synchronous (default) + // Blocks until the target function returns. The result is the function's + // return value. Use this when the caller needs the result to continue. + // --- + let iii_clone = iii.clone(); + iii.register_function( + RegisterFunction::new_async("examples::sync-call", move |data: SyncCallInput| { + let iii = iii_clone.clone(); + async move { + let logger = Logger::new(); + + let result = iii + .trigger(TriggerRequest { + function_id: "checkout::validate-cart".into(), + payload: json!({ "cart_id": data.cart_id, "items": data.items }), + action: None, + timeout_ms: None, + }) + .await + .map_err(|e| e.to_string())?; + + logger.info("Sync result received", &json!({ "valid": result["valid"], "total": result["total"] })); + Ok(result) + } + }) + .description("Example: synchronous trigger call"), + ); + + // --- + // Mode 2 - Fire-and-forget (TriggerAction::Void) + // Returns null immediately. The target function runs asynchronously and its + // return value is discarded. Use for side-effects like logging, notifications, + // or analytics where the caller does not need to wait. + // --- + let iii_clone = iii.clone(); + iii.register_function( + RegisterFunction::new_async("examples::void-call", move |data: VoidCallInput| { + let iii = iii_clone.clone(); + async move { + let logger = Logger::new(); + + iii.trigger(TriggerRequest { + function_id: "checkout::send-confirmation".into(), + payload: json!({ "email": data.email, "order_id": data.order_id }), + action: Some(TriggerAction::Void), + timeout_ms: None, + }) + .await + .ok(); + + logger.info("Confirmation dispatched (fire-and-forget)", &json!({})); + Ok(json!({ "dispatched": true })) + } + }) + .description("Example: fire-and-forget trigger call"), + ); + + // --- + // Mode 3 - Enqueue (TriggerAction::Enqueue { queue }) + // Durably enqueues the payload onto a named queue. Returns immediately with + // { messageReceiptId }. The target function processes the message when a + // worker picks it up. Use for work that must survive crashes and be retried. + // --- + let iii_clone = iii.clone(); + iii.register_function( + RegisterFunction::new_async("examples::enqueue-call", move |data: EnqueueCallInput| { + let iii = iii_clone.clone(); + async move { + let logger = Logger::new(); + + let receipt = iii + .trigger(TriggerRequest { + function_id: "checkout::charge-payment".into(), + payload: json!({ "cart_id": data.cart_id, "total": data.total }), + action: Some(TriggerAction::Enqueue { queue: "payments".into() }), + timeout_ms: None, + }) + .await + .map_err(|e| e.to_string())?; + + logger.info("Payment enqueued", &json!({ "messageReceiptId": receipt["messageReceiptId"] })); + Ok(receipt) + } + }) + .description("Example: enqueue trigger call"), + ); + + // --- + // Realistic workflow - Checkout combining all three modes + // 1. Validate cart (sync) - need the result to decide whether to proceed + // 2. Charge payment (enqueue) - durable, retryable, must not be lost + // 3. Send email (void) - best-effort notification, don't block + // --- + let iii_clone = iii.clone(); + iii.register_function( + RegisterFunction::new_async("checkout::process", move |data: CheckoutInput| { + let iii = iii_clone.clone(); + async move { + let logger = Logger::new(); + + let validation = iii + .trigger(TriggerRequest { + function_id: "checkout::validate-cart".into(), + payload: json!({ "cart_id": data.cart_id, "items": data.items }), + action: None, + timeout_ms: None, + }) + .await + .map_err(|e| e.to_string())?; + + if validation["valid"] != true { + return Ok(json!({ "error": validation["reason"] })); + } + + let total = validation["total"].as_f64().unwrap_or(0.0); + + let receipt = iii + .trigger(TriggerRequest { + function_id: "checkout::charge-payment".into(), + payload: json!({ "cart_id": data.cart_id, "total": total }), + action: Some(TriggerAction::Enqueue { queue: "payments".into() }), + timeout_ms: None, + }) + .await + .map_err(|e| e.to_string())?; + + logger.info("Payment queued", &json!({ "receiptId": receipt["messageReceiptId"] })); + + iii.trigger(TriggerRequest { + function_id: "checkout::send-confirmation".into(), + payload: json!({ "email": data.email, "order_id": data.cart_id }), + action: Some(TriggerAction::Void), + timeout_ms: None, + }) + .await + .ok(); + + Ok(json!({ + "status": "accepted", + "cart_id": data.cart_id, + "total": total, + "payment_receipt": receipt["messageReceiptId"], + })) + } + }) + .description("Full checkout workflow combining sync, enqueue, and void"), + ); + + iii.register_trigger( + IIITrigger::Http(HttpTriggerConfig::new("/checkout").method(HttpMethod::Post)) + .for_function("checkout::process"), + ) + .expect("failed"); + + tokio::runtime::Runtime::new().unwrap().block_on(async { + tokio::signal::ctrl_c().await.ok(); + }); + iii.shutdown(); +} diff --git a/skills/references/trigger-conditions.js b/skills/references/trigger-conditions.js new file mode 100644 index 000000000..0fa899a86 --- /dev/null +++ b/skills/references/trigger-conditions.js @@ -0,0 +1,147 @@ +/** + * Pattern: Trigger Conditions + * Comparable to: Event filters, guard clauses, conditional routing + * + * A trigger condition is a regular function that returns a boolean. When + * attached to a trigger via condition_function_id, the engine calls the + * condition first — if it returns true the handler runs, otherwise the + * event is silently skipped. The condition receives the same event data + * as the handler. + * + * How-to references: + * - Trigger conditions: https://iii.dev/docs/how-to/use-trigger-conditions + */ + +import { registerWorker, Logger, TriggerAction } from 'iii-sdk' + +const iii = registerWorker(process.env.III_ENGINE_URL || 'ws://localhost:49134', { + workerName: 'trigger-conditions', +}) + +// --------------------------------------------------------------------------- +// Example 1 — State trigger with a high-value order condition +// Only fires the handler when the order total exceeds $500. +// --------------------------------------------------------------------------- + +// Condition function — returns true/false +iii.registerFunction({ id: 'conditions::is-high-value' }, async (data) => { + // data is the same event the handler would receive + return data.value?.total > 500 +}) + +// Handler function — only runs when the condition passes +iii.registerFunction({ id: 'orders::flag-high-value' }, async (data) => { + const logger = new Logger() + logger.info('High-value order detected', { key: data.key, total: data.value.total }) + + await iii.trigger({ + function_id: 'state::update', + payload: { + scope: 'orders', + key: data.key, + ops: [{ type: 'set', path: 'flagged', value: true }], + }, + }) + + return { flagged: true, order_id: data.key } +}) + +// Bind the trigger with condition_function_id +iii.registerTrigger({ + type: 'state', + function_id: 'orders::flag-high-value', + config: { + scope: 'orders', + condition_function_id: 'conditions::is-high-value', + }, +}) + +// --------------------------------------------------------------------------- +// Example 2 — HTTP trigger with request validation condition +// Rejects requests missing a required API key header. +// --------------------------------------------------------------------------- + +iii.registerFunction({ id: 'conditions::has-api-key' }, async (data) => { + const apiKey = data.headers?.['x-api-key'] + return typeof apiKey === 'string' && apiKey.length > 0 +}) + +iii.registerFunction({ id: 'api::protected-endpoint' }, async (data) => { + const logger = new Logger() + logger.info('Authenticated request', { path: data.path }) + return { message: 'Access granted', user: data.headers['x-api-key'] } +}) + +iii.registerTrigger({ + type: 'http', + function_id: 'api::protected-endpoint', + config: { + api_path: '/api/protected', + http_method: 'GET', + condition_function_id: 'conditions::has-api-key', + }, +}) + +// --------------------------------------------------------------------------- +// Example 3 — Queue trigger with event type filter condition +// Only processes messages whose `event_type` is "order.placed". +// --------------------------------------------------------------------------- + +iii.registerFunction({ id: 'conditions::is-order-placed' }, async (data) => { + return data.event_type === 'order.placed' +}) + +iii.registerFunction({ id: 'orders::on-placed' }, async (data) => { + const logger = new Logger() + logger.info('Processing order.placed event', { orderId: data.order_id }) + + // Kick off fulfillment + await iii.trigger({ + function_id: 'orders::fulfill', + payload: { order_id: data.order_id }, + action: TriggerAction.Enqueue({ queue: 'fulfillment' }), + }) + + return { processed: true, order_id: data.order_id } +}) + +iii.registerFunction({ id: 'orders::fulfill' }, async (data) => { + const logger = new Logger() + logger.info('Fulfilling order', { orderId: data.order_id }) + return { fulfilled: true } +}) + +iii.registerTrigger({ + type: 'queue', + function_id: 'orders::on-placed', + config: { + queue: 'order-events', + condition_function_id: 'conditions::is-order-placed', + }, +}) + +// --------------------------------------------------------------------------- +// Example 4 — Condition with shared data +// The condition and handler receive identical event data, so a condition can +// enrich or validate any field the handler will use. +// --------------------------------------------------------------------------- + +iii.registerFunction({ id: 'conditions::is-weekday' }, async (data) => { + const day = new Date().getDay() + return day >= 1 && day <= 5 // Monday–Friday +}) + +iii.registerFunction({ id: 'reports::weekday-digest' }, async () => { + const logger = new Logger() + logger.info('Running weekday digest') + return { generated: true } +}) + +iii.registerTrigger({ + type: 'cron', + function_id: 'reports::weekday-digest', + config: { + expression: '0 8 * * *', // runs daily at 08:00 but condition limits to weekdays + condition_function_id: 'conditions::is-weekday', + }, +}) diff --git a/skills/references/trigger-conditions.py b/skills/references/trigger-conditions.py new file mode 100644 index 000000000..3983b266d --- /dev/null +++ b/skills/references/trigger-conditions.py @@ -0,0 +1,185 @@ +""" +Pattern: Trigger Conditions +Comparable to: Event filters, guard clauses, conditional routing + +A trigger condition is a regular function that returns a boolean. When +attached to a trigger via condition_function_id, the engine calls the +condition first — if it returns true the handler runs, otherwise the +event is silently skipped. The condition receives the same event data +as the handler. + +How-to references: + - Trigger conditions: https://iii.dev/docs/how-to/use-trigger-conditions +""" + +import asyncio +import os +from datetime import datetime, timezone + +from iii import InitOptions, Logger, TriggerAction, register_worker + +iii = register_worker( + address=os.environ.get("III_ENGINE_URL", "ws://localhost:49134"), + options=InitOptions(worker_name="trigger-conditions"), +) + +# --- +# Example 1 — State trigger with a high-value order condition +# Only fires the handler when the order total exceeds $500. +# --- + + +async def is_high_value(data): + value = data.get("value") or {} + return value.get("total", 0) > 500 + + +iii.register_function("conditions::is-high-value", is_high_value) + + +async def flag_high_value(data): + logger = Logger() + logger.info("High-value order detected", {"key": data["key"], "total": data["value"]["total"]}) + + await iii.trigger_async({ + "function_id": "state::update", + "payload": { + "scope": "orders", + "key": data["key"], + "ops": [{"type": "set", "path": "flagged", "value": True}], + }, + }) + + return {"flagged": True, "order_id": data["key"]} + + +iii.register_function("orders::flag-high-value", flag_high_value) + +iii.register_trigger({ + "type": "state", + "function_id": "orders::flag-high-value", + "config": { + "scope": "orders", + "condition_function_id": "conditions::is-high-value", + }, +}) + +# --- +# Example 2 — HTTP trigger with request validation condition +# Rejects requests missing a required API key header. +# --- + + +async def has_api_key(data): + headers = data.get("headers") or {} + api_key = headers.get("x-api-key") + return isinstance(api_key, str) and len(api_key) > 0 + + +iii.register_function("conditions::has-api-key", has_api_key) + + +async def protected_endpoint(data): + logger = Logger() + logger.info("Authenticated request", {"path": data.get("path")}) + return {"message": "Access granted", "user": data["headers"]["x-api-key"]} + + +iii.register_function("api::protected-endpoint", protected_endpoint) + +iii.register_trigger({ + "type": "http", + "function_id": "api::protected-endpoint", + "config": { + "api_path": "/api/protected", + "http_method": "GET", + "condition_function_id": "conditions::has-api-key", + }, +}) + +# --- +# Example 3 — Queue trigger with event type filter condition +# Only processes messages whose `event_type` is "order.placed". +# --- + + +async def is_order_placed(data): + return data.get("event_type") == "order.placed" + + +iii.register_function("conditions::is-order-placed", is_order_placed) + + +async def on_placed(data): + logger = Logger() + logger.info("Processing order.placed event", {"orderId": data["order_id"]}) + + await iii.trigger_async({ + "function_id": "orders::fulfill", + "payload": {"order_id": data["order_id"]}, + "action": TriggerAction.Enqueue({"queue": "fulfillment"}), + }) + + return {"processed": True, "order_id": data["order_id"]} + + +iii.register_function("orders::on-placed", on_placed) + + +async def fulfill(data): + logger = Logger() + logger.info("Fulfilling order", {"orderId": data["order_id"]}) + return {"fulfilled": True} + + +iii.register_function("orders::fulfill", fulfill) + +iii.register_trigger({ + "type": "queue", + "function_id": "orders::on-placed", + "config": { + "queue": "order-events", + "condition_function_id": "conditions::is-order-placed", + }, +}) + +# --- +# Example 4 — Condition with shared data +# The condition and handler receive identical event data, so a condition can +# enrich or validate any field the handler will use. +# --- + + +async def is_weekday(data): + day = datetime.now(timezone.utc).weekday() + return day < 5 + + +iii.register_function("conditions::is-weekday", is_weekday) + + +async def weekday_digest(data): + logger = Logger() + logger.info("Running weekday digest") + return {"generated": True} + + +iii.register_function("reports::weekday-digest", weekday_digest) + +iii.register_trigger({ + "type": "cron", + "function_id": "reports::weekday-digest", + "config": { + "expression": "0 8 * * *", + "condition_function_id": "conditions::is-weekday", + }, +}) + + +async def main(): + while True: + await asyncio.sleep(60) + + +if __name__ == "__main__": + asyncio.run(main()) diff --git a/skills/references/trigger-conditions.rs b/skills/references/trigger-conditions.rs new file mode 100644 index 000000000..a0352c2c3 --- /dev/null +++ b/skills/references/trigger-conditions.rs @@ -0,0 +1,224 @@ +/// Pattern: Trigger Conditions +/// Comparable to: Event filters, guard clauses, conditional routing +/// +/// A trigger condition is a regular function that returns a boolean. When +/// attached to a trigger via condition_function_id, the engine calls the +/// condition first - if it returns true the handler runs, otherwise the +/// event is silently skipped. The condition receives the same event data +/// as the handler. + +use iii_sdk::{ + register_worker, InitOptions, RegisterFunction, TriggerRequest, TriggerAction, + builtin_triggers::*, IIITrigger, Logger, +}; +use serde_json::json; +use chrono::Datelike; + +#[derive(serde::Deserialize, schemars::JsonSchema)] +struct StateChangeEvent { + new_value: Option, + old_value: Option, + key: String, +} + +#[derive(serde::Deserialize, schemars::JsonSchema)] +struct HttpRequestEvent { + headers: Option>, + path: Option, +} + +#[derive(serde::Deserialize, schemars::JsonSchema)] +struct QueueEvent { + event_type: Option, + order_id: Option, +} + +fn main() { + let url = std::env::var("III_ENGINE_URL").unwrap_or("ws://127.0.0.1:49134".into()); + let iii = register_worker(&url, InitOptions::default()); + + // --- + // Example 1 - State trigger with a high-value order condition + // Only fires the handler when the order total exceeds $500. + // --- + + iii.register_function( + RegisterFunction::new("conditions::is-high-value", |data: StateChangeEvent| -> Result { + let is_high = data + .new_value + .as_ref() + .and_then(|v| v["total"].as_f64()) + .map(|total| total > 500.0) + .unwrap_or(false); + Ok(json!(is_high)) + }) + .description("Condition: order total exceeds $500"), + ); + + let iii_clone = iii.clone(); + iii.register_function( + RegisterFunction::new_async("orders::flag-high-value", move |data: StateChangeEvent| { + let iii = iii_clone.clone(); + async move { + let logger = Logger::new(); + let total = data.new_value.as_ref().and_then(|v| v["total"].as_f64()).unwrap_or(0.0); + logger.info("High-value order detected", &json!({ "key": data.key, "total": total })); + + iii.trigger(TriggerRequest { + function_id: "state::update".into(), + payload: json!({ + "scope": "orders", + "key": data.key, + "ops": [{ "type": "set", "path": "flagged", "value": true }], + }), + action: None, + timeout_ms: None, + }) + .await + .map_err(|e| e.to_string())?; + + Ok(json!({ "flagged": true, "order_id": data.key })) + } + }) + .description("Flag high-value orders"), + ); + + iii.register_trigger( + IIITrigger::State( + StateTriggerConfig::new() + .scope("orders") + .condition("conditions::is-high-value"), + ) + .for_function("orders::flag-high-value"), + ) + .expect("failed"); + + // --- + // Example 2 - HTTP trigger with request validation condition + // Rejects requests missing a required API key header. + // --- + + iii.register_function( + RegisterFunction::new("conditions::has-api-key", |data: HttpRequestEvent| -> Result { + let has_key = data + .headers + .as_ref() + .and_then(|h| h.get("x-api-key")) + .map(|k| !k.is_empty()) + .unwrap_or(false); + Ok(json!(has_key)) + }) + .description("Condition: request has x-api-key header"), + ); + + iii.register_function( + RegisterFunction::new("api::protected-endpoint", |data: HttpRequestEvent| -> Result { + let logger = Logger::new(); + logger.info("Authenticated request", &json!({ "path": data.path })); + Ok(json!({ "message": "Access granted" })) + }) + .description("Protected API endpoint"), + ); + + iii.register_trigger( + IIITrigger::Http( + HttpTriggerConfig::new("/api/protected") + .method(HttpMethod::Get) + .condition("conditions::has-api-key"), + ) + .for_function("api::protected-endpoint"), + ) + .expect("failed"); + + // --- + // Example 3 - Queue trigger with event type filter condition + // Only processes messages whose `event_type` is "order.placed". + // --- + + iii.register_function( + RegisterFunction::new("conditions::is-order-placed", |data: QueueEvent| -> Result { + let is_placed = data.event_type.as_deref() == Some("order.placed"); + Ok(json!(is_placed)) + }) + .description("Condition: event_type is order.placed"), + ); + + let iii_clone = iii.clone(); + iii.register_function( + RegisterFunction::new_async("orders::on-placed", move |data: QueueEvent| { + let iii = iii_clone.clone(); + async move { + let logger = Logger::new(); + let order_id = data.order_id.clone().unwrap_or_default(); + logger.info("Processing order.placed event", &json!({ "orderId": order_id })); + + iii.trigger(TriggerRequest { + function_id: "orders::fulfill".into(), + payload: json!({ "order_id": order_id }), + action: Some(TriggerAction::Enqueue { queue: "fulfillment".into() }), + timeout_ms: None, + }) + .await + .map_err(|e| e.to_string())?; + + Ok(json!({ "processed": true, "order_id": order_id })) + } + }) + .description("Handle order.placed events"), + ); + + iii.register_function( + RegisterFunction::new("orders::fulfill", |data: serde_json::Value| -> Result { + let logger = Logger::new(); + logger.info("Fulfilling order", &json!({ "orderId": data["order_id"] })); + Ok(json!({ "fulfilled": true })) + }) + .description("Fulfill an order"), + ); + + iii.register_trigger( + IIITrigger::Queue( + QueueTriggerConfig::new("order-events") + .condition("conditions::is-order-placed"), + ) + .for_function("orders::on-placed"), + ) + .expect("failed"); + + // --- + // Example 4 - Condition with shared data + // The condition and handler receive identical event data, so a condition can + // enrich or validate any field the handler will use. + // --- + + iii.register_function( + RegisterFunction::new("conditions::is-weekday", |_: serde_json::Value| -> Result { + let day = chrono::Utc::now().weekday().num_days_from_monday(); + Ok(json!(day < 5)) + }) + .description("Condition: current day is a weekday"), + ); + + iii.register_function( + RegisterFunction::new("reports::weekday-digest", |_: serde_json::Value| -> Result { + let logger = Logger::new(); + logger.info("Running weekday digest", &json!({})); + Ok(json!({ "generated": true })) + }) + .description("Generate weekday digest report"), + ); + + iii.register_trigger( + IIITrigger::Cron( + CronTriggerConfig::new("0 8 * * *") + .condition("conditions::is-weekday"), + ) + .for_function("reports::weekday-digest"), + ) + .expect("failed"); + + tokio::runtime::Runtime::new().unwrap().block_on(async { + tokio::signal::ctrl_c().await.ok(); + }); + iii.shutdown(); +} diff --git a/skills/references/workflow-orchestration.js b/skills/references/workflow-orchestration.js new file mode 100644 index 000000000..2d642a07b --- /dev/null +++ b/skills/references/workflow-orchestration.js @@ -0,0 +1,232 @@ +/** + * Pattern: Workflow Orchestration & Durable Execution + * Comparable to: Temporal, Airflow, Inngest + * + * Demonstrates a durable order-fulfillment pipeline with retries, + * step tracking via state, scheduled cleanup, and DLQ handling. + * Each step is its own function chained via named queues. + * + * How-to references: + * - Queues & retries: https://iii.dev/docs/how-to/use-queues + * - DLQ handling: https://iii.dev/docs/how-to/dead-letter-queues + * - Cron scheduling: https://iii.dev/docs/how-to/schedule-cron-task + * - State management: https://iii.dev/docs/how-to/manage-state + * - Streams: https://iii.dev/docs/how-to/stream-realtime-data + */ + +import { registerWorker, Logger, TriggerAction } from 'iii-sdk' + +const iii = registerWorker(process.env.III_ENGINE_URL || 'ws://localhost:49134', { + workerName: 'workflow-orchestration', +}) + +// Queue durability (iii-config.yaml): +// queue_configs: +// order-validate: { max_retries: 2, backoff_ms: 1000, type: standard } +// order-payment: +// { max_retries: 5, backoff_ms: 2000, type: fifo, message_group_field: order_id } +// order-ship: { max_retries: 3, backoff_ms: 1000, type: standard } +// adapter: +// class: modules::queue::BuiltinQueueAdapter +// Failed jobs are routed to the DLQ after retries are exhausted. + +// --------------------------------------------------------------------------- +// Helper — update workflow state and emit a stream event +// --------------------------------------------------------------------------- +function trackStep(orderId, step, status, detail = {}) { + iii.trigger({ + function_id: 'state::update', + payload: { + scope: 'orders', + key: orderId, + ops: [ + { type: 'set', path: 'current_step', value: step }, + { type: 'set', path: 'status', value: status }, + { type: 'set', path: `steps.${step}`, value: { status, ...detail, at: new Date().toISOString() } }, + ], + }, + action: TriggerAction.Void(), + }) + + iii.trigger({ + function_id: 'stream::send', + payload: { + stream_name: 'order-progress', + group_id: orderId, + id: `${step}-${Date.now()}`, + event_type: 'step_update', + data: { step, status, ...detail }, + }, + action: TriggerAction.Void(), + }) +} + +// --------------------------------------------------------------------------- +// Step 1 — Validate order +// --------------------------------------------------------------------------- +iii.registerFunction({ id: 'orders::validate' }, async (data) => { + const logger = new Logger() + logger.info('Validating order', { orderId: data.order_id }) + + trackStep(data.order_id, 'validate', 'running') + + const isValid = data.items?.length > 0 && data.total > 0 + if (!isValid) throw new Error('Invalid order: missing items or total') + + trackStep(data.order_id, 'validate', 'complete') + + await iii.trigger({ + function_id: 'orders::charge-payment', + payload: data, + action: TriggerAction.Enqueue({ queue: 'order-payment' }), + }) + + return { valid: true } +}) + +// --------------------------------------------------------------------------- +// Step 2 — Charge payment (with retries for transient failures) +// --------------------------------------------------------------------------- +iii.registerFunction({ id: 'orders::charge-payment' }, async (data) => { + const logger = new Logger() + logger.info('Charging payment', { orderId: data.order_id, total: data.total }) + + const snapshot = await iii.trigger({ + function_id: 'state::get', + payload: { scope: 'orders', key: data.order_id }, + }) + const paymentAttempt = Number(snapshot?.attempts?.payment ?? 0) + 1 + await iii.trigger({ + function_id: 'state::update', + payload: { + scope: 'orders', + key: data.order_id, + ops: [{ type: 'set', path: 'attempts.payment', value: paymentAttempt }], + }, + }) + + trackStep(data.order_id, 'payment', 'running') + const shouldFailForDemo = + data.force_payment_failure || paymentAttempt <= Number(data.fail_until_payment_attempt ?? 0) + if (shouldFailForDemo) { + trackStep(data.order_id, 'payment', 'retrying', { paymentAttempt }) + throw new Error(`Payment provider unavailable on attempt ${paymentAttempt}`) + } + + // Simulate payment call + const paymentResult = { transaction_id: `txn-${Date.now()}`, charged: data.total } + + trackStep(data.order_id, 'payment', 'complete', paymentResult) + + await iii.trigger({ + function_id: 'orders::ship', + payload: { ...data, ...paymentResult }, + action: TriggerAction.Enqueue({ queue: 'order-ship' }), + }) + + return paymentResult +}) + +// --------------------------------------------------------------------------- +// Step 3 — Ship order +// --------------------------------------------------------------------------- +iii.registerFunction({ id: 'orders::ship' }, async (data) => { + const logger = new Logger() + logger.info('Shipping order', { orderId: data.order_id }) + + trackStep(data.order_id, 'shipping', 'running') + + const shipment = { tracking_number: `TRACK-${Date.now()}`, carrier: 'ups' } + + trackStep(data.order_id, 'shipping', 'fulfilled', shipment) + + // Broadcast completion + iii.trigger({ + function_id: 'publish', + payload: { topic: 'order.fulfilled', data: { order_id: data.order_id, ...shipment } }, + action: TriggerAction.Void(), + }) + + return shipment +}) + +// --------------------------------------------------------------------------- +// Cron — clean up stale orders every hour +// --------------------------------------------------------------------------- +iii.registerFunction({ id: 'orders::cleanup-stale' }, async () => { + const logger = new Logger() + const orders = await iii.trigger({ + function_id: 'state::list', + payload: { scope: 'orders' }, + }) + + let cleaned = 0 + const ONE_DAY = 24 * 60 * 60 * 1000 + + for (const order of orders) { + const stepTime = order.steps?.[order.current_step]?.at + if (stepTime && Date.now() - new Date(stepTime).getTime() > ONE_DAY) { + await iii.trigger({ + function_id: 'state::update', + payload: { + scope: 'orders', + key: order._key, + ops: [{ type: 'set', path: 'status', value: 'stale' }], + }, + }) + cleaned++ + } + } + + logger.info('Cleaned stale orders', { cleaned }) + return { cleaned } +}) + +iii.registerTrigger({ + type: 'cron', + function_id: 'orders::cleanup-stale', + config: { expression: '0 0 * * * * *' }, // every hour +}) + +// --------------------------------------------------------------------------- +// HTTP — create a new order (entry point) +// --------------------------------------------------------------------------- +iii.registerFunction({ id: 'orders::create' }, async (data) => { + const order_id = `ord-${Date.now()}` + const force_payment_failure = Boolean(data.force_payment_failure) + const fail_until_payment_attempt = Number(data.fail_until_payment_attempt ?? 0) + + await iii.trigger({ + function_id: 'state::set', + payload: { + scope: 'orders', + key: order_id, + value: { + _key: order_id, + order_id, + items: data.items, + total: data.total, + force_payment_failure, + fail_until_payment_attempt, + status: 'created', + current_step: 'created', + steps: {}, + created_at: new Date().toISOString(), + }, + }, + }) + + const enqueueResult = await iii.trigger({ + function_id: 'orders::validate', + payload: { order_id, ...data, force_payment_failure, fail_until_payment_attempt }, + action: TriggerAction.Enqueue({ queue: 'order-validate' }), + }) + + return { order_id, status: 'created', enqueue_receipt_id: enqueueResult.messageReceiptId } +}) + +iii.registerTrigger({ + type: 'http', + function_id: 'orders::create', + config: { api_path: '/orders', http_method: 'POST' }, +})