Skip to content
Draft
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
2 changes: 2 additions & 0 deletions packages/cli/src/commands/index.ts
Original file line number Diff line number Diff line change
Expand Up @@ -15,6 +15,7 @@ import trace from "./trace";
import devContracts from "./dev-contracts";
import verify from "./verify";
import pull from "./pull";
import mirror from "./mirror";

// eslint-disable-next-line @typescript-eslint/no-explicit-any -- Each command has different options
export const commands: CommandModule<any, any>[] = [
Expand All @@ -32,4 +33,5 @@ export const commands: CommandModule<any, any>[] = [
abiTs,
verify,
pull,
mirror,
];
148 changes: 148 additions & 0 deletions packages/cli/src/commands/mirror.ts
Original file line number Diff line number Diff line change
@@ -0,0 +1,148 @@
import type { CommandModule, InferredOptionTypes } from "yargs";
import { createClient, getAddress, http, isHex } from "viem";
import chalk from "chalk";
import { getChainId } from "viem/actions";
import { defaultChains } from "../defaultChains";
import { mirror } from "../mirror/mirror";
import { MUDError } from "@latticexyz/common/errors";
import { kmsKeyToAccount } from "@latticexyz/common/kms";
import { privateKeyToAccount } from "viem/accounts";

const options = {
rpcBatch: {
type: "boolean",
desc: "Enable batch processing of RPC requests via Viem client (defaults to batch size of 100 and wait of 1s).",
},
kms: {
type: "boolean",
desc: "Deploy the world with an AWS KMS key instead of local private key.",
},
fromWorld: {
type: "string",
desc: "Source world address to mirror data from.",
required: true,
},
fromBlock: {
type: "number",
desc: "Block number of source world deploy.",
},
fromRpc: {
type: "string",
desc: "RPC URL of source chain to mirror from.",
required: true,
},
fromIndexer: {
type: "string",
desc: "MUD indexer URL of source chain to mirror from. Used to fetch table data.",
},
fromBlockscout: {
type: "string",
desc: "Blockscout URL of source chain to mirror from. Used to fetch contract init code.",
},
toWorld: {
type: "string",
desc: "Target world address to mirror data to.",
required: true,
},
toRpc: {
type: "string",
desc: "RPC URL of target chain to mirror to.",
required: true,
},
} as const;

type Options = InferredOptionTypes<typeof options>;

const commandModule: CommandModule<Options, Options> = {
command: "mirror",

describe: "Mirror an existing world and its data to another chain.",

builder(yargs) {
return yargs.options(options);
},

async handler(opts) {
const fromWorld = getAddress(opts.fromWorld);
const fromClient = createClient({
transport: http(opts.fromRpc, {
batch: opts.rpcBatch ? { batchSize: 100, wait: 1000 } : undefined,
}),
pollingInterval: 500,
});
const fromChainId = await getChainId(fromClient);
const fromChain = defaultChains.find((chain) => chain.id === fromChainId);
const fromIndexer = opts.fromIndexer ?? fromChain?.indexerUrl;
if (!fromIndexer) {
throw new MUDError(`No \`--fromIndexer\` provided or indexer URL configured for chain ${fromChainId}.`);
}

const fromBlockscout = opts.fromBlockscout
? ({ name: "Blockscout", url: opts.fromBlockscout } as const)
: fromChain?.blockExplorers.default;
if (!fromBlockscout || fromBlockscout.name !== "Blockscout") {
throw new MUDError(
`No \`--fromBlockscout\` provided or Blockscout block explorer URL configured for chain ${fromChainId}.`,
);
}

const account = await (async () => {
if (opts.kms) {
const keyId = process.env.AWS_KMS_KEY_ID;
if (!keyId) {
throw new MUDError(
"Missing `AWS_KMS_KEY_ID` environment variable. This is required when using with `--kms` option.",
);
}

return await kmsKeyToAccount({ keyId });
} else {
const privateKey = process.env.PRIVATE_KEY;
if (!isHex(privateKey)) {
throw new MUDError(
// eslint-disable-next-line max-len
`Missing or invalid \`PRIVATE_KEY\` environment variable. To use the default Anvil private key, run\n\n echo "PRIVATE_KEY=0xac0974bec39a17e36ba4a6b4d238ff944bacb478cbed5efcae784d7bf4f2ff80" > .env\n`,
);
}
return privateKeyToAccount(privateKey);
}
})();

const toWorld = getAddress(opts.toWorld);
const toClient = createClient({
account,
transport: http(opts.toRpc, {
batch: opts.rpcBatch ? { batchSize: 100, wait: 1000 } : undefined,
}),
pollingInterval: 500,
});
const toChainId = await getChainId(toClient);

console.log(
chalk.bgBlue(
chalk.whiteBright(`
Mirroring MUD data
from world ${fromWorld} on chain ${fromChainId}
to world ${toWorld} on chain ${toChainId}
`),
),
);

await mirror({
rootDir: process.cwd(),
from: {
client: fromClient,
indexer: fromIndexer,
world: fromWorld,
block: opts.fromBlock != null ? BigInt(opts.fromBlock) : undefined,
blockscout: fromBlockscout.url,
},
to: {
client: toClient,
world: toWorld,
},
});
},
};

export default commandModule;
2 changes: 1 addition & 1 deletion packages/cli/src/deploy/configToModules.ts
Original file line number Diff line number Diff line change
Expand Up @@ -17,7 +17,7 @@ const callWithSignatureModuleArtifact = getContractArtifact(callWithSignatureMod
const batchStoreModuleArtifact = getContractArtifact(batchStoreModule);

// metadata module is installed inside `ensureResourceTags`
const defaultModules: Module[] = [
export const defaultModules: Module[] = [
{
// optional for now
// TODO: figure out approach to install on existing worlds where deployer may not own root namespace
Expand Down
19 changes: 19 additions & 0 deletions packages/cli/src/mirror/common.ts
Original file line number Diff line number Diff line change
@@ -0,0 +1,19 @@
import { mudDataDirectory } from "@latticexyz/world/node";
import { Address, Hex } from "viem";
import { DeployedSystem } from "../deploy/common";
import { StoreLog } from "@latticexyz/store";

export const mirrorPlansDirectory = `${mudDataDirectory}/mirror-plans`;

export type PlanStep =
| { step: "deploySystem"; system: DeployedSystem; bytecode: DeployedBytecode }
| { step: "setRecord"; record: Extract<StoreLog, { eventName: "Store_SetRecord" }>["args"] };

export type DeployedBytecode = {
address: Address;
initCode: Hex;
libraries: {
offset: number;
reference: DeployedBytecode;
}[];
};
109 changes: 109 additions & 0 deletions packages/cli/src/mirror/createMirrorPlan.ts
Original file line number Diff line number Diff line change
@@ -0,0 +1,109 @@
import { Address, Client } from "viem";
import { getWorldDeploy } from "../deploy/getWorldDeploy";
import { getChainId } from "viem/actions";
import { getTables } from "../deploy/getTables";
import { resourceToLabel } from "@latticexyz/common";
import { getRecordsAsLogs } from "@latticexyz/store-sync";
import pRetry from "p-retry";
import { Table } from "@latticexyz/config";
import path from "path";
import { createPlanWriter } from "./createPlanWriter";
import { mkdir, rm } from "fs/promises";
import { mirrorPlansDirectory } from "./common";
import { getSystems } from "../deploy/getSystems";
import { getDeployedBytecode } from "./getDeployedBytecode";

// TODO: attempt to create world the same way as it was originally created, thus preserving world address
// TODO: set up table to track migrated records with original metadata (block number/timestamp) and for lazy migrations

export async function createMirrorPlan({
rootDir,
from,
}: {
rootDir: string;
from: {
block?: bigint;
world: Address;
client: Client;
indexer: string;
blockscout: string;
};
}) {
const fromChainId = await getChainId(from.client);

const planFilename = path.join(rootDir, mirrorPlansDirectory, `${fromChainId}_${from.world.toLowerCase()}.ndjson.gz`);
await mkdir(path.dirname(planFilename), { recursive: true });

const plan = createPlanWriter(planFilename);

const makePlan = (async () => {
const worldDeploy = await getWorldDeploy(from.client, from.world, from.block);

console.log("getting systems");
const systems = await getSystems({
client: from.client,
worldDeploy,
indexerUrl: from.indexer,
chainId: fromChainId,
});

console.log("getting bytecode for", systems.length, "systems");
const systemsWithBytecode = await Promise.all(
systems.map(async (system) => {
const bytecode = await getDeployedBytecode({
client: from.client,
address: system.address,
debugLabel: `${resourceToLabel(system)} system`,
allowedStorage: ["empty", { worldConsumer: worldDeploy.address }],
blockscoutUrl: from.blockscout,
});
return { system, bytecode };
}),
);
for (const { system, bytecode } of systemsWithBytecode) {
if (!bytecode) continue;
plan.write({ step: "deploySystem", system, bytecode });
}

const tables = await getTables({
client: from.client,
worldDeploy,
indexerUrl: from.indexer,
chainId: fromChainId,
});

// TODO: sort tables so that the insert order is correct (e.g. namespaces first)

let count = 0;
for (const table of tables) {
const logs = await pRetry(() =>
getRecordsAsLogs<Table>({
worldAddress: from.world,
table: table as never,
client: from.client,
indexerUrl: from.indexer,
chainId: fromChainId,
}),
);
console.log("got", logs.length, "logs for", resourceToLabel(table));
for (const log of logs) {
plan.write({ step: "setRecord", record: log.args });
}
count += logs.length;
}
console.log("got", count, "total record logs");
})();

try {
try {
await makePlan;
} finally {
console.log("writing plan to", path.relative(rootDir, planFilename));
await plan.end();
}
return planFilename;
} catch (error) {
await rm(planFilename, { force: true });
throw error;
}
}
20 changes: 20 additions & 0 deletions packages/cli/src/mirror/createPlanWriter.ts
Original file line number Diff line number Diff line change
@@ -0,0 +1,20 @@
import { createWriteStream } from "node:fs";
import { createGzip } from "node:zlib";
import { pipeline } from "node:stream/promises";
import { PlanStep } from "./common";

export function createPlanWriter(filename: string) {
const gzip = createGzip();
const fileStream = createWriteStream(filename);
const output = pipeline(gzip, fileStream);
return {
write(data: PlanStep) {
gzip.write(JSON.stringify(data) + "\n");
return this;
},
async end() {
gzip.end();
await output;
},
};
}
Loading
Loading