diff --git a/.prettierignore b/.prettierignore new file mode 100644 index 0000000..14b0f05 --- /dev/null +++ b/.prettierignore @@ -0,0 +1,5 @@ +# Ignore artifacts: +dist +creds +node_modules +abi diff --git a/.prettierrc.json b/.prettierrc.json new file mode 100644 index 0000000..0dbf448 --- /dev/null +++ b/.prettierrc.json @@ -0,0 +1,5 @@ +{ + "trailingComma": "es5", + "semi": true, + "singleQuote": true +} diff --git a/README.md b/README.md index 996956b..4af9a02 100644 --- a/README.md +++ b/README.md @@ -1,22 +1,23 @@ -# Fileverse Agents +# Fileverse Agents Access the Fileverse middleware, programmatically. Fileverse's middleware is expanding from powering self-sovereign human collaboration to also enabling multi-agent coordination with crypto primitives guaranteed :yellow_heart: ## Documentation -* [Take a look at our documentation](https://docs.fileverse.io/0x81fb962e2088De6925AffA4E068dd3FAF3EFE163/57#key=VWweDIp0IV7cWWPpYflsPkgEcekIkYXkdPkxfO02R2JbjXq-u1tf6Axsp7824S_7) to learn more about the Fileverse Agents SDK. +- [Take a look at our documentation](https://docs.fileverse.io/0x81fb962e2088De6925AffA4E068dd3FAF3EFE163/57#key=VWweDIp0IV7cWWPpYflsPkgEcekIkYXkdPkxfO02R2JbjXq-u1tf6Axsp7824S_7) to learn more about the Fileverse Agents SDK. -* Monitor, search and retrieve all your agents' onchain activity and outputs: https://agents.fileverse.io/ +- Monitor, search and retrieve all your agents' onchain activity and outputs: https://agents.fileverse.io/ ## Overview -With the Fileverse Agents SDK, your agents will have the ability to read, write, and organize data onchain and on IPFS. +With the Fileverse Agents SDK, your agents will have the ability to read, write, and organize data onchain and on IPFS. Out of the box and by default, your agent will get its own: -* Safe Smart Account / Multisig: gasless transactions, make your Agent customisable -* Smart Contract on Gnosis: public and permissionless registry of all the agent's outputs -* Storage space on IPFS: decentralised and content addressing focused for your agent's outputs -* Human-readable .md output: markdown is a format accessible by anyone, humans and other agents + +- Safe Smart Account / Multisig: gasless transactions, make your Agent customisable +- Smart Contract on Gnosis: public and permissionless registry of all the agent's outputs +- Storage space on IPFS: decentralised and content addressing focused for your agent's outputs +- Human-readable .md output: markdown is a format accessible by anyone, humans and other agents ## Installation @@ -34,7 +35,7 @@ import { PinataStorageProvider } from '@fileverse/agents/storage'; // Create storage provider const storageProvider = new PinataStorageProvider({ jwt: process.env.PINATA_JWT, - gateway: process.env.PINATA_GATEWAY + gateway: process.env.PINATA_GATEWAY, }); // Initialize agent @@ -42,17 +43,17 @@ const agent = new Agent({ chain: process.env.CHAIN, // required - options: gnosis, sepolia viemAccount: privateKeyToAccount(process.env.PRIVATE_KEY), // required - viem account instance pimlicoAPIKey: process.env.PIMLICO_API_KEY, // required - see how to get API keys below - storageProvider // required - storage provider instance + storageProvider, // required - storage provider instance }); // setup storage with namespace -// This will generate the required keys and deploy a portal or pull the existing +// This will generate the required keys and deploy a portal or pull the existing await agent.setupStorage('my-namespace'); // file is generated as the creds/${namespace}.json in the main directory const latestBlockNumber = await agent.getBlockNumber(); console.log(`Latest block number: ${latestBlockNumber}`); -// create a new file +// create a new file const file = await agent.create('Hello World'); console.log(`File created: ${file}`); @@ -70,10 +71,11 @@ console.log(`File deleted: ${deletedFile}`); ``` ## How to get API Keys -* Pimlico API Key: https://www.pimlico.io/ - * https://docs.pimlico.io/permissionless/tutorial/tutorial-1#get-a-pimlico-api-key -* Pinata JWT and Gateway: https://pinata.cloud/ - * https://docs.pinata.cloud/account-management/api-keys + +- Pimlico API Key: https://www.pimlico.io/ + - https://docs.pimlico.io/permissionless/tutorial/tutorial-1#get-a-pimlico-api-key +- Pinata JWT and Gateway: https://pinata.cloud/ + - https://docs.pinata.cloud/account-management/api-keys ## Chains Supported diff --git a/abi/index.js b/abi/index.js index 0fcb5ad..9bd4d3f 100644 --- a/abi/index.js +++ b/abi/index.js @@ -1,4 +1,4 @@ -import PortalABI from "./Portal.json" with { type: "json" }; -import PortalRegistryABI from "./PortalRegistry.json" with { type: "json" }; +import PortalABI from './Portal.json' with { type: 'json' }; +import PortalRegistryABI from './PortalRegistry.json' with { type: 'json' }; export { PortalABI, PortalRegistryABI }; diff --git a/agent/index.js b/agent/index.js index c6c665a..a34b528 100644 --- a/agent/index.js +++ b/agent/index.js @@ -3,30 +3,30 @@ import { createWalletClient, http, parseEventLogs, -} from "viem"; -import { gnosis, sepolia } from "viem/chains"; -import { PortalRegistryABI, PortalABI } from "../abi/index.js"; -import { generatePortalKeys, getPortalKeyVerifiers } from "./keys.js"; -import { createPimlicoClient } from "permissionless/clients/pimlico"; -import { toSafeSmartAccount } from "permissionless/accounts"; -import { entryPoint07Address } from "viem/account-abstraction" -import { createSmartAccountClient } from "permissionless"; -import fs from "fs"; +} from 'viem'; +import { gnosis, sepolia } from 'viem/chains'; +import { PortalRegistryABI, PortalABI } from '../abi/index.js'; +import { generatePortalKeys, getPortalKeyVerifiers } from './keys.js'; +import { createPimlicoClient } from 'permissionless/clients/pimlico'; +import { toSafeSmartAccount } from 'permissionless/accounts'; +import { entryPoint07Address } from 'viem/account-abstraction'; +import { createSmartAccountClient } from 'permissionless'; +import fs from 'fs'; class Agent { - DELETED_HASH = "deleted"; + DELETED_HASH = 'deleted'; constructor({ chain, viemAccount, pimlicoAPIKey, storageProvider }) { if (!chain) { - throw new Error("Chain is required - options: gnosis, sepolia"); + throw new Error('Chain is required - options: gnosis, sepolia'); } if (!pimlicoAPIKey) { - throw new Error("Pimlico API key is required"); + throw new Error('Pimlico API key is required'); } if (!storageProvider) { - throw new Error("Storage provider is required"); + throw new Error('Storage provider is required'); } this.chain = - chain === "gnosis" || chain?.name?.toLowerCase() === "gnosis" + chain === 'gnosis' || chain?.name?.toLowerCase() === 'gnosis' ? gnosis : sepolia; this.pimlicoAPIKey = pimlicoAPIKey; @@ -45,17 +45,17 @@ class Agent { transport: http(pimlicoRpcUrl), entryPoint: { address: entryPoint07Address, - version: "0.7", + version: '0.7', }, }); this.safeAccount = await toSafeSmartAccount({ client: this.publicClient, entryPoint: { address: entryPoint07Address, - version: "0.7", + version: '0.7', }, owners: [this.viemAccount], - version: "1.4.1", + version: '1.4.1', }); const smartAccountClient = createSmartAccountClient({ account: this.safeAccount, @@ -63,7 +63,8 @@ class Agent { paymaster: paymasterClient, bundlerTransport: http(pimlicoRpcUrl), userOperation: { - estimateFeesPerGas: async () => (await paymasterClient.getUserOperationGasPrice()).fast, + estimateFeesPerGas: async () => + (await paymasterClient.getUserOperationGasPrice()).fast, }, }); this.smartAccountClient = smartAccountClient; @@ -84,10 +85,10 @@ class Agent { } setPortalRegistry() { - if (this.chain.name.toLowerCase() === "gnosis") { - return "0x945690a516519daEE95834C05218839c8deEC88D"; + if (this.chain.name.toLowerCase() === 'gnosis') { + return '0x945690a516519daEE95834C05218839c8deEC88D'; } else { - return "0x8D9E28AC21D823ddE63fbf20FAD8EdD4F4a0cCfD"; + return '0x8D9E28AC21D823ddE63fbf20FAD8EdD4F4a0cCfD'; } } @@ -96,68 +97,72 @@ class Agent { } async loadStorage(namespace) { - if (!fs.existsSync("creds")) { - fs.mkdirSync("creds"); + if (!fs.existsSync('creds')) { + fs.mkdirSync('creds'); } if (!fs.existsSync(`creds/${namespace}.json`)) { return null; } - const storage = fs.readFileSync(`creds/${namespace}.json`, "utf8"); + const storage = fs.readFileSync(`creds/${namespace}.json`, 'utf8'); return JSON.parse(storage); } async setupStorage(namespace) { if (!namespace) { - throw new Error("Namespace is required"); + throw new Error('Namespace is required'); } this.namespace = `${namespace}-${this.chain.name.toLowerCase()}`; await this.setupSafe(); try { const storage = await this.loadStorage(this.namespace); if (storage && storage.namespace === this.namespace) { - console.log("Storage already exists"); + console.log('Storage already exists'); this.portal = storage; return storage.portalAddress; } const metadataIPFSHash = await this.uploadToStorage( - "metadata.json", + 'metadata.json', JSON.stringify({ namespace: this.namespace, - source: "FileverseAgent", + source: 'FileverseAgent', gateway: this.pinataGateway, }) ); const portalKeys = await generatePortalKeys(); const verifiers = await getPortalKeyVerifiers(portalKeys); const hash = await this.smartAccountClient.sendUserOperation({ - calls: [{ - to: this.portalRegistry, - abi: PortalRegistryABI, - functionName: "mint", - args: [ - metadataIPFSHash, - portalKeys.viewDID, - portalKeys.editDID, - verifiers.portalEncryptionKeyVerifier, - verifiers.portalDecryptionKeyVerifier, - verifiers.memberEncryptionKeyVerifer, - verifiers.memberDecryptionKeyVerifer, - ], - }] - }); - const receipt = await this.smartAccountClient.waitForUserOperationReceipt({ - hash, + calls: [ + { + to: this.portalRegistry, + abi: PortalRegistryABI, + functionName: 'mint', + args: [ + metadataIPFSHash, + portalKeys.viewDID, + portalKeys.editDID, + verifiers.portalEncryptionKeyVerifier, + verifiers.portalDecryptionKeyVerifier, + verifiers.memberEncryptionKeyVerifer, + verifiers.memberDecryptionKeyVerifer, + ], + }, + ], }); + const receipt = await this.smartAccountClient.waitForUserOperationReceipt( + { + hash, + } + ); const logs = parseEventLogs({ abi: PortalRegistryABI, logs: receipt.logs, - eventName: "Mint", + eventName: 'Mint', }); const portalAddress = logs[0].args.portal; - if (!portalAddress) throw new Error("Portal not found"); + if (!portalAddress) throw new Error('Portal not found'); const portalData = { portalAddress, @@ -167,17 +172,17 @@ class Agent { portalKeys, verifiers, }; - + // Set portal data this.portal = portalData; - + fs.writeFileSync( `creds/${this.namespace}.json`, JSON.stringify(portalData, null, 2) ); return portalAddress; } catch (error) { - console.error("Error deploying portal:", error); + console.error('Error deploying portal:', error); throw error; } } @@ -188,10 +193,10 @@ class Agent { async prechecks() { if (!this.safeAccount) { - throw new Error("Storage not setup yet!"); + throw new Error('Storage not setup yet!'); } if (!this.portal || !this.portal.portalAddress) { - throw new Error("Portal not found!"); + throw new Error('Portal not found!'); } } @@ -205,7 +210,7 @@ class Agent { const metadata = { name: `${this.portal.portalAddress}/${this.namespace}/output.md`, - description: "Markdown file created by FileverseAgent", + description: 'Markdown file created by FileverseAgent', }; const metadataIpfsHash = await this.uploadToStorage( 'metadata.json', @@ -213,31 +218,35 @@ class Agent { ); const hash = await this.smartAccountClient.sendUserOperation({ - calls: [{ - to: this.portal.portalAddress, - abi: PortalABI, - functionName: "addFile", - args: [ - metadataIpfsHash, - contentIpfsHash, - "", // _gateIPFSHash (empty for public files) - 0, // filetype (0 = PUBLIC from enum) - 0, // version - ], - }] + calls: [ + { + to: this.portal.portalAddress, + abi: PortalABI, + functionName: 'addFile', + args: [ + metadataIpfsHash, + contentIpfsHash, + '', // _gateIPFSHash (empty for public files) + 0, // filetype (0 = PUBLIC from enum) + 0, // version + ], + }, + ], }); - const receipt = await this.smartAccountClient.waitForUserOperationReceipt({ hash }); + const receipt = await this.smartAccountClient.waitForUserOperationReceipt({ + hash, + }); const logs = parseEventLogs({ abi: PortalABI, logs: receipt.logs, - eventName: "AddedFile", + eventName: 'AddedFile', }); const addedFileLog = logs[0]; if (!addedFileLog) { - throw new Error("AddedFile event not found"); + throw new Error('AddedFile event not found'); } const fileId = addedFileLog.args?.fileId; @@ -254,7 +263,7 @@ class Agent { const file = await this.publicClient.readContract({ address: this.portal.portalAddress, abi: PortalABI, - functionName: "files", + functionName: 'files', args: [fileId], }); const [metadataIpfsHash, contentIpfsHash] = file; @@ -273,29 +282,34 @@ class Agent { // in order to unpin them after a successful update transaction const fileBeforeUpdate = await this.getFile(fileId); - const contentIpfsHash = await this.uploadToStorage("output.md", output); + const contentIpfsHash = await this.uploadToStorage('output.md', output); const metadata = { - name: "output.md", - description: "Updated Markdown file by FileverseAgent", + name: 'output.md', + description: 'Updated Markdown file by FileverseAgent', contentIpfsHash, }; - const metadataIpfsHash = await this.uploadToStorage("metadata.json", metadata); + const metadataIpfsHash = await this.uploadToStorage( + 'metadata.json', + metadata + ); const hash = await this.smartAccountClient.sendUserOperation({ - calls: [{ - to: this.portal.portalAddress, - abi: PortalABI, - functionName: "editFile", - args: [ - fileId, - metadataIpfsHash, - contentIpfsHash, - "", // _gateIPFSHash (empty for public files) - 0, // filetype (0 = PUBLIC from enum) - 0, // version - ], - }] + calls: [ + { + to: this.portal.portalAddress, + abi: PortalABI, + functionName: 'editFile', + args: [ + fileId, + metadataIpfsHash, + contentIpfsHash, + '', // _gateIPFSHash (empty for public files) + 0, // filetype (0 = PUBLIC from enum) + 0, // version + ], + }, + ], }); // try to unpin the file content and metadata @@ -304,7 +318,7 @@ class Agent { await this.storageProvider.unpin(metadataIpfsHash); await this.storageProvider.unpin(contentIpfsHash); } catch (error) { - console.error("Error unpinning file from storage:", error); + console.error('Error unpinning file from storage:', error); } const transaction = { @@ -325,38 +339,40 @@ class Agent { const fileBeforeDelete = await this.getFile(fileId); const hash = await this.smartAccountClient.sendUserOperation({ - calls: [{ - to: this.portal.portalAddress, - abi: PortalABI, - functionName: "editFile", - args: [ - fileId, - `${protocol}${this.DELETED_HASH}`, - `${protocol}${this.DELETED_HASH}`, - "", // _gateIPFSHash (empty for deleted files) - 0, // filetype (0 = PUBLIC from enum) - 0, // version + calls: [ + { + to: this.portal.portalAddress, + abi: PortalABI, + functionName: 'editFile', + args: [ + fileId, + `${protocol}${this.DELETED_HASH}`, + `${protocol}${this.DELETED_HASH}`, + '', // _gateIPFSHash (empty for deleted files) + 0, // filetype (0 = PUBLIC from enum) + 0, // version + ], + }, ], - }] - }); + }); - try { - const { metadataIpfsHash, contentIpfsHash } = fileBeforeDelete; - await this.storageProvider.unpin(metadataIpfsHash); - await this.storageProvider.unpin(contentIpfsHash); - } catch (error) { - console.error("Error unpinning file from storage:", error); - } + try { + const { metadataIpfsHash, contentIpfsHash } = fileBeforeDelete; + await this.storageProvider.unpin(metadataIpfsHash); + await this.storageProvider.unpin(contentIpfsHash); + } catch (error) { + console.error('Error unpinning file from storage:', error); + } - const transaction = { - hash: hash, - fileId, - portalAddress: this.portal.portalAddress, - }; + const transaction = { + hash: hash, + fileId, + portalAddress: this.portal.portalAddress, + }; return transaction; } catch (error) { - console.error("Error deleting file:", error); - throw new Error("File deletion failed."); + console.error('Error deleting file:', error); + throw new Error('File deletion failed.'); } } } diff --git a/agent/keys.js b/agent/keys.js index 6f10454..5b7e32b 100644 --- a/agent/keys.js +++ b/agent/keys.js @@ -1,19 +1,19 @@ -import * as ucans from "@ucans/ucans"; -import { Base64 } from "js-base64"; -import { fromUint8Array } from "js-base64"; -import { generateKeyPairSync } from "crypto"; -import { sha256 } from "viem"; +import * as ucans from '@ucans/ucans'; +import { Base64 } from 'js-base64'; +import { fromUint8Array } from 'js-base64'; +import { generateKeyPairSync } from 'crypto'; +import { sha256 } from 'viem'; const generateRandomRSAKeyPair = async () => { - const { publicKey, privateKey } = generateKeyPairSync("rsa", { + const { publicKey, privateKey } = generateKeyPairSync('rsa', { modulusLength: 4096, publicKeyEncoding: { - type: "spki", - format: "der", + type: 'spki', + format: 'der', }, privateKeyEncoding: { - type: "pkcs8", - format: "der", + type: 'pkcs8', + format: 'der', }, }); return { publicKey, privateKey }; @@ -86,19 +86,20 @@ const generatePortalKeys = async () => { }; async function getAuthToken(contractAddress, editSecret, recipientDID) { - console.log("editSecret: ", editSecret); + console.log('editSecret: ', editSecret); const editKeypair = ucans.EdKeypair.fromSecretKey(editSecret); const ucan = await ucans.build({ audience: recipientDID, // recipient DID issuer: editKeypair, // signing key - capabilities: [ // permissions for ucan + capabilities: [ + // permissions for ucan { - with: { scheme: "storage", hierPart: `${contractAddress}` }, - can: { namespace: "file", segments: [ "CREATE" ] } + with: { scheme: 'storage', hierPart: `${contractAddress}` }, + can: { namespace: 'file', segments: ['CREATE'] }, }, - ] - }) - const token = ucans.encode(ucan) // base64 jwt-formatted auth token + ], + }); + const token = ucans.encode(ucan); // base64 jwt-formatted auth token return token; } @@ -109,5 +110,5 @@ export { exportKeyPair, getPortalKeyVerifiers, generatePortalKeys, - getAuthToken -}; \ No newline at end of file + getAuthToken, +}; diff --git a/bin/cli.js b/bin/cli.js index b447875..fd13648 100755 --- a/bin/cli.js +++ b/bin/cli.js @@ -6,50 +6,50 @@ const figlet = require('figlet'); const { Agent } = require('../index'); const getProjectName = async () => { - let projectName = yargs.argv.name; + let projectName = yargs.argv.name; - if (!projectName) { - const rl = createInterface(); - projectName = await askQuestion(rl, 'Enter project name: '); - rl.close(); - } + if (!projectName) { + const rl = createInterface(); + projectName = await askQuestion(rl, 'Enter project name: '); + rl.close(); + } - return projectName; + return projectName; }; const createInterface = () => { - return readline.createInterface({ - input: process.stdin, - output: process.stdout, - }); + return readline.createInterface({ + input: process.stdin, + output: process.stdout, + }); }; const askQuestion = (rl, question) => { - return new Promise((resolve) => { - rl.question(question, (answer) => { - resolve(answer); - }); + return new Promise((resolve) => { + rl.question(question, (answer) => { + resolve(answer); }); + }); }; const main = async () => { - console.log("Fileverse Agents Kickstart!!!"); - const projectName = await getProjectName(); - console.log(`Project name **${projectName}**`); - // fetching latest block number from gnosis chain - const chain = 'gnosis'; - console.log(`Fetching latest block number from ${chain} chain`); - const agent = new Agent(chain); - const latestBlockNumber = await agent.getBlockNumber(); - console.log(`Latest block number: ${latestBlockNumber}`); - figlet("Work in Progress !", function (err, data) { - if (err) { - console.log("Something went wrong..."); - console.dir(err); - return; - } - console.log(data); - }); + console.log('Fileverse Agents Kickstart!!!'); + const projectName = await getProjectName(); + console.log(`Project name **${projectName}**`); + // fetching latest block number from gnosis chain + const chain = 'gnosis'; + console.log(`Fetching latest block number from ${chain} chain`); + const agent = new Agent(chain); + const latestBlockNumber = await agent.getBlockNumber(); + console.log(`Latest block number: ${latestBlockNumber}`); + figlet('Work in Progress !', function (err, data) { + if (err) { + console.log('Something went wrong...'); + console.dir(err); + return; + } + console.log(data); + }); }; -main().catch(console.error); \ No newline at end of file +main().catch(console.error); diff --git a/index.js b/index.js index dde41a8..e5d31c5 100644 --- a/index.js +++ b/index.js @@ -1,9 +1,9 @@ -import { Agent } from "./agent/index.js"; +import { Agent } from './agent/index.js'; import { PinataStorageProvider, SwarmStorageProvider, BaseStorageProvider, -} from "./storage/index.js"; +} from './storage/index.js'; export { Agent, diff --git a/package-lock.json b/package-lock.json index f4e0a23..d8e087c 100644 --- a/package-lock.json +++ b/package-lock.json @@ -29,6 +29,7 @@ "@types/node": "^22.10.7", "chai": "^5.1.2", "mocha": "^11.0.1", + "prettier": "3.6.2", "ts-node": "^10.9.2", "typescript": "^5.7.3" } diff --git a/package.json b/package.json index 484ef58..7c4ad3f 100644 --- a/package.json +++ b/package.json @@ -5,7 +5,8 @@ "main": "index.js", "type": "module", "scripts": { - "test": "mocha --loader=ts-node/esm 'test/**/*.{js,ts}'" + "test": "mocha --loader=ts-node/esm 'test/**/*.{js,ts}'", + "format": "prettier . --write" }, "bin": "./bin/cli.js", "keywords": [ @@ -39,6 +40,7 @@ "@types/node": "^22.10.7", "chai": "^5.1.2", "mocha": "^11.0.1", + "prettier": "3.6.2", "ts-node": "^10.9.2", "typescript": "^5.7.3" } diff --git a/storage/index.js b/storage/index.js index 706a621..a258efb 100644 --- a/storage/index.js +++ b/storage/index.js @@ -1,6 +1,6 @@ -import { BaseStorageProvider } from "./base.js"; -import { PinataStorageProvider } from "./pinata.js"; -import { SwarmStorageProvider } from "./swarm.js"; +import { BaseStorageProvider } from './base.js'; +import { PinataStorageProvider } from './pinata.js'; +import { SwarmStorageProvider } from './swarm.js'; export { BaseStorageProvider, PinataStorageProvider, SwarmStorageProvider }; diff --git a/storage/pinata.js b/storage/pinata.js index 8adc96a..9ea51ac 100644 --- a/storage/pinata.js +++ b/storage/pinata.js @@ -5,7 +5,7 @@ class PinataStorageProvider extends BaseStorageProvider { constructor({ pinataJWT, pinataGateway }) { super(); if (!pinataJWT || !pinataGateway) { - throw new Error("Pinata JWT and gateway are required"); + throw new Error('Pinata JWT and gateway are required'); } this.pinata = new PinataSDK({ pinataJwt: pinataJWT, @@ -14,7 +14,7 @@ class PinataStorageProvider extends BaseStorageProvider { } async protocol() { - return "ipfs://"; + return 'ipfs://'; } async upload(fileName, content) { @@ -24,7 +24,7 @@ class PinataStorageProvider extends BaseStorageProvider { const result = await this.pinata.upload.file(file); return `${protocol}${result.IpfsHash}`; } catch (error) { - console.error("Error uploading to IPFS:", error); + console.error('Error uploading to IPFS:', error); throw error; } } @@ -47,8 +47,8 @@ class PinataStorageProvider extends BaseStorageProvider { async download(reference) { const protocol = await this.protocol(); const strippedReference = - typeof reference === "string" - ? reference.replace(protocol, "") + typeof reference === 'string' + ? reference.replace(protocol, '') : reference; const result = await this.pinata.download.file(strippedReference); return result; @@ -59,7 +59,7 @@ class PinataStorageProvider extends BaseStorageProvider { const result = await this.pinata.testAuthentication(); return result; } catch (error) { - console.error("Error testing Pinata auth:", error); + console.error('Error testing Pinata auth:', error); return false; } } diff --git a/storage/swarm.js b/storage/swarm.js index a734466..8a98b5f 100644 --- a/storage/swarm.js +++ b/storage/swarm.js @@ -1,15 +1,15 @@ -import { Bee } from "@ethersphere/bee-js"; -import { BaseStorageProvider } from "./base.js"; +import { Bee } from '@ethersphere/bee-js'; +import { BaseStorageProvider } from './base.js'; class SwarmStorageProvider extends BaseStorageProvider { constructor({ beeUrl, postageBatchId }) { super(); if (!beeUrl) { - throw new Error("Bee node URL is required"); + throw new Error('Bee node URL is required'); } if (!postageBatchId) { throw new Error( - "Postage batch ID is required for uploading data to Swarm" + 'Postage batch ID is required for uploading data to Swarm' ); } this.bee = new Bee(beeUrl); @@ -17,14 +17,14 @@ class SwarmStorageProvider extends BaseStorageProvider { } async protocol() { - return "bzz://"; + return 'bzz://'; } async upload(fileName, content) { try { const protocol = await this.protocol(); // Create a File object from the content - const file = new File([content], fileName, { type: "text/plain" }); + const file = new File([content], fileName, { type: 'text/plain' }); // Upload the file to Swarm const result = await this.bee.uploadFile( @@ -39,7 +39,7 @@ class SwarmStorageProvider extends BaseStorageProvider { // Return the Swarm reference as a URI return `${protocol}${result.reference}`; } catch (error) { - console.error("Error uploading to Swarm:", error); + console.error('Error uploading to Swarm:', error); throw error; } } @@ -48,13 +48,13 @@ class SwarmStorageProvider extends BaseStorageProvider { try { const protocol = await this.protocol(); const strippedReference = - typeof reference === "string" - ? reference.replace(protocol, "") + typeof reference === 'string' + ? reference.replace(protocol, '') : reference; const result = await this.bee.unpin(strippedReference); return `${protocol}${result.reference}`; } catch (error) { - console.error("Error unpinning from Swarm:", error); + console.error('Error unpinning from Swarm:', error); throw error; } } @@ -63,13 +63,13 @@ class SwarmStorageProvider extends BaseStorageProvider { try { const protocol = await this.protocol(); const strippedReference = - typeof reference === "string" - ? reference.replace(protocol, "") - : reference; + typeof reference === 'string' + ? reference.replace(protocol, '') + : reference; const result = await this.bee.downloadFile(strippedReference); return result; } catch (error) { - console.error("Error downloading from Swarm:", error); + console.error('Error downloading from Swarm:', error); throw error; } } diff --git a/test/FileverseAgent.test.js b/test/FileverseAgent.test.js index a24d934..75b22e0 100644 --- a/test/FileverseAgent.test.js +++ b/test/FileverseAgent.test.js @@ -1,84 +1,86 @@ -import 'dotenv/config'; -import { describe, it } from 'mocha'; -import { expect } from 'chai'; -import { privateKeyToAccount } from 'viem/accounts'; -import { sepolia } from 'viem/chains'; -import { Agent as FileverseAgent, PinataStorageProvider } from '../index.js'; - -describe('FileverseAgent', () => { - let agent; - let fileId; - - beforeEach(() => { - const account = privateKeyToAccount(process.env.PRIVATE_KEY); - // Initialize FileverseAgent with test values - agent = new FileverseAgent({ - chain: sepolia, - viemAccount: account, - pimlicoAPIKey: process.env.PIMLICO_API_KEY, - storageProvider: new PinataStorageProvider({ - pinataJWT: process.env.PINATA_JWT, - pinataGateway: process.env.PINATA_GATEWAY, - }), - }); - }); - - it('should initialize with correct properties', () => { - // Test that the agent is properly initialized - expect(agent.chain).to.exist; - expect(agent.chain.name).to.equal('Sepolia'); - expect(agent.publicClient).to.exist; - expect(agent.walletClient).to.exist; - expect(agent.portalRegistry).to.equal('0x8D9E28AC21D823ddE63fbf20FAD8EdD4F4a0cCfD'); - expect(agent.viemAccount).to.exist; - expect(agent.storageProvider).to.exist; - }); - - it('should have required methods', () => { - expect(agent.setupStorage).to.be.a('function'); - expect(agent.create).to.be.a('function'); - expect(agent.update).to.be.a('function'); - expect(agent.delete).to.be.a('function'); - expect(agent.getFile).to.be.a('function'); - }); - it('should perform full file lifecycle (create, update, delete)', async function () { - this.timeout(300000); - - // First deploy a portal - const portalAddress = await agent.setupStorage('test'); - console.log('Portal deployed at:', portalAddress); - expect(portalAddress).to.be.a('string'); - - // Create file - console.log('Creating file...'); - const createResult = await agent.create('Test content @001'); - console.log('Create File Transaction:', createResult); - let receipt = await agent.smartAccountClient.waitForUserOperationReceipt({ - hash: createResult.hash, - }); - console.log('Create receipt:', receipt); - fileId = createResult.fileId; - expect(createResult).to.have.property('hash'); - expect(createResult).to.have.property('fileId'); - - // Update same file - console.log('Updating file...', fileId); - const updateResult = await agent.update(fileId, 'Updated content @002'); - console.log('Update File Transaction:', updateResult); - receipt = await agent.smartAccountClient.waitForUserOperationReceipt({ - hash: updateResult.hash, - }); - console.log('Update receipt:', receipt); - expect(updateResult.fileId).to.equal(fileId); - - // Delete the file - console.log('Deleting file...', fileId); - const deleteResult = await agent.delete(fileId); - console.log('Delete File Transaction:', deleteResult); - receipt = await agent.smartAccountClient.waitForUserOperationReceipt({ - hash: deleteResult.hash, - }); - console.log('Delete receipt:', receipt); - expect(deleteResult.fileId).to.equal(fileId); - }); -}); +import 'dotenv/config'; +import { describe, it } from 'mocha'; +import { expect } from 'chai'; +import { privateKeyToAccount } from 'viem/accounts'; +import { sepolia } from 'viem/chains'; +import { Agent as FileverseAgent, PinataStorageProvider } from '../index.js'; + +describe('FileverseAgent', () => { + let agent; + let fileId; + + beforeEach(() => { + const account = privateKeyToAccount(process.env.PRIVATE_KEY); + // Initialize FileverseAgent with test values + agent = new FileverseAgent({ + chain: sepolia, + viemAccount: account, + pimlicoAPIKey: process.env.PIMLICO_API_KEY, + storageProvider: new PinataStorageProvider({ + pinataJWT: process.env.PINATA_JWT, + pinataGateway: process.env.PINATA_GATEWAY, + }), + }); + }); + + it('should initialize with correct properties', () => { + // Test that the agent is properly initialized + expect(agent.chain).to.exist; + expect(agent.chain.name).to.equal('Sepolia'); + expect(agent.publicClient).to.exist; + expect(agent.walletClient).to.exist; + expect(agent.portalRegistry).to.equal( + '0x8D9E28AC21D823ddE63fbf20FAD8EdD4F4a0cCfD' + ); + expect(agent.viemAccount).to.exist; + expect(agent.storageProvider).to.exist; + }); + + it('should have required methods', () => { + expect(agent.setupStorage).to.be.a('function'); + expect(agent.create).to.be.a('function'); + expect(agent.update).to.be.a('function'); + expect(agent.delete).to.be.a('function'); + expect(agent.getFile).to.be.a('function'); + }); + it('should perform full file lifecycle (create, update, delete)', async function () { + this.timeout(300000); + + // First deploy a portal + const portalAddress = await agent.setupStorage('test'); + console.log('Portal deployed at:', portalAddress); + expect(portalAddress).to.be.a('string'); + + // Create file + console.log('Creating file...'); + const createResult = await agent.create('Test content @001'); + console.log('Create File Transaction:', createResult); + let receipt = await agent.smartAccountClient.waitForUserOperationReceipt({ + hash: createResult.hash, + }); + console.log('Create receipt:', receipt); + fileId = createResult.fileId; + expect(createResult).to.have.property('hash'); + expect(createResult).to.have.property('fileId'); + + // Update same file + console.log('Updating file...', fileId); + const updateResult = await agent.update(fileId, 'Updated content @002'); + console.log('Update File Transaction:', updateResult); + receipt = await agent.smartAccountClient.waitForUserOperationReceipt({ + hash: updateResult.hash, + }); + console.log('Update receipt:', receipt); + expect(updateResult.fileId).to.equal(fileId); + + // Delete the file + console.log('Deleting file...', fileId); + const deleteResult = await agent.delete(fileId); + console.log('Delete File Transaction:', deleteResult); + receipt = await agent.smartAccountClient.waitForUserOperationReceipt({ + hash: deleteResult.hash, + }); + console.log('Delete receipt:', receipt); + expect(deleteResult.fileId).to.equal(fileId); + }); +});