diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index b599f34..cf3cf9b 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -121,8 +121,9 @@ jobs: sleep 10 attempt=$((attempt + 1)) done + - name: docker logs - run: docker logs ocean-ocean-contracts-1 && docker logs ocean-ganache-1 && docker logs ocean-typesense-1 + run: docker logs ocean_ocean-contracts_1 && docker logs ocean_typesense_1 if: ${{ failure() }} - name: Run system tests @@ -131,4 +132,9 @@ jobs: INDEXING_RETRY_INTERVAL: 4000 INDEXING_MAX_RETRIES: 120 NODE_URL: 'http://127.0.0.1:8001' - AVOID_LOOP_RUN: true \ No newline at end of file + AVOID_LOOP_RUN: true + - name: Print Ocean Node Logs if tests fail + if: ${{ failure() }} + run: | + echo "========== Ocean Node Logs ==========" + tac ${{ github.workspace }}/ocean-node/ocean-node.log || echo "Log file not found" diff --git a/README.md b/README.md index e828756..594a714 100644 --- a/README.md +++ b/README.md @@ -194,12 +194,19 @@ npm run cli [options] **Start Compute:** - **Positional:** - `npm run cli startCompute did1,did2 algoDid env1` + `npm run cli startCompute -- did1,did2 algoDid env1 maxJobDuration paymentToken resources --accept true` - **Named Options:** - `npm run cli startCompute --datasets did1,did2 --algo algoDid --env env1` + `npm run cli startCompute --datasets did1,did2 --algo algoDid --env env1 --maxJobDuration maxJobDuration --token paymentToken --resources resources --accept true` (Options can be provided in any order.) + +- `maxJobDuration` is a required parameter an represents the time measured in seconds for job maximum execution, the payment is based on this maxJobDuration value, user needs to provide this. +- `paymentToken` is required and represents the address of the token that is supported by the environment for processing the compute job payment. It can be retrieved from `getComputeEnvironments` command output. +- `resources` is required and represents a stringified JSON object obtained from `getComputeEnvironments` command output. `getComputeEnvironments` command shows the available resources and the selected resources by the user need to be within the available limits. +e.g.: `'[{"id":"cpu","amount":3},{"id":"ram","amount":16772672536},{"id":"disk","amount":0}]'` +- `--accept` option can be set to `true` or `false`. If it is set to `false` a prompt will be displayed to the user for manual accepting the payment before starting a compute job. If it is set to `true`, the compute job starts automatically, without user input. + --- **Start Free Compute:** @@ -291,6 +298,11 @@ npm run cli [options] `-d, --datasets ` `-a, --algo ` `-e, --env ` + `--init ` + `--maxJobDuration ` + `-t, --token ` + `--resources ` + `--amountToDeposit ` (Id `''`, it will fallback to initialize compute payment amount.) - **startFreeCompute:** `-d, --datasets ` diff --git a/metadata/downloadAssetPaymentUSDC.json b/metadata/downloadAssetPaymentUSDC.json index d490967..bf1ffbb 100644 --- a/metadata/downloadAssetPaymentUSDC.json +++ b/metadata/downloadAssetPaymentUSDC.json @@ -41,32 +41,32 @@ "timeout": 86400 } ], - "event": { - }, - "nft": { - "address": "", - "name": "Ocean Data NFT", - "symbol": "OCEAN-NFT", - "state": 5, - "tokenURI": "", - "owner": "", - "created": "" - }, - "purgatory": { - "state": false + "indexedMetadata": { + "event": {}, + "nft": { + "address": "", + "name": "Ocean Data NFT", + "symbol": "OCEAN-NFT", + "state": 5, + "tokenURI": "", + "owner": "", + "created": "" + }, + "purgatory": { + "state": false + }, + "stats": [{ + "orders": 0, + "token": "0xf08A50178dfcDe18524640EA6618a1f965821715", + "prices": [ + { + "price": "2" + } + ] + }] }, "datatokens": [ ], - "stats": { - "allocated": 0, - "orders": 0, - "price": { - "value": "2", - "tokenAddress": "0xf08A50178dfcDe18524640EA6618a1f965821715", - "tokenSymbol": "USDC" - } - }, - "accessDetails": { "baseToken": { "address": "0xf08A50178dfcDe18524640EA6618a1f965821715", diff --git a/metadata/jsAlgo.json b/metadata/jsAlgo.json index 725f5fb..638cac1 100644 --- a/metadata/jsAlgo.json +++ b/metadata/jsAlgo.json @@ -54,20 +54,30 @@ } } ], - "stats": { - "allocated": 0, - "orders": 0, - "price": { - "value": "0" - } - }, - "nft": { - "address": "", - "name": "Ocean Data NFT", - "symbol": "OCEAN-NFT", - "state": 5, - "tokenURI": "", - "owner": "", - "created": "" + "indexedMetadata": { + "event": {}, + "nft": { + "address": "", + "name": "Ocean Data NFT", + "symbol": "OCEAN-NFT", + "state": 5, + "tokenURI": "", + "owner": "", + "created": "" + }, + "purgatory": { + "state": false + }, + "stats": [ + { + "orders": 0, + "prices": [ + { + "price": "0" + } + ] + } + ], + "datatokens": [] } } \ No newline at end of file diff --git a/metadata/jsIPFSAlgo.json b/metadata/jsIPFSAlgo.json index 7cc1eb9..f850c01 100644 --- a/metadata/jsIPFSAlgo.json +++ b/metadata/jsIPFSAlgo.json @@ -51,20 +51,30 @@ } } ], - "stats": { - "allocated": 0, - "orders": 0, - "price": { - "value": "0" + "indexedMetadata": { + "event": {}, + "nft": { + "address": "", + "name": "Ocean Data NFT", + "symbol": "OCEAN-NFT", + "state": 5, + "tokenURI": "", + "owner": "", + "created": "" + }, + "purgatory": { + "state": false + }, + "stats": [ + { + "orders": 0, + "prices": [ + { + "price": "0" + } + ] } - }, - "nft": { - "address": "", - "name": "Ocean Data NFT", - "symbol": "OCEAN-NFT", - "state": 5, - "tokenURI": "", - "owner": "", - "created": "" - } + ], + "datatokens": [] + } } \ No newline at end of file diff --git a/metadata/pythonAlgo.json b/metadata/pythonAlgo.json index c64c72f..ca2982f 100644 --- a/metadata/pythonAlgo.json +++ b/metadata/pythonAlgo.json @@ -48,20 +48,30 @@ "serviceEndpoint": "http://10.84.128.6:8001" } ], - "stats": { - "allocated": 0, - "orders": 0, - "price": { - "value": "0" - } - }, - "nft": { - "address": "", - "name": "Ocean Data NFT", - "symbol": "OCEAN-NFT", - "state": 5, - "tokenURI": "", - "owner": "", - "created": "" + "indexedMetadata": { + "event": {}, + "nft": { + "address": "", + "name": "Ocean Data NFT", + "symbol": "OCEAN-NFT", + "state": 5, + "tokenURI": "", + "owner": "", + "created": "" + }, + "purgatory": { + "state": false + }, + "stats": [ + { + "orders": 0, + "prices": [ + { + "price": "0" + } + ] + } + ], + "datatokens": [] } } diff --git a/metadata/simpleComputeDataset.json b/metadata/simpleComputeDataset.json index 0e8ba71..8d99cb1 100644 --- a/metadata/simpleComputeDataset.json +++ b/metadata/simpleComputeDataset.json @@ -47,27 +47,30 @@ } } ], - "event": { - }, - "nft": { - "address": "", - "name": "Ocean Data NFT", - "symbol": "OCEAN-NFT", - "state": 5, - "tokenURI": "", - "owner": "", - "created": "" - }, - "purgatory": { - "state": false - }, - "datatokens": [ - ], - "stats": { - "allocated": 0, - "orders": 0, - "price": { - "value": "0" - } + "indexedMetadata": { + "event": {}, + "nft": { + "address": "", + "name": "Ocean Data NFT", + "symbol": "OCEAN-NFT", + "state": 5, + "tokenURI": "", + "owner": "", + "created": "" + }, + "purgatory": { + "state": false + }, + "stats": [ + { + "orders": 0, + "prices": [ + { + "price": "0" + } + ] + } + ], + "datatokens": [] } } diff --git a/metadata/simpleDownloadDataset.json b/metadata/simpleDownloadDataset.json index 48ffc9d..bb90472 100644 --- a/metadata/simpleDownloadDataset.json +++ b/metadata/simpleDownloadDataset.json @@ -41,27 +41,30 @@ "timeout": 86400 } ], - "event": { - }, - "nft": { - "address": "", - "name": "Ocean Data NFT", - "symbol": "OCEAN-NFT", - "state": 5, - "tokenURI": "", - "owner": "", - "created": "" - }, - "purgatory": { - "state": false - }, - "datatokens": [ - ], - "stats": { - "allocated": 0, - "orders": 0, - "price": { - "value": "2" - } + "indexedMetadata": { + "event": {}, + "nft": { + "address": "", + "name": "Ocean Data NFT", + "symbol": "OCEAN-NFT", + "state": 5, + "tokenURI": "", + "owner": "", + "created": "" + }, + "purgatory": { + "state": false + }, + "stats": [ + { + "orders": 0, + "prices": [ + { + "price": "0" + } + ] + } + ], + "datatokens": [] } } \ No newline at end of file diff --git a/metadata/simpleIPFSComputeDataset.json b/metadata/simpleIPFSComputeDataset.json index cce3231..a794083 100644 --- a/metadata/simpleIPFSComputeDataset.json +++ b/metadata/simpleIPFSComputeDataset.json @@ -46,27 +46,30 @@ } } ], - "event": { - }, - "nft": { - "address": "", - "name": "Ocean Data NFT", - "symbol": "OCEAN-NFT", - "state": 5, - "tokenURI": "", - "owner": "", - "created": "" - }, - "purgatory": { - "state": false - }, - "datatokens": [ - ], - "stats": { - "allocated": 0, - "orders": 0, - "price": { - "value": "0" + "indexedMetadata": { + "event": {}, + "nft": { + "address": "", + "name": "Ocean Data NFT", + "symbol": "OCEAN-NFT", + "state": 5, + "tokenURI": "", + "owner": "", + "created": "" + }, + "purgatory": { + "state": false + }, + "stats": [ + { + "orders": 0, + "prices": [ + { + "price": "0" + } + ] } - } + ] + }, + "datatokens": [] } \ No newline at end of file diff --git a/package-lock.json b/package-lock.json index 46464aa..763101c 100644 --- a/package-lock.json +++ b/package-lock.json @@ -4237,9 +4237,9 @@ } }, "node_modules/@rdfjs/types": { - "version": "1.1.2", - "resolved": "https://registry.npmjs.org/@rdfjs/types/-/types-1.1.2.tgz", - "integrity": "sha512-wqpOJK1QCbmsGNtyzYnojPU8gRDPid2JO0Q0kMtb4j65xhCK880cnKAfEOwC+dX85VJcCByQx5zOwyyfCjDJsg==", + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/@rdfjs/types/-/types-2.0.1.tgz", + "integrity": "sha512-uyAzpugX7KekAXAHq26m3JlUIZJOC0uSBhpnefGV5i15bevDyyejoB7I+9MKeUrzXD8OOUI3+4FeV1wwQr5ihA==", "dependencies": { "@types/node": "*" } @@ -4859,19 +4859,13 @@ } }, "node_modules/@types/readable-stream": { - "version": "4.0.18", - "resolved": "https://registry.npmjs.org/@types/readable-stream/-/readable-stream-4.0.18.tgz", - "integrity": "sha512-21jK/1j+Wg+7jVw1xnSwy/2Q1VgVjWuFssbYGTREPUBeZ+rqVFl2udq0IkxzPC0ZhOzVceUbyIACFZKLqKEBlA==", + "version": "4.0.19", + "resolved": "https://registry.npmjs.org/@types/readable-stream/-/readable-stream-4.0.19.tgz", + "integrity": "sha512-6Tgd3lMocKwOul/kwAAgSebkhdMCLhRvcJ6CKHA6wdql2qNIwK6hw3Y4PZQxn9HcJogoC/1ZOmkFM7OZKH/VrA==", "dependencies": { - "@types/node": "*", - "safe-buffer": "~5.1.1" + "@types/node": "*" } }, - "node_modules/@types/readable-stream/node_modules/safe-buffer": { - "version": "5.1.2", - "resolved": "https://registry.npmjs.org/safe-buffer/-/safe-buffer-5.1.2.tgz", - "integrity": "sha512-Gd2UZBJDkXlY7GbJxfsE8/nvKkUEU1G38c1siN6QP6a9PT9MmHB8GnpscSmMJSoF8LOIrt8ud/wPtojys4G6+g==" - }, "node_modules/@types/resolve": { "version": "1.17.1", "resolved": "https://registry.npmjs.org/@types/resolve/-/resolve-1.17.1.tgz", @@ -5193,17 +5187,17 @@ "dev": true }, "node_modules/@vocabulary/sh": { - "version": "1.1.5", - "resolved": "https://registry.npmjs.org/@vocabulary/sh/-/sh-1.1.5.tgz", - "integrity": "sha512-8R4uxHLpwmp6l6szZdCtfQx0wRy64OHuOsYTDfhCsbJ773Uv6nCM2bYBtjjirZHN+2m3uHQWgtWOdvuu1jwmOA==", + "version": "1.1.6", + "resolved": "https://registry.npmjs.org/@vocabulary/sh/-/sh-1.1.6.tgz", + "integrity": "sha512-8IfAQoKh57THz8LA2+n1jaY/VC2XaqMNSsJgzBKSSrj20y5PSMAawb6dMsxoLxqDIPBDs1TFRl/9CijUnwbBUA==", "peerDependencies": { - "@rdfjs/types": "^1.0.0" + "@rdfjs/types": "^2.0.0" } }, "node_modules/@zazuko/env": { - "version": "2.5.1", - "resolved": "https://registry.npmjs.org/@zazuko/env/-/env-2.5.1.tgz", - "integrity": "sha512-FfvogDssZEmWYeVlh/d6q63rEubgtOSvkuDV/2VIB6rz4a3c93OsMQlwasQkMko0+gEJQZoh7VVR8+gaCaCNpw==", + "version": "2.5.3", + "resolved": "https://registry.npmjs.org/@zazuko/env/-/env-2.5.3.tgz", + "integrity": "sha512-kivvYoXGFjva1CuXeK/jaaWMy9eXhhFmuSfSJGVW2wH7XbcZehJObjPXEVlZ3kKLCFhuv96j8Ot3SkbYaOtuLA==", "dependencies": { "@rdfjs/data-model": "^2.0.1", "@rdfjs/dataset": "^2.0.1", @@ -5220,17 +5214,17 @@ "rdf-dataset-ext": "^1.1.0" }, "peerDependencies": { - "@rdfjs/types": "^1.1.0", + "@rdfjs/types": "^2", "@types/clownface": "^2.0.0", - "@types/rdf-dataset-ext": "^1", - "@types/rdfjs__data-model": "^2.0.7", + "@types/rdf-dataset-ext": "^1.0.8", + "@types/rdfjs__data-model": "^2.0.9", "@types/rdfjs__dataset": "^2.0.7", "@types/rdfjs__environment": "^1.0.0", - "@types/rdfjs__formats": "^4.0.0", + "@types/rdfjs__formats": "^4.0.1", "@types/rdfjs__namespace": "^2.0.10", - "@types/rdfjs__term-map": "^2.0.9", - "@types/rdfjs__term-set": "^2.0.8", - "@types/rdfjs__traverser": "^0.1.3" + "@types/rdfjs__term-map": "^2.0.10", + "@types/rdfjs__term-set": "^2.0.9", + "@types/rdfjs__traverser": "^0.1.5" } }, "node_modules/@zazuko/env-core": { @@ -5245,18 +5239,18 @@ } }, "node_modules/@zazuko/env-node": { - "version": "2.1.4", - "resolved": "https://registry.npmjs.org/@zazuko/env-node/-/env-node-2.1.4.tgz", - "integrity": "sha512-D3pw3T3SpC6lI3D5Akio/63lWaI9VKWazVGcjWP0gC598JQ60V7T+4QSCUcxT0ZGTDOkNdT3loYR9P+JK96KeQ==", + "version": "2.1.5", + "resolved": "https://registry.npmjs.org/@zazuko/env-node/-/env-node-2.1.5.tgz", + "integrity": "sha512-qYrePSWiz9XOB5R0NfvuaogmPP8gLcMsl18G7sYW1h6RB4l3aiofAr4YAKTK+bpJgGjeivDDi64lsQjF2F6p0g==", "dependencies": { "@rdfjs/fetch-lite": "^3.2.2", "@rdfjs/formats": "^4.0.0", - "@zazuko/env": "^2.1.1", + "@zazuko/env": "^2.5.3", "@zazuko/rdf-utils-fs": "^3.3.0" }, "peerDependencies": { - "@types/rdfjs__fetch-lite": "^3.0.6", - "@types/rdfjs__formats": "^4" + "@types/rdfjs__fetch-lite": "^3.0.11", + "@types/rdfjs__formats": "^4.0.1" } }, "node_modules/@zazuko/env/node_modules/get-stream": { @@ -5286,9 +5280,9 @@ } }, "node_modules/@zazuko/prefixes": { - "version": "2.3.0", - "resolved": "https://registry.npmjs.org/@zazuko/prefixes/-/prefixes-2.3.0.tgz", - "integrity": "sha512-PrJnd4YUwsxZgsTP4faiQKEkRhDd5cFjY2budkHltGJVUtu0kDZpH/1zArXJxSbrNbScIx7rSKDx0NsvBh6ACw==" + "version": "2.4.0", + "resolved": "https://registry.npmjs.org/@zazuko/prefixes/-/prefixes-2.4.0.tgz", + "integrity": "sha512-bd53k5XgFKWR56sofHeAcIbv8o0m2HsJlbHaHbrMufUCdgiZsCLvZn84Vh1dhcsyBHOD0EIo9AD4pNWDQLVRaw==" }, "node_modules/@zazuko/rdf-utils-fs": { "version": "3.3.1", @@ -10572,9 +10566,9 @@ } }, "node_modules/jsonld-context-parser/node_modules/@types/node": { - "version": "18.19.80", - "resolved": "https://registry.npmjs.org/@types/node/-/node-18.19.80.tgz", - "integrity": "sha512-kEWeMwMeIvxYkeg1gTc01awpwLbfMRZXdIhwRcakd/KlK53jmRC26LqcbIt7fnAQTu5GzlnWmzA3H6+l1u6xxQ==", + "version": "18.19.103", + "resolved": "https://registry.npmjs.org/@types/node/-/node-18.19.103.tgz", + "integrity": "sha512-hHTHp+sEz6SxFsp+SA+Tqrua3AbmlAw+Y//aEwdHrdZkYVRWdvWD3y5uPZ0flYOkgskaFWqZ/YGFm3FaFQ0pRw==", "dependencies": { "undici-types": "~5.26.4" } @@ -11528,12 +11522,11 @@ } }, "node_modules/n3": { - "version": "1.24.2", - "resolved": "https://registry.npmjs.org/n3/-/n3-1.24.2.tgz", - "integrity": "sha512-j/3PKmK0MA3tAohDCl9y1JDaNxp8wCnhTtrOOgZ1O17JVtWLkzHsp2jZ8YhY2uS4FWQAm6mExcXvl7C8lwXyaw==", + "version": "1.25.2", + "resolved": "https://registry.npmjs.org/n3/-/n3-1.25.2.tgz", + "integrity": "sha512-ZBPnAgOw4sze/hnyoydNA5Ts9wbwiG+BXssTkdBKD6IkQZcg1IfQdo5AMU9JhsIu/RGtRD1QD0gphEhk/6ZnWA==", "dependencies": { "buffer": "^6.0.3", - "queue-microtask": "^1.1.2", "readable-stream": "^4.0.0" }, "engines": { @@ -11640,6 +11633,7 @@ "version": "1.0.0", "resolved": "https://registry.npmjs.org/node-domexception/-/node-domexception-1.0.0.tgz", "integrity": "sha512-/jKZoMpw0F8GRwl4/eLROPA3cfcXtLApP0QzLmUT/HuPCZWyB7IY9ZrMeKw2O/nFIqPQB3PVM9aYm0F312AXDQ==", + "deprecated": "Use your platform's native DOMException instead", "funding": [ { "type": "github", @@ -13236,6 +13230,7 @@ "version": "1.2.3", "resolved": "https://registry.npmjs.org/queue-microtask/-/queue-microtask-1.2.3.tgz", "integrity": "sha512-NuaNSa6flKT5JaSYQzJok04JzTL1CA6aGhv5rfLW3PgqA+M2ChpZQnAC8h8i4ZFkBS8X5RqkDBHA7r4hej3K9A==", + "dev": true, "funding": [ { "type": "github", @@ -13318,14 +13313,6 @@ "url": "https://github.com/sponsors/rubensworks/" } }, - "node_modules/rdf-data-factory/node_modules/@rdfjs/types": { - "version": "2.0.1", - "resolved": "https://registry.npmjs.org/@rdfjs/types/-/types-2.0.1.tgz", - "integrity": "sha512-uyAzpugX7KekAXAHq26m3JlUIZJOC0uSBhpnefGV5i15bevDyyejoB7I+9MKeUrzXD8OOUI3+4FeV1wwQr5ihA==", - "dependencies": { - "@types/node": "*" - } - }, "node_modules/rdf-dataset-ext": { "version": "1.1.0", "resolved": "https://registry.npmjs.org/rdf-dataset-ext/-/rdf-dataset-ext-1.1.0.tgz", @@ -13336,53 +13323,45 @@ "readable-stream": "3 - 4" } }, - "node_modules/rdf-literal": { - "version": "1.3.2", - "resolved": "https://registry.npmjs.org/rdf-literal/-/rdf-literal-1.3.2.tgz", - "integrity": "sha512-79Stlu3sXy0kq9/decHFLf3xNPuY6sfhFPhd/diWErgaFr0Ekyg38Vh9bnVcqDYu48CFRi0t+hrFii49n92Hbw==", - "dependencies": { - "@rdfjs/types": "*", - "rdf-data-factory": "^1.1.0" - } - }, - "node_modules/rdf-literal/node_modules/rdf-data-factory": { - "version": "1.1.3", - "resolved": "https://registry.npmjs.org/rdf-data-factory/-/rdf-data-factory-1.1.3.tgz", - "integrity": "sha512-ny6CI7m2bq4lfQQmDYvcb2l1F9KtGwz9chipX4oWu2aAtVoXjb7k3d8J1EsgAsEbMXnBipB/iuRen5H2fwRWWQ==", - "dependencies": { - "@rdfjs/types": "^1.0.0" - } - }, "node_modules/rdf-validate-datatype": { - "version": "0.2.1", - "resolved": "https://registry.npmjs.org/rdf-validate-datatype/-/rdf-validate-datatype-0.2.1.tgz", - "integrity": "sha512-DpREnmoWDxC80KyslZeBPLQb3ztyeiOolT4uCl58tCju2KHJu4j5vonmVVdEJh2Mpad5UY57v6sSM/hfSTFGKQ==", + "version": "0.2.2", + "resolved": "https://registry.npmjs.org/rdf-validate-datatype/-/rdf-validate-datatype-0.2.2.tgz", + "integrity": "sha512-mH9qL8i0WBbZ6HJCA26BB6V+WV2MraKvitez3SV0QegBWVQ4wYO49CgfFBzoAYg6tlnhFXl9MkrOAQ07X2N1FA==", "dependencies": { "@rdfjs/term-map": "^2.0.0", - "@tpluscode/rdf-ns-builders": "3 - 4" + "@tpluscode/rdf-ns-builders": "3 - 5" } }, "node_modules/rdf-validate-shacl": { - "version": "0.5.6", - "resolved": "https://registry.npmjs.org/rdf-validate-shacl/-/rdf-validate-shacl-0.5.6.tgz", - "integrity": "sha512-B23lccAy1uIYU9XVoXxK2DFGMV+xBbpvzTpfBJXLKoURjdEOfu/MCih1AHiGJh9PInvl667GvkVD9TmAE2b3Sg==", + "version": "0.5.10", + "resolved": "https://registry.npmjs.org/rdf-validate-shacl/-/rdf-validate-shacl-0.5.10.tgz", + "integrity": "sha512-I+TRVGeKn5eG/kTzVGRGGNThCSkgX/v7EUSOUEsIcHubyyShQYzRbQqyU45zKzNjLWdqp9abFHw1ULUPzWyo1A==", "dependencies": { "@rdfjs/data-model": "^2", "@rdfjs/dataset": "^2", "@rdfjs/environment": "^1", "@rdfjs/namespace": "^2.0.0", "@rdfjs/term-set": "^2.0.1", - "@vocabulary/sh": "^1.0.1", + "@rdfjs/types": "^1.1.0", + "@vocabulary/sh": "^1.1.5", "clownface": "^2.0.0", "debug": "^4.3.2", - "rdf-literal": "^1.3.0", + "rdf-literal": "^1.3.2", "rdf-validate-datatype": "^0.2.0" } }, + "node_modules/rdf-validate-shacl/node_modules/@rdfjs/types": { + "version": "1.1.2", + "resolved": "https://registry.npmjs.org/@rdfjs/types/-/types-1.1.2.tgz", + "integrity": "sha512-wqpOJK1QCbmsGNtyzYnojPU8gRDPid2JO0Q0kMtb4j65xhCK880cnKAfEOwC+dX85VJcCByQx5zOwyyfCjDJsg==", + "dependencies": { + "@types/node": "*" + } + }, "node_modules/rdf-validate-shacl/node_modules/debug": { - "version": "4.4.0", - "resolved": "https://registry.npmjs.org/debug/-/debug-4.4.0.tgz", - "integrity": "sha512-6WTZ/IxCY/T6BALoZHaE4ctp9xm+Z5kY/pzYaCHRFeyVhojxlrm+46y68HA6hr0TcwEssoxNiDEUJQjfPZ/RYA==", + "version": "4.4.1", + "resolved": "https://registry.npmjs.org/debug/-/debug-4.4.1.tgz", + "integrity": "sha512-KcKCqiftBJcZr++7ykoDIEwSa3XWowTfNPo92BYxjXiyYEVrUQh2aLyhxBCwww+heortUFxEJYcRzosstTEBYQ==", "dependencies": { "ms": "^2.1.3" }, @@ -13400,6 +13379,23 @@ "resolved": "https://registry.npmjs.org/ms/-/ms-2.1.3.tgz", "integrity": "sha512-6FlzubTLZG3J2a/NVCAleEhjzq5oxgHyaCU9yYXvcLsvoVaHJq/s5xXI6/XXP6tz7R9xAOtHnSO/tXtF3WRTlA==" }, + "node_modules/rdf-validate-shacl/node_modules/rdf-data-factory": { + "version": "1.1.3", + "resolved": "https://registry.npmjs.org/rdf-data-factory/-/rdf-data-factory-1.1.3.tgz", + "integrity": "sha512-ny6CI7m2bq4lfQQmDYvcb2l1F9KtGwz9chipX4oWu2aAtVoXjb7k3d8J1EsgAsEbMXnBipB/iuRen5H2fwRWWQ==", + "dependencies": { + "@rdfjs/types": "^1.0.0" + } + }, + "node_modules/rdf-validate-shacl/node_modules/rdf-literal": { + "version": "1.3.2", + "resolved": "https://registry.npmjs.org/rdf-literal/-/rdf-literal-1.3.2.tgz", + "integrity": "sha512-79Stlu3sXy0kq9/decHFLf3xNPuY6sfhFPhd/diWErgaFr0Ekyg38Vh9bnVcqDYu48CFRi0t+hrFii49n92Hbw==", + "dependencies": { + "@rdfjs/types": "*", + "rdf-data-factory": "^1.1.0" + } + }, "node_modules/rdfxml-streaming-parser": { "version": "2.4.0", "resolved": "https://registry.npmjs.org/rdfxml-streaming-parser/-/rdfxml-streaming-parser-2.4.0.tgz", @@ -13415,6 +13411,14 @@ "validate-iri": "^1.0.0" } }, + "node_modules/rdfxml-streaming-parser/node_modules/@rdfjs/types": { + "version": "1.1.2", + "resolved": "https://registry.npmjs.org/@rdfjs/types/-/types-1.1.2.tgz", + "integrity": "sha512-wqpOJK1QCbmsGNtyzYnojPU8gRDPid2JO0Q0kMtb4j65xhCK880cnKAfEOwC+dX85VJcCByQx5zOwyyfCjDJsg==", + "dependencies": { + "@types/node": "*" + } + }, "node_modules/rdfxml-streaming-parser/node_modules/@types/readable-stream": { "version": "2.3.15", "resolved": "https://registry.npmjs.org/@types/readable-stream/-/readable-stream-2.3.15.tgz", @@ -15696,9 +15700,9 @@ } }, "node_modules/undici": { - "version": "5.28.5", - "resolved": "https://registry.npmjs.org/undici/-/undici-5.28.5.tgz", - "integrity": "sha512-zICwjrDrcrUE0pyyJc1I2QzBkLM8FINsgOrt6WjA+BgajVq9Nxu2PbFFXUrAggLfDXlZGZBVZYw7WNV5KiBiBA==", + "version": "5.29.0", + "resolved": "https://registry.npmjs.org/undici/-/undici-5.29.0.tgz", + "integrity": "sha512-raqeBD6NQK4SkWhQzeYKd1KmIG6dllBOTt55Rmkt4HtI9mwdWtJljnrXjAFUBLTSN67HWrOIZ3EPF4kjUw80Bg==", "dependencies": { "@fastify/busboy": "^2.0.0" }, @@ -19287,9 +19291,9 @@ } }, "@rdfjs/types": { - "version": "1.1.2", - "resolved": "https://registry.npmjs.org/@rdfjs/types/-/types-1.1.2.tgz", - "integrity": "sha512-wqpOJK1QCbmsGNtyzYnojPU8gRDPid2JO0Q0kMtb4j65xhCK880cnKAfEOwC+dX85VJcCByQx5zOwyyfCjDJsg==", + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/@rdfjs/types/-/types-2.0.1.tgz", + "integrity": "sha512-uyAzpugX7KekAXAHq26m3JlUIZJOC0uSBhpnefGV5i15bevDyyejoB7I+9MKeUrzXD8OOUI3+4FeV1wwQr5ihA==", "requires": { "@types/node": "*" } @@ -19817,19 +19821,11 @@ } }, "@types/readable-stream": { - "version": "4.0.18", - "resolved": "https://registry.npmjs.org/@types/readable-stream/-/readable-stream-4.0.18.tgz", - "integrity": "sha512-21jK/1j+Wg+7jVw1xnSwy/2Q1VgVjWuFssbYGTREPUBeZ+rqVFl2udq0IkxzPC0ZhOzVceUbyIACFZKLqKEBlA==", + "version": "4.0.19", + "resolved": "https://registry.npmjs.org/@types/readable-stream/-/readable-stream-4.0.19.tgz", + "integrity": "sha512-6Tgd3lMocKwOul/kwAAgSebkhdMCLhRvcJ6CKHA6wdql2qNIwK6hw3Y4PZQxn9HcJogoC/1ZOmkFM7OZKH/VrA==", "requires": { - "@types/node": "*", - "safe-buffer": "~5.1.1" - }, - "dependencies": { - "safe-buffer": { - "version": "5.1.2", - "resolved": "https://registry.npmjs.org/safe-buffer/-/safe-buffer-5.1.2.tgz", - "integrity": "sha512-Gd2UZBJDkXlY7GbJxfsE8/nvKkUEU1G38c1siN6QP6a9PT9MmHB8GnpscSmMJSoF8LOIrt8ud/wPtojys4G6+g==" - } + "@types/node": "*" } }, "@types/resolve": { @@ -20039,15 +20035,15 @@ "dev": true }, "@vocabulary/sh": { - "version": "1.1.5", - "resolved": "https://registry.npmjs.org/@vocabulary/sh/-/sh-1.1.5.tgz", - "integrity": "sha512-8R4uxHLpwmp6l6szZdCtfQx0wRy64OHuOsYTDfhCsbJ773Uv6nCM2bYBtjjirZHN+2m3uHQWgtWOdvuu1jwmOA==", + "version": "1.1.6", + "resolved": "https://registry.npmjs.org/@vocabulary/sh/-/sh-1.1.6.tgz", + "integrity": "sha512-8IfAQoKh57THz8LA2+n1jaY/VC2XaqMNSsJgzBKSSrj20y5PSMAawb6dMsxoLxqDIPBDs1TFRl/9CijUnwbBUA==", "requires": {} }, "@zazuko/env": { - "version": "2.5.1", - "resolved": "https://registry.npmjs.org/@zazuko/env/-/env-2.5.1.tgz", - "integrity": "sha512-FfvogDssZEmWYeVlh/d6q63rEubgtOSvkuDV/2VIB6rz4a3c93OsMQlwasQkMko0+gEJQZoh7VVR8+gaCaCNpw==", + "version": "2.5.3", + "resolved": "https://registry.npmjs.org/@zazuko/env/-/env-2.5.3.tgz", + "integrity": "sha512-kivvYoXGFjva1CuXeK/jaaWMy9eXhhFmuSfSJGVW2wH7XbcZehJObjPXEVlZ3kKLCFhuv96j8Ot3SkbYaOtuLA==", "requires": { "@rdfjs/data-model": "^2.0.1", "@rdfjs/dataset": "^2.0.1", @@ -20089,20 +20085,20 @@ } }, "@zazuko/env-node": { - "version": "2.1.4", - "resolved": "https://registry.npmjs.org/@zazuko/env-node/-/env-node-2.1.4.tgz", - "integrity": "sha512-D3pw3T3SpC6lI3D5Akio/63lWaI9VKWazVGcjWP0gC598JQ60V7T+4QSCUcxT0ZGTDOkNdT3loYR9P+JK96KeQ==", + "version": "2.1.5", + "resolved": "https://registry.npmjs.org/@zazuko/env-node/-/env-node-2.1.5.tgz", + "integrity": "sha512-qYrePSWiz9XOB5R0NfvuaogmPP8gLcMsl18G7sYW1h6RB4l3aiofAr4YAKTK+bpJgGjeivDDi64lsQjF2F6p0g==", "requires": { "@rdfjs/fetch-lite": "^3.2.2", "@rdfjs/formats": "^4.0.0", - "@zazuko/env": "^2.1.1", + "@zazuko/env": "^2.5.3", "@zazuko/rdf-utils-fs": "^3.3.0" } }, "@zazuko/prefixes": { - "version": "2.3.0", - "resolved": "https://registry.npmjs.org/@zazuko/prefixes/-/prefixes-2.3.0.tgz", - "integrity": "sha512-PrJnd4YUwsxZgsTP4faiQKEkRhDd5cFjY2budkHltGJVUtu0kDZpH/1zArXJxSbrNbScIx7rSKDx0NsvBh6ACw==" + "version": "2.4.0", + "resolved": "https://registry.npmjs.org/@zazuko/prefixes/-/prefixes-2.4.0.tgz", + "integrity": "sha512-bd53k5XgFKWR56sofHeAcIbv8o0m2HsJlbHaHbrMufUCdgiZsCLvZn84Vh1dhcsyBHOD0EIo9AD4pNWDQLVRaw==" }, "@zazuko/rdf-utils-fs": { "version": "3.3.1", @@ -23994,9 +23990,9 @@ }, "dependencies": { "@types/node": { - "version": "18.19.80", - "resolved": "https://registry.npmjs.org/@types/node/-/node-18.19.80.tgz", - "integrity": "sha512-kEWeMwMeIvxYkeg1gTc01awpwLbfMRZXdIhwRcakd/KlK53jmRC26LqcbIt7fnAQTu5GzlnWmzA3H6+l1u6xxQ==", + "version": "18.19.103", + "resolved": "https://registry.npmjs.org/@types/node/-/node-18.19.103.tgz", + "integrity": "sha512-hHTHp+sEz6SxFsp+SA+Tqrua3AbmlAw+Y//aEwdHrdZkYVRWdvWD3y5uPZ0flYOkgskaFWqZ/YGFm3FaFQ0pRw==", "requires": { "undici-types": "~5.26.4" } @@ -24718,12 +24714,11 @@ } }, "n3": { - "version": "1.24.2", - "resolved": "https://registry.npmjs.org/n3/-/n3-1.24.2.tgz", - "integrity": "sha512-j/3PKmK0MA3tAohDCl9y1JDaNxp8wCnhTtrOOgZ1O17JVtWLkzHsp2jZ8YhY2uS4FWQAm6mExcXvl7C8lwXyaw==", + "version": "1.25.2", + "resolved": "https://registry.npmjs.org/n3/-/n3-1.25.2.tgz", + "integrity": "sha512-ZBPnAgOw4sze/hnyoydNA5Ts9wbwiG+BXssTkdBKD6IkQZcg1IfQdo5AMU9JhsIu/RGtRD1QD0gphEhk/6ZnWA==", "requires": { "buffer": "^6.0.3", - "queue-microtask": "^1.1.2", "readable-stream": "^4.0.0" }, "dependencies": { @@ -25819,7 +25814,8 @@ "queue-microtask": { "version": "1.2.3", "resolved": "https://registry.npmjs.org/queue-microtask/-/queue-microtask-1.2.3.tgz", - "integrity": "sha512-NuaNSa6flKT5JaSYQzJok04JzTL1CA6aGhv5rfLW3PgqA+M2ChpZQnAC8h8i4ZFkBS8X5RqkDBHA7r4hej3K9A==" + "integrity": "sha512-NuaNSa6flKT5JaSYQzJok04JzTL1CA6aGhv5rfLW3PgqA+M2ChpZQnAC8h8i4ZFkBS8X5RqkDBHA7r4hej3K9A==", + "dev": true }, "quick-lru": { "version": "5.1.1", @@ -25867,16 +25863,6 @@ "integrity": "sha512-WzPoYHwQYWvIP9k+7IBLY1b4nIDitzAK4mA37WumAF/Cjvu/KOtYJH9IPZnUTWNSd5K2+pq4vrcE9WZC4sRHhg==", "requires": { "@rdfjs/types": "^2.0.0" - }, - "dependencies": { - "@rdfjs/types": { - "version": "2.0.1", - "resolved": "https://registry.npmjs.org/@rdfjs/types/-/types-2.0.1.tgz", - "integrity": "sha512-uyAzpugX7KekAXAHq26m3JlUIZJOC0uSBhpnefGV5i15bevDyyejoB7I+9MKeUrzXD8OOUI3+4FeV1wwQr5ihA==", - "requires": { - "@types/node": "*" - } - } } }, "rdf-dataset-ext": { @@ -25888,55 +25874,45 @@ "readable-stream": "3 - 4" } }, - "rdf-literal": { - "version": "1.3.2", - "resolved": "https://registry.npmjs.org/rdf-literal/-/rdf-literal-1.3.2.tgz", - "integrity": "sha512-79Stlu3sXy0kq9/decHFLf3xNPuY6sfhFPhd/diWErgaFr0Ekyg38Vh9bnVcqDYu48CFRi0t+hrFii49n92Hbw==", - "requires": { - "@rdfjs/types": "*", - "rdf-data-factory": "^1.1.0" - }, - "dependencies": { - "rdf-data-factory": { - "version": "1.1.3", - "resolved": "https://registry.npmjs.org/rdf-data-factory/-/rdf-data-factory-1.1.3.tgz", - "integrity": "sha512-ny6CI7m2bq4lfQQmDYvcb2l1F9KtGwz9chipX4oWu2aAtVoXjb7k3d8J1EsgAsEbMXnBipB/iuRen5H2fwRWWQ==", - "requires": { - "@rdfjs/types": "^1.0.0" - } - } - } - }, "rdf-validate-datatype": { - "version": "0.2.1", - "resolved": "https://registry.npmjs.org/rdf-validate-datatype/-/rdf-validate-datatype-0.2.1.tgz", - "integrity": "sha512-DpREnmoWDxC80KyslZeBPLQb3ztyeiOolT4uCl58tCju2KHJu4j5vonmVVdEJh2Mpad5UY57v6sSM/hfSTFGKQ==", + "version": "0.2.2", + "resolved": "https://registry.npmjs.org/rdf-validate-datatype/-/rdf-validate-datatype-0.2.2.tgz", + "integrity": "sha512-mH9qL8i0WBbZ6HJCA26BB6V+WV2MraKvitez3SV0QegBWVQ4wYO49CgfFBzoAYg6tlnhFXl9MkrOAQ07X2N1FA==", "requires": { "@rdfjs/term-map": "^2.0.0", - "@tpluscode/rdf-ns-builders": "3 - 4" + "@tpluscode/rdf-ns-builders": "3 - 5" } }, "rdf-validate-shacl": { - "version": "0.5.6", - "resolved": "https://registry.npmjs.org/rdf-validate-shacl/-/rdf-validate-shacl-0.5.6.tgz", - "integrity": "sha512-B23lccAy1uIYU9XVoXxK2DFGMV+xBbpvzTpfBJXLKoURjdEOfu/MCih1AHiGJh9PInvl667GvkVD9TmAE2b3Sg==", + "version": "0.5.10", + "resolved": "https://registry.npmjs.org/rdf-validate-shacl/-/rdf-validate-shacl-0.5.10.tgz", + "integrity": "sha512-I+TRVGeKn5eG/kTzVGRGGNThCSkgX/v7EUSOUEsIcHubyyShQYzRbQqyU45zKzNjLWdqp9abFHw1ULUPzWyo1A==", "requires": { "@rdfjs/data-model": "^2", "@rdfjs/dataset": "^2", "@rdfjs/environment": "^1", "@rdfjs/namespace": "^2.0.0", "@rdfjs/term-set": "^2.0.1", - "@vocabulary/sh": "^1.0.1", + "@rdfjs/types": "^1.1.0", + "@vocabulary/sh": "^1.1.5", "clownface": "^2.0.0", "debug": "^4.3.2", - "rdf-literal": "^1.3.0", + "rdf-literal": "^1.3.2", "rdf-validate-datatype": "^0.2.0" }, "dependencies": { + "@rdfjs/types": { + "version": "1.1.2", + "resolved": "https://registry.npmjs.org/@rdfjs/types/-/types-1.1.2.tgz", + "integrity": "sha512-wqpOJK1QCbmsGNtyzYnojPU8gRDPid2JO0Q0kMtb4j65xhCK880cnKAfEOwC+dX85VJcCByQx5zOwyyfCjDJsg==", + "requires": { + "@types/node": "*" + } + }, "debug": { - "version": "4.4.0", - "resolved": "https://registry.npmjs.org/debug/-/debug-4.4.0.tgz", - "integrity": "sha512-6WTZ/IxCY/T6BALoZHaE4ctp9xm+Z5kY/pzYaCHRFeyVhojxlrm+46y68HA6hr0TcwEssoxNiDEUJQjfPZ/RYA==", + "version": "4.4.1", + "resolved": "https://registry.npmjs.org/debug/-/debug-4.4.1.tgz", + "integrity": "sha512-KcKCqiftBJcZr++7ykoDIEwSa3XWowTfNPo92BYxjXiyYEVrUQh2aLyhxBCwww+heortUFxEJYcRzosstTEBYQ==", "requires": { "ms": "^2.1.3" } @@ -25945,6 +25921,23 @@ "version": "2.1.3", "resolved": "https://registry.npmjs.org/ms/-/ms-2.1.3.tgz", "integrity": "sha512-6FlzubTLZG3J2a/NVCAleEhjzq5oxgHyaCU9yYXvcLsvoVaHJq/s5xXI6/XXP6tz7R9xAOtHnSO/tXtF3WRTlA==" + }, + "rdf-data-factory": { + "version": "1.1.3", + "resolved": "https://registry.npmjs.org/rdf-data-factory/-/rdf-data-factory-1.1.3.tgz", + "integrity": "sha512-ny6CI7m2bq4lfQQmDYvcb2l1F9KtGwz9chipX4oWu2aAtVoXjb7k3d8J1EsgAsEbMXnBipB/iuRen5H2fwRWWQ==", + "requires": { + "@rdfjs/types": "^1.0.0" + } + }, + "rdf-literal": { + "version": "1.3.2", + "resolved": "https://registry.npmjs.org/rdf-literal/-/rdf-literal-1.3.2.tgz", + "integrity": "sha512-79Stlu3sXy0kq9/decHFLf3xNPuY6sfhFPhd/diWErgaFr0Ekyg38Vh9bnVcqDYu48CFRi0t+hrFii49n92Hbw==", + "requires": { + "@rdfjs/types": "*", + "rdf-data-factory": "^1.1.0" + } } } }, @@ -25963,6 +25956,14 @@ "validate-iri": "^1.0.0" }, "dependencies": { + "@rdfjs/types": { + "version": "1.1.2", + "resolved": "https://registry.npmjs.org/@rdfjs/types/-/types-1.1.2.tgz", + "integrity": "sha512-wqpOJK1QCbmsGNtyzYnojPU8gRDPid2JO0Q0kMtb4j65xhCK880cnKAfEOwC+dX85VJcCByQx5zOwyyfCjDJsg==", + "requires": { + "@types/node": "*" + } + }, "@types/readable-stream": { "version": "2.3.15", "resolved": "https://registry.npmjs.org/@types/readable-stream/-/readable-stream-2.3.15.tgz", @@ -27580,9 +27581,9 @@ } }, "undici": { - "version": "5.28.5", - "resolved": "https://registry.npmjs.org/undici/-/undici-5.28.5.tgz", - "integrity": "sha512-zICwjrDrcrUE0pyyJc1I2QzBkLM8FINsgOrt6WjA+BgajVq9Nxu2PbFFXUrAggLfDXlZGZBVZYw7WNV5KiBiBA==", + "version": "5.29.0", + "resolved": "https://registry.npmjs.org/undici/-/undici-5.29.0.tgz", + "integrity": "sha512-raqeBD6NQK4SkWhQzeYKd1KmIG6dllBOTt55Rmkt4HtI9mwdWtJljnrXjAFUBLTSN67HWrOIZ3EPF4kjUw80Bg==", "requires": { "@fastify/busboy": "^2.0.0" } diff --git a/src/cli.ts b/src/cli.ts index 4bc3d1c..2b16123 100644 --- a/src/cli.ts +++ b/src/cli.ts @@ -2,6 +2,10 @@ import { Command } from 'commander'; import { Commands } from './commands.js'; import { ethers } from 'ethers'; import chalk from 'chalk'; +import { stdin as input, stdout as output } from 'node:process'; +import { createInterface } from 'readline/promises'; +import { unitsToAmount } from '@oceanprotocol/lib'; +import { toBoolean } from './helpers.js'; async function initializeSigner() { @@ -182,22 +186,61 @@ export async function createCLI() { .argument('', 'Dataset DIDs (comma-separated) OR (empty array for none)') .argument('', 'Algorithm DID') .argument('', 'Compute environment ID') + .argument('', 'maxJobDuration for compute job') + .argument('', 'Payment token for compute') + .argument('', 'Resources of compute environment stringified') .option('-d, --datasets ', 'Dataset DIDs (comma-separated) OR (empty array for none)') .option('-a, --algo ', 'Algorithm DID') .option('-e, --env ', 'Compute environment ID') - .action(async (datasetDids, algoDid, computeEnvId, options) => { + .option('--maxJobDuration ', 'Compute maxJobDuration') + .option('-t, --token ', 'Compute payment token') + .option('--resources ', 'Compute resources') + .option('--accept [boolean]', 'Auto-confirm payment for compute job (true/false)', toBoolean) + .action(async (datasetDids, algoDid, computeEnvId, maxJobDuration, paymentToken, resources, options) => { const dsDids = options.datasets || datasetDids; const aDid = options.algo || algoDid; const envId = options.env || computeEnvId; - if (!dsDids || !aDid || !envId) { + const jobDuration = options.maxJobDuration || maxJobDuration; + const token = options.token || paymentToken; + const res = options.resources || resources; + if (!dsDids || !aDid ||!envId || !jobDuration || !token || !res) { console.error(chalk.red('Missing required arguments')); // process.exit(1); return } const { signer, chainId } = await initializeSigner(); const commands = new Commands(signer, chainId); - await commands.computeStart([null, dsDids, aDid, envId]); - }); + + const initArgs = [null, dsDids, aDid, envId, jobDuration, token, res]; + const initResp = await commands.initializeCompute(initArgs); + + if (!initResp) { + console.error(chalk.red('Initialization failed. Aborting.')); + return; + } + + console.log(chalk.yellow('\n--- Payment Details ---')); + console.log(JSON.stringify(initResp, null, 2)); + const amount = await unitsToAmount(signer, initResp.payment.token, initResp.payment.amount.toString()); + + const proceed = options.accept; + if (!proceed) { + const rl = createInterface({ input, output }); + const confirmation = await rl.question(`\nProceed with payment for starting compute job at price ${amount} in tokens from address ${initResp.payment.token}? (y/n): `); + rl.close(); + if (confirmation.toLowerCase() !== 'y' && confirmation.toLowerCase() !== 'yes') { + console.log(chalk.red('Compute job canceled by user.')); + return; + } + } else { + console.log(chalk.cyan('Auto-confirm enabled with --yes flag.')); + } + + const computeArgs = [null, dsDids, aDid, envId, JSON.stringify(initResp), jobDuration, token, res]; + + await commands.computeStart(computeArgs); + console.log(chalk.green('Compute job started successfully.')); + }); // startFreeCompute command program @@ -253,7 +296,7 @@ export async function createCLI() { .description('Stops a compute job') .argument('', 'Dataset DID') .argument('', 'Job ID') - .argument('[agreementId]', 'Agreement ID') + .argument('', 'Agreement ID') .option('-d, --dataset ', 'Dataset DID') .option('-j, --job ', 'Job ID') .option('-a, --agreement [agreementId]', 'Agreement ID') @@ -268,7 +311,7 @@ export async function createCLI() { } const { signer, chainId } = await initializeSigner(); const commands = new Commands(signer, chainId); - const args = [null, dsDid, jId]; + const args = [null, dsDid, jId]; if (agrId) args.push(agrId); await commands.computeStop(args); }); @@ -279,7 +322,7 @@ export async function createCLI() { .description('Displays the compute job status') .argument('', 'Dataset DID') .argument('', 'Job ID') - .argument('[agreementId]', 'Agreement ID') + .argument('', 'Agreement ID') .option('-d, --dataset ', 'Dataset DID') .option('-j, --job ', 'Job ID') .option('-a, --agreement [agreementId]', 'Agreement ID') diff --git a/src/commands.ts b/src/commands.ts index 2eba8de..80c251d 100644 --- a/src/commands.ts +++ b/src/commands.ts @@ -9,6 +9,7 @@ import { getMetadataURI, getIndexingWaitSettings, IndexerWaitParams, + fixAndParseProviderFees } from "./helpers.js"; import { Aquarius, @@ -23,10 +24,14 @@ import { getHash, orderAsset, sendTx, + unitsToAmount, + EscrowContract } from "@oceanprotocol/lib"; +import { Asset } from '@oceanprotocol/ddo-js'; import { Signer, ethers } from "ethers"; import { interactiveFlow } from "./interactiveFlow.js"; import { publishAsset } from "./publishAsset.js"; +import chalk from 'chalk'; export class Commands { public signer: Signer; @@ -42,6 +47,7 @@ export class Commands { this.oceanNodeUrl = process.env.NODE_URL; this.indexingParams = getIndexingWaitSettings(); console.log("Using Ocean Node URL :", this.oceanNodeUrl); + this.config.nodeUri = this.oceanNodeUrl; this.aquarius = new Aquarius(this.oceanNodeUrl); } @@ -73,8 +79,8 @@ export class Commands { try { // add some more checks const urlAssetId = await createAssetUtil( - asset.nft.name, - asset.nft.symbol, + asset.indexedMetadata.nft.name, + asset.indexedMetadata.nft.symbol, this.signer, asset.services[0].files, asset, @@ -92,7 +98,7 @@ export class Commands { } public async publishAlgo(args: string[]) { - let algoAsset; + let algoAsset: Asset; try { algoAsset = JSON.parse(fs.readFileSync(args[1], "utf8")); } catch (e) { @@ -104,8 +110,8 @@ export class Commands { // add some more checks try { const algoDid = await createAssetUtil( - algoAsset.nft.name, - algoAsset.nft.symbol, + algoAsset.indexedMetadata.nft.name, + algoAsset.indexedMetadata.nft.symbol, this.signer, algoAsset.services[0].files, algoAsset, @@ -236,7 +242,7 @@ export class Commands { } } - public async computeStart(args: string[]) { + public async initializeCompute(args: string[]) { const inputDatasetsString = args[1]; let inputDatasets = []; @@ -310,17 +316,6 @@ export class Commands { return; } - const datatoken = new Datatoken( - this.signer, - (await this.signer.provider.getNetwork()).chainId, - this.config - ); - - const mytime = new Date(); - const computeMinutes = 5; - mytime.setMinutes(mytime.getMinutes() + computeMinutes); - const computeValidUntil = Math.floor(mytime.getTime() / 1000); - const computeEnvID = args[3]; // NO chainId needed anymore (is not part of ComputeEnvironment spec anymore) // const chainComputeEnvs = computeEnvs[computeEnvID]; // was algoDdo.chainId @@ -367,18 +362,96 @@ export class Commands { serviceId: ddos[dataDdo].services[0].id, }); } - - console.log("Starting compute job using provider: ", providerURI); + const maxJobDuration = Number(args[4]) + if (!maxJobDuration) { + console.error( + "Error initializing Provider for the compute job using dataset DID " + + args[1] + + " and algorithm DID " + + args[2] + + " because maxJobDuration was not provided." + ); + return; + } + if (maxJobDuration < 0) { + console.error( + "Error initializing Provider for the compute job using dataset DID " + + args[1] + + " and algorithm DID " + + args[2] + + " because maxJobDuration is less than 0. It should be in seconds." + ); + return; + } + let supportedMaxJobDuration: number = maxJobDuration; + if (maxJobDuration > computeEnv.maxJobDuration) { + supportedMaxJobDuration = computeEnv.maxJobDuration; + } + const paymentToken = args[5] + if (!paymentToken) { + console.error( + "Error initializing Provider for the compute job using dataset DID " + + args[1] + + " and algorithm DID " + + args[2] + + " because paymentToken was not provided." + ); + return; + } + const chainId = await this.signer.getChainId() + if (!Object.keys(computeEnv.fees).includes(chainId.toString())) { + console.error( + "Error starting paid compute using dataset DID " + + args[1] + + " and algorithm DID " + + args[2] + + " because chainId is not supported by compute environment. " + + args[3] + + ". Supported chain IDs: " + + computeEnv.fees.keys() + ); + return; + } + let found: boolean = false; + for (const fee of computeEnv.fees[chainId.toString()]) { + if (fee.feeToken.toLowerCase() === paymentToken.toLowerCase()) { + found = true; + break; + } + } + if (found === false) { + console.error( + "Error initializing Provider for the compute job using dataset DID " + + args[1] + + " and algorithm DID " + + args[2] + + " because paymentToken is not supported by this environment " + + args[3] + ); + return; + } + const resources = args[6] // resources object should be stringified in cli when calling initializeCompute + if (!resources) { + console.error( + "Error initializing Provider for the compute job using dataset DID " + + args[1] + + " and algorithm DID " + + args[2] + + " because resources for compute were not provided." + ); + return; + } + const parsedResources = JSON.parse(resources); const providerInitializeComputeJob = await ProviderInstance.initializeCompute( assets, algo, computeEnv.id, - null, - computeValidUntil, + paymentToken, + supportedMaxJobDuration, providerURI, - this.signer, - null + this.signer, // V1 was this.signer.getAddress() + parsedResources ); if ( !providerInitializeComputeJob || @@ -386,23 +459,153 @@ export class Commands { ) { console.error( "Error initializing Provider for the compute job using dataset DID " + - args[1] + - " and algorithm DID " + - args[2] + args[1] + + " and algorithm DID " + + args[2] + ); + return; + } + console.log(chalk.yellow('\n--- Payment Details ---')); + console.log(JSON.stringify(providerInitializeComputeJob, null, 2)); + return providerInitializeComputeJob; + + } + + public async computeStart(args: string[]) { + const inputDatasetsString = args[1]; + let inputDatasets = []; + + if ( + inputDatasetsString.includes("[") && + inputDatasetsString.includes("]") + ) { + const processedInput = inputDatasetsString + .replaceAll("]", "") + .replaceAll("[", ""); + if (processedInput.indexOf(",") > -1) { + inputDatasets = processedInput.split(","); + } + } else { + inputDatasets.push(inputDatasetsString); + } + + const ddos = []; + + for (const dataset in inputDatasets) { + const dataDdo = await this.aquarius.waitForIndexer( + inputDatasets[dataset], + null, + null, + this.indexingParams.retryInterval, + this.indexingParams.maxRetries + ); + if (!dataDdo) { + console.error( + "Error fetching DDO " + dataset[1] + ". Does this asset exists?" + ); + return; + } else { + ddos.push(dataDdo); + } + } + if ( + inputDatasets.length > 0 && + (ddos.length <= 0 || ddos.length != inputDatasets.length) + ) { + console.error("Not all the data ddos are available."); + return; + } + let providerURI = this.oceanNodeUrl; + if (ddos.length > 0) { + providerURI = ddos[0].services[0].serviceEndpoint; + } + const algoDdo = await this.aquarius.waitForIndexer( + args[2], + null, + null, + this.indexingParams.retryInterval, + this.indexingParams.maxRetries + ); + if (!algoDdo) { + console.error( + "Error fetching DDO " + args[1] + ". Does this asset exists?" + ); + return; + } + + const computeEnvs = await ProviderInstance.getComputeEnvironments( + this.oceanNodeUrl + ); + + if (!computeEnvs || computeEnvs.length < 1) { + console.error( + "Error fetching compute environments. No compute environments available." ); return; } + const computeEnvID = args[3]; + // NO chainId needed anymore (is not part of ComputeEnvironment spec anymore) + // const chainComputeEnvs = computeEnvs[computeEnvID]; // was algoDdo.chainId + let computeEnv = null; // chainComputeEnvs[0]; + if (computeEnvID && computeEnvID.length > 1) { + for (const index in computeEnvs) { + if (computeEnvID == computeEnvs[index].id) { + computeEnv = computeEnvs[index]; + break; + } + } + } + if (!computeEnv || !computeEnvID) { + console.error( + "Error fetching compute environment. No compute environment matches id: ", + computeEnvID + ); + return; + } + + const algo: ComputeAlgorithm = { + documentId: algoDdo.id, + serviceId: algoDdo.services[0].id, + meta: algoDdo.metadata.algorithm, + }; + + const assets = []; + for (const dataDdo in ddos) { + const canStartCompute = isOrderable( + ddos[dataDdo], + ddos[dataDdo].services[0].id, + algo, + algoDdo + ); + if (!canStartCompute) { + console.error( + "Error Cannot start compute job using the datasets DIDs & algorithm DID provided" + ); + return; + } + assets.push({ + documentId: ddos[dataDdo].id, + serviceId: ddos[dataDdo].services[0].id, + }); + } + const providerInitializeComputeJob = args[4]; // provider fees + payment + const parsedProviderInitializeComputeJob = fixAndParseProviderFees(providerInitializeComputeJob) console.log("Ordering algorithm: ", args[2]); + const datatoken = new Datatoken( + this.signer, + (await this.signer.provider.getNetwork()).chainId, + this.config + ); algo.transferTxId = await handleComputeOrder( - providerInitializeComputeJob.algorithm, + parsedProviderInitializeComputeJob?.algorithm, algoDdo, this.signer, computeEnv.consumerAddress, 0, datatoken, this.config, - providerInitializeComputeJob?.algorithm?.providerFee, + parsedProviderInitializeComputeJob?.algorithm?.providerFee, providerURI ); if (!algo.transferTxId) { @@ -413,17 +616,18 @@ export class Commands { ); return; } + console.log("Ordering assets: ", args[1]); for (let i = 0; i < ddos.length; i++) { assets[i].transferTxId = await handleComputeOrder( - providerInitializeComputeJob.datasets[i], + parsedProviderInitializeComputeJob?.datasets[i], ddos[i], this.signer, computeEnv.consumerAddress, 0, datatoken, this.config, - providerInitializeComputeJob?.datasets[i].providerFee, + parsedProviderInitializeComputeJob?.datasets[i].providerFee, providerURI ); if (!assets[i].transferTxId) { @@ -435,6 +639,113 @@ export class Commands { return; } } + // payment check + const maxJobDuration = Number(args[5]) + if (!maxJobDuration) { + console.error( + "Error initializing Provider for the compute job using dataset DID " + + args[1] + + " and algorithm DID " + + args[2] + + " because maxJobDuration was not provided." + ); + return; + } + if (maxJobDuration < 0) { + console.error( + "Error starting paid compute using dataset DID " + + args[1] + + " and algorithm DID " + + args[2] + + " because maxJobDuration is less than 0. It should be in seconds." + ); + return; + } + let supportedMaxJobDuration: number = maxJobDuration; + if (maxJobDuration > computeEnv.maxJobDuration) { + supportedMaxJobDuration = computeEnv.maxJobDuration; + } + const chainId = await this.signer.getChainId() + const paymentToken = args[6] + if (!paymentToken) { + console.error( + "Error starting paid compute using dataset DID " + + args[1] + + " and algorithm DID " + + args[2] + + " because paymentToken was not provided." + ); + return; + } + if (!Object.keys(computeEnv.fees).includes(chainId.toString())) { + console.error( + "Error starting paid compute using dataset DID " + + args[1] + + " and algorithm DID " + + args[2] + + " because chainId is not supported by compute environment. " + + args[3] + + ". Supported chain IDs: " + + computeEnv.fees.keys() + ); + return; + } + let found: boolean = false; + for (const fee of computeEnv.fees[chainId.toString()]) { + if (fee.feeToken.toLowerCase() === paymentToken.toLowerCase()) { + found = true; + break; + } + } + if (found === false) { + console.error( + "Error starting paid compute using dataset DID " + + args[1] + + " and algorithm DID " + + args[2] + + " because paymentToken is not supported by this environment " + + args[3] + ); + return; + } + const resources = args[7] // resources object should be stringified in cli when calling initializeCompute + if (!resources) { + console.error( + "Error starting paid compute using dataset DID " + + args[1] + + " and algorithm DID " + + args[2] + + " because resources for compute were not provided." + ); + return; + } + + const escrow = new EscrowContract( + ethers.utils.getAddress(parsedProviderInitializeComputeJob.payment.escrowAddress), + this.signer + ) + console.log("Verifying payment..."); + const validationEscrow = await escrow.verifyFundsForEscrowPayment( + paymentToken, + computeEnv.consumerAddress, + await unitsToAmount(this.signer, paymentToken, parsedProviderInitializeComputeJob.payment.amount), + parsedProviderInitializeComputeJob.payment.amount.toString(), + parsedProviderInitializeComputeJob.payment.minLockSeconds.toString(), + '10' + ) + if (validationEscrow.isValid === false) { + console.error( + "Error starting compute job dataset DID " + + args[1] + + " and algorithm DID " + + args[2] + + " because escrow funds check failed: " + + validationEscrow.message + ); + return; + } + + console.log("Starting compute job using provider: ", providerURI); const additionalDatasets = assets.length > 1 ? assets.slice(1) : null; if (assets.length > 0) { @@ -462,17 +773,17 @@ export class Commands { const output: ComputeOutput = { metadataUri: await getMetadataURI(), }; - + const computeJobs = await ProviderInstance.computeStart( providerURI, this.signer, computeEnv.id, assets, // assets[0] // only c2d v1, algo, - null, - null, - null, - null, + supportedMaxJobDuration, + paymentToken, + JSON.parse(resources), + await this.signer.getChainId(), // additionalDatasets, only c2d v1 output, ); @@ -480,9 +791,9 @@ export class Commands { console.log("compute jobs: ", computeJobs); if (computeJobs && computeJobs[0]) { - const { jobId, agreementId } = computeJobs[0]; + const { jobId, payment } = computeJobs[0]; console.log("Compute started. JobID: " + jobId); - console.log("Agreement ID: " + agreementId); + console.log("Agreement ID: " + payment.lockTx); } else { console.log("Error while starting the compute job: ", computeJobs); } @@ -707,7 +1018,8 @@ export class Commands { ); return; } - console.log("Exiting compute environments: ", computeEnvs); + + console.log("Exiting compute environments: ", JSON.stringify(computeEnvs)); } public async computeStreamableLogs(args: string[]) { diff --git a/src/helpers.ts b/src/helpers.ts index d12d34a..7cc8195 100644 --- a/src/helpers.ts +++ b/src/helpers.ts @@ -3,6 +3,7 @@ import fetch from "cross-fetch"; import { promises as fs } from "fs"; import * as path from "path"; import * as sapphire from '@oasisprotocol/sapphire-paratime'; +import { Asset, DDO } from '@oceanprotocol/ddo-js'; import { AccesslistFactory, Aquarius, @@ -25,7 +26,7 @@ import { } from "@oceanprotocol/lib"; import { hexlify } from "ethers/lib/utils"; import ERC20Template from '@oceanprotocol/contracts/artifacts/contracts/templates/ERC20Template.sol/ERC20Template.json'; -import { DDO, Asset } from '@oceanprotocol/ddo-js'; + export async function downloadFile( @@ -151,7 +152,7 @@ export async function createAssetUtil( export async function updateAssetMetadata( owner: Signer, - updatedDdo: DDO, + updatedDdo: Asset, oceanNodeUrl: string, aquariusInstance: Aquarius, encryptDDO: boolean = true @@ -378,4 +379,30 @@ export function getIndexingWaitSettings(): IndexerWaitParams { } return indexingParams +} + +export function fixAndParseProviderFees(rawString: string) { + // Remove surrounding quotes if present + if (rawString.startsWith('"') && rawString.endsWith('"')) { + rawString = rawString.slice(1, -1).replace(/\\"/g, '"'); + } + + const fixed = rawString + .replace(/([{,])(\s*)([a-zA-Z0-9_]+)\s*:/g, '$1"$3":') + .replace(/:\s*(did:[^,}\]]+)/g, ':"$1"') + .replace(/:\s*(0x[a-fA-F0-9]+)/g, ':"$1"') + .replace(/providerData:\s*([^,}\]]+)/g, 'providerData:"$1"') + .replace(/:false/g, ':false') + .replace(/:true/g, ':true'); + + return JSON.parse(fixed); +} + +export function toBoolean(value) { + if (typeof value === 'boolean') return value; + if (typeof value === 'string') { + const val = value.trim().toLowerCase(); + return val === 'true' || val === '1' || val === 'yes' || val === 'y'; + } + return Boolean(value); } \ No newline at end of file diff --git a/src/publishAsset.ts b/src/publishAsset.ts index 7b28de8..8fc72ec 100644 --- a/src/publishAsset.ts +++ b/src/publishAsset.ts @@ -4,8 +4,8 @@ import { Config, Aquarius, } from '@oceanprotocol/lib'; -import { createAssetUtil, updateAssetMetadata } from './helpers.js'; import { Asset } from '@oceanprotocol/ddo-js'; +import { createAssetUtil, updateAssetMetadata } from './helpers.js'; export interface PublishAssetParams { title: string; @@ -56,20 +56,20 @@ export async function publishAsset(aquarius: Aquarius, params: PublishAssetParam event: undefined, purgatory: undefined, stats: [ - { - orders: 0, - prices: [{ - price: params.price, - contract: '0x282d8efCe846A88B159800bd4130ad77443Fa1A1', - token: params.token, - type: params.isCharged === false ? 'dispenser' : 'fixedrate' - }], - datatokenAddress: '', - name: 'access', - serviceId: 'access', - symbol: '' - } - ] + { + orders: 0, + prices: [{ + price: params.price, + contract: '0x282d8efCe846A88B159800bd4130ad77443Fa1A1', + token: params.token, + type: params.isCharged === false ? 'dispenser' : 'fixedrate' + }], + datatokenAddress: '', + name: 'access', + serviceId: 'access', + symbol: '' + } + ] }, services: [ { diff --git a/test/consumeFlow.test.ts b/test/consumeFlow.test.ts index ee09e0d..9219b03 100644 --- a/test/consumeFlow.test.ts +++ b/test/consumeFlow.test.ts @@ -1,15 +1,10 @@ import { expect } from "chai"; -import { exec } from "child_process"; import path from "path"; import fs from "fs"; import crypto from "crypto"; import https from "https"; -import { dirname } from 'path' -import { fileURLToPath } from 'url' - -const __filename = fileURLToPath(import.meta.url) -const __dirname = dirname(__filename) +import { projectRoot, runCommand } from "./util.js"; describe("Ocean CLI Publishing", function() { this.timeout(200000); // Set a longer timeout to allow the command to execute @@ -19,7 +14,6 @@ describe("Ocean CLI Publishing", function() { let jsAlgoDid: string; let pythonAlgoDid: string; - const projectRoot = path.resolve(__dirname, ".."); // Function to compute hash of a file const computeFileHash = (filePath: string): string => { @@ -43,15 +37,13 @@ describe("Ocean CLI Publishing", function() { }); }; - - it("should publish a dataset using 'npm run cli publish'", function(done) { + it("should publish a dataset using 'npm run cli publish'", async function() { const metadataFile = path.resolve(projectRoot, "metadata/simpleDownloadDataset.json"); // Ensure the metadata file exists if (!fs.existsSync(metadataFile)) { - done(new Error("Metadata file not found: " + metadataFile)); - return; + throw new Error("Metadata file not found: " + metadataFile); } process.env.PRIVATE_KEY = "0x1d751ded5a32226054cd2e71261039b65afb9ee1c746d055dd699b1150a5befc"; @@ -60,159 +52,181 @@ describe("Ocean CLI Publishing", function() { process.env.NODE_URL = "http://127.0.0.1:8001"; process.env.ADDRESS_FILE = path.join(process.env.HOME || "", ".ocean/ocean-contracts/artifacts/address.json"); - exec(`npm run cli publish ${metadataFile}`, { cwd: projectRoot }, (error, stdout) => { - try { - const match = stdout.match(/did:op:[a-f0-9]{64}/); - if (match) { - downloadDatasetDid = match[0]; - console.log('download asset: ', downloadDatasetDid) - } - expect(stdout).to.contain("Asset published. ID:"); - done() - } catch (assertionError) { - done(assertionError); - } - }); + const output = await runCommand(`npm run cli publish ${metadataFile}`); + + const jsonMatch = output.match(/did:op:[a-f0-9]{64}/); + if (!jsonMatch) { + console.error("Raw output:", output); + throw new Error("Could not find compute environments in the output"); + } + + try { + downloadDatasetDid = jsonMatch[0]; + } catch (error) { + console.error("Extracted output:", jsonMatch[0]); + throw new Error("Failed to parse the extracted output:\n" + error); + } + }); - it("should publish a compute dataset using 'npm run cli publish'", function(done) { + it("should publish a compute dataset using 'npm run cli publish'", async function() { const metadataFile = path.resolve(projectRoot, "metadata/simpleComputeDataset.json"); // Ensure the metadata file exists if (!fs.existsSync(metadataFile)) { - done(new Error("Metadata file not found: " + metadataFile)); - return; + throw new Error("Metadata file not found: " + metadataFile); } - exec(`npm run cli publish ${metadataFile}`, { cwd: projectRoot }, (error, stdout) => { - try { - const match = stdout.match(/did:op:[a-f0-9]{64}/); - if (match) { - computeDatasetDid = match[0]; - } - expect(stdout).to.contain("Asset published. ID:"); - done() - } catch (assertionError) { - done(assertionError); - } - }); + const output = await runCommand(`npm run cli publish ${metadataFile}`); + + const jsonMatch = output.match(/did:op:[a-f0-9]{64}/); + if (!jsonMatch) { + console.error("Raw output:", output); + throw new Error("Could not find compute environments in the output"); + } + + try { + computeDatasetDid = jsonMatch[0]; + } catch (error) { + console.error("Extracted output:", jsonMatch[0]); + throw new Error("Failed to parse the extracted output:\n" + error); + } }); - it("should publish a js Algorithm using 'npm run cli publishAlgo'", function(done) { + it("should publish a js Algorithm using 'npm run cli publishAlgo'", async function() { const filePath = path.resolve(projectRoot, "metadata/jsAlgo.json"); // Ensure the metadata file exists if (!fs.existsSync(filePath)) { - done(new Error("Metadata file not found: " + filePath)); - return; + throw new Error("Metadata file not found: " + filePath); } - - exec(`npm run cli publishAlgo ${filePath}`, { cwd: projectRoot }, (error, stdout) => { - try { - expect(stdout).to.contain("Algorithm published. DID:"); - const match = stdout.match(/did:op:[a-f0-9]{64}/); - if (match) { - jsAlgoDid = match[0]; - } - done() - } catch (assertionError) { - done(assertionError); - } - }); + + const output = await runCommand(`npm run cli publishAlgo ${filePath}`); + const jsonMatch = output.match(/did:op:[a-f0-9]{64}/); + if (!jsonMatch) { + console.error("Raw output:", output); + throw new Error("Could not find compute environments in the output"); + } + + try { + jsAlgoDid = jsonMatch[0]; + } catch (error) { + console.error("Extracted output:", jsonMatch[0]); + throw new Error("Failed to parse the extracted output:\n" + error); + } }); - it("should publish a python Algorithm using 'npm run cli publishAlgo'", function(done) { + it("should publish a python Algorithm using 'npm run cli publishAlgo'", async function() { const filePath = path.resolve(projectRoot, "metadata/pythonAlgo.json"); // Ensure the metadata file exists if (!fs.existsSync(filePath)) { - done(new Error("Metadata file not found: " + filePath)); - return; + throw new Error("Metadata file not found: " + filePath); } - exec(`npm run cli publishAlgo ${filePath}`, { cwd: projectRoot }, (error, stdout) => { - try { - expect(stdout).to.contain("Algorithm published. DID:"); - const match = stdout.match(/did:op:[a-f0-9]{64}/); - if (match) { - pythonAlgoDid = match[0]; - } - done() - } catch (assertionError) { - done(assertionError); - } - }); + const output = await runCommand(`npm run cli publishAlgo ${filePath}`); + const jsonMatch = output.match(/did:op:[a-f0-9]{64}/); + if (!jsonMatch) { + console.error("Raw output:", output); + throw new Error("Could not find compute environments in the output"); + } + + try { + pythonAlgoDid = jsonMatch[0]; + } catch (error) { + console.error("Extracted output:", jsonMatch[0]); + throw new Error("Failed to parse the extracted output:\n" + error); + } }); - it("should get DDO using 'npm run cli getDDO' for download dataset", function(done) { + it("should get DDO using 'npm run cli getDDO' for download dataset", async function() { - exec(`npm run cli getDDO ${downloadDatasetDid}`, { cwd: projectRoot }, (error, stdout) => { - expect(stdout).to.contain(`${downloadDatasetDid}`); - expect(stdout).to.contain("https://w3id.org/did/v1"); - done() - }); + const output = await runCommand(`npm run cli getDDO ${downloadDatasetDid}`); + + const jsonMatch = output.match(/s*([\s\S]*)/); + if (!jsonMatch) { + console.error("Raw output:", output); + throw new Error("Could not find compute environments in the output"); + } + + try { + expect(output).to.contain(`Resolving Asset with DID: ${downloadDatasetDid}`) + } catch (error) { + console.error("Extracted output:", jsonMatch[0]); + throw new Error("Failed to parse the extracted output:\n" + error); + } }); - it("should get DDO using 'npm run cli getDDO' for compute dataset", function(done) { - exec(`npm run cli getDDO ${computeDatasetDid}`, { cwd: projectRoot }, (error, stdout) => { - expect(stdout).to.contain(`${computeDatasetDid}`); - expect(stdout).to.contain("https://w3id.org/did/v1"); - done() - }); + it("should get DDO using 'npm run cli getDDO' for compute dataset", async function() { + const output = await runCommand(`npm run cli getDDO ${computeDatasetDid}`); + + const jsonMatch = output.match(/s*([\s\S]*)/); + if (!jsonMatch) { + console.error("Raw output:", output); + throw new Error("Could not find compute environments in the output"); + } + + try { + expect(output).to.contain(`Resolving Asset with DID: ${computeDatasetDid}`) + } catch (error) { + console.error("Extracted output:", jsonMatch[0]); + throw new Error("Failed to parse the extracted output:\n" + error); + } }); - it("should get DDO using 'npm run cli getDDO' for JS algorithm", function(done) { - exec(`npm run cli getDDO ${jsAlgoDid}`, { cwd: projectRoot }, (error, stdout) => { - expect(stdout).to.contain(`${jsAlgoDid}`); - expect(stdout).to.contain("https://w3id.org/did/v1"); - done() - }); + it("should get DDO using 'npm run cli getDDO' for JS algorithm", async function() { + const output = await runCommand(`npm run cli getDDO ${jsAlgoDid}`); + + const jsonMatch = output.match(/s*([\s\S]*)/); + if (!jsonMatch) { + console.error("Raw output:", output); + throw new Error("Could not find compute environments in the output"); + } + + try { + expect(output).to.contain(`Resolving Asset with DID: ${jsAlgoDid}`) + } catch (error) { + console.error("Extracted output:", jsonMatch[0]); + throw new Error("Failed to parse the extracted output:\n" + error); + } }); - it("should get DDO using 'npm run cli getDDO' for python algorithm", function(done) { - exec(`npm run cli getDDO ${pythonAlgoDid}`, { cwd: projectRoot }, (error, stdout) => { - expect(stdout).to.contain(`${pythonAlgoDid}`); - expect(stdout).to.contain("https://w3id.org/did/v1"); - done() - }); + it("should get DDO using 'npm run cli getDDO' for python algorithm", async function() { + const output = await runCommand(`npm run cli getDDO ${pythonAlgoDid}`); + + const jsonMatch = output.match(/s*([\s\S]*)/); + if (!jsonMatch) { + console.error("Raw output:", output); + throw new Error("Could not find compute environments in the output"); + } + + try { + expect(output).to.contain(`Resolving Asset with DID: ${pythonAlgoDid}`) + } catch (error) { + console.error("Extracted output:", jsonMatch[0]); + throw new Error("Failed to parse the extracted output:\n" + error); + } }); - it("should download the download dataset", function(done) { + it("should download the download dataset", async function() { this.timeout(10000); // Increase timeout if needed + const output = await runCommand(`npm run cli download ${downloadDatasetDid} .`); - (async () => { - try { - const { stdout } = await new Promise<{ stdout: string, error: Error | null }>((resolve, reject) => { - exec(`npm run cli download ${downloadDatasetDid} .`, { cwd: projectRoot }, (error, stdout) => { - if (error) { - reject(error); - } else { - resolve({ stdout, error: null }); - } - }); - }); - - expect(stdout).to.contain("File downloaded successfully"); - - // Path to the downloaded file - const downloadedFilePath = './LICENSE'; - - // Verify the downloaded file content hash matches the original file hash - const downloadedFileHash = computeFileHash(downloadedFilePath); - const originalFilePath = './metadata/LICENSE'; - - await downloadFile("https://raw.githubusercontent.com/oceanprotocol/ocean-node/refs/heads/main/LICENSE", originalFilePath); - const originalFileHash = computeFileHash(originalFilePath); - - expect(downloadedFileHash).to.equal(originalFileHash); - - // Clean up downloaded original file - fs.unlinkSync(originalFilePath); + expect(output).to.contain("File downloaded successfully"); + + // Path to the downloaded file + const downloadedFilePath = './LICENSE'; + + // Verify the downloaded file content hash matches the original file hash + const downloadedFileHash = computeFileHash(downloadedFilePath); + const originalFilePath = './metadata/LICENSE'; + + await downloadFile("https://raw.githubusercontent.com/oceanprotocol/ocean-node/refs/heads/main/LICENSE", originalFilePath); + const originalFileHash = computeFileHash(originalFilePath); + + expect(downloadedFileHash).to.equal(originalFileHash); + + // Clean up downloaded original file + fs.unlinkSync(originalFilePath); - done() - } catch (err) { - done(err); - } - })(); }); - }); diff --git a/test/paidComputeFlow.test.ts b/test/paidComputeFlow.test.ts new file mode 100644 index 0000000..dec76c2 --- /dev/null +++ b/test/paidComputeFlow.test.ts @@ -0,0 +1,235 @@ +import { expect } from "chai"; +import path from "path"; +import fs from "fs"; +import { homedir } from 'os' +import { + ProviderInstance +} from "@oceanprotocol/lib"; +import { projectRoot, runCommand } from "./util.js"; + + + +describe("Ocean CLI Paid Compute", function() { + this.timeout(600000); // Set a longer timeout to allow the command to execute + + let computeDatasetDid: string; + let jsAlgoDid: string; + let computeEnvId: string; + let resources: any; + let computeJobId: string; + let agreementId: string; + + const getAddresses = () => { + const data = JSON.parse( + fs.readFileSync( + process.env.ADDRESS_FILE || + `${homedir}/.ocean/ocean-contracts/artifacts/address.json`, + 'utf8' + ) + ) + return data.development + }; + + it("should publish a compute dataset using 'npm run cli publish'", async function() { + const metadataFile = path.resolve(projectRoot, "metadata/simpleComputeDataset.json"); + // Ensure the metadata file exists + if (!fs.existsSync(metadataFile)) { + throw new Error("Metadata file not found: " + metadataFile); + } + + process.env.PRIVATE_KEY = "0x1d751ded5a32226054cd2e71261039b65afb9ee1c746d055dd699b1150a5befc"; + // Using this account: 0x529043886F21D9bc1AE0feDb751e34265a246e47 + process.env.RPC = "http://127.0.0.1:8545"; + process.env.NODE_URL = "http://127.0.0.1:8001"; + process.env.ADDRESS_FILE = path.join(process.env.HOME || "", ".ocean/ocean-contracts/artifacts/address.json"); + + const output = await runCommand(`npm run cli publish ${metadataFile}`); + + const jsonMatch = output.match(/did:op:[a-f0-9]{64}/); + if (!jsonMatch) { + console.error("Raw output:", output); + throw new Error("Could not find compute dataset did in the output"); + } + + try { + computeDatasetDid = jsonMatch[0]; + } catch (error) { + console.error("Extracted output:", jsonMatch[0]); + throw new Error("Failed to parse the extracted output:\n" + error); + } + }); + + it("should publish a js Algorithm using 'npm run cli publishAlgo'", async function() { + const filePath = path.resolve(projectRoot, "metadata/jsAlgo.json"); + + // Ensure the metadata file exists + if (!fs.existsSync(filePath)) { + throw new Error("Metadata file not found: " + filePath); + } + + const output = await runCommand(`npm run cli publishAlgo ${filePath}`); + + const jsonMatch = output.match(/did:op:[a-f0-9]{64}/); + if (!jsonMatch) { + console.error("Raw output:", output); + throw new Error("Could not find algo did in the output"); + } + + try { + jsAlgoDid = jsonMatch[0]; + } catch (error) { + console.error("Extracted output:", jsonMatch[0]); + throw new Error("Failed to parse the extracted output:\n" + error); + } + }); + + it("should get DDO using 'npm run cli getDDO' for compute dataset", async function() { + const output = await runCommand(`npm run cli getDDO ${computeDatasetDid}`); + + const jsonMatch = output.match(/s*([\s\S]*)/); + if (!jsonMatch) { + console.error("Raw output:", output); + throw new Error("Could not find compute environments in the output"); + } + + try { + expect(output).to.contain(`Resolving Asset with DID: ${computeDatasetDid}`) + } catch (error) { + console.error("Extracted output:", jsonMatch[0]); + throw new Error("Failed to parse the extracted output:\n" + error); + } + }); + + it("should get DDO using 'npm run cli getDDO' for JS algorithm", async function() { + const output = await runCommand(`npm run cli getDDO ${jsAlgoDid}`); + + const jsonMatch = output.match(/s*([\s\S]*)/); + if (!jsonMatch) { + console.error("Raw output:", output); + throw new Error("Could not find compute environments in the output"); + } + + try { + expect(output).to.contain(`Resolving Asset with DID: ${jsAlgoDid}`) + } catch (error) { + console.error("Extracted output:", jsonMatch[0]); + throw new Error("Failed to parse the extracted output:\n" + error); + } + }); + + it("should get compute environments using 'npm run cli getComputeEnvironments'", async function() { + const output = await runCommand(`npm run cli getComputeEnvironments`); + + const jsonMatch = output.match(/Exiting compute environments:\s*([\s\S]*)/); + if (!jsonMatch) { + console.error("Raw output:", output); + throw new Error("Could not find compute environments in the output"); + } + const match = jsonMatch[0].match(/Exiting compute environments:\s*(.*)/s); + const result = match ? match[1].trim() : null; + if (!result) { + console.error("Raw output:", output); + throw new Error("Could not find compute environments in the output"); + } + + let environments; + try { + environments = eval(result); + } catch (error) { + console.error(`Extracted output: ${jsonMatch[0]} and final result: ${result}`); + throw new Error("Failed to parse the extracted output:\n" + error); + } + + expect(environments).to.be.an("array").that.is.not.empty; + + const firstEnv = environments[0]; + + expect(firstEnv).to.have.property("id").that.is.a("string"); + expect(firstEnv).to.have.property("consumerAddress").that.is.a("string"); + expect(firstEnv).to.have.property("resources").that.is.an("array"); + + computeEnvId = firstEnv.id; + + console.log(`Fetched Compute Env ID: ${computeEnvId}`); + }); + + it("should start paid compute on compute dataset and algorithm", async function() { + const computeEnvs = await ProviderInstance.getComputeEnvironments('http://127.0.0.1:8001'); + const env = computeEnvs[0]; + expect(env).to.be.an('object').and.to.not.be.null.and.to.not.be.undefined; + + resources = [ + { + id: 'cpu', + amount: env.resources[0].max - env.resources[0].inUse - 1 + }, + { + id: 'ram', + amount: env.resources[1].max - env.resources[1].inUse - 1000 + }, + { + id: 'disk', + amount: 0 + } + ] + const paymentToken = getAddresses().Ocean + const output = await runCommand(`npm run cli -- startCompute ${computeDatasetDid} ${jsAlgoDid} ${computeEnvId} 900 ${paymentToken} '${JSON.stringify(resources)}' --accept true`); + const jobIdMatch = output.match(/JobID:\s*([^\s]+)/); + const agreementIdMatch = output.match(/Agreement ID:\s*([^\s]+)/); + + if (!jobIdMatch) { + console.error("Raw output:", output); + throw new Error("Could not find Job ID in the output"); + } + + if (!agreementIdMatch) { + console.error("Raw output for finding agreement:", output); + throw new Error("Could not find Agreement ID in the output"); + } + + computeJobId = jobIdMatch[1]; + agreementId = agreementIdMatch[1]; + + expect(computeJobId).to.be.a("string"); + expect(agreementId).to.be.a("string"); + + console.log(`jobId: ${computeJobId}`); + console.log(`agreementId: ${agreementId}`); + + if (!computeJobId) { + console.error("Job ID was empty:", output); + throw new Error("Job ID is missing"); + } + + if (!agreementId) { + console.error("Agreement ID was empty:", output); + throw new Error("Agreement ID is missing"); + } + }); + + it('should delay for compute job', (done) => { + setTimeout(() => done(), 10000) + }).timeout(10200) + + it("should get job status", async () => { + const output = await runCommand(`npm run cli getJobStatus ${computeDatasetDid} ${computeJobId} ''`); + expect(output).to.contain(computeJobId); + expect(output.toLowerCase()).to.match(/status/); + console.log(`Job status retrieved for jobId: ${computeJobId}`); + }); + + it("should download compute job results", async () => { + const destFolder = path.join(projectRoot, "test-results", computeJobId); + fs.mkdirSync(destFolder, { recursive: true }); + + const output = await runCommand(`npm run cli downloadJobResults ${computeJobId} 1 ${destFolder}`); + + expect(output.toLowerCase()).to.match(/download(ed)?/); + + const files = fs.readdirSync(destFolder); + expect(files.length).to.be.greaterThan(0, "No result files downloaded"); + console.log(`Downloaded results to: ${destFolder}`); + fs.rmSync(path.join(projectRoot, "test-results"), { recursive: true }) + }); + +}); diff --git a/test/setup.test.ts b/test/setup.test.ts index 57aec50..b1c7dc4 100644 --- a/test/setup.test.ts +++ b/test/setup.test.ts @@ -35,13 +35,13 @@ describe("Ocean CLI Setup", function() { expect(stdout).to.contain("Downloads an asset into specified folder"); expect(stdout).to.contain("allowAlgo [options] "); expect(stdout).to.contain("Approves an algorithm to run on a dataset"); - expect(stdout).to.contain("startCompute [options] "); + expect(stdout).to.contain("startCompute [options] "); expect(stdout).to.contain("Starts a compute job"); expect(stdout).to.contain("startFreeCompute [options] "); expect(stdout).to.contain("Starts a FREE compute job"); - expect(stdout).to.contain("stopCompute [options] [agreementId]"); + expect(stdout).to.contain("stopCompute [options] "); expect(stdout).to.contain("Stops a compute job"); - expect(stdout).to.contain("getJobStatus [options] [agreementId]"); + expect(stdout).to.contain("getJobStatus [options] "); expect(stdout).to.contain("Displays the compute job status"); expect(stdout).to.contain("downloadJobResults [destinationFolder]"); expect(stdout).to.contain("Downloads compute job results"); diff --git a/test/tsconfig.json b/test/tsconfig.json index 3db26ec..a448a4e 100644 --- a/test/tsconfig.json +++ b/test/tsconfig.json @@ -1,14 +1,20 @@ { "compilerOptions": { - "lib": ["es6", "es7", "dom"], - "module": "ES2022", - "target": "ES2022", - "noUnusedLocals": false, + "resolveJsonModule": true, + "moduleResolution": "node", "esModuleInterop": true, - "allowSyntheticDefaultImports": true + "allowSyntheticDefaultImports": true, + "lib": ["ESNext", "dom"], + "declaration": true, + "module": "ES2020", + "target": "ES2020", + "removeComments": false, + "experimentalDecorators": true, + "preserveConstEnums": true, + "outDir": "./dist", + "sourceMap": true, + "skipLibCheck": true, }, - "ts-node": { - "esm": true, - "experimentalSpecifierResolution": "node" - } -} + "include": ["src/**/*", "src/tests/**/*"], + "exclude": ["node_modules"] +} \ No newline at end of file diff --git a/test/util.ts b/test/util.ts new file mode 100644 index 0000000..d2fb9d3 --- /dev/null +++ b/test/util.ts @@ -0,0 +1,26 @@ +import { exec } from "child_process"; +import path from "path"; +import util from "util"; + +import { dirname } from 'path' +import { fileURLToPath } from 'url' + + +export const execPromise = util.promisify(exec); + +export const __filename = fileURLToPath(import.meta.url) +export const __dirname = dirname(__filename) + + +export const projectRoot = path.resolve(__dirname, ".."); +export const runCommand = async (command: string): Promise => { + console.log(`\n[CMD]: ${command}`); + try { + const { stdout } = await execPromise(command, { cwd: projectRoot }); + console.log(`[OUTPUT]:\n${stdout}`); + return stdout; + } catch (error: any) { + console.error(`[ERROR]:\n${error.stderr || error.message}`); + throw error; + } +}; \ No newline at end of file