Skip to content
Closed
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
2 changes: 1 addition & 1 deletion .github/workflows/ci.yml
Original file line number Diff line number Diff line change
Expand Up @@ -260,7 +260,7 @@ jobs:
DB_TYPE: 'elasticsearch'
MAX_REQ_PER_MINUTE: 320
MAX_CONNECTIONS_PER_MINUTE: 320
DOCKER_COMPUTE_ENVIRONMENTS: '[{"socketPath":"/var/run/docker.sock","resources":[{"id":"disk","total":1000000000}],"storageExpiry":604800,"maxJobDuration":3600,"fees":{"8996":[{"prices":[{"id":"cpu","price":1}]}]},"free":{"maxJobDuration":60,"maxJobs":3,"resources":[{"id":"cpu","max":1},{"id":"ram","max":1000000000},{"id":"disk","max":1000000000}]}}]'
DOCKER_COMPUTE_ENVIRONMENTS: '[{"socketPath":"/var/run/docker.sock","resources":[{"id":"disk","total":10}],"storageExpiry":604800,"maxJobDuration":3600,"fees":{"8996":[{"prices":[{"id":"cpu","price":1}]}]},"free":{"maxJobDuration":60,"maxJobs":3,"resources":[{"id":"cpu","max":1},{"id":"ram","max":1},{"id":"disk","max":1}]}}]'
- name: Check Ocean Node is running
run: |
for i in $(seq 1 90); do
Expand Down
48 changes: 24 additions & 24 deletions docs/GPU.md
Original file line number Diff line number Diff line change
Expand Up @@ -82,7 +82,7 @@ Here is the full definition of DOCKER_COMPUTE_ENVIRONMENTS:
}
}
},
{ "id": "disk", "total": 1000000000 }
{ "id": "disk", "total": 1 }
],
"storageExpiry": 604800,
"maxJobDuration": 3600,
Expand All @@ -102,8 +102,8 @@ Here is the full definition of DOCKER_COMPUTE_ENVIRONMENTS:
"maxJobs": 3,
"resources": [
{ "id": "cpu", "max": 1 },
{ "id": "ram", "max": 1000000000 },
{ "id": "disk", "max": 1000000000 },
{ "id": "ram", "max": 1 },
{ "id": "disk", "max": 1 },
{ "id": "myGPU", "max": 1 }
]
}
Expand All @@ -122,7 +122,7 @@ root@gpu-1:/repos/ocean/ocean-node# curl http://localhost:8000/api/services/comp
{
"id": "0xd6b10b27aab01a72070a5164c07d0517755838b9cb9857e2d5649287ec3aaaa2-0x66073c81f833deaa2f8e2a508f69cf78f8a99b17ba1a64f369af921750f93914",
"runningJobs": 0,
"consumerAddress": "0x4fb80776C8eb4cAbe7730dcBCdb1fa6ecD3c460E",
"consumerAddress": "0x00",
"platform": { "architecture": "x86_64", "os": "Ubuntu 22.04.3 LTS" },
"fees": {
"1": [
Expand All @@ -141,9 +141,9 @@ root@gpu-1:/repos/ocean/ocean-node# curl http://localhost:8000/api/services/comp
{ "id": "cpu", "total": 8, "max": 8, "min": 1, "inUse": 0 },
{
"id": "ram",
"total": 24888963072,
"max": 24888963072,
"min": 1000000000,
"total": 23,
"max": 23,
"min": 1,
"inUse": 0
},
{
Expand All @@ -162,15 +162,15 @@ root@gpu-1:/repos/ocean/ocean-node# curl http://localhost:8000/api/services/comp
"min": 0,
"inUse": 0
},
{ "id": "disk", "total": 1000000000, "max": 1000000000, "min": 0, "inUse": 0 }
{ "id": "disk", "total": 1, "max": 1, "min": 0, "inUse": 0 }
],
"free": {
"maxJobDuration": 60,
"maxJobs": 3,
"resources": [
{ "id": "cpu", "max": 1, "inUse": 0 },
{ "id": "ram", "max": 1000000000, "inUse": 0 },
{ "id": "disk", "max": 1000000000, "inUse": 0 },
{ "id": "ram", "max": 1, "inUse": 0 },
{ "id": "disk", "max": 1, "inUse": 0 },
{ "id": "myGPU", "max": 1, "inUse": 0 }
]
},
Expand All @@ -194,7 +194,7 @@ Start a free job using:
"rawcode": "import tensorflow as tf\nsess = tf.compat.v1.Session(config=tf.compat.v1.ConfigProto(log_device_placement=True))\nprint(\"Num GPUs Available: \", len(tf.config.list_physical_devices('GPU')))\ngpus = tf.config.list_physical_devices('GPU')\nfor gpu in gpus:\n\tprint('Name:', gpu.name, ' Type:', gpu.device_type)"
}
},
"consumerAddress": "0xC7EC1970B09224B317c52d92f37F5e1E4fF6B687",
"consumerAddress": "0x00",
"signature": "123",
"nonce": 1,
"environment": "0xd6b10b27aab01a72070a5164c07d0517755838b9cb9857e2d5649287ec3aaaa2-0x66073c81f833deaa2f8e2a508f69cf78f8a99b17ba1a64f369af921750f93914",
Expand Down Expand Up @@ -259,7 +259,7 @@ Then define DOCKER_COMPUTE_ENVIRONMENTS with
},
{
"id": "disk",
"total": 1000000000
"total": 1
}
],
"storageExpiry": 604800,
Expand Down Expand Up @@ -291,11 +291,11 @@ Then define DOCKER_COMPUTE_ENVIRONMENTS with
},
{
"id": "ram",
"max": 1000000000
"max": 1
},
{
"id": "disk",
"max": 1000000000
"max": 1
},
{
"id": "myGPU",
Expand All @@ -311,7 +311,7 @@ aka

```bash
export DOCKER_COMPUTE_ENVIRONMENTS="[{\"socketPath\":\"/var/run/docker.sock\",\"resources\":[{\"id\":\"myGPU\",\"description\":\"AMD Radeon RX 9070 XT\",\"type\":\"gpu\",\"total\":1,\"init\":{\"advanced\":{
\"IpcMode\":\"host\",\"CapAdd\":[\"CAP_SYS_PTRACE\"],\"Devices\":[\"/dev/dxg\",\"/dev/dri/card0\"],\"Binds\":[\"/usr/lib/wsl/lib/libdxcore.so:/usr/lib/libdxcore.so\",\"/opt/rocm/lib/libhsa-runtime64.so.1:/opt/rocm/lib/libhsa-runtime64.so.1\"],\"SecurityOpt\":{\"seccomp\":\"unconfined\"}}}},{\"id\":\"disk\",\"total\":1000000000}],\"storageExpiry\":604800,\"maxJobDuration\":3600,\"fees\":{\"1\":[{\"feeToken\":\"0x123\",\"prices\":[{\"id\":\"cpu\",\"price\":1},{\"id\":\"nyGPU\",\"price\":3}]}]},\"free\":{\"maxJobDuration\":60,\"maxJobs\":3,\"resources\":[{\"id\":\"cpu\",\"max\":1},{\"id\":\"ram\",\"max\":1000000000},{\"id\":\"disk\",\"max\":1000000000},{\"id\":\"myGPU\",\"max\":1}]}}]"
\"IpcMode\":\"host\",\"CapAdd\":[\"CAP_SYS_PTRACE\"],\"Devices\":[\"/dev/dxg\",\"/dev/dri/card0\"],\"Binds\":[\"/usr/lib/wsl/lib/libdxcore.so:/usr/lib/libdxcore.so\",\"/opt/rocm/lib/libhsa-runtime64.so.1:/opt/rocm/lib/libhsa-runtime64.so.1\"],\"SecurityOpt\":{\"seccomp\":\"unconfined\"}}}},{\"id\":\"disk\",\"total\":10}],\"storageExpiry\":604800,\"maxJobDuration\":3600,\"fees\":{\"1\":[{\"feeToken\":\"0x123\",\"prices\":[{\"id\":\"cpu\",\"price\":1},{\"id\":\"nyGPU\",\"price\":3}]}]},\"free\":{\"maxJobDuration\":60,\"maxJobs\":3,\"resources\":[{\"id\":\"cpu\",\"max\":1},{\"id\":\"ram\",\"max\":1},{\"id\":\"disk\",\"max\":1},{\"id\":\"myGPU\",\"max\":1}]}}]"
```

you should have it in your compute envs:
Expand All @@ -325,7 +325,7 @@ root@gpu-1:/repos/ocean/ocean-node# curl http://localhost:8000/api/services/comp
{
"id": "0xbb5773e734e1b188165dac88d9a3dc8ac28bc9f5624b45fa8bbd8fca043de7c1-0x2c2761f938cf186eeb81f71dee06ad7edb299493e39c316c390d0c0691e6585c",
"runningJobs": 0,
"consumerAddress": "0x4fb80776C8eb4cAbe7730dcBCdb1fa6ecD3c460E",
"consumerAddress": "0x00",
"platform": {
"architecture": "x86_64",
"os": "Ubuntu 24.04.2 LTS"
Expand Down Expand Up @@ -359,9 +359,9 @@ root@gpu-1:/repos/ocean/ocean-node# curl http://localhost:8000/api/services/comp
},
{
"id": "ram",
"total": 33617674240,
"max": 33617674240,
"min": 1000000000,
"total": 31,
"max": 31,
"min": 1,
"inUse": 0
},
{
Expand Down Expand Up @@ -389,8 +389,8 @@ root@gpu-1:/repos/ocean/ocean-node# curl http://localhost:8000/api/services/comp
},
{
"id": "disk",
"total": 1000000000,
"max": 1000000000,
"total": 10,
"max": 10,
"min": 0,
"inUse": 0
}
Expand All @@ -406,12 +406,12 @@ root@gpu-1:/repos/ocean/ocean-node# curl http://localhost:8000/api/services/comp
},
{
"id": "ram",
"max": 1000000000,
"max": 1,
"inUse": 0
},
{
"id": "disk",
"max": 1000000000,
"max": 1,
"inUse": 0
},
{
Expand Down Expand Up @@ -450,7 +450,7 @@ Start a free job with
"rawcode": "import tensorflow as tf\nsess = tf.compat.v1.Session(config=tf.compat.v1.ConfigProto(log_device_placement=True))\nprint(\"Num GPUs Available: \", len(tf.config.list_physical_devices('GPU')))\ngpus = tf.config.list_physical_devices('GPU')\nfor gpu in gpus:\n\tprint('Name:', gpu.name, ' Type:', gpu.device_type)"
}
},
"consumerAddress": "0xC7EC1970B09224B317c52d92f37F5e1E4fF6B687",
"consumerAddress": "0x00",
"signature": "123",
"nonce": 1,
"environment": "0xbb5773e734e1b188165dac88d9a3dc8ac28bc9f5624b45fa8bbd8fca043de7c1-0x2c2761f938cf186eeb81f71dee06ad7edb299493e39c316c390d0c0691e6585c",
Expand Down
6 changes: 3 additions & 3 deletions docs/env.md
Original file line number Diff line number Diff line change
Expand Up @@ -130,7 +130,7 @@ The `DOCKER_COMPUTE_ENVIRONMENTS` environment variable should be a JSON array of
"resources": [
{
"id": "disk",
"total": 1000000000
"total": 10
}
],
"storageExpiry": 604800,
Expand Down Expand Up @@ -158,11 +158,11 @@ The `DOCKER_COMPUTE_ENVIRONMENTS` environment variable should be a JSON array of
},
{
"id": "ram",
"max": 1000000000
"max": 1
},
{
"id": "disk",
"max": 1000000000
"max": 1
}
]
}
Expand Down
2 changes: 1 addition & 1 deletion scripts/ocean-node-quickstart.sh
Original file line number Diff line number Diff line change
Expand Up @@ -142,7 +142,7 @@ fi
# Set default compute environments if not already defined
if [ -z "$DOCKER_COMPUTE_ENVIRONMENTS" ]; then
echo "Setting default DOCKER_COMPUTE_ENVIRONMENTS configuration"
export DOCKER_COMPUTE_ENVIRONMENTS="[{\"socketPath\":\"/var/run/docker.sock\",\"resources\":[{\"id\":\"disk\",\"total\":1000000000}],\"storageExpiry\":604800,\"maxJobDuration\":36000,\"fees\":{\"1\":[{\"feeToken\":\"0x123\",\"prices\":[{\"id\":\"cpu\",\"price\":1}]}]},\"free\":{\"maxJobDuration\":360000,\"maxJobs\":3,\"resources\":[{\"id\":\"cpu\",\"max\":1},{\"id\":\"ram\",\"max\":1000000000},{\"id\":\"disk\",\"max\":1000000000}]}}]"
export DOCKER_COMPUTE_ENVIRONMENTS="[{\"socketPath\":\"/var/run/docker.sock\",\"resources\":[{\"id\":\"disk\",\"total\":10}],\"storageExpiry\":604800,\"maxJobDuration\":36000,\"fees\":{\"1\":[{\"feeToken\":\"0x123\",\"prices\":[{\"id\":\"cpu\",\"price\":1}]}]},\"free\":{\"maxJobDuration\":360000,\"maxJobs\":3,\"resources\":[{\"id\":\"cpu\",\"max\":1},{\"id\":\"ram\",\"max\":1},{\"id\":\"disk\",\"max\":1}]}}]"
fi

cat <<EOF > docker-compose.yml
Expand Down
2 changes: 1 addition & 1 deletion scripts/ocean-node-update.sh
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
#!/bin/bash

DEFAULT_DOCKER_ENVIRONMENTS='[{"socketPath":"/var/run/docker.sock","resources":[{"id":"disk","total":1000000000}],"storageExpiry":604800,"maxJobDuration":36000,"fees":{"1":[{"feeToken":"0x123","prices":[{"id":"cpu","price":1}]}]},"free":{"maxJobDuration":360000,"maxJobs":3,"resources":[{"id":"cpu","max":1},{"id":"ram","max":1000000000},{"id":"disk","max":1000000000}]}}]'
DEFAULT_DOCKER_ENVIRONMENTS='[{"socketPath":"/var/run/docker.sock","resources":[{"id":"disk","total":10}],"storageExpiry":604800,"maxJobDuration":36000,"fees":{"1":[{"feeToken":"0x123","prices":[{"id":"cpu","price":1}]}]},"free":{"maxJobDuration":360000,"maxJobs":3,"resources":[{"id":"cpu","max":1},{"id":"ram","max":1},{"id":"disk","max":1}]}}]'

check_prerequisites() {
if [ ! -f "docker-compose.yml" ]; then
Expand Down
36 changes: 33 additions & 3 deletions src/@types/C2D/C2D.ts
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
import { MetadataAlgorithm } from '@oceanprotocol/ddo-js'
import { MetadataAlgorithm, ConsumerParameter } from '@oceanprotocol/ddo-js'
import type { BaseFileObject } from '../fileObject.js'
export enum C2DClusterType {
// eslint-disable-next-line no-unused-vars
Expand Down Expand Up @@ -145,6 +145,11 @@ export interface ComputeResult {
export type DBComputeJobMetadata = {
[key: string]: string | number | boolean
}

export interface ComputeJobTerminationDetails {
OOMKilled: boolean
exitCode: number
}
export interface ComputeJob {
owner: string
did?: string
Expand All @@ -160,6 +165,7 @@ export interface ComputeJob {
agreementId?: string
environment?: string
metadata?: DBComputeJobMetadata
terminationDetails?: ComputeJobTerminationDetails
}

export interface ComputeOutput {
Expand All @@ -181,15 +187,27 @@ export interface ComputeAsset {
transferTxId?: string
userdata?: { [key: string]: any }
}

export interface ExtendedMetadataAlgorithm extends MetadataAlgorithm {
container: {
// retain existing properties
entrypoint: string
image: string
tag: string
checksum: string
dockerfile?: string // optional
additionalDockerFiles?: { [key: string]: any }
consumerParameters?: ConsumerParameter[]
}
}
export interface ComputeAlgorithm {
documentId?: string
serviceId?: string
fileObject?: BaseFileObject
meta?: MetadataAlgorithm
meta?: ExtendedMetadataAlgorithm
transferTxId?: string
algocustomdata?: { [key: string]: any }
userdata?: { [key: string]: any }
envs?: { [key: string]: any }
}

export interface AlgoChecksums {
Expand Down Expand Up @@ -236,6 +254,10 @@ export enum C2DStatusNumber {
// eslint-disable-next-line no-unused-vars
PullImageFailed = 11,
// eslint-disable-next-line no-unused-vars
BuildImage = 12,
// eslint-disable-next-line no-unused-vars
BuildImageFailed = 13,
// eslint-disable-next-line no-unused-vars
ConfiguringVolumes = 20,
// eslint-disable-next-line no-unused-vars
VolumeCreationFailed = 21,
Expand All @@ -254,6 +276,8 @@ export enum C2DStatusNumber {
// eslint-disable-next-line no-unused-vars
AlgorithmFailed = 41,
// eslint-disable-next-line no-unused-vars
DiskQuotaExceeded = 42,
// eslint-disable-next-line no-unused-vars
FilteringResults = 50,
// eslint-disable-next-line no-unused-vars
PublishingResults = 60,
Expand All @@ -272,6 +296,10 @@ export enum C2DStatusText {
// eslint-disable-next-line no-unused-vars
PullImageFailed = 'Pulling algorithm image failed',
// eslint-disable-next-line no-unused-vars
BuildImage = 'Building algorithm image',
// eslint-disable-next-line no-unused-vars
BuildImageFailed = 'Building algorithm image failed',
// eslint-disable-next-line no-unused-vars
ConfiguringVolumes = 'Configuring volumes',
// eslint-disable-next-line no-unused-vars
VolumeCreationFailed = 'Volume creation failed',
Expand All @@ -290,6 +318,8 @@ export enum C2DStatusText {
// eslint-disable-next-line no-unused-vars
AlgorithmFailed = 'Failed to run algorithm',
// eslint-disable-next-line no-unused-vars
DiskQuotaExceeded = 'Error: disk quota exceeded',
// eslint-disable-next-line no-unused-vars
FilteringResults = 'Filtering results',
// eslint-disable-next-line no-unused-vars
PublishingResults = 'Publishing results',
Expand Down
Loading
Loading