Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
4 changes: 4 additions & 0 deletions src/components/core/compute/getStatus.ts
Original file line number Diff line number Diff line change
Expand Up @@ -53,9 +53,13 @@ export class ComputeGetStatusHandler extends CommandHandler {
}
} else {
engines = await this.getOceanNode().getC2DEngines().getAllEngines()
CORE_LOGGER.logMessage(
'ComputeGetStatusCommand: No jobId provided, querying all C2D clusters'
)
}

for (const engine of engines) {
CORE_LOGGER.logMessage(`ComputeGetStatusCommand: Querying engine`)
const jobs = await engine.getComputeJobStatus(
task.consumerAddress,
task.agreementId,
Expand Down
50 changes: 49 additions & 1 deletion src/components/core/compute/initialize.ts
Original file line number Diff line number Diff line change
Expand Up @@ -33,6 +33,7 @@ import { C2DEngineDocker, getAlgorithmImage } from '../../c2d/compute_engine_doc
import { Credentials, DDOManager } from '@oceanprotocol/ddo-js'
import { areKnownCredentialTypes, checkCredentials } from '../../../utils/credentials.js'
import { PolicyServer } from '../../policyServer/index.js'
import { getAlgoChecksums, validateAlgoForDataset } from './utils.js'

export class ComputeInitializeHandler extends CommandHandler {
validate(command: ComputeInitializeCommand): ValidateParams {
Expand Down Expand Up @@ -88,6 +89,28 @@ export class ComputeInitializeHandler extends CommandHandler {
}
}
}

const algoChecksums = await getAlgoChecksums(
task.algorithm.documentId,
task.algorithm.serviceId,
node
)

const isRawCodeAlgorithm = task.algorithm.meta?.rawcode
const hasValidChecksums = algoChecksums.container && algoChecksums.files

if (!isRawCodeAlgorithm && !hasValidChecksums) {
const errorMessage =
'Failed to retrieve algorithm checksums. Both container and files checksums are required.'
CORE_LOGGER.error(errorMessage)
return {
stream: null,
status: {
httpStatus: 500,
error: errorMessage
}
}
}
if (engine === null) {
return {
stream: null,
Expand Down Expand Up @@ -201,7 +224,8 @@ export class ComputeInitializeHandler extends CommandHandler {
const {
chainId: ddoChainId,
nftAddress,
credentials
credentials,
metadata
} = ddoInstance.getDDOFields()
const isOrdable = isOrderingAllowedForAsset(ddo)
if (!isOrdable.isOrdable) {
Expand All @@ -214,6 +238,30 @@ export class ComputeInitializeHandler extends CommandHandler {
}
}
}
if (metadata.type !== 'algorithm') {
const index = task.datasets.findIndex(
(d) => d.documentId === ddoInstance.getDid()
)
const safeIndex = index === -1 ? 0 : index
const validAlgoForDataset = await validateAlgoForDataset(
task.algorithm.documentId,
algoChecksums,
ddoInstance,
task.datasets[safeIndex].serviceId,
node
)
if (!validAlgoForDataset) {
return {
stream: null,
status: {
httpStatus: 400,
error: `Algorithm ${
task.algorithm.documentId
} not allowed to run on the dataset: ${ddoInstance.getDid()}`
}
}
}
}
// check credentials (DDO level)
let accessGrantedDDOLevel: boolean
if (credentials) {
Expand Down
82 changes: 80 additions & 2 deletions src/test/data/assets.ts
Original file line number Diff line number Diff line change
Expand Up @@ -190,8 +190,14 @@ export const computeAssetWithCredentials = {
compute: {
allowRawAlgorithm: false,
allowNetworkAccess: true,
publisherTrustedAlgorithmPublishers: [] as any,
publisherTrustedAlgorithms: [] as any
publisherTrustedAlgorithmPublishers: ['*'] as any,
publisherTrustedAlgorithms: [
{
did: '*',
filesChecksum: '*',
containerSectionChecksum: '*'
}
] as any
}
}
],
Expand Down Expand Up @@ -287,6 +293,78 @@ export const algoAssetWithCredentials = {
}

export const computeAsset = {
'@context': ['https://w3id.org/did/v1'],
id: '',
nftAddress: '',
version: '4.1.0',
chainId: 8996,
metadata: {
created: '2021-12-20T14:35:20Z',
updated: '2021-12-20T14:35:20Z',
type: 'dataset',
name: 'cli fixed asset',
description: 'asset published using ocean.js cli tool',
tags: ['test'],
author: 'oceanprotocol',
license: 'https://market.oceanprotocol.com/terms',
additionalInformation: {
termsAndConditions: true
}
},
services: [
{
id: '1155995dda741e93afe4b1c6ced2d01734a6ec69865cc0997daf1f4db7259a36',
type: 'compute',
files: {
files: [
{
type: 'url',
url: 'https://raw.githubusercontent.com/oceanprotocol/testdatasets/main/shs_dataset_test.txt',
method: 'GET'
}
]
},
datatokenAddress: '',
serviceEndpoint: 'https://v4.provider.oceanprotocol.com',
timeout: 86400,
compute: {
allowRawAlgorithm: false,
allowNetworkAccess: true,
publisherTrustedAlgorithmPublishers: ['*'] as any,
publisherTrustedAlgorithms: [
{
did: '*',
filesChecksum: '*',
containerSectionChecksum: '*'
}
] as any
}
}
],
event: {},
nft: {
address: '',
name: 'Ocean Data NFT',
symbol: 'OCEAN-NFT',
state: 5,
tokenURI: '',
owner: '',
created: ''
},
purgatory: {
state: false
},
datatokens: [] as any,
stats: {
allocated: 0,
orders: 0,
price: {
value: '0'
}
}
}

export const computeAssetWithNoAccess = {
'@context': ['https://w3id.org/did/v1'],
id: '',
nftAddress: '',
Expand Down
Loading
Loading