Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
808 changes: 9 additions & 799 deletions package-lock.json

Large diffs are not rendered by default.

2 changes: 1 addition & 1 deletion package.json
Original file line number Diff line number Diff line change
Expand Up @@ -74,7 +74,7 @@
"@libp2p/websockets": "^8.1.1",
"@multiformats/multiaddr": "^10.2.0",
"@oceanprotocol/contracts": "^2.4.0",
"@oceanprotocol/ddo-js": "^0.1.2",
"@oceanprotocol/ddo-js": "^0.1.4",
"@types/lodash.clonedeep": "^4.5.7",
"aws-sdk": "^2.1591.0",
"axios": "^1.8.4",
Expand Down
1 change: 1 addition & 0 deletions src/@types/C2D/C2D.ts
Original file line number Diff line number Diff line change
Expand Up @@ -194,6 +194,7 @@ export interface ComputeAlgorithm {
export interface AlgoChecksums {
files: string
container: string
serviceId?: string
}

export interface DBComputeJobPayment {
Expand Down
2 changes: 1 addition & 1 deletion src/components/core/compute/initialize.ts
Original file line number Diff line number Diff line change
Expand Up @@ -257,7 +257,7 @@ export class ComputeInitializeHandler extends CommandHandler {
httpStatus: 400,
error: `Algorithm ${
task.algorithm.documentId
} not allowed to run on the dataset: ${ddoInstance.getDid()}`
} with serviceId ${task.algorithm.serviceId} not allowed to run on the dataset: ${ddoInstance.getDid()} with serviceId: ${task.datasets[safeIndex].serviceId}`
}
}
}
Expand Down
2 changes: 1 addition & 1 deletion src/components/core/compute/startCompute.ts
Original file line number Diff line number Diff line change
Expand Up @@ -351,7 +351,7 @@ export class PaidComputeStartHandler extends CommandHandler {
httpStatus: 400,
error: `Algorithm ${
task.algorithm.documentId
} not allowed to run on the dataset: ${ddoInstance.getDid()}`
} with serviceId ${task.algorithm.serviceId} not allowed to run on the dataset: ${ddoInstance.getDid()} with serviceId: ${task.datasets[safeIndex].serviceId}`
}
}
}
Expand Down
22 changes: 14 additions & 8 deletions src/components/core/compute/utils.ts
Original file line number Diff line number Diff line change
Expand Up @@ -21,7 +21,8 @@ export async function getAlgoChecksums(
): Promise<AlgoChecksums> {
const checksums: AlgoChecksums = {
files: '',
container: ''
container: '',
serviceId: algoServiceId
}
try {
const algoDDO = await new FindDdoHandler(oceanNode).findAndFormatDdo(algoDID)
Expand Down Expand Up @@ -54,7 +55,6 @@ export async function getAlgoChecksums(
metadata.algorithm.container.entrypoint + metadata.algorithm.container.checksum
)
.digest('hex')
CORE_LOGGER.info(`Algorithm checksums: ${JSON.stringify(checksums)}`)
return checksums
} catch (error) {
CORE_LOGGER.error(`Fetching algorithm checksums failed: ${error.message}`)
Expand All @@ -67,6 +67,7 @@ export async function validateAlgoForDataset(
algoChecksums: {
files: string
container: string
serviceId?: string
},
ddoInstance: VersionedDDO,
datasetServiceId: string,
Expand Down Expand Up @@ -96,11 +97,6 @@ export async function validateAlgoForDataset(
if (!hasTrustedPublishers && !hasTrustedAlgorithms) return false

if (algoDID) {
CORE_LOGGER.info(`Validating algorithm...`)
CORE_LOGGER.info(`Algorithm DID: ${algoDID}`)
CORE_LOGGER.info(`Algorithm checksums: ${JSON.stringify(algoChecksums)}`)
CORE_LOGGER.info(`Trusted publishers: ${JSON.stringify(publishers)}`)
CORE_LOGGER.info(`Trusted algorithms: ${JSON.stringify(algorithms)}`)
// Check if algorithm is explicitly trusted
const isAlgoTrusted =
hasTrustedAlgorithms &&
Expand All @@ -111,6 +107,17 @@ export async function validateAlgoForDataset(
const containerMatch =
algo.containerSectionChecksum === '*' ||
algo.containerSectionChecksum === algoChecksums.container
if ('serviceId' in algo) {
const serviceIdMatch =
algo.serviceId === '*' || algo.serviceId === algoChecksums.serviceId
CORE_LOGGER.info(
`didMatch: ${didMatch}, filesMatch: ${filesMatch}, containerMatch: ${containerMatch}, serviceIdMatch: ${serviceIdMatch}`
)
return didMatch && filesMatch && containerMatch && serviceIdMatch
}
CORE_LOGGER.info(
`didMatch: ${didMatch}, filesMatch: ${filesMatch}, containerMatch: ${containerMatch}`
)
return didMatch && filesMatch && containerMatch
})

Expand All @@ -128,7 +135,6 @@ export async function validateAlgoForDataset(
.includes(nftAddress?.toLowerCase())
}
}

return isAlgoTrusted && isPublisherTrusted
}

Expand Down
1 change: 0 additions & 1 deletion src/components/core/handler/fileInfoHandler.ts
Original file line number Diff line number Diff line change
Expand Up @@ -34,7 +34,6 @@ async function formatMetadata(file: ArweaveFileObject | IpfsFileObject | UrlFile
'get',
false
)
CORE_LOGGER.logMessage(`Metadata for file: ${contentLength} ${contentType}`)

return {
valid: true,
Expand Down
5 changes: 0 additions & 5 deletions src/test/integration/algorithmsAccess.test.ts
Original file line number Diff line number Diff line change
Expand Up @@ -235,11 +235,6 @@ describe('Trusted algorithms Flow', () => {
console.log(resp)
assert(resp, 'Failed to get response')
assert(resp.status.httpStatus === 400, 'Failed to get 400 response')
assert(
resp.status.error ===
`Algorithm ${publishedAlgoDataset.ddo.id} not allowed to run on the dataset: ${publishedComputeDataset.ddo.id}`,
'Inconsistent error message'
)
assert(resp.stream === null, 'Failed to get stream')
})

Expand Down
Loading