Skip to content
Closed
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
6 changes: 3 additions & 3 deletions .github/workflows/ci.yml
Original file line number Diff line number Diff line change
Expand Up @@ -163,7 +163,7 @@ jobs:
ASSET_PURGATORY_URL: 'https://raw.githubusercontent.com/oceanprotocol/list-purgatory/main/list-assets.json'
ACCOUNT_PURGATORY_URL: 'https://raw.githubusercontent.com/oceanprotocol/list-purgatory/main/list-accounts.json'
- name: docker logs
run: docker logs ocean-ocean-contracts-1 && docker logs ocean-typesense-1
run: docker logs ocean-ocean-contracts-1 && docker logs ocean-kindcluster-1 && docker logs ocean-computetodata-1 && docker logs ocean-typesense-1
if: ${{ failure() }}
- uses: actions/upload-artifact@v4
with:
Expand Down Expand Up @@ -231,7 +231,7 @@ jobs:
done

- name: docker logs
run: docker logs ocean-ocean-contracts-1 && docker logs ocean-typesense-1
run: docker logs ocean-ocean-contracts-1 && docker logs ocean-kindcluster-1 && docker logs ocean-computetodata-1 && docker logs ocean-typesense-1
if: ${{ failure() }}

- name: Checkout Ocean Node
Expand Down Expand Up @@ -282,7 +282,7 @@ jobs:
with:
repository: 'oceanprotocol/ocean-cli'
path: 'ocean-cli'
ref: 'fix-tests-for-node'
ref: 'feature/fix-compute-dataset'
- name: Setup Ocean CLI
working-directory: ${{ github.workspace }}/ocean-cli
run: |
Expand Down
3 changes: 0 additions & 3 deletions src/components/core/compute/initialize.ts
Original file line number Diff line number Diff line change
Expand Up @@ -336,7 +336,6 @@ export class ComputeInitializeHandler extends CommandHandler {
message: false
}
result.consumerAddress = env.consumerAddress
CORE_LOGGER.info(`elem: ${JSON.stringify(elem)}`)
if ('transferTxId' in elem && elem.transferTxId) {
// search for that compute env and see if it has access to dataset
const paymentValidation = await validateOrderTransaction(
Expand All @@ -349,8 +348,6 @@ export class ComputeInitializeHandler extends CommandHandler {
service.timeout,
blockchain.getSigner()
)
CORE_LOGGER.info(`paymentValidation: ${JSON.stringify(paymentValidation)}`)

if (paymentValidation.isValid === true) {
// order is valid, so let's check providerFees
result.validOrder = elem.transferTxId
Expand Down
75 changes: 31 additions & 44 deletions src/components/core/compute/utils.ts
Original file line number Diff line number Diff line change
Expand Up @@ -83,57 +83,44 @@ export async function validateAlgoForDataset(
if (datasetService.type !== 'compute' || !compute) {
throw new Error('Service not compute')
}
const publishers = compute.publisherTrustedAlgorithmPublishers || []
const algorithms = compute.publisherTrustedAlgorithms || []

// If no restrictions are set, deny by default
const hasTrustedPublishers = publishers.length > 0
const hasTrustedAlgorithms = algorithms.length > 0
if (!hasTrustedPublishers && !hasTrustedAlgorithms) return false

if (algoDID) {
if (
// if not set deny them all
(!Array.isArray(compute.publisherTrustedAlgorithms) ||
compute.publisherTrustedAlgorithms.length === 0) &&
(!Array.isArray(compute.publisherTrustedAlgorithmPublishers) ||
compute.publisherTrustedAlgorithmPublishers.length === 0)
) {
return false
}
// Check if algorithm is explicitly trusted
const isAlgoTrusted =
hasTrustedAlgorithms &&
algorithms.some((algo: any) => {
const didMatch = algo.did === '*' || algo.did === algoDID
const filesMatch =
algo.filesChecksum === '*' || algo.filesChecksum === algoChecksums.files
const containerMatch =
algo.containerSectionChecksum === '*' ||
algo.containerSectionChecksum === algoChecksums.container
return didMatch && filesMatch && containerMatch
})

if (
compute.publisherTrustedAlgorithms.includes('*') &&
compute.publisherTrustedAlgorithmPublishers.includes('*')
) {
return true
}
// Check if algorithm publisher is trusted
let isPublisherTrusted = true
if (hasTrustedPublishers) {
if (!publishers.includes('*')) {
const algoDDO = await new FindDdoHandler(oceanNode).findAndFormatDdo(algoDID)
if (!algoDDO) return false
const algoInstance = DDOManager.getDDOClass(algoDDO)
const { nftAddress } = algoInstance.getDDOFields()

if (
Array.isArray(compute.publisherTrustedAlgorithms) &&
compute.publisherTrustedAlgorithms.length > 0 &&
!compute.publisherTrustedAlgorithms.includes('*')
) {
const trustedAlgo = compute.publisherTrustedAlgorithms.find(
(algo: any) => algo.did === algoDID
)
if (trustedAlgo) {
return (
trustedAlgo.filesChecksum === algoChecksums.files &&
trustedAlgo.containerSectionChecksum === algoChecksums.container
)
}
return false
}
if (
Array.isArray(compute.publisherTrustedAlgorithmPublishers) &&
compute.publisherTrustedAlgorithmPublishers.length > 0 &&
!compute.publisherTrustedAlgorithmPublishers.includes('*')
) {
const algoDDO = await new FindDdoHandler(oceanNode).findAndFormatDdo(algoDID)
const algoInstance = DDOManager.getDDOClass(algoDDO)
const { nftAddress } = algoInstance.getDDOFields()
if (algoDDO) {
return compute.publisherTrustedAlgorithmPublishers
.map((address: string) => address?.toLowerCase())
isPublisherTrusted = publishers
.map((addr: string) => addr?.toLowerCase())
.includes(nftAddress?.toLowerCase())
}
return false
}
return true

return isAlgoTrusted && isPublisherTrusted
}

return compute.allowRawAlgorithm
Expand Down
3 changes: 0 additions & 3 deletions src/components/database/ElasticSearchDatabase.ts
Original file line number Diff line number Diff line change
Expand Up @@ -367,7 +367,6 @@ export class ElasticsearchOrderDatabase extends AbstractOrderDatabase {
did: string,
startOrderId?: string
) {
DATABASE_LOGGER.info(`index: ${this.getSchema().index}`)
try {
const document = {
orderId,
Expand All @@ -380,8 +379,6 @@ export class ElasticsearchOrderDatabase extends AbstractOrderDatabase {
did,
startOrderId
}
DATABASE_LOGGER.info(`document: ${JSON.stringify(document)}`)
DATABASE_LOGGER.info(`orderid: ${orderId}`)

await this.provider.index({
index: this.getSchema().index,
Expand Down
Loading