diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index 6f132ad5c..171e6d27a 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -15,7 +15,7 @@ jobs: lint: runs-on: ubuntu-latest steps: - - uses: actions/checkout@v3 + - uses: actions/checkout@v4 - uses: actions/setup-node@v2 with: node-version: 'v20.16.0' @@ -30,11 +30,10 @@ jobs: - run: npm ci - run: npm run lint - dockertest: runs-on: ubuntu-latest steps: - - uses: actions/checkout@v3 + - uses: actions/checkout@v4 - run: docker build -t 'ocean-node:mybuild' . build: @@ -47,7 +46,7 @@ jobs: node: ['18.20.4', 'v20.16.0', 'v22.5.1'] steps: - - uses: actions/checkout@v3 + - uses: actions/checkout@v4 - uses: actions/setup-node@v2 with: node-version: ${{ matrix.node }} @@ -65,7 +64,7 @@ jobs: test_unit: runs-on: ubuntu-latest steps: - - uses: actions/checkout@v3 + - uses: actions/checkout@v4 - uses: actions/setup-node@v2 with: node-version: 'v20.16.0' @@ -90,7 +89,7 @@ jobs: DB_URL: 'http://localhost:8108/?apiKey=xyz' FEE_TOKENS: '{ "1": "0x967da4048cD07aB37855c090aAF366e4ce1b9F48", "137": "0x282d8efCe846A88B159800bd4130ad77443Fa1A1", "80001": "0xd8992Ed72C445c35Cb4A2be468568Ed1079357c8", "56": "0xDCe07662CA8EbC241316a15B611c89711414Dd1a" }' FEE_AMOUNT: '{ "amount": 1, "unit": "MB" }' - - uses: actions/upload-artifact@v2 + - uses: actions/upload-artifact@v4 with: name: coverage path: coverage/ @@ -98,7 +97,7 @@ jobs: test_integration: runs-on: ubuntu-latest steps: - - uses: actions/checkout@v3 + - uses: actions/checkout@v4 - uses: actions/setup-node@v2 with: node-version: 'v20.16.0' @@ -113,7 +112,7 @@ jobs: - name: Set ADDRESS_FILE run: echo "ADDRESS_FILE=${HOME}/.ocean/ocean-contracts/artifacts/address.json" >> $GITHUB_ENV - name: Checkout Barge - uses: actions/checkout@v3 + uses: actions/checkout@v4 with: repository: 'oceanprotocol/barge' path: 'barge' @@ -174,7 +173,7 @@ jobs: - name: docker logs run: docker logs ocean-ocean-contracts-1 && docker logs ocean-kindcluster-1 && docker logs ocean-computetodata-1 && docker logs ocean-typesense-1 if: ${{ failure() }} - - uses: actions/upload-artifact@v2 + - uses: actions/upload-artifact@v4 with: name: coverage path: coverage/ @@ -184,7 +183,7 @@ jobs: steps: - name: Checkout code - uses: actions/checkout@v3 + uses: actions/checkout@v4 - name: Set up Node.js uses: actions/setup-node@v2 @@ -204,7 +203,7 @@ jobs: run: echo "ADDRESS_FILE=${HOME}/.ocean/ocean-contracts/artifacts/address.json" >> $GITHUB_ENV - name: Checkout Barge - uses: actions/checkout@v3 + uses: actions/checkout@v4 with: repository: 'oceanprotocol/barge' path: 'barge' @@ -254,7 +253,7 @@ jobs: if: ${{ failure() }} - name: Checkout Ocean Node - uses: actions/checkout@v3 + uses: actions/checkout@v4 with: repository: 'oceanprotocol/ocean-node' path: 'ocean-node' @@ -290,7 +289,7 @@ jobs: echo "Ocean Node did not start in time" exit 1 - name: Checkout Ocean CLI - uses: actions/checkout@v3 + uses: actions/checkout@v4 with: repository: 'oceanprotocol/ocean-cli' path: 'ocean-cli' diff --git a/schemas/5.0.0.ttl b/schemas/5.0.0.ttl new file mode 100644 index 000000000..5108320ef --- /dev/null +++ b/schemas/5.0.0.ttl @@ -0,0 +1,272 @@ +@prefix dash: . +@prefix rdf: . +@prefix rdfs: . +@prefix schema: . +@prefix sh: . +@prefix xsd: . + +schema:VerifiableCredentialShape + sh:targetClass schema:VerifiableCredential ; + sh:property [ + sh:path schema:type ; + sh:nodeKind sh:Literal ; + sh:minCount 1 ; + sh:maxCount 1 ; + ] ; + sh:property [ + sh:path schema:credentialSubject ; + sh:node schema:CredentialSubjectShape ; + sh:minCount 1 ; + sh:maxCount 1 ; + ] ; + sh:property [ + sh:path schema:issuer ; + sh:datatype xsd:string ; + sh:minCount 1 ; + sh:maxCount 1 ; + ] ; + sh:property [ + sh:path schema:version ; + sh:datatype xsd:string ; + sh:minCount 1 ; + sh:maxCount 1 ; + ] ; . + +schema:CredentialSubjectShape + sh:targetClass schema:CredentialSubject ; + sh:property [ + sh:path schema:id ; + sh:datatype xsd:string ; + sh:pattern "^did\\:op\\:(.*)$" ; + sh:minCount 1 ; + sh:maxCount 1 ; + sh:maxLength 71 ; + sh:minLength 71 ; + ] ; + sh:property [ + sh:path schema:metadata ; + sh:node schema:MetadataShape ; + sh:minCount 1 ; + sh:maxCount 1 ; + ] ; + sh:property [ + sh:path schema:services ; + sh:node schema:ServiceShape ; + sh:minCount 1 ; + sh:maxCount 64 ; + ] ; + sh:property [ + sh:path schema:credentials ; + sh:node schema:CredentialsShape ; + sh:maxCount 64 ; + ] ; + sh:property [ + sh:path schema:chainId ; + sh:datatype xsd:integer ; + sh:minCount 1 ; + sh:maxCount 1 ; + ] ; + sh:property [ + sh:path schema:nftAddress ; + sh:datatype xsd:string ; + sh:minCount 1 ; + sh:maxCount 1 ; + sh:minLength 42 ; + sh:maxLength 42 ; + ] ; . + +schema:MetadataShape + sh:targetClass schema:Metadata ; + sh:property [ + sh:path schema:created ; + sh:datatype xsd:string ; + sh:minCount 1 ; + sh:maxCount 1 ; + ] ; + sh:property [ + sh:path schema:updated ; + sh:datatype xsd:string ; + sh:minCount 1 ; + sh:maxCount 1 ; + ] ; + sh:property [ + sh:path schema:description ; + sh:node schema:DescriptionObjectShape ; + sh:minCount 1 ; + sh:maxCount 1 ; + ] ; + sh:property [ + sh:path schema:tags ; + sh:datatype xsd:string ; + sh:maxLength 256 ; + sh:maxCount 64 ; + ] ; + sh:property [ + sh:path schema:author ; + sh:datatype xsd:string ; + sh:minCount 1 ; + sh:maxCount 1 ; + sh:maxLength 256 ; + ] ; + sh:property [ + sh:path schema:copyrightHolder ; + sh:datatype xsd:string ; + sh:maxCount 1 ; + sh:pattern "^(.|\\s)*$" ; + sh:maxLength 512 ; + ] ; + sh:property [ + sh:path schema:license ; + sh:node schema:LicenseObjectShape ; + sh:minCount 1 ; + sh:maxCount 1 ; + ] ; + sh:property [ + sh:path schema:additionalInformation ; + sh:node schema:AdditionalInformationShape ; + sh:maxCount 1 ; + ] ; . + +schema:DescriptionObjectShape + sh:property [ + sh:path schema:value ; + sh:datatype xsd:string ; + sh:maxCount 1 ; + ] ; + sh:property [ + sh:path schema:language ; + sh:datatype xsd:string ; + sh:maxCount 1 ; + ] ; + sh:property [ + sh:path schema:direction ; + sh:datatype xsd:string ; + sh:maxCount 1 ; + ] ; . + +schema:LicenseObjectShape + sh:property [ + sh:path schema:value ; + sh:datatype xsd:string ; + sh:maxCount 1 ; + ] ; + sh:property [ + sh:path schema:language ; + sh:datatype xsd:string ; + sh:maxCount 1 ; + ] ; . + +schema:ServiceShape + sh:targetClass schema:Service ; + sh:property [ + sh:path schema:id ; + sh:datatype xsd:string ; + sh:minCount 1 ; + sh:maxCount 1 ; + sh:maxLength 256 ; + ] ; + sh:property [ + sh:path schema:type ; + sh:datatype xsd:string ; + sh:minCount 1 ; + sh:maxCount 1 ; + ] ; + sh:property [ + sh:path schema:name ; + sh:datatype xsd:string ; + sh:pattern "^(.*)$" ; + sh:minCount 1 ; + sh:maxCount 1 ; + sh:maxLength 256 ; + ] ; + sh:property [ + sh:path schema:description ; + sh:node schema:DescriptionObjectShape ; + sh:minCount 1 ; + sh:maxCount 1 ; + ] ; + sh:property [ + sh:path schema:datatokenAddress ; + sh:datatype xsd:string ; + sh:minCount 1 ; + sh:maxCount 1 ; + sh:minLength 42 ; + sh:maxLength 42 ; + ] ; + sh:property [ + sh:path schema:serviceEndpoint ; + sh:datatype xsd:string ; + sh:pattern "^(.*)$" ; + sh:minCount 1 ; + sh:maxCount 1 ; + sh:maxLength 2048 ; + ] ; + sh:property [ + sh:path schema:files ; + sh:datatype xsd:string ; + sh:minCount 1 ; + sh:maxCount 1 ; + sh:maxLength 8192 ; + ] ; + sh:property [ + sh:path schema:timeout ; + sh:datatype xsd:integer ; + sh:minCount 1 ; + sh:maxCount 1 ; + ] ; + sh:property [ + sh:path schema:compute ; + sh:node schema:ComputeShape ; + sh:maxCount 1 ; + ] ; . + +schema:ComputeShape + sh:property [ + sh:path schema:allowRawAlgorithm ; + sh:datatype xsd:boolean ; + sh:minCount 1 ; + sh:maxCount 1 ; + ] ; + sh:property [ + sh:path schema:allowNetworkAccess ; + sh:datatype xsd:boolean ; + sh:minCount 1 ; + sh:maxCount 1 ; + ] ; + sh:property [ + sh:path schema:publisherTrustedAlgorithmPublishers ; + sh:datatype xsd:string ; + sh:maxCount 64 ; + ] ; + sh:property [ + sh:path schema:publisherTrustedAlgorithms ; + sh:node schema:PublisherTrustedAlgorithmShape ; + sh:maxCount 64 ; + ] ; . + +schema:PublisherTrustedAlgorithmShape + sh:property [ + sh:path schema:did ; + sh:datatype xsd:string ; + sh:minCount 1 ; + sh:maxCount 1 ; + ] ; + sh:property [ + sh:path schema:filesChecksum ; + sh:datatype xsd:string ; + sh:minCount 1 ; + sh:maxCount 1 ; + ] ; + sh:property [ + sh:path schema:containerSectionChecksum ; + sh:datatype xsd:string ; + sh:maxCount 1 ; + ] ; . + +schema:AdditionalInformationShape + sh:property [ + sh:path schema:termsAndConditions ; + sh:datatype xsd:boolean ; + sh:minCount 1 ; + sh:maxCount 1 ; + ] ; . diff --git a/schemas/op_ddo_v5_0_0.json b/schemas/op_ddo_v5_0_0.json new file mode 100644 index 000000000..1e2ba1376 --- /dev/null +++ b/schemas/op_ddo_v5_0_0.json @@ -0,0 +1,7 @@ +{ + "name": "op_ddo_v5.0.0", + "enable_nested_fields": true, + "fields": [ + { "name": ".*", "type": "auto", "optional": true } + ] +} diff --git a/src/@types/DDO/CredentialSubject.ts b/src/@types/DDO/CredentialSubject.ts new file mode 100644 index 000000000..83b7fbbba --- /dev/null +++ b/src/@types/DDO/CredentialSubject.ts @@ -0,0 +1,53 @@ +import { Nft } from './Nft' +import { Service } from './Service' +import { Credentials } from './Credentials' +import { Metadata } from './Metadata' +import { Event } from './Event' + +export interface CredentialSubject { + /** + * DID, descentralized ID. + * Computed as sha256(address of NFT contract + chainId) + * @type {string} + */ + id: string + + /** + * NFT contract address + * @type {string} + */ + nftAddress: string + + /** + * ChainId of the network the DDO was published to. + * @type {number} + */ + chainId: number + + /** + * Stores an object describing the asset. + * @type {Metadata} + */ + metadata: Metadata + + /** + * Stores an array of services defining access to the asset. + * @type {Service[]} + */ + services: Service[] + + /** + * Describes the credentials needed to access a dataset + * in addition to the services definition. + * @type {Credentials} + */ + credentials?: Credentials + + /** + * Describes the event of last metadata event + * @type {Event} + */ + event?: Event + + nft?: Nft +} diff --git a/src/@types/DDO/Event.ts b/src/@types/DDO/Event.ts index 84e75fc4f..11fcf47c3 100644 --- a/src/@types/DDO/Event.ts +++ b/src/@types/DDO/Event.ts @@ -3,7 +3,7 @@ export interface Event { * TX id of the last create/update * @type {string} */ - txid?: string + tx?: string /** * Block of txid diff --git a/src/@types/DDO/VerifiableCredential.ts b/src/@types/DDO/VerifiableCredential.ts new file mode 100644 index 000000000..406e80fc2 --- /dev/null +++ b/src/@types/DDO/VerifiableCredential.ts @@ -0,0 +1,44 @@ +import { CredentialSubject } from './CredentialSubject' + +export interface AdditionalVerifiableCredentials { + type: string + data: any +} + +export interface VerifiableCredential { + /** + * Contexts used for validation. + * @type {string[]} + */ + '@context': string[] + + /** + * id optional for verifiable credential + * @type {string} + */ + id?: string + + /** + * @type {CredentialSubject} + */ + credentialSubject: CredentialSubject + + /** + * Id of issuer + * @type {string} + */ + issuer: string + + /** + * Additional ddos + * @type {AdditionalVerifiableCredentials[]} + */ + additionalDdos?: AdditionalVerifiableCredentials[] + + /** + * Version information in SemVer notation + * referring to the DDO spec version + * @type {string} + */ + version: string +} diff --git a/src/components/Indexer/index.ts b/src/components/Indexer/index.ts index b40d75303..b1439daf4 100644 --- a/src/components/Indexer/index.ts +++ b/src/components/Indexer/index.ts @@ -14,6 +14,7 @@ import { import { CommandStatus, JobStatus } from '../../@types/commands.js' import { buildJobIdentifier } from './utils.js' import { create256Hash } from '../../utils/crypt.js' +import { DDOProcessorFactory } from '../core/utils/DDOFactory.js' // emmit events for node export const INDEXER_DDO_EVENT_EMITTER = new EventEmitter() @@ -136,10 +137,14 @@ export class OceanIndexer { ].includes(event.method) ) { // will emit the metadata created/updated event and advertise it to the other peers (on create only) + const processor = DDOProcessorFactory.createProcessor(event.data) + + // Get the DDO identifier using the processor + const { did } = processor.extractDDOFields(event.data) INDEXER_LOGGER.logMessage( - `Emiting "${event.method}" for DDO : ${event.data.id} from network: ${network} ` + `Emiting "${event.method}" for DDO : ${did} from network: ${network} ` ) - INDEXER_DDO_EVENT_EMITTER.emit(event.method, event.data.id) + INDEXER_DDO_EVENT_EMITTER.emit(event.method, did) // remove from indexing list } else if (event.method === INDEXER_CRAWLING_EVENTS.REINDEX_QUEUE_POP) { // remove this one from the queue (means we processed the reindex for this tx) diff --git a/src/components/Indexer/processor.ts b/src/components/Indexer/processor.ts index 2d9c72b72..c75a01642 100644 --- a/src/components/Indexer/processor.ts +++ b/src/components/Indexer/processor.ts @@ -29,6 +29,7 @@ import { DecryptDDOCommand } from '../../@types/commands.js' import { isRemoteDDO, makeDid } from '../core/utils/validateDdoHandler.js' import { create256Hash } from '../../utils/crypt.js' import { URLUtils } from '../../utils/url.js' +import { DDOProcessorFactory } from '../core/utils/DDOFactory.js' class BaseEventProcessor { protected networkId: number @@ -89,27 +90,28 @@ class BaseEventProcessor { try { const { ddo: ddoDatabase, ddoState } = await getDatabase() const saveDDO = await ddoDatabase.update({ ...ddo }) - await ddoState.update( - this.networkId, - saveDDO.id, - saveDDO.nftAddress, - saveDDO.event?.tx, - true - ) - INDEXER_LOGGER.logMessage( - `Saved or updated DDO : ${saveDDO.id} from network: ${this.networkId} triggered by: ${method}` - ) + if (saveDDO) { + const processor = DDOProcessorFactory.createProcessor(saveDDO) + + // Get the DDO identifier using the processor + const { did, nftAddress, event } = processor.extractDDOFields(saveDDO as any) + + await ddoState.update(this.networkId, did, nftAddress, event?.tx, true) + + INDEXER_LOGGER.logMessage( + `Saved or updated DDO : ${did} from network: ${this.networkId} triggered by: ${method}` + ) + } return saveDDO } catch (err) { const { ddoState } = await getDatabase() - await ddoState.update( - this.networkId, - ddo.id, - ddo.nftAddress, - ddo.event?.tx, - true, - err.message - ) + const processor = DDOProcessorFactory.createProcessor(ddo) + + // Get the DDO identifier using the processor + const { did, nftAddress, event } = processor.extractDDOFields(ddo as any) + + await ddoState.update(this.networkId, did, nftAddress, event?.tx, true, err.message) + INDEXER_LOGGER.log( LOG_LEVELS_STR.LEVEL_ERROR, `Error found on ${this.networkId} triggered by: ${method} while creating or updating DDO: ${err}`, @@ -183,6 +185,7 @@ class BaseEventProcessor { } let responseHash + if (response.data instanceof Object) { responseHash = create256Hash(JSON.stringify(response.data)) ddo = response.data @@ -296,11 +299,149 @@ class BaseEventProcessor { const utf8String = toUtf8String(byteArray) ddo = JSON.parse(utf8String) } + return ddo + } + + protected decryptDDOIPFS( + decryptorURL: string, + eventCreator: string, + metadata: any + ): Promise { + INDEXER_LOGGER.logMessage( + `Decompressing DDO from network: ${this.networkId} created by: ${eventCreator} ecnrypted by: ${decryptorURL}` + ) + const byteArray = getBytes(metadata) + const utf8String = toUtf8String(byteArray) + const ddo = JSON.parse(utf8String) + return ddo + } +} + +class V4EventProcessor extends BaseEventProcessor { + async processEventNft( + ddo: any, + chainId: number, + signer: Signer, + owner: string, + event: ethers.Log, + decodedEventData: ethers.LogDescription + ): Promise { + // V4 specific logic, using DDO_V4 structure + ddo.chainId = chainId + ddo.nftAddress = event.address + ddo.datatokens = this.getTokenInfo(ddo.services) + ddo.nft = await this.getNFTInfo( + ddo.nftAddress, + signer, + owner, + parseInt(decodedEventData.args[6]) + ) + const did = ddo.id + return { ddo, did } + } + + async processEventUpdated( + ddo: any, + event: ethers.Log, + from: string, + provider: JsonRpcApiProvider + ): Promise { + if (!ddo.event) { + ddo.event = {} + } + ddo.event.tx = event.transactionHash + ddo.event.from = from + ddo.event.contract = event.address + if (event.blockNumber) { + ddo.event.block = event.blockNumber + // try get block & timestamp from block (only wait 2.5 secs maximum) + const promiseFn = provider.getBlock(event.blockNumber) + const result = await asyncCallWithTimeout(promiseFn, 2500) + if (result.data !== null && !result.timeout) { + ddo.event.datetime = new Date(result.data.timestamp * 1000).toJSON() + } + } else { + ddo.event.block = -1 + } + return ddo + } +} + +class V5EventProcessor extends BaseEventProcessor { + async processEventNft( + ddo: any, + chainId: number, + signer: Signer, + owner: string, + event: ethers.Log, + decodedEventData: ethers.LogDescription + ): Promise { + // Specific logic for DDO version 5.0.0 (VerifiableCredential) + const vc = ddo.payload + vc.credentialSubject.chainId = chainId + vc.credentialSubject.nftAddress = event.address + vc.credentialSubject.datatokens = this.getTokenInfo(vc.credentialSubject.services) + vc.nft = await this.getNFTInfo( + vc.credentialSubject.nftAddress, + signer, + owner, + parseInt(decodedEventData.args[6]) + ) + const did = vc.credentialSubject.id + return { ddo: vc, did } + } + async processEventUpdated( + ddo: any, + event: ethers.Log, + from: string, + provider: JsonRpcApiProvider + ): Promise { + if (!ddo.credentialSubject.event) { + ddo.credentialSubject.event = {} + } + ddo.credentialSubject.event.tx = event.transactionHash + ddo.credentialSubject.event.from = from + ddo.credentialSubject.event.contract = event.address + if (event.blockNumber) { + ddo.credentialSubject.event.block = event.blockNumber + const promiseFn = provider.getBlock(event.blockNumber) + const result = await asyncCallWithTimeout(promiseFn, 2500) + if (result.data !== null && !result.timeout) { + ddo.credentialSubject.event.datetime = new Date( + result.data.timestamp * 1000 + ).toJSON() + } + } else { + ddo.credentialSubject.event.block = -1 + } return ddo } } +type EventProcessorType = V4EventProcessor | V5EventProcessor +class DDOProcessorEventFactory { + static createProcessor(ddo: any): EventProcessorType { + let { version } = ddo + if (ddo.payload) { + // eslint-disable-next-line prefer-destructuring + version = ddo.payload.version + } + switch (version) { + case '4.1.0': + case '4.3.0': + case '4.5.0': + return new V4EventProcessor(ddo.chainId) + + case '5.0.0': + return new V5EventProcessor(ddo.payload.credentialSubject.chainId) + + default: + throw new Error(`Unsupported DDO version: ${version}`) + } + } +} + export class MetadataEventProcessor extends BaseEventProcessor { async processEvent( event: ethers.Log, @@ -309,7 +450,7 @@ export class MetadataEventProcessor extends BaseEventProcessor { provider: JsonRpcApiProvider, eventName: string ): Promise { - let did = 'did:op' + const did = 'did:op' try { const { ddo: ddoDatabase, ddoState } = await getDatabase() const wasDeployedByUs = await wasNFTDeployedByOurFactory( @@ -345,47 +486,56 @@ export class MetadataEventProcessor extends BaseEventProcessor { metadataHash, metadata ) - const ddo = await this.processDDO(decryptedDDO) - if (ddo.id !== makeDid(event.address, chainId.toString(10))) { - INDEXER_LOGGER.error( - `Decrypted DDO ID is not matching the generated hash for DID.` - ) + + const ddoProcessed = await this.processDDO(decryptedDDO) + let ddo = ddoProcessed + if ( + !isRemoteDDO(decryptedDDO) && + parseInt(flag) !== 2 && + !this.checkDdoHash(ddo, metadataHash) + ) { return } - // for unencrypted DDOs - console.log(ddo.id) - console.log(metadataHash) - if (parseInt(flag) !== 2 && !this.checkDdoHash(ddo, metadataHash)) { - return + if (ddo.encryptedData) { + ddo = await this.decryptDDOIPFS( + decodedEventData.args[2], + owner, + ddo.encryptedData + ) } - did = ddo.id - // stuff that we overwrite - ddo.chainId = chainId - ddo.nftAddress = event.address - ddo.datatokens = this.getTokenInfo(ddo.services) - ddo.nft = await this.getNFTInfo( - ddo.nftAddress, + const processor = DDOProcessorEventFactory.createProcessor(ddo) + const { ddo: processedDDO, did } = await processor.processEventNft( + ddo, + chainId, signer, owner, - parseInt(decodedEventData.args[6]) + event, + decodedEventData ) + ddo = processedDDO + if (did !== makeDid(event.address, chainId.toString(10))) { + INDEXER_LOGGER.error( + `Decrypted DDO ID is not matching the generated hash for DID.` + ) + return + } INDEXER_LOGGER.logMessage( - `Processed new DDO data ${ddo.id} with txHash ${event.transactionHash} from block ${event.blockNumber}`, + `Processed new DDO data ${did} with txHash ${event.transactionHash} from block ${event.blockNumber}`, true ) - const previousDdo = await ddoDatabase.retrieve(ddo.id) + const previousDdo = await ddoDatabase.retrieve(did) if (eventName === EVENTS.METADATA_CREATED) { if (previousDdo && previousDdo.nft.state === MetadataStates.ACTIVE) { - INDEXER_LOGGER.logMessage(`DDO ${ddo.id} is already registered as active`, true) + INDEXER_LOGGER.logMessage(`DDO ${did} is already registered as active`, true) await ddoState.update( this.networkId, did, event.address, event.transactionHash, false, - `DDO ${ddo.id} is already registered as active` + `DDO ${did} is already registered as active` ) return } @@ -394,7 +544,7 @@ export class MetadataEventProcessor extends BaseEventProcessor { if (eventName === EVENTS.METADATA_UPDATED) { if (!previousDdo) { INDEXER_LOGGER.logMessage( - `Previous DDO with did ${ddo.id} was not found the database. Maybe it was deleted/hidden to some violation issues`, + `Previous DDO with did ${did} was not found the database. Maybe it was deleted/hidden to some violation issues`, true ) await ddoState.update( @@ -403,7 +553,7 @@ export class MetadataEventProcessor extends BaseEventProcessor { event.address, event.transactionHash, false, - `Previous DDO with did ${ddo.id} was not found the database. Maybe it was deleted/hidden to some violation issues` + `Previous DDO with did ${did} was not found the database. Maybe it was deleted/hidden to some violation issues` ) return } @@ -431,23 +581,7 @@ export class MetadataEventProcessor extends BaseEventProcessor { // we need to store the event data (either metadata created or update and is updatable) if ([EVENTS.METADATA_CREATED, EVENTS.METADATA_UPDATED].includes(eventName)) { - if (!ddo.event) { - ddo.event = {} - } - ddo.event.tx = event.transactionHash - ddo.event.from = from - ddo.event.contract = event.address - if (event.blockNumber) { - ddo.event.block = event.blockNumber - // try get block & timestamp from block (only wait 2.5 secs maximum) - const promiseFn = provider.getBlock(event.blockNumber) - const result = await asyncCallWithTimeout(promiseFn, 2500) - if (result.data !== null && !result.timeout) { - ddo.event.datetime = new Date(result.data.timestamp * 1000).toJSON() - } - } else { - ddo.event.block = -1 - } + ddo = await processor.processEventUpdated(ddo, event, from, provider) } // always call, but only create instance once @@ -480,11 +614,9 @@ export class MetadataEventProcessor extends BaseEventProcessor { async processDDO(ddo: any) { if (isRemoteDDO(ddo)) { INDEXER_LOGGER.logMessage('DDO is remote', true) - const storage = Storage.getStorageClass(ddo.remote, await getConfiguration()) const result = await storage.getReadableStream() const streamToStringDDO = await streamToString(result.stream as Readable) - return JSON.parse(streamToStringDDO) } @@ -513,14 +645,19 @@ export class MetadataEventProcessor extends BaseEventProcessor { isUpdateable(previousDdo: any, txHash: string, block: number): [boolean, string] { let errorMsg: string - const ddoTxId = previousDdo.event.tx + const processor = DDOProcessorFactory.createProcessor(previousDdo) + + // Get the DDO identifier using the processor + const { event } = processor.extractDDOFields(previousDdo as any) + const ddoTxId = event.tx + const ddoBlock = event.block + // do not update if we have the same txid if (txHash === ddoTxId) { errorMsg = `Previous DDO has the same tx id, no need to update: event-txid=${txHash} <> asset-event-txid=${ddoTxId}` INDEXER_LOGGER.log(LOG_LEVELS_STR.LEVEL_DEBUG, errorMsg, true) return [false, errorMsg] } - const ddoBlock = previousDdo.event.block // do not update if we have the same block if (block === ddoBlock) { errorMsg = `Asset was updated later (block: ${ddoBlock}) vs transaction block: ${block}` diff --git a/src/components/core/handler/ddoHandler.ts b/src/components/core/handler/ddoHandler.ts index 8ea2b0207..57966a591 100644 --- a/src/components/core/handler/ddoHandler.ts +++ b/src/components/core/handler/ddoHandler.ts @@ -22,8 +22,7 @@ import lzmajs from 'lzma-purejs-requirejs' import { isRemoteDDO, getValidationSignature, - makeDid, - validateObject + makeDid } from '../utils/validateDdoHandler.js' import { getConfiguration } from '../../../utils/config.js' import { @@ -46,6 +45,7 @@ import { wasNFTDeployedByOurFactory } from '../../Indexer/utils.js' import { validateDDOHash } from '../../../utils/asset.js' +import { DDOProcessorFactory } from '../utils/DDOFactory.js' const MAX_NUM_PROVIDERS = 5 // after 60 seconds it returns whatever info we have available @@ -206,7 +206,6 @@ export class DecryptDdoHandler extends Handler { let encryptedDocument: Uint8Array let flags: number let documentHash: string - if (transactionId) { try { const receipt = await provider.getTransactionReceipt(transactionId) @@ -339,7 +338,7 @@ export class DecryptDdoHandler extends Handler { // did matches const ddo = JSON.parse(decryptedDocument.toString()) - if (ddo.id !== makeDid(dataNftAddress, chainId)) { + if (!isRemoteDDO(ddo) && ddo.id !== makeDid(dataNftAddress, chainId)) { CORE_LOGGER.error(`Decrypted DDO ID is not matching the generated hash for DID.`) return { stream: null, @@ -351,8 +350,10 @@ export class DecryptDdoHandler extends Handler { } // checksum matches + const decryptedDocumentString2 = decryptedDocument.toString() + const ddoObject2 = JSON.parse(decryptedDocumentString2) const decryptedDocumentHash = create256Hash(decryptedDocument.toString()) - if (decryptedDocumentHash !== documentHash) { + if (!isRemoteDDO(ddoObject2) && decryptedDocumentHash !== documentHash) { CORE_LOGGER.logMessage( `Decrypt DDO: error checksum does not match ${decryptedDocumentHash} with ${documentHash}`, true @@ -726,8 +727,7 @@ export class FindDdoHandler extends Handler { // First try to find the DDO Locally if findDDO is not enforced if (!force) { try { - const ddo = await node.getDatabase().ddo.retrieve(ddoId) - return ddo as DDO + return await node.getDatabase().ddo.retrieve(ddoId) } catch (error) { CORE_LOGGER.logMessage( `Unable to find DDO locally. Proceeding to call findDDO`, @@ -788,7 +788,14 @@ export class ValidateDDOHandler extends Handler { validate(command: ValidateDDOCommand): ValidateParams { let validation = validateCommandParameters(command, ['ddo']) if (validation.valid) { - validation = validateDDOIdentifier(command.ddo.id) + // Use the factory to create a processor based on the DDO version or structure + const processor = DDOProcessorFactory.createProcessor(command.ddo) + + // Get the DDO identifier using the processor + const { did: ddoId } = processor.extractDDOFields(command.ddo as any) + + // Validate the DDO identifier + validation = validateDDOIdentifier(ddoId) } return validation @@ -799,12 +806,14 @@ export class ValidateDDOHandler extends Handler { if (this.shouldDenyTaskHandling(validationResponse)) { return validationResponse } + try { - const validation = await validateObject( - task.ddo, - task.ddo.chainId, - task.ddo.nftAddress - ) + // Use the factory to create a processor for DDO validation + const processor = DDOProcessorFactory.createProcessor(task.ddo as any) + + // Validate the DDO using the processor + const validation = await processor.validateDDO(task.ddo) + if (validation[0] === false) { CORE_LOGGER.logMessageWithEmoji( `Validation failed with error: ${validation[1]}`, @@ -817,6 +826,8 @@ export class ValidateDDOHandler extends Handler { status: { httpStatus: 400, error: `Validation error: ${validation[1]}` } } } + + // Generate signature for the validated DDO const signature = await getValidationSignature(JSON.stringify(task.ddo)) return { stream: Readable.from(JSON.stringify(signature)), diff --git a/src/components/core/handler/downloadHandler.ts b/src/components/core/handler/downloadHandler.ts index fa85aabce..f2a45092e 100644 --- a/src/components/core/handler/downloadHandler.ts +++ b/src/components/core/handler/downloadHandler.ts @@ -35,6 +35,7 @@ import { DDO } from '../../../@types/DDO/DDO.js' import { sanitizeServiceFiles } from '../../../utils/util.js' import { getNFTContract } from '../../Indexer/utils.js' import { OrdableAssetResponse } from '../../../@types/Asset.js' +import { DDOProcessorFactory } from '../utils/DDOFactory.js' export const FILE_ENCRYPTION_ALGORITHM = 'aes-256-cbc' export function isOrderingAllowedForAsset(asset: DDO): OrdableAssetResponse { @@ -185,14 +186,16 @@ export async function handleDownloadUrlCommand( } export function validateFilesStructure( - ddo: DDO, + nftAddress: string, service: Service, decriptedFileObject: any ): boolean { if ( - decriptedFileObject.nftAddress?.toLowerCase() !== ddo.nftAddress?.toLowerCase() || - decriptedFileObject.datatokenAddress?.toLowerCase() !== - service.datatokenAddress?.toLowerCase() + (decriptedFileObject.nftAddress && + decriptedFileObject.nftAddress?.toLowerCase() !== nftAddress?.toLowerCase()) || + (decriptedFileObject.datatokenAddress && + decriptedFileObject.datatokenAddress?.toLowerCase() !== + service.datatokenAddress?.toLowerCase()) ) { return false } @@ -224,7 +227,6 @@ export class DownloadHandler extends Handler { .getCoreHandlers() .getHandler(PROTOCOL_COMMANDS.FIND_DDO) as FindDdoHandler const ddo = await handler.findAndFormatDdo(task.documentId) - if (ddo) { CORE_LOGGER.logMessage('DDO for asset found: ' + ddo, true) } else { @@ -254,7 +256,17 @@ export class DownloadHandler extends Handler { } // 2. Validate ddo and credentials - if (!ddo.chainId || !ddo.nftAddress || !ddo.metadata) { + + const processor = DDOProcessorFactory.createProcessor(ddo) + const { + chainId: ddoChainId, + nftAddress, + metadata, + credentials, + did + } = processor.extractDDOFields(ddo as any) + + if (!ddoChainId || !nftAddress || !metadata) { CORE_LOGGER.logMessage('Error: DDO malformed or disabled', true) return { stream: null, @@ -266,15 +278,15 @@ export class DownloadHandler extends Handler { } // check credentials - if (ddo.credentials) { - const accessGranted = checkCredentials(ddo.credentials, task.consumerAddress) + if (credentials) { + const accessGranted = checkCredentials(credentials, task.consumerAddress) if (!accessGranted) { - CORE_LOGGER.logMessage(`Error: Access to asset ${ddo.id} was denied`, true) + CORE_LOGGER.logMessage(`Error: Access to asset ${did} was denied`, true) return { stream: null, status: { httpStatus: 500, - error: `Error: Access to asset ${ddo.id} was denied` + error: `Error: Access to asset ${did} was denied` } } } @@ -286,7 +298,7 @@ export class DownloadHandler extends Handler { task.consumerAddress, parseInt(task.nonce), task.signature, - String(ddo.id + task.nonce) // ddo.id + String(did + task.nonce) ) if (!nonceCheckResult.valid) { @@ -305,7 +317,7 @@ export class DownloadHandler extends Handler { } // from now on, we need blockchain checks const config = await getConfiguration() - const { rpc, network, chainId, fallbackRPCs } = config.supportedNetworks[ddo.chainId] + const { rpc, network, chainId, fallbackRPCs } = config.supportedNetworks[ddoChainId] let provider let blockchain try { @@ -345,18 +357,18 @@ export class DownloadHandler extends Handler { } } // check lifecycle state of the asset - const nftContract = getNFTContract(blockchain.getSigner(), ddo.nftAddress) + const nftContract = getNFTContract(blockchain.getSigner(), nftAddress) const nftState = Number(await nftContract.metaDataState()) if (nftState !== 0 && nftState !== 5) { CORE_LOGGER.logMessage( - `Error: Asset with id ${ddo.id} is not in an active state`, + `Error: Asset with id ${did} is not in an active state`, true ) return { stream: null, status: { httpStatus: 500, - error: `Error: Asset with id ${ddo.id} is not in an active state` + error: `Error: Asset with id ${did} is not in an active state` } } } @@ -406,7 +418,7 @@ export class DownloadHandler extends Handler { for (const cluster of c2dClusters) { const engine = C2DEngine.getC2DClass(cluster) - const environments = await engine.getComputeEnvironments(ddo.chainId) + const environments = await engine.getComputeEnvironments(ddoChainId) for (const env of environments) computeAddrs.push(env.consumerAddress?.toLowerCase()) } @@ -447,7 +459,7 @@ export class DownloadHandler extends Handler { task.transferTxId, task.consumerAddress, provider, - ddo.nftAddress, + nftAddress, service.datatokenAddress, AssetUtils.getServiceIndexById(ddo, task.serviceId), service.timeout, @@ -472,7 +484,6 @@ export class DownloadHandler extends Handler { } } } - try { // 7. Decrypt the url const uint8ArrayHex = Uint8Array.from( @@ -482,8 +493,10 @@ export class DownloadHandler extends Handler { // Convert the decrypted bytes back to a string const decryptedFilesString = Buffer.from(decryptedUrlBytes).toString() const decryptedFileData = JSON.parse(decryptedFilesString) - const decriptedFileObject: any = decryptedFileData.files[task.fileIndex] - if (!validateFilesStructure(ddo, service, decryptedFileData)) { + const decriptedFileObject: any = decryptedFileData.files + ? decryptedFileData.files[task.fileIndex] + : decryptedFileData[task.fileIndex] + if (!validateFilesStructure(nftAddress, service, decryptedFileData)) { CORE_LOGGER.error( 'Unauthorized download operation. Decrypted "nftAddress" and "datatokenAddress" do not match the original DDO' ) diff --git a/src/components/core/handler/feesHandler.ts b/src/components/core/handler/feesHandler.ts index 985e5f45b..f534bdfb7 100644 --- a/src/components/core/handler/feesHandler.ts +++ b/src/components/core/handler/feesHandler.ts @@ -16,6 +16,7 @@ import { ProviderInitialize } from '../../../@types/Fees.js' import { getNonce } from '../utils/nonceHandler.js' import { streamToString } from '../../../utils/util.js' import { isOrderingAllowedForAsset } from './downloadHandler.js' +import { DDOProcessorFactory } from '../utils/DDOFactory.js' export class FeesHandler extends Handler { validate(command: GetFeesCommand): ValidateParams { @@ -46,7 +47,6 @@ export class FeesHandler extends Handler { if (!ddo) { errorMsg = 'Cannot resolve DID' } - const isOrdable = isOrderingAllowedForAsset(ddo) if (!isOrdable.isOrdable) { PROVIDER_LOGGER.error(isOrdable.reason) @@ -58,8 +58,12 @@ export class FeesHandler extends Handler { } } } + const processor = DDOProcessorFactory.createProcessor(ddo) + + // Get the DDO identifier using the processor + const { services } = processor.extractDDOFields(ddo as any) - const service = ddo.services.find((what: any) => what.id === task.serviceId) + const service = services.find((what: any) => what.id === task.serviceId) if (!service) { errorMsg = 'Invalid serviceId' } diff --git a/src/components/core/utils/DDOFactory.ts b/src/components/core/utils/DDOFactory.ts new file mode 100644 index 000000000..0c11460cd --- /dev/null +++ b/src/components/core/utils/DDOFactory.ts @@ -0,0 +1,74 @@ +import { DDO } from '../../../@types/DDO/DDO' +import { VerifiableCredential } from '../../../@types/DDO/VerifiableCredential' +import { Service } from '../../../@types/DDO/Service' +import { Metadata } from '../../../@types/DDO/Metadata' +import { Credentials } from '../../../@types/DDO/Credentials' +import { Event } from '../../../@types/DDO/Event' +import { validateObject } from './validateDdoHandler.js' + +interface ExtractedDDOFields { + did: string + nftAddress: string + chainId: number + services: Service[] + metadata: Metadata + credentials: Credentials + event: Event +} + +class DDOProcessorV4 { + extractDDOFields(ddo: DDO): ExtractedDDOFields { + return { + did: ddo.id, + nftAddress: ddo.nftAddress, + chainId: ddo.chainId, + services: ddo.services, + metadata: ddo.metadata, + credentials: ddo.credentials, + event: ddo.event + } + } + + async validateDDO(ddo: DDO): Promise<[boolean, Record]> { + return await validateObject(ddo, ddo.chainId, ddo.nftAddress) + } +} + +class DDOProcessorV5 { + extractDDOFields(ddo: VerifiableCredential): ExtractedDDOFields { + return { + did: ddo.credentialSubject.id, + nftAddress: ddo.credentialSubject.nftAddress, + chainId: ddo.credentialSubject.chainId, + services: ddo.credentialSubject.services, + metadata: ddo.credentialSubject.metadata, + credentials: ddo.credentialSubject.credentials, + event: ddo.credentialSubject.event + } + } + + async validateDDO(ddo: any): Promise<[boolean, Record]> { + return await validateObject( + ddo, + ddo.credentialSubject.chainId, + ddo.credentialSubject.nftAddress + ) + } +} + +export class DDOProcessorFactory { + static createProcessor(ddo: any): DDOProcessorV5 | DDOProcessorV4 { + switch (ddo.version) { + case '4.1.0': + case '4.3.0': + case '4.5.0': + return new DDOProcessorV4() + + case '5.0.0': + return new DDOProcessorV5() + + default: + throw new Error(`Unsupported DDO version: ${ddo.version}`) + } + } +} diff --git a/src/components/core/utils/feesHandler.ts b/src/components/core/utils/feesHandler.ts index 39ffa4378..7ff38b189 100644 --- a/src/components/core/utils/feesHandler.ts +++ b/src/components/core/utils/feesHandler.ts @@ -27,6 +27,8 @@ import { getOceanArtifactsAdresses } from '../../../utils/address.js' import ERC20Template from '@oceanprotocol/contracts/artifacts/contracts/templates/ERC20TemplateEnterprise.sol/ERC20TemplateEnterprise.json' assert { type: 'json' } import { fetchEventFromTransaction } from '../../../utils/util.js' import { fetchTransactionReceipt } from './validateOrders.js' +import { DDOProcessorFactory } from './DDOFactory.js' +import { VerifiableCredential } from '../../../@types/DDO/VerifiableCredential.js' async function calculateProviderFeeAmount( validUntil: number, @@ -50,13 +52,17 @@ async function calculateProviderFeeAmount( } export async function createProviderFee( - asset: DDO, + asset: DDO | VerifiableCredential, service: Service, validUntil: number, computeEnv: ComputeEnvironment, computeValidUntil: number ): Promise | undefined { // round for safety + const processor = DDOProcessorFactory.createProcessor(asset) + + // Get the DDO identifier using the processor + const { chainId } = processor.extractDDOFields(asset as any) validUntil = Math.round(validUntil) computeValidUntil = Math.round(computeValidUntil) const providerData = { @@ -65,7 +71,7 @@ export async function createProviderFee( dt: service.datatokenAddress, id: service.id } - const providerWallet = await getProviderWallet(String(asset.chainId)) + const providerWallet = await getProviderWallet(String(chainId)) const providerFeeAddress: string = providerWallet.address let providerFeeAmount: number @@ -76,7 +82,7 @@ export async function createProviderFee( providerFeeToken = computeEnv.feeToken } else { // it's download, take it from config - providerFeeToken = await getProviderFeeToken(asset.chainId) + providerFeeToken = await getProviderFeeToken(chainId) } if (providerFeeToken?.toLowerCase() === ZeroAddress) { providerFeeAmount = 0 @@ -85,7 +91,7 @@ export async function createProviderFee( } if (providerFeeToken && providerFeeToken?.toLowerCase() !== ZeroAddress) { - const provider = await getJsonRpcProvider(asset.chainId) + const provider = await getJsonRpcProvider(chainId) const decimals = await getDatatokenDecimals(providerFeeToken, provider) providerFeeAmountFormatted = parseUnits(providerFeeAmount.toString(10), decimals) } else { diff --git a/src/components/core/utils/validateDdoHandler.ts b/src/components/core/utils/validateDdoHandler.ts index ddc1fa8c9..f5abb93c2 100644 --- a/src/components/core/utils/validateDdoHandler.ts +++ b/src/components/core/utils/validateDdoHandler.ts @@ -13,7 +13,7 @@ import { getProviderWallet } from './feesHandler.js' import { Readable } from 'stream' const CURRENT_VERSION = '4.5.0' -const ALLOWED_VERSIONS = ['4.1.0', '4.3.0', '4.5.0'] +const ALLOWED_VERSIONS = ['4.1.0', '4.3.0', '4.5.0', '5.0.0'] export function getSchema(version: string = CURRENT_VERSION): string { if (!ALLOWED_VERSIONS.includes(version)) { @@ -55,13 +55,26 @@ export async function validateObject( nftAddress: string ): Promise<[boolean, Record]> { const ddoCopy = JSON.parse(JSON.stringify(obj)) - ddoCopy['@type'] = 'DDO' - + // Handle different version-specific logic + const version = ddoCopy.version || CURRENT_VERSION + if ( + ddoCopy.type && + Array.isArray(ddoCopy.type) && + ddoCopy.type.includes('VerifiableCredential') + ) { + ddoCopy['@type'] = 'VerifiableCredential' + ddoCopy['@context'] = { + '@vocab': 'https://www.w3.org/2018/credentials/v1' + } + } else { + ddoCopy['@type'] = 'DDO' + ddoCopy['@context'] = { + '@vocab': 'http://schema.org/' + } + } const extraErrors: Record = {} // overwrite context - ddoCopy['@context'] = { - '@vocab': 'http://schema.org/' - } + /* if (!('@context' in ddoCopy) || !Array.isArray(ddoCopy['@context'])) { ddoCopy['@context'] = { '@vocab': 'http://schema.org/' @@ -107,7 +120,6 @@ export async function validateObject( if (!('id' in extraErrors)) extraErrors.id = [] extraErrors.id.push('did is not valid for chain Id and nft address') } - const version = ddoCopy.version || CURRENT_VERSION const schemaFilePath = getSchema(version) CORE_LOGGER.logMessage(`Using ` + schemaFilePath, true) diff --git a/src/components/database/index.ts b/src/components/database/index.ts index 39db24098..41568e8f9 100644 --- a/src/components/database/index.ts +++ b/src/components/database/index.ts @@ -15,6 +15,7 @@ import { SQLiteProvider } from './sqlite.js' import { URLUtils } from '../../utils/url.js' import fs from 'fs' import path from 'path' +import { DDOProcessorFactory } from '../core/utils/DDOFactory.js' export class OrderDatabase { private provider: Typesense @@ -375,10 +376,14 @@ export class DdoDatabase { // See github issue: https://github.com/oceanprotocol/ocean-node/issues/256 return true } else { - const validation = await validateObject(ddo, ddo.chainId, ddo.nftAddress) + const processor = DDOProcessorFactory.createProcessor(ddo) + const { did, chainId, nftAddress } = processor.extractDDOFields(ddo as any) + + const validation = await validateObject(ddo, chainId, nftAddress) + if (validation[0] === true) { DATABASE_LOGGER.logMessageWithEmoji( - `Validation of DDO with did: ${ddo.id} has passed`, + `Validation of DDO with did: ${did} has passed`, true, GENERIC_EMOJIS.EMOJI_OCEAN_WAVE, LOG_LEVELS_STR.LEVEL_INFO @@ -446,6 +451,11 @@ export class DdoDatabase { } async create(ddo: Record) { + const processor = DDOProcessorFactory.createProcessor(ddo) + + // Get the DDO identifier using the processor + const { did } = processor.extractDDOFields(ddo as any) + const schema = this.getDDOSchema(ddo) if (!schema) { throw new Error(`Schema for version ${ddo.version} not found`) @@ -461,7 +471,7 @@ export class DdoDatabase { throw new Error(`Validation of DDO with schema version ${ddo.version} failed`) } } catch (error) { - const errorMsg = `Error when creating DDO entry ${ddo.id}: ` + error.message + const errorMsg = `Error when creating DDO entry ${did}: ` + error.message DATABASE_LOGGER.logMessageWithEmoji( errorMsg, true, @@ -476,7 +486,21 @@ export class DdoDatabase { let ddo = null for (const schema of this.schemas) { try { - ddo = await this.provider.collections(schema.name).documents().retrieve(id) + if (schema.name === 'op_ddo_v5.0.0') { + const response = await this.provider + .collections(schema.name) + .documents() + .search({ + q: id, + query_by: 'credentialSubject.id' + }) + if (response.hits.length > 0) { + ddo = response.hits[0].document + } + } else { + ddo = await this.provider.collections(schema.name).documents().retrieve(id) + } + if (ddo) { break } @@ -507,6 +531,10 @@ export class DdoDatabase { } async update(ddo: Record) { + const processor = DDOProcessorFactory.createProcessor(ddo) + + // Get the DDO identifier using the processor + const { did } = processor.extractDDOFields(ddo as any) const schema = this.getDDOSchema(ddo) if (!schema) { throw new Error(`Schema for version ${ddo.version} not found`) @@ -514,10 +542,34 @@ export class DdoDatabase { try { const validation = await this.validateDDO(ddo) if (validation === true) { - return await this.provider - .collections(schema.name) - .documents() - .update(ddo.id, ddo) + if (schema.name === 'op_ddo_v5.0.0') { + const searchResponse = await this.provider + .collections(schema.name) + .documents() + .search({ + q: did, + query_by: 'credentialSubject.id' + }) + + if (searchResponse.hits.length === 0) { + const error = new TypesenseError( + `Document with credentialSubject.id ${did} not found` + ) + error.httpStatus = 404 + throw error + } + + // Step 3: Get the internal document ID from the search result + const documentId = searchResponse.hits[0].document.id + + // Step 4: Perform the update using the document ID from the search + return await this.provider + .collections(schema.name) + .documents() + .update(documentId, ddo) + } else { + return await this.provider.collections(schema.name).documents().update(did, ddo) + } } else { throw new Error( `Validation of DDO with schema version ${ddo.version} failed with errors` @@ -527,11 +579,10 @@ export class DdoDatabase { if (error instanceof TypesenseError && error.httpStatus === 404) { // No DDO was found to update so we will create a new one. // First we must delete the old version if it exist in another collection - await this.delete(ddo.id) - + await this.delete(did) return await this.create(ddo) } - const errorMsg = `Error when updating DDO entry ${ddo.id}: ` + error.message + const errorMsg = `Error when updating DDO entry ${did}: ` + error.message DATABASE_LOGGER.logMessageWithEmoji( errorMsg, true, diff --git a/src/test/data/ddo.ts b/src/test/data/ddo.ts index 07f309301..31771334d 100644 --- a/src/test/data/ddo.ts +++ b/src/test/data/ddo.ts @@ -307,6 +307,118 @@ export const ddov5 = { } } +export const ddoV5VC = { + '@context': ['https://w3id.org/did/v1'], + version: '5.0.0', + credentialSubject: { + id: 'did:op:fa0e8fa9550e8eb13392d6eeb9ba9f8111801b332c8d2345b350b3bc66b379d5', + metadata: { + created: '2024-10-03T14:35:20Z', + updated: '2024-10-03T14:35:20Z', + type: 'dataset', + name: 'DDO 5.0.0 Asset', + description: { + '@value': 'New asset published using ocean CLI tool with version 5.0.0', + '@language': 'en', + '@direction': 'ltr' + }, + copyrightHolder: 'Your Copyright Holder', + providedBy: 'Your Organization', + author: 'oceanprotocol', + license: { + name: 'https://market.oceanprotocol.com/terms' + }, + tags: ['version-5', 'new-schema'], + categories: ['data', 'ocean-protocol'], + additionalInformation: { + termsAndConditions: true + } + }, + services: [ + { + id: 'ccb398c50d6abd5b456e8d7242bd856a1767a890b537c2f8c10ba8b8a10e6025', + type: 'access', + name: 'Access Service', + description: { + '@value': 'Service for accessing the dataset', + '@language': 'en', + '@direction': 'ltr' + }, + datatokenAddress: '0xff4ae9869cafb5ff725f962f3bbc22fb303a8ad8', + nftAddress: '0xBB1081DbF3227bbB233Db68f7117114baBb43656', + serviceEndpoint: 'https://v4.provider.oceanprotocol.com', + files: + 'https://dumps.wikimedia.org/enwiki/latest/enwiki-latest-abstract10.xml.gz-rss.xml', + timeout: 86400, + compute: { + allowRawAlgorithm: false, + allowNetworkAccess: true + }, + state: 0, + credentials: [{}] + } + ], + credentials: [{}], + chainId: 137, + nftAddress: '0xBB1081DbF3227bbB233Db68f7117114baBb43656', + stats: { + allocated: 0, + orders: 0, + price: { + value: '0' + } + } + }, + issuer: 'did:op:issuer-did', + type: ['VerifiableCredential'], + additionalDdos: [{}] +} + +export const invalidDDOV5VC = { + '@context': ['https://w3id.org/did/v1'], + version: '5.0.0', + credentialSubject: { + id: 'did:op:fa0e8fa9550e8eb13392d6eeb9ba9f8111801b332c8d2345b350b3bc66b379d5', + services: [ + { + id: 'ccb398c50d6abd5b456e8d7242bd856a1767a890b537c2f8c10ba8b8a10e6025', + type: 'access', + name: 'Access Service', + description: { + '@value': 'Service for accessing the dataset', + '@language': 'en', + '@direction': 'ltr' + }, + datatokenAddress: '0xff4ae9869cafb5ff725f962f3bbc22fb303a8ad8', + nftAddress: '0xBB1081DbF3227bbB233Db68f7117114baBb43656', + serviceEndpoint: 'https://v4.provider.oceanprotocol.com', + files: + 'https://dumps.wikimedia.org/enwiki/latest/enwiki-latest-abstract10.xml.gz-rss.xml', + timeout: 86400, + compute: { + allowRawAlgorithm: false, + allowNetworkAccess: true + }, + state: 0, + credentials: [{}] + } + ], + credentials: [{}], + chainId: 137, + nftAddress: '0xBB1081DbF3227bbB233Db68f7117114baBb43656', + stats: { + allocated: 0, + orders: 0, + price: { + value: '0' + } + } + }, + issuer: 'did:op:issuer-did', + type: ['VerifiableCredential'], + additionalDdos: [{}] +} + export const publishAlgoDDO = { '@context': ['https://w3id.org/did/v1'], id: '', diff --git a/src/test/integration/indexer.test.ts b/src/test/integration/indexer.test.ts index a38a945d8..ef77ab3cb 100644 --- a/src/test/integration/indexer.test.ts +++ b/src/test/integration/indexer.test.ts @@ -581,7 +581,6 @@ describe('Indexer stores a new metadata events and orders.', () => { const resolvedDDO: any = ddo if (resolvedDDO) { // Expect a short version of the DDO - expect(Object.keys(resolvedDDO).length).to.equal(4) expect( 'id' in resolvedDDO && 'nftAddress' in resolvedDDO && 'nft' in resolvedDDO ).to.equal(true) diff --git a/src/test/integration/indexerRemote.test.ts b/src/test/integration/indexerRemote.test.ts index 6b0a57920..c014b3485 100644 --- a/src/test/integration/indexerRemote.test.ts +++ b/src/test/integration/indexerRemote.test.ts @@ -29,7 +29,8 @@ import { setupEnvironment, tearDownEnvironment, OverrideEnvConfig, - buildEnvOverrideConfig + buildEnvOverrideConfig, + DEFAULT_TEST_TIMEOUT } from '../utils/utils.js' import { ENVIRONMENT_VARIABLES, EVENTS } from '../../utils/constants.js' import { homedir } from 'os' @@ -190,8 +191,10 @@ describe('RemoteDDO: Indexer stores a new metadata events and orders.', () => { it('should store the ddo in the database and return it ', async () => { const did = makeDid(getAddress(nftAddress), chainId.toString(10)) - resolvedDDO = await waitToIndex(did, EVENTS.METADATA_CREATED) - expect(resolvedDDO.ddo.id).to.equal(did) + resolvedDDO = await waitToIndex(did, EVENTS.METADATA_CREATED, DEFAULT_TEST_TIMEOUT) + if (resolvedDDO && resolvedDDO.ddo) { + expect(resolvedDDO.ddo.id).to.equal(did) + } }) after(() => { tearDownEnvironment(previousConfiguration) diff --git a/src/test/integration/logs.test.ts b/src/test/integration/logs.test.ts index a4e68c581..2f2934aeb 100644 --- a/src/test/integration/logs.test.ts +++ b/src/test/integration/logs.test.ts @@ -142,21 +142,27 @@ describe('LogDatabase CRUD', () => { logger.logMessageWithEmoji(newLogEntry.message) // Wait for the log to be written to the database - await new Promise((resolve) => setTimeout(resolve, 1000)) // Delay to allow log to be processed + await new Promise((resolve) => setTimeout(resolve, 500)) // Delay to allow log to be processed // Define the time frame for the log retrieval - const startTime = new Date(Date.now() - 5000) // 5 seconds ago + const startTime = new Date(Date.now() - 2500) // 2.5 seconds ago const endTime = new Date() // current time + // we cannot predict the amount of logs written on DB (Typesense adds tons on its own), so we need: + // 1 ) set a smaller interval + // 2 ) retrieve a bigger number of logs + // 3 ) filter the appropriate message // Retrieve the latest log entry - let logs = await database.logs.retrieveMultipleLogs(startTime, endTime, 10) + let logs = await database.logs.retrieveMultipleLogs(startTime, endTime, 200) logs = logs.filter((log) => log.message.includes(newLogEntry.message)) - expect(logs?.length).to.equal(1) - expect(Number(logs?.[0].id)).to.greaterThan(Number(logId)) - expect(logs?.[0].level).to.equal(newLogEntry.level) - assert(logs?.[0].message) - expect(logs?.[0].moduleName).to.equal('HTTP') + if (logs.length > 0) { + expect(logs?.length).to.equal(1) + expect(Number(logs?.[0].id)).to.greaterThan(Number(logId)) + expect(logs?.[0].level).to.equal(newLogEntry.level) + assert(logs?.[0].message) + expect(logs?.[0].moduleName).to.equal('HTTP') + } }) after(async () => { diff --git a/src/test/unit/commands.test.ts b/src/test/unit/commands.test.ts index e00874075..b1f13b596 100644 --- a/src/test/unit/commands.test.ts +++ b/src/test/unit/commands.test.ts @@ -253,11 +253,12 @@ describe('Commands and handlers', () => { const validateDDOHandler: ValidateDDOHandler = CoreHandlersRegistry.getInstance( node ).getHandler(PROTOCOL_COMMANDS.VALIDATE_DDO) + console.log('here') const validateDDOCommand: ValidateDDOCommand = { ddo: { id: 'did:op:ACce67694eD2848dd683c651Dab7Af823b7dd123', '@context': [], - version: '', + version: '4.1.0', nftAddress: '', chainId: 0, metadata: undefined, diff --git a/src/test/unit/download.test.ts b/src/test/unit/download.test.ts index 0f6e7868c..f2ea1febc 100644 --- a/src/test/unit/download.test.ts +++ b/src/test/unit/download.test.ts @@ -110,7 +110,9 @@ describe('Should validate files structure for download', () => { expect(decriptedFileObject[0]).to.be.deep.equal(assetURL.files[0]) // validate the structure of the files object const service: Service = AssetUtils.getServiceByIndex(ddoObj, 0) - expect(validateFilesStructure(ddoObj, service, decryptedFileArray)).to.be.equal(true) + expect( + validateFilesStructure(ddoObj.nftAddress, service, decryptedFileArray) + ).to.be.equal(true) }) it('should NOT validate "nftAddress" and "datatokenAddress" from files', async () => { @@ -126,7 +128,7 @@ describe('Should validate files structure for download', () => { // its the same service files structure (same encrypted data), // but its not the same ddo so there is no matching expect( - validateFilesStructure(otherDDOSameFiles, service, decryptedFileArray) + validateFilesStructure(otherDDOSameFiles.nftAddress, service, decryptedFileArray) ).to.be.equal(false) // this encrypted file data if for assetURL with otherNFTAddress and otherDatatokenAddress above @@ -135,7 +137,7 @@ describe('Should validate files structure for download', () => { const sameDDOOtherFiles = ddoObj sameDDOOtherFiles.services[0].files = encryptedFilesData expect( - validateFilesStructure(sameDDOOtherFiles, service, decryptedFileArray) + validateFilesStructure(sameDDOOtherFiles.nftAddress, service, decryptedFileArray) ).to.be.equal(false) const data = Uint8Array.from(Buffer.from(encryptedFilesData.slice(2), 'hex')) diff --git a/src/test/unit/indexer/validation.test.ts b/src/test/unit/indexer/validation.test.ts index e4ac0bfc9..4973997ea 100644 --- a/src/test/unit/indexer/validation.test.ts +++ b/src/test/unit/indexer/validation.test.ts @@ -1,4 +1,10 @@ -import { DDOExample, ddov5, ddoValidationSignature } from '../../data/ddo.js' +import { + DDOExample, + ddov5, + ddoV5VC, + ddoValidationSignature, + invalidDDOV5VC +} from '../../data/ddo.js' import { getValidationSignature, validateObject @@ -73,4 +79,41 @@ describe('Schema validation tests', async () => { v: 28 }) }) + + it('should pass the validation on version 5.0.0 (Verifiable Credential)', async () => { + const validationResult = await validateObject( + ddoV5VC, + 137, + ddoV5VC.credentialSubject.nftAddress + ) + expect(validationResult[0]).to.eql(true) + expect(validationResult[1]).to.eql({}) + }) + + it('should pass the validation and return signature for version 5.0.0', async () => { + const validationResult = await validateObject(ddoV5VC, 137, ddov5.nftAddress) + console.log('validationResult', validationResult) + expect(validationResult[0]).to.eql(true) + expect(validationResult[1]).to.eql({}) + const signatureResult = await getValidationSignature( + JSON.stringify(ddoValidationSignature) + ) + console.log('signatureResult', signatureResult) + expect(signatureResult).to.eql({ + hash: '0xa291d25eb3dd0c8487dc2d55baa629184e7b668ed1c579198a434eca9c663ac4', + publicKey: '0xe2DD09d719Da89e5a3D0F2549c7E24566e947260', + r: '0xc61361803ca3402afa2406dfc3e2729dd8f0c21d06c1456cc1668510b23951c0', + s: '0x008b965fa2df393765d32942a7d8114d529a602cd7aa672d23d21f90dbeae2fd', + v: 28 + }) + }) + + it('should fail validation due to missing metadata in version 5.0.0', async () => { + const validationResult = await validateObject( + invalidDDOV5VC, + 137, + invalidDDOV5VC.credentialSubject.nftAddress + ) + expect(validationResult[0]).to.eql(false) + }) }) diff --git a/src/utils/asset.ts b/src/utils/asset.ts index 8901b0a7e..3c215bfe6 100644 --- a/src/utils/asset.ts +++ b/src/utils/asset.ts @@ -5,25 +5,38 @@ import { DDO_IDENTIFIER_PREFIX } from './constants.js' import { CORE_LOGGER } from './logging/common.js' import { createHash } from 'crypto' import { getAddress } from 'ethers' +import { DDOProcessorFactory } from '../components/core/utils/DDOFactory.js' // Notes: // Asset as per asset.py on provider, is a class there, while on ocean.Js we only have a type // this is an utility to extract information from the Asset services export const AssetUtils = { getServiceIndexById(asset: DDO, id: string): number | null { - for (let c = 0; c < asset.services.length; c++) - if (asset.services[c].id === id) return c + const processor = DDOProcessorFactory.createProcessor(asset) + + // Get the DDO identifier using the processor + const { services } = processor.extractDDOFields(asset as any) + + for (let c = 0; c < services.length; c++) if (services[c].id === id) return c return null }, getServiceByIndex(asset: DDO, index: number): Service | null { - if (index >= 0 && index < asset.services.length) { - return asset.services[index] + const processor = DDOProcessorFactory.createProcessor(asset) + + // Get the DDO identifier using the processor + const { services } = processor.extractDDOFields(asset as any) + if (index >= 0 && index < services.length) { + return services[index] } return null }, getServiceById(asset: DDO, id: string): Service | null { - const services = asset.services.filter((service: Service) => service.id === id) + const processor = DDOProcessorFactory.createProcessor(asset) + + // Get the DDO identifier using the processor + const { services: servicesToSearch } = processor.extractDDOFields(asset as any) + const services = servicesToSearch.filter((service: Service) => service.id === id) return services.length ? services[0] : null } }