Skip to content

Commit

Permalink
feat(torii-indexer): eip 4906 update metadata processor (#2984)
Browse files Browse the repository at this point in the history
* feat(torii-indexer): eip 4906 update metadata processor

* batch metadata update

* fix race condition batch update

* fmt

* f

* fmt

* fix fetch token metadta

* clippy

* add processor for erc721 and 1155

* fmt

* add erc4906 component and implement in 721 and 1155

* update emetadata batch and single

* fix token id

* fmt

* add batch processor

* fmt

* fmt

* update test db
  • Loading branch information
Larkooo authored Feb 18, 2025
1 parent 1065468 commit e5b93ac
Show file tree
Hide file tree
Showing 14 changed files with 479 additions and 133 deletions.
6 changes: 6 additions & 0 deletions crates/torii/indexer/src/engine.rs
Original file line number Diff line number Diff line change
Expand Up @@ -33,6 +33,8 @@ use crate::processors::erc1155_transfer_batch::Erc1155TransferBatchProcessor;
use crate::processors::erc1155_transfer_single::Erc1155TransferSingleProcessor;
use crate::processors::erc20_legacy_transfer::Erc20LegacyTransferProcessor;
use crate::processors::erc20_transfer::Erc20TransferProcessor;
use crate::processors::erc4906_batch_metadata_update::Erc4906BatchMetadataUpdateProcessor;
use crate::processors::erc4906_metadata_update::Erc4906MetadataUpdateProcessor;
use crate::processors::erc721_legacy_transfer::Erc721LegacyTransferProcessor;
use crate::processors::erc721_transfer::Erc721TransferProcessor;
use crate::processors::event_message::EventMessageProcessor;
Expand Down Expand Up @@ -107,13 +109,17 @@ impl<P: Provider + Send + Sync + std::fmt::Debug + 'static> Processors<P> {
vec![
Box::new(Erc721TransferProcessor) as Box<dyn EventProcessor<P>>,
Box::new(Erc721LegacyTransferProcessor) as Box<dyn EventProcessor<P>>,
Box::new(Erc4906MetadataUpdateProcessor) as Box<dyn EventProcessor<P>>,
Box::new(Erc4906BatchMetadataUpdateProcessor) as Box<dyn EventProcessor<P>>,
],
),
(
ContractType::ERC1155,
vec![
Box::new(Erc1155TransferBatchProcessor) as Box<dyn EventProcessor<P>>,
Box::new(Erc1155TransferSingleProcessor) as Box<dyn EventProcessor<P>>,
Box::new(Erc4906MetadataUpdateProcessor) as Box<dyn EventProcessor<P>>,
Box::new(Erc4906BatchMetadataUpdateProcessor) as Box<dyn EventProcessor<P>>,
],
),
(ContractType::UDC, vec![Box::new(ControllerProcessor) as Box<dyn EventProcessor<P>>]),
Expand Down
Original file line number Diff line number Diff line change
@@ -0,0 +1,74 @@
use anyhow::Error;
use async_trait::async_trait;
use cainome::cairo_serde::{CairoSerde, U256 as U256Cainome};
use dojo_world::contracts::world::WorldContractReader;
use starknet::core::types::{Event, U256};
use starknet::providers::Provider;
use torii_sqlite::Sql;
use tracing::debug;

use super::{EventProcessor, EventProcessorConfig};
use crate::task_manager::{self, TaskId, TaskPriority};

pub(crate) const LOG_TARGET: &str = "torii_indexer::processors::erc4906_metadata_update_batch";

#[derive(Default, Debug)]
pub struct Erc4906BatchMetadataUpdateProcessor;

#[async_trait]
impl<P> EventProcessor<P> for Erc4906BatchMetadataUpdateProcessor
where
P: Provider + Send + Sync + std::fmt::Debug,
{
fn event_key(&self) -> String {
"BatchMetadataUpdate".to_string()
}

fn validate(&self, event: &Event) -> bool {
// Batch metadata update: [hash(BatchMetadataUpdate), from_token_id.low, from_token_id.high,
// to_token_id.low, to_token_id.high]
event.keys.len() == 5 && event.data.is_empty()
}

fn task_priority(&self) -> TaskPriority {
2
}

fn task_identifier(&self, _event: &Event) -> TaskId {
task_manager::TASK_ID_SEQUENTIAL
}

async fn process(
&self,
_world: &WorldContractReader<P>,
db: &mut Sql,
_block_number: u64,
_block_timestamp: u64,
_event_id: &str,
event: &Event,
_config: &EventProcessorConfig,
) -> Result<(), Error> {
let token_address = event.from_address;
let from_token_id = U256Cainome::cairo_deserialize(&event.keys, 1)?;
let from_token_id = U256::from_words(from_token_id.low, from_token_id.high);

let to_token_id = U256Cainome::cairo_deserialize(&event.keys, 3)?;
let to_token_id = U256::from_words(to_token_id.low, to_token_id.high);

let mut token_id = from_token_id;
while token_id <= to_token_id {
db.update_nft_metadata(token_address, token_id).await?;
token_id += U256::from(1u8);
}

debug!(
target: LOG_TARGET,
token_address = ?token_address,
from_token_id = ?from_token_id,
to_token_id = ?to_token_id,
"NFT metadata updated for token range"
);

Ok(())
}
}
64 changes: 64 additions & 0 deletions crates/torii/indexer/src/processors/erc4906_metadata_update.rs
Original file line number Diff line number Diff line change
@@ -0,0 +1,64 @@
use anyhow::Error;
use async_trait::async_trait;
use cainome::cairo_serde::{CairoSerde, U256 as U256Cainome};
use dojo_world::contracts::world::WorldContractReader;
use starknet::core::types::{Event, U256};
use starknet::providers::Provider;
use torii_sqlite::Sql;
use tracing::debug;

use super::{EventProcessor, EventProcessorConfig};
use crate::task_manager::{self, TaskId, TaskPriority};

pub(crate) const LOG_TARGET: &str = "torii_indexer::processors::erc4906_metadata_update";
#[derive(Default, Debug)]
pub struct Erc4906MetadataUpdateProcessor;

#[async_trait]
impl<P> EventProcessor<P> for Erc4906MetadataUpdateProcessor
where
P: Provider + Send + Sync + std::fmt::Debug,
{
fn event_key(&self) -> String {
"MetadataUpdate".to_string()
}

fn validate(&self, event: &Event) -> bool {
// Single token metadata update: [hash(MetadataUpdate), token_id.low, token_id.high]
event.keys.len() == 3 && event.data.is_empty()
}

fn task_priority(&self) -> TaskPriority {
2
}

fn task_identifier(&self, _event: &Event) -> TaskId {
task_manager::TASK_ID_SEQUENTIAL
}

async fn process(
&self,
_world: &WorldContractReader<P>,
db: &mut Sql,
_block_number: u64,
_block_timestamp: u64,
_event_id: &str,
event: &Event,
_config: &EventProcessorConfig,
) -> Result<(), Error> {
let token_address = event.from_address;
let token_id = U256Cainome::cairo_deserialize(&event.keys, 1)?;
let token_id = U256::from_words(token_id.low, token_id.high);

db.update_nft_metadata(token_address, token_id).await?;

debug!(
target: LOG_TARGET,
token_address = ?token_address,
token_id = ?token_id,
"NFT metadata updated for single token"
);

Ok(())
}
}
3 changes: 3 additions & 0 deletions crates/torii/indexer/src/processors/mod.rs
Original file line number Diff line number Diff line change
Expand Up @@ -14,6 +14,8 @@ pub mod erc1155_transfer_batch;
pub mod erc1155_transfer_single;
pub mod erc20_legacy_transfer;
pub mod erc20_transfer;
pub mod erc4906_batch_metadata_update;
pub mod erc4906_metadata_update;
pub mod erc721_legacy_transfer;
pub mod erc721_transfer;
pub mod event_message;
Expand All @@ -28,6 +30,7 @@ pub mod store_update_member;
pub mod store_update_record;
pub mod upgrade_event;
pub mod upgrade_model;

#[derive(Clone, Debug, Default)]
pub struct EventProcessorConfig {
pub historical_events: HashSet<String>,
Expand Down
42 changes: 26 additions & 16 deletions crates/torii/sqlite/src/erc.rs
Original file line number Diff line number Diff line change
Expand Up @@ -10,7 +10,7 @@ use starknet::providers::Provider;
use super::utils::{u256_to_sql_string, I256};
use super::{Sql, SQL_FELT_DELIMITER};
use crate::constants::TOKEN_TRANSFER_TABLE;
use crate::executor::erc::RegisterNftTokenQuery;
use crate::executor::erc::{RegisterNftTokenQuery, UpdateNftMetadataQuery};
use crate::executor::{
ApplyBalanceDiffQuery, Argument, QueryMessage, QueryType, RegisterErc20TokenQuery,
};
Expand Down Expand Up @@ -91,20 +91,19 @@ impl Sql {
block_number: u64,
) -> Result<()> {
// contract_address:id
let actual_token_id = token_id;
let token_id = felt_and_u256_to_sql_string(&contract_address, &token_id);
let token_exists: bool = self.local_cache.contains_token_id(&token_id).await;
let id = felt_and_u256_to_sql_string(&contract_address, &token_id);
let token_exists: bool = self.local_cache.contains_token_id(&id).await;

if !token_exists {
self.register_nft_token_metadata(contract_address, &token_id, actual_token_id).await?;
self.register_nft_token_metadata(&id, contract_address, token_id).await?;
}

self.store_erc_transfer_event(
contract_address,
from_address,
to_address,
amount,
&token_id,
&id,
block_timestamp,
event_id,
)?;
Expand All @@ -113,19 +112,16 @@ impl Sql {
{
let mut erc_cache = self.local_cache.erc_cache.write().await;
if from_address != Felt::ZERO {
let from_balance_id = format!(
"{}{SQL_FELT_DELIMITER}{}",
felt_to_sql_string(&from_address),
&token_id
);
let from_balance_id =
format!("{}{SQL_FELT_DELIMITER}{}", felt_to_sql_string(&from_address), &id);
let from_balance =
erc_cache.entry((ContractType::ERC721, from_balance_id)).or_default();
*from_balance -= I256::from(amount);
}

if to_address != Felt::ZERO {
let to_balance_id =
format!("{}{SQL_FELT_DELIMITER}{}", felt_to_sql_string(&to_address), &token_id);
format!("{}{SQL_FELT_DELIMITER}{}", felt_to_sql_string(&to_address), &id);
let to_balance =
erc_cache.entry((ContractType::ERC721, to_balance_id)).or_default();
*to_balance += I256::from(amount);
Expand All @@ -142,6 +138,20 @@ impl Sql {
Ok(())
}

pub async fn update_nft_metadata(
&mut self,
contract_address: Felt,
token_id: U256,
) -> Result<()> {
self.executor.send(QueryMessage::new(
"".to_string(),
vec![],
QueryType::UpdateNftMetadata(UpdateNftMetadataQuery { contract_address, token_id }),
))?;

Ok(())
}

async fn register_erc20_token_metadata<P: Provider + Sync>(
&mut self,
contract_address: Felt,
Expand Down Expand Up @@ -222,25 +232,25 @@ impl Sql {

async fn register_nft_token_metadata(
&mut self,
id: &str,
contract_address: Felt,
token_id: &str,
actual_token_id: U256,
) -> Result<()> {
self.executor.send(QueryMessage::new(
"".to_string(),
vec![],
QueryType::RegisterNftToken(RegisterNftTokenQuery {
token_id: token_id.to_string(),
id: id.to_string(),
contract_address,
actual_token_id,
token_id: actual_token_id,
}),
))?;

// optimistically add the token_id to cache
// this cache is used while applying the cache diff
// so we need to make sure that all RegisterErc*Token queries
// are applied before the cache diff is applied
self.local_cache.register_token_id(token_id.to_string()).await;
self.local_cache.register_token_id(id.to_string()).await;

Ok(())
}
Expand Down
Loading

0 comments on commit e5b93ac

Please sign in to comment.