Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

feat(torii-indexer): eip 4906 update metadata processor #2984

Merged
merged 20 commits into from
Feb 18, 2025
Merged
Show file tree
Hide file tree
Changes from 7 commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
116 changes: 116 additions & 0 deletions crates/torii/indexer/src/processors/erc4906_metadata_update.rs
Original file line number Diff line number Diff line change
@@ -0,0 +1,116 @@
use std::hash::{DefaultHasher, Hash, Hasher};

use anyhow::Error;
use async_trait::async_trait;
use cainome::cairo_serde::{CairoSerde, U256 as U256Cainome};
use dojo_world::contracts::world::WorldContractReader;
use starknet::core::types::{Event, U256};
use starknet::providers::Provider;
use torii_sqlite::Sql;
use tracing::debug;

use super::{EventProcessor, EventProcessorConfig};
use crate::task_manager::{TaskId, TaskPriority};

pub(crate) const LOG_TARGET: &str = "torii_indexer::processors::erc4906_metadata_update";

#[derive(Default, Debug)]
pub struct Erc4906MetadataUpdateProcessor;

#[async_trait]
impl<P> EventProcessor<P> for Erc4906MetadataUpdateProcessor
where
P: Provider + Send + Sync + std::fmt::Debug,
{
fn event_key(&self) -> String {
// We'll handle both event types in validate()
"MetadataUpdate".to_string()
}

fn validate(&self, event: &Event) -> bool {
// Single token metadata update: [hash(MetadataUpdate), token_id.low, token_id.high]
if event.keys.len() == 3 && event.data.is_empty() {
return true;
}

// Batch metadata update: [hash(BatchMetadataUpdate), from_token_id.low, from_token_id.high,
// to_token_id.low, to_token_id.high]
if event.keys.len() == 5 && event.data.is_empty() {
return true;
}

false
}

fn task_priority(&self) -> TaskPriority {
2 // Lower priority than transfers
}

fn task_identifier(&self, event: &Event) -> TaskId {
let mut hasher = DefaultHasher::new();
event.from_address.hash(&mut hasher); // Hash the contract address

// For single token updates
if event.keys.len() == 3 {
event.keys[1].hash(&mut hasher); // token_id.low
event.keys[2].hash(&mut hasher); // token_id.high
} else {
// For batch updates, we need to be more conservative
// Hash just the contract address to serialize all batch updates for the same contract
// This prevents race conditions with overlapping ranges
}

hasher.finish()
}

async fn process(
&self,
_world: &WorldContractReader<P>,
db: &mut Sql,
_block_number: u64,
_block_timestamp: u64,
_event_id: &str,
event: &Event,
_config: &EventProcessorConfig,
) -> Result<(), Error> {
let token_address = event.from_address;

if event.keys.len() == 3 {
// Single token metadata update
let token_id = U256Cainome::cairo_deserialize(&event.keys, 1)?;
let token_id = U256::from_words(token_id.low, token_id.high);

db.update_erc721_metadata(token_address, token_id).await?;

debug!(
target: LOG_TARGET,
token_address = ?token_address,
token_id = ?token_id,
"ERC721 metadata updated for single token"
);
} else {
// Batch metadata update
let from_token_id = U256Cainome::cairo_deserialize(&event.keys, 1)?;
let from_token_id = U256::from_words(from_token_id.low, from_token_id.high);

let to_token_id = U256Cainome::cairo_deserialize(&event.keys, 3)?;
let to_token_id = U256::from_words(to_token_id.low, to_token_id.high);

let mut token_id = from_token_id;
while token_id <= to_token_id {
db.update_erc721_metadata(token_address, token_id).await?;
token_id += U256::from(1u8);
}

debug!(
target: LOG_TARGET,
token_address = ?token_address,
from_token_id = ?from_token_id,
to_token_id = ?to_token_id,
"ERC721 metadata updated for token range"
);
}

Ok(())
}
Larkooo marked this conversation as resolved.
Show resolved Hide resolved
}
2 changes: 2 additions & 0 deletions crates/torii/indexer/src/processors/mod.rs
Original file line number Diff line number Diff line change
Expand Up @@ -12,6 +12,7 @@ use crate::task_manager::{TaskId, TaskPriority};
pub mod controller;
pub mod erc20_legacy_transfer;
pub mod erc20_transfer;
pub mod erc4906_metadata_update;
pub mod erc721_legacy_transfer;
pub mod erc721_transfer;
pub mod event_message;
Expand All @@ -26,6 +27,7 @@ pub mod store_update_member;
pub mod store_update_record;
pub mod upgrade_event;
pub mod upgrade_model;

#[derive(Clone, Debug, Default)]
pub struct EventProcessorConfig {
pub historical_events: HashSet<String>,
Expand Down
36 changes: 26 additions & 10 deletions crates/torii/sqlite/src/erc.rs
Original file line number Diff line number Diff line change
Expand Up @@ -10,6 +10,7 @@ use starknet::providers::Provider;
use super::utils::{u256_to_sql_string, I256};
use super::{Sql, SQL_FELT_DELIMITER};
use crate::constants::TOKEN_TRANSFER_TABLE;
use crate::executor::erc::UpdateErc721MetadataQuery;
use crate::executor::{
ApplyBalanceDiffQuery, Argument, QueryMessage, QueryType, RegisterErc20TokenQuery,
RegisterErc721TokenQuery,
Expand Down Expand Up @@ -90,21 +91,19 @@ impl Sql {
block_number: u64,
) -> Result<()> {
// contract_address:id
let actual_token_id = token_id;
let token_id = felt_and_u256_to_sql_string(&contract_address, &token_id);
let token_exists: bool = self.local_cache.contains_token_id(&token_id).await;
let id = felt_and_u256_to_sql_string(&contract_address, &token_id);
let token_exists: bool = self.local_cache.contains_token_id(&id).await;

if !token_exists {
self.register_erc721_token_metadata(contract_address, &token_id, actual_token_id)
.await?;
self.register_erc721_token_metadata(&id, contract_address, token_id).await?;
}

self.store_erc_transfer_event(
contract_address,
from_address,
to_address,
U256::from(1u8),
&token_id,
&id,
block_timestamp,
event_id,
)?;
Expand Down Expand Up @@ -142,6 +141,23 @@ impl Sql {
Ok(())
}

pub async fn update_erc721_metadata(
&mut self,
contract_address: Felt,
token_id: U256,
) -> Result<()> {
self.executor.send(QueryMessage::new(
"".to_string(),
vec![],
QueryType::UpdateErc721Metadata(UpdateErc721MetadataQuery {
contract_address,
token_id,
}),
))?;

Ok(())
}

async fn register_erc20_token_metadata<P: Provider + Sync>(
&mut self,
contract_address: Felt,
Expand Down Expand Up @@ -222,25 +238,25 @@ impl Sql {

async fn register_erc721_token_metadata(
&mut self,
id: &str,
contract_address: Felt,
token_id: &str,
actual_token_id: U256,
) -> Result<()> {
self.executor.send(QueryMessage::new(
"".to_string(),
vec![],
QueryType::RegisterErc721Token(RegisterErc721TokenQuery {
token_id: token_id.to_string(),
id: id.to_string(),
contract_address,
actual_token_id,
token_id: actual_token_id,
}),
))?;

// optimistically add the token_id to cache
// this cache is used while applying the cache diff
// so we need to make sure that all RegisterErc*Token queries
// are applied before the cache diff is applied
self.local_cache.register_token_id(token_id.to_string()).await;
self.local_cache.register_token_id(id.to_string()).await;

Ok(())
}
Expand Down
Loading
Loading