diff --git a/components/chainhook-postgres/src/lib.rs b/components/chainhook-postgres/src/lib.rs index 0db2c37b..8650249b 100644 --- a/components/chainhook-postgres/src/lib.rs +++ b/components/chainhook-postgres/src/lib.rs @@ -4,6 +4,11 @@ pub mod utils; use deadpool_postgres::{Manager, ManagerConfig, Object, Pool, RecyclingMethod, Transaction}; use tokio_postgres::{Client, Config, NoTls, Row}; +/// Standard chunk size to use when we're batching multiple query inserts into a single SQL statement to save on DB round trips. +/// This number is designed to not hit the postgres limit of 65536 query parameters in a single SQL statement, but results may +/// vary depending on column counts. Queries should use other custom chunk sizes as needed. +pub const BATCH_QUERY_CHUNK_SIZE: usize = 500; + /// A Postgres configuration for a single database. #[derive(Clone, Debug)] pub struct PgConnectionConfig { diff --git a/components/ordhook-core/src/core/meta_protocols/brc20/brc20_pg.rs b/components/ordhook-core/src/core/meta_protocols/brc20/brc20_pg.rs index 830520f9..62ae0f5b 100644 --- a/components/ordhook-core/src/core/meta_protocols/brc20/brc20_pg.rs +++ b/components/ordhook-core/src/core/meta_protocols/brc20/brc20_pg.rs @@ -2,7 +2,7 @@ use std::collections::HashMap; use chainhook_postgres::{ types::{PgNumericU128, PgNumericU64}, - utils, FromPgRow, + utils, FromPgRow, BATCH_QUERY_CHUNK_SIZE, }; use chainhook_sdk::types::{ BitcoinBlockData, Brc20BalanceData, Brc20Operation, Brc20TokenDeployData, Brc20TransferData, @@ -79,24 +79,43 @@ pub async fn get_token_available_balance_for_address( Ok(Some(supply.0)) } -pub async fn get_unsent_token_transfer( - ordinal_number: u64, +pub async fn get_unsent_token_transfers( + ordinal_numbers: &Vec, client: &T, -) -> Result, String> { - let row = client - .query_opt( - "SELECT * FROM operations - WHERE ordinal_number = $1 AND operation = 'transfer' - AND NOT EXISTS (SELECT 1 FROM operations WHERE ordinal_number = $1 AND operation = 'transfer_send') - LIMIT 1", - &[&PgNumericU64(ordinal_number)], - ) - .await - .map_err(|e| format!("get_unsent_token_transfer: {e}"))?; - let Some(row) = row else { - return Ok(None); - }; - Ok(Some(DbOperation::from_pg_row(&row))) +) -> Result, String> { + if ordinal_numbers.is_empty() { + return Ok(vec![]); + } + let mut results = vec![]; + // We can afford a larger chunk size here because we're only using one parameter per ordinal number value. + for chunk in ordinal_numbers.chunks(5000) { + let mut wrapped = Vec::with_capacity(chunk.len()); + for n in chunk { + wrapped.push(PgNumericU64(*n)); + } + let mut params = vec![]; + for number in wrapped.iter() { + params.push(number); + } + let rows = client + .query( + "SELECT * + FROM operations o + WHERE operation = 'transfer' + AND o.ordinal_number = ANY($1) + AND NOT EXISTS ( + SELECT 1 FROM operations + WHERE ordinal_number = o.ordinal_number + AND operation = 'transfer_send' + ) + LIMIT 1", + &[¶ms], + ) + .await + .map_err(|e| format!("get_unsent_token_transfers: {e}"))?; + results.extend(rows.iter().map(|row| DbOperation::from_pg_row(row))); + } + Ok(results) } pub async fn insert_tokens( @@ -106,7 +125,7 @@ pub async fn insert_tokens( if tokens.len() == 0 { return Ok(()); } - for chunk in tokens.chunks(500) { + for chunk in tokens.chunks(BATCH_QUERY_CHUNK_SIZE) { let mut params: Vec<&(dyn ToSql + Sync)> = vec![]; for row in chunk.iter() { params.push(&row.ticker); @@ -148,7 +167,7 @@ pub async fn insert_operations( if operations.len() == 0 { return Ok(()); } - for chunk in operations.chunks(500) { + for chunk in operations.chunks(BATCH_QUERY_CHUNK_SIZE) { let mut params: Vec<&(dyn ToSql + Sync)> = vec![]; for row in chunk.iter() { params.push(&row.ticker); @@ -253,7 +272,11 @@ pub async fn update_address_operation_counts( if counts.len() == 0 { return Ok(()); } - for chunk in counts.keys().collect::>().chunks(500) { + for chunk in counts + .keys() + .collect::>() + .chunks(BATCH_QUERY_CHUNK_SIZE) + { let mut params: Vec<&(dyn ToSql + Sync)> = vec![]; let mut insert_rows = 0; for address in chunk { @@ -287,7 +310,11 @@ pub async fn update_token_operation_counts( if counts.len() == 0 { return Ok(()); } - for chunk in counts.keys().collect::>().chunks(500) { + for chunk in counts + .keys() + .collect::>() + .chunks(BATCH_QUERY_CHUNK_SIZE) + { let mut converted = HashMap::new(); for tick in chunk { converted.insert(*tick, counts.get(*tick).unwrap().to_string()); @@ -324,7 +351,11 @@ pub async fn update_token_minted_supplies( if supplies.len() == 0 { return Ok(()); } - for chunk in supplies.keys().collect::>().chunks(500) { + for chunk in supplies + .keys() + .collect::>() + .chunks(BATCH_QUERY_CHUNK_SIZE) + { let mut converted = HashMap::new(); for tick in chunk { converted.insert(*tick, supplies.get(*tick).unwrap().0.to_string()); diff --git a/components/ordhook-core/src/core/meta_protocols/brc20/cache.rs b/components/ordhook-core/src/core/meta_protocols/brc20/cache.rs index 64a22f9e..c07bf346 100644 --- a/components/ordhook-core/src/core/meta_protocols/brc20/cache.rs +++ b/components/ordhook-core/src/core/meta_protocols/brc20/cache.rs @@ -1,4 +1,7 @@ -use std::{collections::HashMap, num::NonZeroUsize}; +use std::{ + collections::{HashMap, HashSet}, + num::NonZeroUsize, +}; use chainhook_postgres::types::{PgBigIntU32, PgNumericU128, PgNumericU64, PgSmallIntU8}; use chainhook_sdk::types::{ @@ -144,30 +147,44 @@ impl Brc20MemoryCache { return Ok(None); } - pub async fn get_unsent_token_transfer( + pub async fn get_unsent_token_transfers( &mut self, - ordinal_number: u64, + ordinal_numbers: &Vec<&u64>, client: &T, - ) -> Result, String> { - // Use `get` instead of `contains` so we promote this value in the LRU. - if let Some(_) = self.ignored_inscriptions.get(&ordinal_number) { - return Ok(None); - } - if let Some(row) = self.unsent_transfers.get(&ordinal_number) { - return Ok(Some(row.clone())); + ) -> Result, String> { + let mut results = vec![]; + let mut cache_missed_ordinal_numbers = HashSet::new(); + for ordinal_number in ordinal_numbers.iter() { + // Use `get` instead of `contains` so we promote this value in the LRU. + if let Some(_) = self.ignored_inscriptions.get(*ordinal_number) { + continue; + } + if let Some(row) = self.unsent_transfers.get(*ordinal_number) { + results.push(row.clone()); + } else { + cache_missed_ordinal_numbers.insert(**ordinal_number); + } } - self.handle_cache_miss(client).await?; - match brc20_pg::get_unsent_token_transfer(ordinal_number, client).await? { - Some(row) => { - self.unsent_transfers.put(ordinal_number, row.clone()); - return Ok(Some(row)); + if !cache_missed_ordinal_numbers.is_empty() { + // Some ordinal numbers were not in cache, check DB. + self.handle_cache_miss(client).await?; + let pending_transfers = brc20_pg::get_unsent_token_transfers( + &cache_missed_ordinal_numbers.iter().cloned().collect(), + client, + ) + .await?; + for unsent_transfer in pending_transfers.into_iter() { + cache_missed_ordinal_numbers.remove(&unsent_transfer.ordinal_number.0); + self.unsent_transfers + .put(unsent_transfer.ordinal_number.0, unsent_transfer.clone()); + results.push(unsent_transfer); } - None => { - // Inscription is not relevant for BRC20. - self.ignore_inscription(ordinal_number); - return Ok(None); + // Ignore all irrelevant numbers. + for irrelevant_number in cache_missed_ordinal_numbers.iter() { + self.ignore_inscription(*irrelevant_number); } } + return Ok(results); } /// Marks an ordinal number as ignored so we don't bother computing its transfers for BRC20 purposes. @@ -454,12 +471,12 @@ impl Brc20MemoryCache { return Ok(transfer.clone()); } self.handle_cache_miss(client).await?; - let Some(transfer) = brc20_pg::get_unsent_token_transfer(ordinal_number, client).await? - else { + let transfers = brc20_pg::get_unsent_token_transfers(&vec![ordinal_number], client).await?; + let Some(transfer) = transfers.first() else { unreachable!("Invalid transfer ordinal number {}", ordinal_number) }; self.unsent_transfers.put(ordinal_number, transfer.clone()); - return Ok(transfer); + return Ok(transfer.clone()); } async fn handle_cache_miss(&mut self, client: &T) -> Result<(), String> { diff --git a/components/ordhook-core/src/core/meta_protocols/brc20/index.rs b/components/ordhook-core/src/core/meta_protocols/brc20/index.rs index dd0adf46..ba887480 100644 --- a/components/ordhook-core/src/core/meta_protocols/brc20/index.rs +++ b/components/ordhook-core/src/core/meta_protocols/brc20/index.rs @@ -3,8 +3,8 @@ use std::collections::HashMap; use deadpool_postgres::Transaction; use chainhook_sdk::{ types::{ - BitcoinBlockData, Brc20BalanceData, Brc20Operation, Brc20TokenDeployData, - Brc20TransferData, OrdinalOperation, + BitcoinBlockData, BlockIdentifier, Brc20BalanceData, Brc20Operation, Brc20TokenDeployData, + Brc20TransferData, OrdinalInscriptionTransferData, OrdinalOperation, TransactionIdentifier, }, utils::Context, }; @@ -15,10 +15,66 @@ use super::{ brc20_activation_height, cache::Brc20MemoryCache, parser::ParsedBrc20Operation, - verifier::{verify_brc20_operation, verify_brc20_transfer, VerifiedBrc20Operation}, + verifier::{verify_brc20_operation, verify_brc20_transfers, VerifiedBrc20Operation}, }; -/// Indexes BRC-20 operations in a Bitcoin block. Also writes the indexed data to DB. +/// Index ordinal transfers in a single Bitcoin block looking for BRC-20 transfers. +async fn index_unverified_brc20_transfers( + transfers: &Vec<(&TransactionIdentifier, &OrdinalInscriptionTransferData)>, + block_identifier: &BlockIdentifier, + timestamp: u32, + brc20_cache: &mut Brc20MemoryCache, + brc20_db_tx: &Transaction<'_>, + ctx: &Context, +) -> Result, String> { + if transfers.is_empty() { + return Ok(vec![]); + } + let mut results = vec![]; + let mut verified_brc20_transfers = + verify_brc20_transfers(transfers, brc20_cache, &brc20_db_tx, &ctx).await?; + // Sort verified transfers by tx_index to make sure they are applied in the order they came through. + verified_brc20_transfers.sort_by(|a, b| a.2.tx_index.cmp(&b.2.tx_index)); + + for (inscription_id, data, transfer, tx_identifier) in verified_brc20_transfers.into_iter() { + let Some(token) = brc20_cache.get_token(&data.tick, brc20_db_tx).await? else { + unreachable!(); + }; + results.push(( + transfer.tx_index, + Brc20Operation::TransferSend(Brc20TransferData { + tick: data.tick.clone(), + amt: u128_amount_to_decimals_str(data.amt, token.decimals.0), + sender_address: data.sender_address.clone(), + receiver_address: data.receiver_address.clone(), + inscription_id, + }), + )); + brc20_cache + .insert_token_transfer_send( + &data, + &transfer, + block_identifier, + timestamp, + &tx_identifier, + transfer.tx_index as u64, + brc20_db_tx, + ) + .await?; + try_info!( + ctx, + "BRC-20 transfer_send {} {} ({} -> {}) at block {}", + data.tick, + data.amt, + data.sender_address, + data.receiver_address, + block_identifier.index + ); + } + Ok(results) +} + +/// Indexes BRC-20 operations in a single Bitcoin block. Also writes indexed data to DB. pub async fn index_block_and_insert_brc20_operations( block: &mut BitcoinBlockData, brc20_operation_map: &mut HashMap, @@ -29,160 +85,128 @@ pub async fn index_block_and_insert_brc20_operations( if block.block_identifier.index < brc20_activation_height(&block.metadata.network) { return Ok(()); } + // Ordinal transfers may be BRC-20 transfers. We group them into a vector to minimize round trips to the db when analyzing + // them. We will always insert them correctly in between new BRC-20 operations. + let mut unverified_ordinal_transfers = vec![]; + let mut verified_brc20_transfers = vec![]; + + // Check every transaction in the block. Look for BRC-20 operations. for (tx_index, tx) in block.transactions.iter_mut().enumerate() { for op in tx.metadata.ordinal_operations.iter() { match op { OrdinalOperation::InscriptionRevealed(reveal) => { - if let Some(parsed_brc20_operation) = + let Some(parsed_brc20_operation) = brc20_operation_map.get(&reveal.inscription_id) - { - match verify_brc20_operation( - parsed_brc20_operation, - reveal, + else { + brc20_cache.ignore_inscription(reveal.ordinal_number); + continue; + }; + // First, verify any pending transfers as they may affect balances for the next operation. + verified_brc20_transfers.append( + &mut index_unverified_brc20_transfers( + &unverified_ordinal_transfers, &block.block_identifier, - &block.metadata.network, + block.timestamp, brc20_cache, - &brc20_db_tx, - &ctx, + brc20_db_tx, + ctx, ) - .await? - { - Some(VerifiedBrc20Operation::TokenDeploy(token)) => { - tx.metadata.brc20_operation = - Some(Brc20Operation::Deploy(Brc20TokenDeployData { - tick: token.tick.clone(), - max: u128_amount_to_decimals_str(token.max, token.dec), - lim: u128_amount_to_decimals_str(token.lim, token.dec), - dec: token.dec.to_string(), - address: token.address.clone(), - inscription_id: reveal.inscription_id.clone(), - self_mint: token.self_mint, - })); - brc20_cache.insert_token_deploy( - &token, + .await?, + ); + unverified_ordinal_transfers.clear(); + // Then continue with the new operation. + let Some(operation) = verify_brc20_operation( + parsed_brc20_operation, + reveal, + &block.block_identifier, + &block.metadata.network, + brc20_cache, + &brc20_db_tx, + &ctx, + ) + .await? + else { + brc20_cache.ignore_inscription(reveal.ordinal_number); + continue; + }; + match operation { + VerifiedBrc20Operation::TokenDeploy(token) => { + tx.metadata.brc20_operation = + Some(Brc20Operation::Deploy(Brc20TokenDeployData { + tick: token.tick.clone(), + max: u128_amount_to_decimals_str(token.max, token.dec), + lim: u128_amount_to_decimals_str(token.lim, token.dec), + dec: token.dec.to_string(), + address: token.address.clone(), + inscription_id: reveal.inscription_id.clone(), + self_mint: token.self_mint, + })); + brc20_cache.insert_token_deploy( + &token, + reveal, + &block.block_identifier, + block.timestamp, + &tx.transaction_identifier, + tx_index as u64, + )?; + try_info!( + ctx, + "BRC-20 deploy {} ({}) at block {}", + token.tick, + token.address, + block.block_identifier.index + ); + } + VerifiedBrc20Operation::TokenMint(balance) => { + let Some(token) = + brc20_cache.get_token(&balance.tick, brc20_db_tx).await? + else { + unreachable!(); + }; + tx.metadata.brc20_operation = + Some(Brc20Operation::Mint(Brc20BalanceData { + tick: balance.tick.clone(), + amt: u128_amount_to_decimals_str(balance.amt, token.decimals.0), + address: balance.address.clone(), + inscription_id: reveal.inscription_id.clone(), + })); + brc20_cache + .insert_token_mint( + &balance, reveal, &block.block_identifier, block.timestamp, &tx.transaction_identifier, tx_index as u64, - )?; - try_info!( - ctx, - "BRC-20 deploy {} ({}) at block {}", - token.tick, - token.address, - block.block_identifier.index - ); - } - Some(VerifiedBrc20Operation::TokenMint(balance)) => { - let Some(token) = - brc20_cache.get_token(&balance.tick, brc20_db_tx).await? - else { - unreachable!(); - }; - tx.metadata.brc20_operation = - Some(Brc20Operation::Mint(Brc20BalanceData { - tick: balance.tick.clone(), - amt: u128_amount_to_decimals_str( - balance.amt, - token.decimals.0, - ), - address: balance.address.clone(), - inscription_id: reveal.inscription_id.clone(), - })); - brc20_cache - .insert_token_mint( - &balance, - reveal, - &block.block_identifier, - block.timestamp, - &tx.transaction_identifier, - tx_index as u64, - brc20_db_tx, - ) - .await?; - try_info!( - ctx, - "BRC-20 mint {} {} ({}) at block {}", - balance.tick, - balance.amt, - balance.address, - block.block_identifier.index - ); - } - Some(VerifiedBrc20Operation::TokenTransfer(balance)) => { - let Some(token) = - brc20_cache.get_token(&balance.tick, brc20_db_tx).await? - else { - unreachable!(); - }; - tx.metadata.brc20_operation = - Some(Brc20Operation::Transfer(Brc20BalanceData { - tick: balance.tick.clone(), - amt: u128_amount_to_decimals_str( - balance.amt, - token.decimals.0, - ), - address: balance.address.clone(), - inscription_id: reveal.inscription_id.clone(), - })); - brc20_cache - .insert_token_transfer( - &balance, - reveal, - &block.block_identifier, - block.timestamp, - &tx.transaction_identifier, - tx_index as u64, - brc20_db_tx, - ) - .await?; - try_info!( - ctx, - "BRC-20 transfer {} {} ({}) at block {}", - balance.tick, - balance.amt, - balance.address, - block.block_identifier.index - ); - } - Some(VerifiedBrc20Operation::TokenTransferSend(_)) => { - unreachable!("BRC-20 token transfer send should never be generated on reveal") - } - None => { - brc20_cache.ignore_inscription(reveal.ordinal_number); - } + brc20_db_tx, + ) + .await?; + try_info!( + ctx, + "BRC-20 mint {} {} ({}) at block {}", + balance.tick, + balance.amt, + balance.address, + block.block_identifier.index + ); } - } else { - brc20_cache.ignore_inscription(reveal.ordinal_number); - } - } - OrdinalOperation::InscriptionTransferred(transfer) => { - match verify_brc20_transfer(transfer, brc20_cache, &brc20_db_tx, &ctx).await? { - Some(data) => { + VerifiedBrc20Operation::TokenTransfer(balance) => { let Some(token) = - brc20_cache.get_token(&data.tick, brc20_db_tx).await? - else { - unreachable!(); - }; - let Some(unsent_transfer) = brc20_cache - .get_unsent_token_transfer(transfer.ordinal_number, brc20_db_tx) - .await? + brc20_cache.get_token(&balance.tick, brc20_db_tx).await? else { unreachable!(); }; tx.metadata.brc20_operation = - Some(Brc20Operation::TransferSend(Brc20TransferData { - tick: data.tick.clone(), - amt: u128_amount_to_decimals_str(data.amt, token.decimals.0), - sender_address: data.sender_address.clone(), - receiver_address: data.receiver_address.clone(), - inscription_id: unsent_transfer.inscription_id, + Some(Brc20Operation::Transfer(Brc20BalanceData { + tick: balance.tick.clone(), + amt: u128_amount_to_decimals_str(balance.amt, token.decimals.0), + address: balance.address.clone(), + inscription_id: reveal.inscription_id.clone(), })); brc20_cache - .insert_token_transfer_send( - &data, - &transfer, + .insert_token_transfer( + &balance, + reveal, &block.block_identifier, block.timestamp, &tx.transaction_identifier, @@ -192,20 +216,260 @@ pub async fn index_block_and_insert_brc20_operations( .await?; try_info!( ctx, - "BRC-20 transfer_send {} {} ({} -> {}) at block {}", - data.tick, - data.amt, - data.sender_address, - data.receiver_address, + "BRC-20 transfer {} {} ({}) at block {}", + balance.tick, + balance.amt, + balance.address, block.block_identifier.index ); } - _ => {} + VerifiedBrc20Operation::TokenTransferSend(_) => { + unreachable!( + "BRC-20 token transfer send should never be generated on reveal" + ) + } } } + OrdinalOperation::InscriptionTransferred(transfer) => { + unverified_ordinal_transfers.push((&tx.transaction_identifier, transfer)); + } } } } + // Verify any dangling ordinal transfers and augment these results back to the block. + verified_brc20_transfers.append( + &mut index_unverified_brc20_transfers( + &unverified_ordinal_transfers, + &block.block_identifier, + block.timestamp, + brc20_cache, + brc20_db_tx, + ctx, + ) + .await?, + ); + for (tx_index, verified_transfer) in verified_brc20_transfers.into_iter() { + block + .transactions + .get_mut(tx_index) + .unwrap() + .metadata + .brc20_operation = Some(verified_transfer); + } + // Write all changes to DB. brc20_cache.db_cache.flush(brc20_db_tx).await?; Ok(()) } + +#[cfg(test)] +mod test { + use std::collections::HashMap; + + use chainhook_postgres::{pg_begin, pg_pool_client}; + use chainhook_sdk::types::{ + Brc20BalanceData, Brc20Operation, Brc20TokenDeployData, Brc20TransferData, + OrdinalInscriptionTransferDestination, OrdinalOperation, + }; + + use crate::{ + core::{ + meta_protocols::brc20::{ + brc20_pg, + cache::Brc20MemoryCache, + index::index_block_and_insert_brc20_operations, + parser::{ + ParsedBrc20BalanceData, ParsedBrc20Operation, ParsedBrc20TokenDeployData, + }, + test_utils::{get_test_ctx, Brc20RevealBuilder, Brc20TransferBuilder}, + }, + test_builders::{TestBlockBuilder, TestTransactionBuilder}, + }, + db::{pg_test_clear_db, pg_test_connection, pg_test_connection_pool}, + }; + + #[tokio::test] + async fn test_full_block_indexing() -> Result<(), String> { + let ctx = get_test_ctx(); + let mut pg_client = pg_test_connection().await; + let _ = brc20_pg::migrate(&mut pg_client).await; + let result = { + let mut brc20_client = pg_pool_client(&pg_test_connection_pool()).await?; + let client = pg_begin(&mut brc20_client).await?; + + // Deploy a token, mint and transfer some balance. + let mut operation_map: HashMap = HashMap::new(); + operation_map.insert( + "01d6876703d25747bf5767f3d830548ebe09ffcade91d49e558eb9b6fd2d6d56i0".to_string(), + ParsedBrc20Operation::Deploy(ParsedBrc20TokenDeployData { + tick: "pepe".to_string(), + display_tick: "pepe".to_string(), + max: "100".to_string(), + lim: "1".to_string(), + dec: "0".to_string(), + self_mint: false, + }), + ); + operation_map.insert( + "2e72578e1259b7dab363cb422ae1979ea329ffc0978c4a7552af907238db354ci0".to_string(), + ParsedBrc20Operation::Mint(ParsedBrc20BalanceData { + tick: "pepe".to_string(), + amt: "1".to_string(), + }), + ); + operation_map.insert( + "a8494261df7d4980af988dfc0241bb7ec95051afdbb86e3bea9c3ab055e898f3i0".to_string(), + ParsedBrc20Operation::Transfer(ParsedBrc20BalanceData { + tick: "pepe".to_string(), + amt: "1".to_string(), + }), + ); + + let mut block = TestBlockBuilder::new() + .hash( + "00000000000000000000a646fc25f31be344cab3e6e31ec26010c40173ad4bd3".to_string(), + ) + .height(818000) + .add_transaction( + TestTransactionBuilder::new() + .add_ordinal_operation(OrdinalOperation::InscriptionRevealed( + Brc20RevealBuilder::new() + .inscription_number(0) + .ordinal_number(100) + .inscription_id("01d6876703d25747bf5767f3d830548ebe09ffcade91d49e558eb9b6fd2d6d56i0") + .inscriber_address(Some("19PFYXeUuArA3vRDHh2zz8tupAYNFqjBCP".to_string())) + .build(), + )) + .build(), + ) + .add_transaction( + TestTransactionBuilder::new() + .add_ordinal_operation(OrdinalOperation::InscriptionRevealed( + Brc20RevealBuilder::new() + .inscription_number(1) + .ordinal_number(200) + .inscription_id("2e72578e1259b7dab363cb422ae1979ea329ffc0978c4a7552af907238db354ci0") + .inscriber_address(Some("19PFYXeUuArA3vRDHh2zz8tupAYNFqjBCP".to_string())) + .build(), + )) + .build(), + ) + .add_transaction( + TestTransactionBuilder::new() + .add_ordinal_operation(OrdinalOperation::InscriptionRevealed( + Brc20RevealBuilder::new() + .inscription_number(2) + .ordinal_number(300) + .inscription_id("a8494261df7d4980af988dfc0241bb7ec95051afdbb86e3bea9c3ab055e898f3i0") + .inscriber_address(Some("19PFYXeUuArA3vRDHh2zz8tupAYNFqjBCP".to_string())) + .build(), + )) + .build(), + ) + .add_transaction( + TestTransactionBuilder::new() + .add_ordinal_operation(OrdinalOperation::InscriptionTransferred( + Brc20TransferBuilder::new() + .tx_index(3) + .ordinal_number(300) + .destination( + OrdinalInscriptionTransferDestination::Transferred("3Ezed1AvfdnXFTMZqhMdhdq9hBMTqfx8Yz".to_string() + )) + .build() + )) + .build(), + ) + .build(); + let mut cache = Brc20MemoryCache::new(10); + + let result = index_block_and_insert_brc20_operations( + &mut block, + &mut operation_map, + &mut cache, + &client, + &ctx, + ) + .await; + + assert_eq!( + block + .transactions + .get(0) + .unwrap() + .metadata + .brc20_operation + .as_ref() + .unwrap(), + &Brc20Operation::Deploy(Brc20TokenDeployData { + tick: "pepe".to_string(), + max: "100".to_string(), + lim: "1".to_string(), + dec: "0".to_string(), + self_mint: false, + address: "19PFYXeUuArA3vRDHh2zz8tupAYNFqjBCP".to_string(), + inscription_id: + "01d6876703d25747bf5767f3d830548ebe09ffcade91d49e558eb9b6fd2d6d56i0" + .to_string(), + }) + ); + assert_eq!( + block + .transactions + .get(1) + .unwrap() + .metadata + .brc20_operation + .as_ref() + .unwrap(), + &Brc20Operation::Mint(Brc20BalanceData { + tick: "pepe".to_string(), + amt: "1".to_string(), + address: "19PFYXeUuArA3vRDHh2zz8tupAYNFqjBCP".to_string(), + inscription_id: + "2e72578e1259b7dab363cb422ae1979ea329ffc0978c4a7552af907238db354ci0" + .to_string() + }) + ); + assert_eq!( + block + .transactions + .get(2) + .unwrap() + .metadata + .brc20_operation + .as_ref() + .unwrap(), + &Brc20Operation::Transfer(Brc20BalanceData { + tick: "pepe".to_string(), + amt: "1".to_string(), + address: "19PFYXeUuArA3vRDHh2zz8tupAYNFqjBCP".to_string(), + inscription_id: + "a8494261df7d4980af988dfc0241bb7ec95051afdbb86e3bea9c3ab055e898f3i0" + .to_string() + }) + ); + assert_eq!( + block + .transactions + .get(3) + .unwrap() + .metadata + .brc20_operation + .as_ref() + .unwrap(), + &Brc20Operation::TransferSend(Brc20TransferData { + tick: "pepe".to_string(), + amt: "1".to_string(), + sender_address: "19PFYXeUuArA3vRDHh2zz8tupAYNFqjBCP".to_string(), + receiver_address: "3Ezed1AvfdnXFTMZqhMdhdq9hBMTqfx8Yz".to_string(), + inscription_id: + "a8494261df7d4980af988dfc0241bb7ec95051afdbb86e3bea9c3ab055e898f3i0" + .to_string() + }) + ); + + result + }; + pg_test_clear_db(&mut pg_client).await; + result + } +} diff --git a/components/ordhook-core/src/core/meta_protocols/brc20/mod.rs b/components/ordhook-core/src/core/meta_protocols/brc20/mod.rs index f975bbd7..724fda47 100644 --- a/components/ordhook-core/src/core/meta_protocols/brc20/mod.rs +++ b/components/ordhook-core/src/core/meta_protocols/brc20/mod.rs @@ -53,6 +53,9 @@ pub fn decimals_str_amount_to_u128(amt: &String, decimals: u8) -> Result String { let num_str = amount.to_string(); + if decimals == 0 { + return num_str; + } let decimal_point = num_str.len() as i32 - decimals as i32; if decimal_point < 0 { let padding = "0".repeat(decimal_point.abs() as usize); diff --git a/components/ordhook-core/src/core/meta_protocols/brc20/test_utils.rs b/components/ordhook-core/src/core/meta_protocols/brc20/test_utils.rs index 36d96452..ee64cfc5 100644 --- a/components/ordhook-core/src/core/meta_protocols/brc20/test_utils.rs +++ b/components/ordhook-core/src/core/meta_protocols/brc20/test_utils.rs @@ -98,6 +98,7 @@ pub struct Brc20TransferBuilder { pub ordinal_number: u64, pub destination: OrdinalInscriptionTransferDestination, pub satpoint_post_transfer: String, + pub tx_index: usize, } impl Brc20TransferBuilder { @@ -107,7 +108,8 @@ impl Brc20TransferBuilder { destination: OrdinalInscriptionTransferDestination::Transferred( "bc1pls75sfwullhygkmqap344f5cqf97qz95lvle6fvddm0tpz2l5ffslgq3m0".to_string(), ), - satpoint_post_transfer: "e45957c419f130cd5c88cdac3eb1caf2d118aee20c17b15b74a611be395a065d:0:0".to_string() + satpoint_post_transfer: "e45957c419f130cd5c88cdac3eb1caf2d118aee20c17b15b74a611be395a065d:0:0".to_string(), + tx_index: 0, } } @@ -121,6 +123,11 @@ impl Brc20TransferBuilder { self } + pub fn tx_index(mut self, val: usize) -> Self { + self.tx_index = val; + self + } + pub fn build(self) -> OrdinalInscriptionTransferData { OrdinalInscriptionTransferData { ordinal_number: self.ordinal_number, @@ -128,7 +135,7 @@ impl Brc20TransferBuilder { satpoint_pre_transfer: "".to_string(), satpoint_post_transfer: self.satpoint_post_transfer, post_transfer_output_value: Some(500), - tx_index: 0, + tx_index: self.tx_index, } } } diff --git a/components/ordhook-core/src/core/meta_protocols/brc20/verifier.rs b/components/ordhook-core/src/core/meta_protocols/brc20/verifier.rs index 6fc9c0f8..dae58d05 100644 --- a/components/ordhook-core/src/core/meta_protocols/brc20/verifier.rs +++ b/components/ordhook-core/src/core/meta_protocols/brc20/verifier.rs @@ -1,6 +1,8 @@ +use std::collections::HashMap; + use chainhook_sdk::types::{ BitcoinNetwork, BlockIdentifier, OrdinalInscriptionRevealData, OrdinalInscriptionTransferData, - OrdinalInscriptionTransferDestination, + OrdinalInscriptionTransferDestination, TransactionIdentifier, }; use chainhook_sdk::utils::Context; use deadpool_postgres::Transaction; @@ -214,49 +216,73 @@ pub async fn verify_brc20_operation( }; } -pub async fn verify_brc20_transfer( - transfer: &OrdinalInscriptionTransferData, +/// Given a list of ordinal transfers, verify which of them are valid `transfer_send` BRC-20 operations we haven't yet processed. +/// Return verified transfer data for each valid operation. +pub async fn verify_brc20_transfers( + transfers: &Vec<(&TransactionIdentifier, &OrdinalInscriptionTransferData)>, cache: &mut Brc20MemoryCache, db_tx: &Transaction<'_>, ctx: &Context, -) -> Result, String> { - let Some(transfer_row) = cache - .get_unsent_token_transfer(transfer.ordinal_number, db_tx) - .await? - else { - try_debug!( - ctx, - "BRC-20: No BRC-20 transfer in ordinal {} or transfer already sent", - transfer.ordinal_number - ); - return Ok(None); - }; - match &transfer.destination { - OrdinalInscriptionTransferDestination::Transferred(receiver_address) => { - return Ok(Some(VerifiedBrc20TransferData { - tick: transfer_row.ticker.clone(), - amt: transfer_row.amount.0, - sender_address: transfer_row.address.clone(), - receiver_address: receiver_address.to_string(), - })); - } - OrdinalInscriptionTransferDestination::SpentInFees => { - return Ok(Some(VerifiedBrc20TransferData { - tick: transfer_row.ticker.clone(), - amt: transfer_row.amount.0, - sender_address: transfer_row.address.clone(), - receiver_address: transfer_row.address.clone(), // Return to sender - })); +) -> Result< + Vec<( + String, + VerifiedBrc20TransferData, + OrdinalInscriptionTransferData, + TransactionIdentifier, + )>, + String, +> { + try_debug!(ctx, "BRC-20 verifying {} ordinal transfers", transfers.len()); + + // Select ordinal numbers to analyze for pending BRC20 transfers. + let mut ordinal_numbers = vec![]; + let mut candidate_transfers = HashMap::new(); + for (tx_identifier, data) in transfers.iter() { + if !candidate_transfers.contains_key(&data.ordinal_number) { + ordinal_numbers.push(&data.ordinal_number); + candidate_transfers.insert(&data.ordinal_number, (*tx_identifier, *data)); } - OrdinalInscriptionTransferDestination::Burnt(_) => { - return Ok(Some(VerifiedBrc20TransferData { + } + // Check cache for said transfers. + let db_operations = cache + .get_unsent_token_transfers(&ordinal_numbers, db_tx) + .await?; + if db_operations.is_empty() { + return Ok(vec![]); + } + // Return any resulting `transfer_send` operations. + let mut results = vec![]; + for transfer_row in db_operations.into_iter() { + let (tx_identifier, data) = candidate_transfers + .get(&transfer_row.ordinal_number.0) + .unwrap(); + let verified = match &data.destination { + OrdinalInscriptionTransferDestination::Transferred(receiver_address) => { + VerifiedBrc20TransferData { + tick: transfer_row.ticker.clone(), + amt: transfer_row.amount.0, + sender_address: transfer_row.address.clone(), + receiver_address: receiver_address.to_string(), + } + } + OrdinalInscriptionTransferDestination::SpentInFees => { + VerifiedBrc20TransferData { + tick: transfer_row.ticker.clone(), + amt: transfer_row.amount.0, + sender_address: transfer_row.address.clone(), + receiver_address: transfer_row.address.clone(), // Return to sender + } + } + OrdinalInscriptionTransferDestination::Burnt(_) => VerifiedBrc20TransferData { tick: transfer_row.ticker.clone(), amt: transfer_row.amount.0, sender_address: transfer_row.address.clone(), receiver_address: "".to_string(), - })); - } - }; + }, + }; + results.push((transfer_row.inscription_id, verified, (*data).clone(), (*tx_identifier).clone())); + } + return Ok(results); } #[cfg(test)] @@ -282,7 +308,7 @@ mod test { db::{pg_test_clear_db, pg_test_connection, pg_test_connection_pool}, }; - use super::{verify_brc20_operation, verify_brc20_transfer, VerifiedBrc20TransferData}; + use super::{verify_brc20_operation, verify_brc20_transfers, VerifiedBrc20TransferData}; #[test_case( ParsedBrc20Operation::Deploy(ParsedBrc20TokenDeployData { @@ -1067,10 +1093,13 @@ mod test { 2, &client ).await?; - verify_brc20_transfer(&transfer, &mut cache, &client, &ctx).await + verify_brc20_transfers(&vec![(&tx, &transfer)], &mut cache, &client, &ctx).await? }; pg_test_clear_db(&mut pg_client).await; - result + let Some(result) = result.first() else { + return Ok(None); + }; + Ok(Some(result.1.clone())) } #[test_case( @@ -1176,9 +1205,12 @@ mod test { &client, ) .await?; - verify_brc20_transfer(&transfer, &mut cache, &client, &ctx).await + verify_brc20_transfers(&vec![(&tx, &transfer)], &mut cache, &client, &ctx).await? }; pg_test_clear_db(&mut pg_client).await; - result + let Some(result) = result.first() else { + return Ok(None); + }; + Ok(Some(result.1.clone())) } }