From 367a6f49a110e621148502e990ddb35c2231099b Mon Sep 17 00:00:00 2001 From: 0xaptosj <129789810+0xaptosj@users.noreply.github.com> Date: Sat, 18 Jan 2025 23:43:11 +0800 Subject: [PATCH] marketplace indexer (#4) * nit * define all tables * fix migration * only have 2 tables * done with marketplace db schema * storer wip * save * time to test non tradeport indexer * nit * tradeport wip * skip tls validation * Update .gitignore * nit * fix event struct * log * fix * nit * dedup order placed events * debug * Update ask_filled_event_storer.rs * debug * debug * Update ask_filled_event_storer.rs * debug * log * refactor wip * use default vaule when not writing to db * fmt * fix default value * wip * added all tradeport events * parse tradeport * nit * nit --- indexer/.gitignore | 2 +- indexer/Cargo.lock | 27 +++ indexer/Cargo.toml | 2 + indexer/Dockerfile | 7 +- indexer/configs/example.config.yaml | 21 -- ...ample.contract_upgrade_indexer_config.yaml | 3 + .../example.marketplace_indexer_config.yaml | 5 +- .../src/config/indexer_processor_config.rs | 22 +- indexer/src/config/processor_config.rs | 8 +- .../contract_upgrade_schema.rs | 54 ----- .../down.sql | 2 - .../2024-08-01-224558_ledger-infos/down.sql | 2 - .../2024-08-01-224558_ledger-infos/up.sql | 3 - .../diesel.toml | 2 +- .../down.sql | 0 .../up.sql | 0 .../down.sql | 2 + .../up.sql | 2 +- .../2024-07-18-202400_test-migration/down.sql | 0 .../2024-07-18-202400_test-migration/up.sql | 2 +- .../2024-08-01-224558_ledger-infos/down.sql | 2 + .../2024-08-01-224558_ledger-infos/up.sql | 3 + .../down.sql | 0 .../up.sql | 0 .../down.sql | 0 .../up.sql | 0 .../down.sql | 2 + .../up.sql | 47 ++++ .../down.sql | 16 ++ .../up.sql | 16 ++ .../down.sql | 2 + .../up.sql | 47 ++++ .../down.sql | 16 ++ .../up.sql | 16 ++ .../down.sql | 2 + .../up.sql | 39 ++++ .../down.sql | 10 + .../up.sql | 10 + .../down.sql | 2 + .../up.sql | 23 ++ indexer/src/db_migrations/schema.rs | 195 ++++++++++++++++ indexer/src/db_models/collection_bids.rs | 33 +++ .../db_models/contract_upgrade_indexer/mod.rs | 4 - .../src/db_models/filled_collection_bids.rs | 21 ++ .../ledger_info.rs | 5 +- indexer/src/db_models/mod.rs | 9 +- .../module_upgrade.rs | 2 +- indexer/src/db_models/nft_asks.rs | 37 +++ indexer/src/db_models/nft_bids.rs | 37 +++ .../package_upgrade.rs | 2 +- .../processor_status.rs | 5 +- .../contract_upgrade_indexer/extractor.rs | 10 +- .../contract_upgrade_indexer/processor.rs | 3 +- .../storers/upgrade_module_change_storer.rs | 4 +- .../storers/upgrade_package_change_storer.rs | 4 +- .../aptos_labs_contract_event_parser.rs | 131 +++++++++++ .../marketplace_indexer/event_parsers/mod.rs | 3 + .../tradeport_contract_v1_event_parser.rs | 190 ++++++++++++++++ .../tradeport_contract_v2_event_parser.rs | 174 ++++++++++++++ .../indexers/marketplace_indexer/extractor.rs | 193 ++++++++++++++++ .../src/indexers/marketplace_indexer/mod.rs | 5 + .../indexers/marketplace_indexer/processor.rs | 104 +++++++++ .../indexers/marketplace_indexer/storer.rs | 193 ++++++++++++++++ .../storers/ask_cancelled_event_storer.rs | 86 +++++++ .../storers/ask_filled_event_storer.rs | 87 +++++++ .../storers/ask_placed_event_storer.rs | 103 +++++++++ .../storers/bid_cancelled_event_storer.rs | 86 +++++++ .../storers/bid_filled_event_storer.rs | 87 +++++++ .../storers/bid_placed_event_storer.rs | 103 +++++++++ .../collection_bid_cancelled_event_storer.rs | 90 ++++++++ .../collection_bid_filled_event_storer.rs | 176 +++++++++++++++ .../collection_bid_placed_event_storer.rs | 111 +++++++++ .../marketplace_indexer/storers/mod.rs | 9 + indexer/src/indexers/mod.rs | 2 +- indexer/src/indexers/tradeport_indexer/mod.rs | 1 - indexer/src/lib.rs | 5 +- .../collection_bid_event.rs | 164 ++++++++++++++ .../aptos_labs_contract_events/mod.rs | 4 + .../nft_ask_event.rs | 169 ++++++++++++++ .../nft_bid_event.rs | 155 +++++++++++++ .../aptos_labs_contract_events/shared.rs | 102 +++++++++ indexer/src/onchain_events/mod.rs | 2 + .../collection_bid_event_v1.rs | 160 +++++++++++++ .../collection_bid_event_v2.rs | 155 +++++++++++++ .../tradeport_contract_events/mod.rs | 7 + .../nft_ask_event_v1.rs | 213 ++++++++++++++++++ .../nft_ask_event_v2.rs | 153 +++++++++++++ .../nft_bid_event_v1.rs | 156 +++++++++++++ .../nft_bid_event_v2.rs | 153 +++++++++++++ .../tradeport_contract_events/shared.rs | 53 +++++ indexer/src/utils/aptos_utils.rs | 23 ++ indexer/src/utils/chain_id.rs | 4 +- indexer/src/utils/database_connection.rs | 1 + indexer/src/utils/database_execution.rs | 19 ++ .../utils/latest_processed_version_tracker.rs | 5 +- indexer/src/utils/mod.rs | 2 + indexer/src/utils/starting_version.rs | 2 +- indexer/src/utils/time_utils.rs | 8 + 98 files changed, 4310 insertions(+), 129 deletions(-) delete mode 100644 indexer/configs/example.config.yaml delete mode 100644 indexer/src/db_migrations/contract_upgrade_indexer/contract_upgrade_schema.rs delete mode 100644 indexer/src/db_migrations/contract_upgrade_indexer/migrations/2024-07-18-194547_create-processor-status/down.sql delete mode 100644 indexer/src/db_migrations/contract_upgrade_indexer/migrations/2024-08-01-224558_ledger-infos/down.sql delete mode 100644 indexer/src/db_migrations/contract_upgrade_indexer/migrations/2024-08-01-224558_ledger-infos/up.sql rename indexer/src/db_migrations/{contract_upgrade_indexer => }/diesel.toml (82%) rename indexer/src/db_migrations/{contract_upgrade_indexer => }/migrations/00000000000000_diesel_initial_setup/down.sql (100%) rename indexer/src/db_migrations/{contract_upgrade_indexer => }/migrations/00000000000000_diesel_initial_setup/up.sql (100%) create mode 100644 indexer/src/db_migrations/migrations/2024-07-18-194547_create-processor-status/down.sql rename indexer/src/db_migrations/{contract_upgrade_indexer => }/migrations/2024-07-18-194547_create-processor-status/up.sql (86%) rename indexer/src/db_migrations/{contract_upgrade_indexer => }/migrations/2024-07-18-202400_test-migration/down.sql (100%) rename indexer/src/db_migrations/{contract_upgrade_indexer => }/migrations/2024-07-18-202400_test-migration/up.sql (54%) create mode 100644 indexer/src/db_migrations/migrations/2024-08-01-224558_ledger-infos/down.sql create mode 100644 indexer/src/db_migrations/migrations/2024-08-01-224558_ledger-infos/up.sql rename indexer/src/db_migrations/{contract_upgrade_indexer => }/migrations/2024-10-17-235031_add-module-upgrade-history-table/down.sql (100%) rename indexer/src/db_migrations/{contract_upgrade_indexer => }/migrations/2024-10-17-235031_add-module-upgrade-history-table/up.sql (100%) rename indexer/src/db_migrations/{contract_upgrade_indexer => }/migrations/2024-10-17-235032_add-package-upgrade-history-table/down.sql (100%) rename indexer/src/db_migrations/{contract_upgrade_indexer => }/migrations/2024-10-17-235032_add-package-upgrade-history-table/up.sql (100%) create mode 100644 indexer/src/db_migrations/migrations/2024-12-03-235031_add-nft-asks-table/down.sql create mode 100644 indexer/src/db_migrations/migrations/2024-12-03-235031_add-nft-asks-table/up.sql create mode 100644 indexer/src/db_migrations/migrations/2024-12-10-014414_add-index-to-nft-asks-table/down.sql create mode 100644 indexer/src/db_migrations/migrations/2024-12-10-014414_add-index-to-nft-asks-table/up.sql create mode 100644 indexer/src/db_migrations/migrations/2024-12-10-015223_add-nft-bids-table/down.sql create mode 100644 indexer/src/db_migrations/migrations/2024-12-10-015223_add-nft-bids-table/up.sql create mode 100644 indexer/src/db_migrations/migrations/2024-12-10-195135_add-index-to-nft-bids-table/down.sql create mode 100644 indexer/src/db_migrations/migrations/2024-12-10-195135_add-index-to-nft-bids-table/up.sql create mode 100644 indexer/src/db_migrations/migrations/2024-12-18-214546_add-collection-bids-table/down.sql create mode 100644 indexer/src/db_migrations/migrations/2024-12-18-214546_add-collection-bids-table/up.sql create mode 100644 indexer/src/db_migrations/migrations/2024-12-18-214551_add-index-to-collection-bids-table/down.sql create mode 100644 indexer/src/db_migrations/migrations/2024-12-18-214551_add-index-to-collection-bids-table/up.sql create mode 100644 indexer/src/db_migrations/migrations/2024-12-19-195341_add-filled-collection-bids-table/down.sql create mode 100644 indexer/src/db_migrations/migrations/2024-12-19-195341_add-filled-collection-bids-table/up.sql create mode 100644 indexer/src/db_migrations/schema.rs create mode 100644 indexer/src/db_models/collection_bids.rs delete mode 100644 indexer/src/db_models/contract_upgrade_indexer/mod.rs create mode 100644 indexer/src/db_models/filled_collection_bids.rs rename indexer/src/db_models/{contract_upgrade_indexer => }/ledger_info.rs (80%) rename indexer/src/db_models/{contract_upgrade_indexer => }/module_upgrade.rs (90%) create mode 100644 indexer/src/db_models/nft_asks.rs create mode 100644 indexer/src/db_models/nft_bids.rs rename indexer/src/db_models/{contract_upgrade_indexer => }/package_upgrade.rs (97%) rename indexer/src/db_models/{contract_upgrade_indexer => }/processor_status.rs (88%) create mode 100644 indexer/src/indexers/marketplace_indexer/event_parsers/aptos_labs_contract_event_parser.rs create mode 100644 indexer/src/indexers/marketplace_indexer/event_parsers/mod.rs create mode 100644 indexer/src/indexers/marketplace_indexer/event_parsers/tradeport_contract_v1_event_parser.rs create mode 100644 indexer/src/indexers/marketplace_indexer/event_parsers/tradeport_contract_v2_event_parser.rs create mode 100644 indexer/src/indexers/marketplace_indexer/extractor.rs create mode 100644 indexer/src/indexers/marketplace_indexer/mod.rs create mode 100644 indexer/src/indexers/marketplace_indexer/processor.rs create mode 100644 indexer/src/indexers/marketplace_indexer/storer.rs create mode 100644 indexer/src/indexers/marketplace_indexer/storers/ask_cancelled_event_storer.rs create mode 100644 indexer/src/indexers/marketplace_indexer/storers/ask_filled_event_storer.rs create mode 100644 indexer/src/indexers/marketplace_indexer/storers/ask_placed_event_storer.rs create mode 100644 indexer/src/indexers/marketplace_indexer/storers/bid_cancelled_event_storer.rs create mode 100644 indexer/src/indexers/marketplace_indexer/storers/bid_filled_event_storer.rs create mode 100644 indexer/src/indexers/marketplace_indexer/storers/bid_placed_event_storer.rs create mode 100644 indexer/src/indexers/marketplace_indexer/storers/collection_bid_cancelled_event_storer.rs create mode 100644 indexer/src/indexers/marketplace_indexer/storers/collection_bid_filled_event_storer.rs create mode 100644 indexer/src/indexers/marketplace_indexer/storers/collection_bid_placed_event_storer.rs create mode 100644 indexer/src/indexers/marketplace_indexer/storers/mod.rs delete mode 100644 indexer/src/indexers/tradeport_indexer/mod.rs create mode 100644 indexer/src/onchain_events/aptos_labs_contract_events/collection_bid_event.rs create mode 100644 indexer/src/onchain_events/aptos_labs_contract_events/mod.rs create mode 100644 indexer/src/onchain_events/aptos_labs_contract_events/nft_ask_event.rs create mode 100644 indexer/src/onchain_events/aptos_labs_contract_events/nft_bid_event.rs create mode 100644 indexer/src/onchain_events/aptos_labs_contract_events/shared.rs create mode 100644 indexer/src/onchain_events/mod.rs create mode 100644 indexer/src/onchain_events/tradeport_contract_events/collection_bid_event_v1.rs create mode 100644 indexer/src/onchain_events/tradeport_contract_events/collection_bid_event_v2.rs create mode 100644 indexer/src/onchain_events/tradeport_contract_events/mod.rs create mode 100644 indexer/src/onchain_events/tradeport_contract_events/nft_ask_event_v1.rs create mode 100644 indexer/src/onchain_events/tradeport_contract_events/nft_ask_event_v2.rs create mode 100644 indexer/src/onchain_events/tradeport_contract_events/nft_bid_event_v1.rs create mode 100644 indexer/src/onchain_events/tradeport_contract_events/nft_bid_event_v2.rs create mode 100644 indexer/src/onchain_events/tradeport_contract_events/shared.rs create mode 100644 indexer/src/utils/aptos_utils.rs create mode 100644 indexer/src/utils/time_utils.rs diff --git a/indexer/.gitignore b/indexer/.gitignore index c962f42..93163f0 100644 --- a/indexer/.gitignore +++ b/indexer/.gitignore @@ -21,4 +21,4 @@ local.config.yaml cloud.config.yaml config.yaml contract_upgrade_indexer_config.yaml -marketplace_indexer_config.yaml +*_marketplace_indexer_config.yaml diff --git a/indexer/Cargo.lock b/indexer/Cargo.lock index 369651d..652fad8 100644 --- a/indexer/Cargo.lock +++ b/indexer/Cargo.lock @@ -246,6 +246,12 @@ dependencies = [ "url", ] +[[package]] +name = "arrayref" +version = "0.3.9" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "76a2e8124351fda1ef8aaaa3bbd7ebbcb486bbcd4225aca0aa0d84bb2db8fecb" + [[package]] name = "arrayvec" version = "0.7.6" @@ -528,6 +534,19 @@ version = "2.6.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "b048fb63fd8b5923fc5aa7b340d8e156aec7ec02f0c78fa8a6ddc2613f6f71de" +[[package]] +name = "blake3" +version = "1.5.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b8ee0c1824c4dea5b5f81736aff91bae041d2c07ee1192bec91054e10e3e601e" +dependencies = [ + "arrayref", + "arrayvec", + "cc", + "cfg-if", + "constant_time_eq", +] + [[package]] name = "block-buffer" version = "0.10.4" @@ -651,6 +670,12 @@ version = "1.0.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "d3fd119d74b830634cea2a0f58bbd0d54540518a14397557951e79340abc28c0" +[[package]] +name = "constant_time_eq" +version = "0.3.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7c74b8349d32d297c9134b8c88677813a227df8f779daa29bfc29c183fe3dca6" + [[package]] name = "core-foundation" version = "0.9.4" @@ -1511,6 +1536,7 @@ dependencies = [ "aptos-indexer-processor-sdk", "aptos-indexer-processor-sdk-server-framework", "async-trait", + "blake3", "chrono", "clap", "diesel", @@ -1518,6 +1544,7 @@ dependencies = [ "diesel_migrations", "field_count", "futures-util", + "hex", "jemallocator", "native-tls", "num_cpus", diff --git a/indexer/Cargo.toml b/indexer/Cargo.toml index 2cfcb5c..8954184 100644 --- a/indexer/Cargo.toml +++ b/indexer/Cargo.toml @@ -16,6 +16,7 @@ aptos-indexer-processor-sdk-server-framework = { git = "https://github.com/aptos ahash = { version = "0.8.7", features = ["serde"] } anyhow = "1.0.86" async-trait = "0.1.80" +blake3 = "1.5.5" chrono = { version = "0.4.19", features = ["clock", "serde"] } clap = { version = "4.3.5", features = ["derive", "unstable-styles"] } # Do NOT enable the postgres feature here, it is conditionally enabled in a feature @@ -38,6 +39,7 @@ diesel-async = { git = "https://github.com/weiznich/diesel_async.git", rev = "d0 diesel_migrations = { version = "2.1.0", features = ["postgres"] } field_count = "0.1.1" futures-util = "0.3.21" +hex = "0.4.3" jemallocator = { version = "0.5.0", features = [ "profiling", "unprefixed_malloc_on_supported_platforms", diff --git a/indexer/Dockerfile b/indexer/Dockerfile index ae9d217..233f69e 100644 --- a/indexer/Dockerfile +++ b/indexer/Dockerfile @@ -39,10 +39,11 @@ WORKDIR /usr/src/app COPY --from=builder /usr/src/app/target/x86_64-unknown-linux-gnu/release/indexer . # Copy the configuration file -COPY configs/contract_upgrade_indexer_config.yaml /secrets/config +COPY configs/contract_upgrade_indexer_config.yaml /secrets/contract_upgrade_config +COPY configs/marketplace_indexer_config.yaml /secrets/marketplace_config # Expose the port your application is using EXPOSE 8080 -# Set the command to run the application -CMD ["./indexer", "-c", "/secrets/config"] +# Set the binary as entrypoint so we can pass config as argument +ENTRYPOINT ["./indexer", "-c"] diff --git a/indexer/configs/example.config.yaml b/indexer/configs/example.config.yaml deleted file mode 100644 index 85cb674..0000000 --- a/indexer/configs/example.config.yaml +++ /dev/null @@ -1,21 +0,0 @@ -# This is a template yaml for the aptos-indexer-processor. -health_check_port: 8085 -server_config: - processor_config: - type: "events_processor" - transaction_stream_config: - indexer_grpc_data_service_address: "https://grpc.testnet.aptoslabs.com:443" - # At which tx version to start indexing, usually this is the tx version when the contract was deployed - starting_version: 5936597868 - # At which tx version to stop indexing - # request_ending_version: 10000 - # Go to https://developers.aptoslabs.com/ to create a project and get an API token - auth_token: "auth_token_you_can_get_from_aptos_build" - request_name_header: "events-processor" - db_config: - # do not include the ?sslmode=require in the connection string when using cloud DB, it will cause an error - postgres_connection_string: "postgresql://username:password@neon_host/db_name" - # we set db_pool_size to a lower number on cloud because we use a free plan - db_pool_size: 25 - contract_config: - contract_address: "your_contract_address" diff --git a/indexer/configs/example.contract_upgrade_indexer_config.yaml b/indexer/configs/example.contract_upgrade_indexer_config.yaml index 0b98d9e..1ea6086 100644 --- a/indexer/configs/example.contract_upgrade_indexer_config.yaml +++ b/indexer/configs/example.contract_upgrade_indexer_config.yaml @@ -4,6 +4,9 @@ server_config: type: "contract_upgrade_indexer" transaction_stream_config: indexer_grpc_data_service_address: "https://grpc.mainnet.aptoslabs.com:443" + # rarible starting version: 1011760686 + # wapal starting version: 216804176 + # tradeport starting version: 94194505 starting_version: 1 # request_ending_version: 10000 auth_token: "_" diff --git a/indexer/configs/example.marketplace_indexer_config.yaml b/indexer/configs/example.marketplace_indexer_config.yaml index 9107a6b..a272f53 100644 --- a/indexer/configs/example.marketplace_indexer_config.yaml +++ b/indexer/configs/example.marketplace_indexer_config.yaml @@ -4,6 +4,9 @@ server_config: type: "marketplace_indexer" transaction_stream_config: indexer_grpc_data_service_address: "https://grpc.mainnet.aptoslabs.com:443" + # rarible starting version: 1011760686 + # wapal starting version: 216804176 + # tradeport starting version: 94194505 starting_version: 1 # request_ending_version: 10000 auth_token: "" @@ -16,7 +19,7 @@ server_config: # see limitation on vercel docs https://vercel.com/docs/storage/vercel-postgres/faq db_pool_size: 25 custom_config: - contract_upgrade_indexer: [ + marketplace_indexer: [ # wapal "0x584b50b999c78ade62f8359c91b5165ff390338d45f8e55969a04e65d76258c9", # tradeport diff --git a/indexer/src/config/indexer_processor_config.rs b/indexer/src/config/indexer_processor_config.rs index 4166813..e56ed21 100644 --- a/indexer/src/config/indexer_processor_config.rs +++ b/indexer/src/config/indexer_processor_config.rs @@ -4,7 +4,10 @@ use aptos_indexer_processor_sdk_server_framework::RunnableConfig; use serde::{Deserialize, Serialize}; use super::processor_config::ProcessorConfig; -use crate::indexers::contract_upgrade_indexer::processor::ContractUpgradeProcessor; +use crate::indexers::{ + contract_upgrade_indexer::processor::ContractUpgradeProcessor, + marketplace_indexer::processor::MarketplaceProcessor, +}; pub const QUERY_DEFAULT_RETRIES: u32 = 5; pub const QUERY_DEFAULT_RETRY_DELAY_MS: u64 = 500; @@ -23,11 +26,20 @@ impl RunnableConfig for IndexerProcessorConfig { async fn run(&self) -> Result<()> { match self.processor_config { ProcessorConfig::ContractUpgradeIndexer => { - let events_processor = ContractUpgradeProcessor::new(self.clone()).await?; - events_processor.run_processor().await + let processor = ContractUpgradeProcessor::new(self.clone()).await?; + processor.run_processor().await } - ProcessorConfig::MarketplaceIndexer => { - return Err(anyhow::anyhow!("MarketplaceIndexer not implemented")); + ProcessorConfig::WapalMarketplaceIndexer => { + let processor = MarketplaceProcessor::new(self.clone()).await?; + processor.run_processor().await + } + ProcessorConfig::RaribleMarketplaceIndexer => { + let processor = MarketplaceProcessor::new(self.clone()).await?; + processor.run_processor().await + } + ProcessorConfig::TradeportMarketplaceIndexer => { + let processor = MarketplaceProcessor::new(self.clone()).await?; + processor.run_processor().await } } } diff --git a/indexer/src/config/processor_config.rs b/indexer/src/config/processor_config.rs index bbfccf9..f09a7a1 100644 --- a/indexer/src/config/processor_config.rs +++ b/indexer/src/config/processor_config.rs @@ -33,7 +33,9 @@ use serde::{Deserialize, Serialize}; )] pub enum ProcessorConfig { ContractUpgradeIndexer, - MarketplaceIndexer, + RaribleMarketplaceIndexer, + WapalMarketplaceIndexer, + TradeportMarketplaceIndexer, } impl ProcessorConfig { @@ -59,7 +61,9 @@ impl ProcessorConfig { )] pub enum Processor { ContractUpgradeIndexer, - MarketplaceIndexer, + RaribleMarketplaceIndexer, + WapalMarketplaceIndexer, + TradeportMarketplaceIndexer, } #[cfg(test)] diff --git a/indexer/src/db_migrations/contract_upgrade_indexer/contract_upgrade_schema.rs b/indexer/src/db_migrations/contract_upgrade_indexer/contract_upgrade_schema.rs deleted file mode 100644 index 3c652aa..0000000 --- a/indexer/src/db_migrations/contract_upgrade_indexer/contract_upgrade_schema.rs +++ /dev/null @@ -1,54 +0,0 @@ -// @generated automatically by Diesel CLI. - -diesel::table! { - contract_upgrade_ledger_infos (chain_id) { - chain_id -> Int8, - } -} - -diesel::table! { - contract_upgrade_processor_status (processor) { - #[max_length = 50] - processor -> Varchar, - last_success_version -> Int8, - last_updated -> Timestamp, - last_transaction_timestamp -> Nullable, - } -} - -diesel::table! { - module_upgrade_history (module_addr, module_name, package_name, upgrade_number) { - #[max_length = 300] - module_addr -> Varchar, - #[max_length = 300] - module_name -> Varchar, - #[max_length = 300] - package_name -> Varchar, - upgrade_number -> Int8, - module_bytecode -> Bytea, - module_source_code -> Text, - module_abi -> Json, - tx_version -> Int8, - } -} - -diesel::table! { - package_upgrade_history (package_addr, package_name, upgrade_number) { - #[max_length = 300] - package_addr -> Varchar, - #[max_length = 300] - package_name -> Varchar, - upgrade_number -> Int8, - upgrade_policy -> Int8, - package_manifest -> Text, - source_digest -> Text, - tx_version -> Int8, - } -} - -diesel::allow_tables_to_appear_in_same_query!( - contract_upgrade_ledger_infos, - contract_upgrade_processor_status, - module_upgrade_history, - package_upgrade_history, -); diff --git a/indexer/src/db_migrations/contract_upgrade_indexer/migrations/2024-07-18-194547_create-processor-status/down.sql b/indexer/src/db_migrations/contract_upgrade_indexer/migrations/2024-07-18-194547_create-processor-status/down.sql deleted file mode 100644 index 8a26990..0000000 --- a/indexer/src/db_migrations/contract_upgrade_indexer/migrations/2024-07-18-194547_create-processor-status/down.sql +++ /dev/null @@ -1,2 +0,0 @@ --- This file should undo anything in `up.sql`d -DROP TABLE IF EXISTS contract_upgrade_processor_status; \ No newline at end of file diff --git a/indexer/src/db_migrations/contract_upgrade_indexer/migrations/2024-08-01-224558_ledger-infos/down.sql b/indexer/src/db_migrations/contract_upgrade_indexer/migrations/2024-08-01-224558_ledger-infos/down.sql deleted file mode 100644 index afeb1cd..0000000 --- a/indexer/src/db_migrations/contract_upgrade_indexer/migrations/2024-08-01-224558_ledger-infos/down.sql +++ /dev/null @@ -1,2 +0,0 @@ --- This file should undo anything in `up.sql` -DROP TABLE IF EXISTS contract_upgrade_ledger_infos; \ No newline at end of file diff --git a/indexer/src/db_migrations/contract_upgrade_indexer/migrations/2024-08-01-224558_ledger-infos/up.sql b/indexer/src/db_migrations/contract_upgrade_indexer/migrations/2024-08-01-224558_ledger-infos/up.sql deleted file mode 100644 index c33cb2c..0000000 --- a/indexer/src/db_migrations/contract_upgrade_indexer/migrations/2024-08-01-224558_ledger-infos/up.sql +++ /dev/null @@ -1,3 +0,0 @@ --- Your SQL goes here -CREATE TABLE - contract_upgrade_ledger_infos (chain_id BIGINT UNIQUE PRIMARY KEY NOT NULL); \ No newline at end of file diff --git a/indexer/src/db_migrations/contract_upgrade_indexer/diesel.toml b/indexer/src/db_migrations/diesel.toml similarity index 82% rename from indexer/src/db_migrations/contract_upgrade_indexer/diesel.toml rename to indexer/src/db_migrations/diesel.toml index 9350233..9a59970 100644 --- a/indexer/src/db_migrations/contract_upgrade_indexer/diesel.toml +++ b/indexer/src/db_migrations/diesel.toml @@ -2,7 +2,7 @@ # see https://diesel.rs/guides/configuring-diesel-cli [print_schema] -file = "contract_upgrade_schema.rs" +file = "schema.rs" [migrations_directory] dir = "migrations" diff --git a/indexer/src/db_migrations/contract_upgrade_indexer/migrations/00000000000000_diesel_initial_setup/down.sql b/indexer/src/db_migrations/migrations/00000000000000_diesel_initial_setup/down.sql similarity index 100% rename from indexer/src/db_migrations/contract_upgrade_indexer/migrations/00000000000000_diesel_initial_setup/down.sql rename to indexer/src/db_migrations/migrations/00000000000000_diesel_initial_setup/down.sql diff --git a/indexer/src/db_migrations/contract_upgrade_indexer/migrations/00000000000000_diesel_initial_setup/up.sql b/indexer/src/db_migrations/migrations/00000000000000_diesel_initial_setup/up.sql similarity index 100% rename from indexer/src/db_migrations/contract_upgrade_indexer/migrations/00000000000000_diesel_initial_setup/up.sql rename to indexer/src/db_migrations/migrations/00000000000000_diesel_initial_setup/up.sql diff --git a/indexer/src/db_migrations/migrations/2024-07-18-194547_create-processor-status/down.sql b/indexer/src/db_migrations/migrations/2024-07-18-194547_create-processor-status/down.sql new file mode 100644 index 0000000..9eb504e --- /dev/null +++ b/indexer/src/db_migrations/migrations/2024-07-18-194547_create-processor-status/down.sql @@ -0,0 +1,2 @@ +-- This file should undo anything in `up.sql`d +DROP TABLE IF EXISTS processor_status; \ No newline at end of file diff --git a/indexer/src/db_migrations/contract_upgrade_indexer/migrations/2024-07-18-194547_create-processor-status/up.sql b/indexer/src/db_migrations/migrations/2024-07-18-194547_create-processor-status/up.sql similarity index 86% rename from indexer/src/db_migrations/contract_upgrade_indexer/migrations/2024-07-18-194547_create-processor-status/up.sql rename to indexer/src/db_migrations/migrations/2024-07-18-194547_create-processor-status/up.sql index aeeb934..fc17698 100644 --- a/indexer/src/db_migrations/contract_upgrade_indexer/migrations/2024-07-18-194547_create-processor-status/up.sql +++ b/indexer/src/db_migrations/migrations/2024-07-18-194547_create-processor-status/up.sql @@ -1,6 +1,6 @@ -- Your SQL goes here CREATE TABLE - contract_upgrade_processor_status ( + processor_status ( processor VARCHAR(50) NOT NULL, last_success_version BIGINT NOT NULL, last_updated TIMESTAMP NOT NULL, diff --git a/indexer/src/db_migrations/contract_upgrade_indexer/migrations/2024-07-18-202400_test-migration/down.sql b/indexer/src/db_migrations/migrations/2024-07-18-202400_test-migration/down.sql similarity index 100% rename from indexer/src/db_migrations/contract_upgrade_indexer/migrations/2024-07-18-202400_test-migration/down.sql rename to indexer/src/db_migrations/migrations/2024-07-18-202400_test-migration/down.sql diff --git a/indexer/src/db_migrations/contract_upgrade_indexer/migrations/2024-07-18-202400_test-migration/up.sql b/indexer/src/db_migrations/migrations/2024-07-18-202400_test-migration/up.sql similarity index 54% rename from indexer/src/db_migrations/contract_upgrade_indexer/migrations/2024-07-18-202400_test-migration/up.sql rename to indexer/src/db_migrations/migrations/2024-07-18-202400_test-migration/up.sql index 3b87a71..a3f0124 100644 --- a/indexer/src/db_migrations/contract_upgrade_indexer/migrations/2024-07-18-202400_test-migration/up.sql +++ b/indexer/src/db_migrations/migrations/2024-07-18-202400_test-migration/up.sql @@ -1,4 +1,4 @@ -- Your SQL goes here -ALTER TABLE IF EXISTS contract_upgrade_processor_status +ALTER TABLE IF EXISTS processor_status ALTER COLUMN last_updated SET DEFAULT NOW (); \ No newline at end of file diff --git a/indexer/src/db_migrations/migrations/2024-08-01-224558_ledger-infos/down.sql b/indexer/src/db_migrations/migrations/2024-08-01-224558_ledger-infos/down.sql new file mode 100644 index 0000000..e54762a --- /dev/null +++ b/indexer/src/db_migrations/migrations/2024-08-01-224558_ledger-infos/down.sql @@ -0,0 +1,2 @@ +-- This file should undo anything in `up.sql` +DROP TABLE IF EXISTS ledger_infos; \ No newline at end of file diff --git a/indexer/src/db_migrations/migrations/2024-08-01-224558_ledger-infos/up.sql b/indexer/src/db_migrations/migrations/2024-08-01-224558_ledger-infos/up.sql new file mode 100644 index 0000000..ba06092 --- /dev/null +++ b/indexer/src/db_migrations/migrations/2024-08-01-224558_ledger-infos/up.sql @@ -0,0 +1,3 @@ +-- Your SQL goes here +CREATE TABLE + ledger_infos (chain_id BIGINT UNIQUE PRIMARY KEY NOT NULL); \ No newline at end of file diff --git a/indexer/src/db_migrations/contract_upgrade_indexer/migrations/2024-10-17-235031_add-module-upgrade-history-table/down.sql b/indexer/src/db_migrations/migrations/2024-10-17-235031_add-module-upgrade-history-table/down.sql similarity index 100% rename from indexer/src/db_migrations/contract_upgrade_indexer/migrations/2024-10-17-235031_add-module-upgrade-history-table/down.sql rename to indexer/src/db_migrations/migrations/2024-10-17-235031_add-module-upgrade-history-table/down.sql diff --git a/indexer/src/db_migrations/contract_upgrade_indexer/migrations/2024-10-17-235031_add-module-upgrade-history-table/up.sql b/indexer/src/db_migrations/migrations/2024-10-17-235031_add-module-upgrade-history-table/up.sql similarity index 100% rename from indexer/src/db_migrations/contract_upgrade_indexer/migrations/2024-10-17-235031_add-module-upgrade-history-table/up.sql rename to indexer/src/db_migrations/migrations/2024-10-17-235031_add-module-upgrade-history-table/up.sql diff --git a/indexer/src/db_migrations/contract_upgrade_indexer/migrations/2024-10-17-235032_add-package-upgrade-history-table/down.sql b/indexer/src/db_migrations/migrations/2024-10-17-235032_add-package-upgrade-history-table/down.sql similarity index 100% rename from indexer/src/db_migrations/contract_upgrade_indexer/migrations/2024-10-17-235032_add-package-upgrade-history-table/down.sql rename to indexer/src/db_migrations/migrations/2024-10-17-235032_add-package-upgrade-history-table/down.sql diff --git a/indexer/src/db_migrations/contract_upgrade_indexer/migrations/2024-10-17-235032_add-package-upgrade-history-table/up.sql b/indexer/src/db_migrations/migrations/2024-10-17-235032_add-package-upgrade-history-table/up.sql similarity index 100% rename from indexer/src/db_migrations/contract_upgrade_indexer/migrations/2024-10-17-235032_add-package-upgrade-history-table/up.sql rename to indexer/src/db_migrations/migrations/2024-10-17-235032_add-package-upgrade-history-table/up.sql diff --git a/indexer/src/db_migrations/migrations/2024-12-03-235031_add-nft-asks-table/down.sql b/indexer/src/db_migrations/migrations/2024-12-03-235031_add-nft-asks-table/down.sql new file mode 100644 index 0000000..428d2db --- /dev/null +++ b/indexer/src/db_migrations/migrations/2024-12-03-235031_add-nft-asks-table/down.sql @@ -0,0 +1,2 @@ +-- This file should undo anything in `up.sql`d +DROP TABLE IF EXISTS nft_asks; \ No newline at end of file diff --git a/indexer/src/db_migrations/migrations/2024-12-03-235031_add-nft-asks-table/up.sql b/indexer/src/db_migrations/migrations/2024-12-03-235031_add-nft-asks-table/up.sql new file mode 100644 index 0000000..f38297b --- /dev/null +++ b/indexer/src/db_migrations/migrations/2024-12-03-235031_add-nft-asks-table/up.sql @@ -0,0 +1,47 @@ +-- This table is backfill safe, i.e. you can re-index without dropping the table +CREATE TABLE + nft_asks ( + ask_obj_addr VARCHAR(300) PRIMARY KEY, + -- For v1 NFTs, this is property_version, for v2 NFTs, this is nft_addr + nft_id VARCHAR(300) NOT NULL, + nft_name VARCHAR(300) NOT NULL, + -- For v2 NFTs, we use collection_addr to identify the collection + collection_addr VARCHAR(300) NOT NULL, + -- For v1 NFTs, we use creator_addr + name to identify the collection + collection_creator_addr VARCHAR(300) NOT NULL, + collection_name VARCHAR(300) NOT NULL, + -- 1 is token v1, 2 is token v2 + nft_standard INT NOT NULL, + marketplace_addr VARCHAR(300) NOT NULL, + -- empty str when it's not filled + buyer_addr VARCHAR(300) NOT NULL, + seller_addr VARCHAR(300) NOT NULL, + -- price in on-chain unit, for APT it's oct + price BIGINT NOT NULL, + -- in on-chain unit, for APT it's oct + royalties BIGINT NOT NULL, + -- in on-chain unit, for APT it's oct + commission BIGINT NOT NULL, + -- for coin APT, this is 0x1::aptos_coin::AptosCoin + -- for fa APT, this is 0xa + payment_token VARCHAR(300) NOT NULL, + -- 1 is coin, 2 is fa + payment_token_type INT NOT NULL, + order_placed_timestamp BIGINT NOT NULL, + order_placed_tx_version BIGINT NOT NULL, + order_placed_event_idx BIGINT NOT NULL, + order_filled_timestamp BIGINT NOT NULL, + order_filled_tx_version BIGINT NOT NULL, + order_filled_event_idx BIGINT NOT NULL, + order_cancelled_timestamp BIGINT NOT NULL, + order_cancelled_tx_version BIGINT NOT NULL, + order_cancelled_event_idx BIGINT NOT NULL, + -- 1 is active, 2 is filled, 3 is cancelled + order_status INT NOT NULL, + -- 1 is fixed price, 2 is auction + order_type INT NOT NULL, + CHECK (nft_standard IN (1, 2)), + CHECK (payment_token_type IN (1, 2)), + CHECK (order_status IN (1, 2, 3)), + CHECK (order_type IN (1, 2)) + ); \ No newline at end of file diff --git a/indexer/src/db_migrations/migrations/2024-12-10-014414_add-index-to-nft-asks-table/down.sql b/indexer/src/db_migrations/migrations/2024-12-10-014414_add-index-to-nft-asks-table/down.sql new file mode 100644 index 0000000..f55921d --- /dev/null +++ b/indexer/src/db_migrations/migrations/2024-12-10-014414_add-index-to-nft-asks-table/down.sql @@ -0,0 +1,16 @@ +-- This file should undo anything in `up.sql` +DROP INDEX idx_nft_asks_nft_id; + +DROP INDEX idx_nft_asks_nft_name; + +DROP INDEX idx_nft_asks_collection_addr; + +DROP INDEX idx_nft_asks_collection_creator_addr; + +DROP INDEX idx_nft_asks_collection_name; + +DROP INDEX idx_nft_asks_buyer_addr; + +DROP INDEX idx_nft_asks_seller_addr; + +DROP INDEX idx_nft_asks_marketplace_addr; \ No newline at end of file diff --git a/indexer/src/db_migrations/migrations/2024-12-10-014414_add-index-to-nft-asks-table/up.sql b/indexer/src/db_migrations/migrations/2024-12-10-014414_add-index-to-nft-asks-table/up.sql new file mode 100644 index 0000000..829b594 --- /dev/null +++ b/indexer/src/db_migrations/migrations/2024-12-10-014414_add-index-to-nft-asks-table/up.sql @@ -0,0 +1,16 @@ +-- Your SQL goes here +CREATE INDEX idx_nft_asks_nft_id ON nft_asks (nft_id); + +CREATE INDEX idx_nft_asks_nft_name ON nft_asks (nft_name); + +CREATE INDEX idx_nft_asks_collection_addr ON nft_asks (collection_addr); + +CREATE INDEX idx_nft_asks_collection_creator_addr ON nft_asks (collection_addr); + +CREATE INDEX idx_nft_asks_collection_name ON nft_asks (collection_addr); + +CREATE INDEX idx_nft_asks_buyer_addr ON nft_asks (buyer_addr); + +CREATE INDEX idx_nft_asks_seller_addr ON nft_asks (seller_addr); + +CREATE INDEX idx_nft_asks_marketplace_addr ON nft_asks (marketplace_addr); \ No newline at end of file diff --git a/indexer/src/db_migrations/migrations/2024-12-10-015223_add-nft-bids-table/down.sql b/indexer/src/db_migrations/migrations/2024-12-10-015223_add-nft-bids-table/down.sql new file mode 100644 index 0000000..72c838a --- /dev/null +++ b/indexer/src/db_migrations/migrations/2024-12-10-015223_add-nft-bids-table/down.sql @@ -0,0 +1,2 @@ +-- This file should undo anything in `up.sql` +DROP TABLE nft_bids; \ No newline at end of file diff --git a/indexer/src/db_migrations/migrations/2024-12-10-015223_add-nft-bids-table/up.sql b/indexer/src/db_migrations/migrations/2024-12-10-015223_add-nft-bids-table/up.sql new file mode 100644 index 0000000..002a32c --- /dev/null +++ b/indexer/src/db_migrations/migrations/2024-12-10-015223_add-nft-bids-table/up.sql @@ -0,0 +1,47 @@ +-- This table is backfill safe, i.e. you can re-index without dropping the table +-- Your SQL goes here +CREATE TABLE + nft_bids ( + bid_obj_addr VARCHAR(300) PRIMARY KEY, + -- For v1 NFTs, this is property_version, for v2 NFTs, this is nft_addr + -- For collection bid, this only gets set after order is filled + nft_id VARCHAR(300) NOT NULL, + nft_name VARCHAR(300) NOT NULL, + -- For v2 NFTs, we use collection_addr to identify the collection + collection_addr VARCHAR(300), + -- For v1 NFTs, we use creator_addr + name to identify the collection + collection_creator_addr VARCHAR(300) NOT NULL, + collection_name VARCHAR(300) NOT NULL, + -- 1 is token v1, 2 is token v2 + nft_standard INT NOT NULL, + marketplace_addr VARCHAR(300) NOT NULL, + buyer_addr VARCHAR(300) NOT NULL, + -- empty str when it's not filled + seller_addr VARCHAR(300) NOT NULL, + -- price in on-chain unit, for APT it's oct + price BIGINT NOT NULL, + -- in on-chain unit, for APT it's oct + royalties BIGINT NOT NULL, + -- in on-chain unit, for APT it's oct + commission BIGINT NOT NULL, + -- for coin APT, this is 0x1::aptos_coin::AptosCoin + -- for fa APT, this is 0xa + payment_token VARCHAR(300) NOT NULL, + -- 1 is coin, 2 is fa + payment_token_type INT NOT NULL, + order_placed_timestamp BIGINT NOT NULL, + order_placed_tx_version BIGINT NOT NULL, + order_placed_event_idx BIGINT NOT NULL, + order_filled_timestamp BIGINT NOT NULL, + order_filled_tx_version BIGINT NOT NULL, + order_filled_event_idx BIGINT NOT NULL, + order_cancelled_timestamp BIGINT NOT NULL, + order_cancelled_tx_version BIGINT NOT NULL, + order_cancelled_event_idx BIGINT NOT NULL, + -- 1 is active, 2 is filled, 3 is cancelled + order_status INT NOT NULL, + order_expiration_timestamp BIGINT NOT NULL, + CHECK (nft_standard IN (1, 2)), + CHECK (payment_token_type IN (1, 2)), + CHECK (order_status IN (1, 2, 3)) + ); \ No newline at end of file diff --git a/indexer/src/db_migrations/migrations/2024-12-10-195135_add-index-to-nft-bids-table/down.sql b/indexer/src/db_migrations/migrations/2024-12-10-195135_add-index-to-nft-bids-table/down.sql new file mode 100644 index 0000000..495e6bc --- /dev/null +++ b/indexer/src/db_migrations/migrations/2024-12-10-195135_add-index-to-nft-bids-table/down.sql @@ -0,0 +1,16 @@ +-- This file should undo anything in `up.sql` +DROP INDEX idx_nft_bids_nft_id; + +DROP INDEX idx_nft_bids_nft_name; + +DROP INDEX idx_nft_bids_collection_addr; + +DROP INDEX idx_nft_bids_collection_creator_addr; + +DROP INDEX idx_nft_bids_collection_name; + +DROP INDEX idx_nft_bids_seller_addr; + +DROP INDEX idx_nft_bids_buyer_addr; + +DROP INDEX idx_nft_bids_marketplace_addr; \ No newline at end of file diff --git a/indexer/src/db_migrations/migrations/2024-12-10-195135_add-index-to-nft-bids-table/up.sql b/indexer/src/db_migrations/migrations/2024-12-10-195135_add-index-to-nft-bids-table/up.sql new file mode 100644 index 0000000..35df9bc --- /dev/null +++ b/indexer/src/db_migrations/migrations/2024-12-10-195135_add-index-to-nft-bids-table/up.sql @@ -0,0 +1,16 @@ +-- Your SQL goes here +CREATE INDEX idx_nft_bids_nft_id ON nft_bids (nft_id); + +CREATE INDEX idx_nft_bids_nft_name ON nft_bids (nft_name); + +CREATE INDEX idx_nft_bids_collection_addr ON nft_bids (collection_addr); + +CREATE INDEX idx_nft_bids_collection_creator_addr ON nft_bids (collection_creator_addr); + +CREATE INDEX idx_nft_bids_collection_name ON nft_bids (collection_name); + +CREATE INDEX idx_nft_bids_seller_addr ON nft_bids (seller_addr); + +CREATE INDEX idx_nft_bids_buyer_addr ON nft_bids (buyer_addr); + +CREATE INDEX idx_nft_bids_marketplace_addr ON nft_bids (marketplace_addr); \ No newline at end of file diff --git a/indexer/src/db_migrations/migrations/2024-12-18-214546_add-collection-bids-table/down.sql b/indexer/src/db_migrations/migrations/2024-12-18-214546_add-collection-bids-table/down.sql new file mode 100644 index 0000000..018ce4b --- /dev/null +++ b/indexer/src/db_migrations/migrations/2024-12-18-214546_add-collection-bids-table/down.sql @@ -0,0 +1,2 @@ +-- This file should undo anything in `up.sql` +DROP TABLE collection_bids; \ No newline at end of file diff --git a/indexer/src/db_migrations/migrations/2024-12-18-214546_add-collection-bids-table/up.sql b/indexer/src/db_migrations/migrations/2024-12-18-214546_add-collection-bids-table/up.sql new file mode 100644 index 0000000..adeabf4 --- /dev/null +++ b/indexer/src/db_migrations/migrations/2024-12-18-214546_add-collection-bids-table/up.sql @@ -0,0 +1,39 @@ +-- This table is backfill safe, i.e. you can re-index without dropping the table +-- Your SQL goes here +CREATE TABLE + collection_bids ( + bid_obj_addr VARCHAR(300) PRIMARY KEY, + -- For v2 NFTs, we use collection_addr to identify the collection + collection_addr VARCHAR(300) NOT NULL, + -- For v1 NFTs, we use creator_addr + name to identify the collection + collection_creator_addr VARCHAR(300) NOT NULL, + collection_name VARCHAR(300) NOT NULL, + -- 1 is token v1, 2 is token v2 + nft_standard INT NOT NULL, + marketplace_addr VARCHAR(300) NOT NULL, + buyer_addr VARCHAR(300) NOT NULL, + total_nft_amount BIGINT NOT NULL, + -- price per nft in on-chain unit, for APT it's oct + price BIGINT NOT NULL, + -- for coin APT, this is 0x1::aptos_coin::AptosCoin + -- for fa APT, this is 0xa + payment_token VARCHAR(300) NOT NULL, + -- 1 is coin, 2 is fa + payment_token_type INT NOT NULL, + order_placed_timestamp BIGINT NOT NULL, + order_placed_tx_version BIGINT NOT NULL, + order_placed_event_idx BIGINT NOT NULL, + latest_order_filled_timestamp BIGINT NOT NULL, + latest_order_filled_tx_version BIGINT NOT NULL, + latest_order_filled_event_idx BIGINT NOT NULL, + order_cancelled_timestamp BIGINT NOT NULL, + order_cancelled_tx_version BIGINT NOT NULL, + order_cancelled_event_idx BIGINT NOT NULL, + -- 1 is active, 2 is filled, 3 is cancelled + -- order is only filled when remaining_nft_amount is 0 + order_status INT NOT NULL, + order_expiration_timestamp BIGINT NOT NULL, + CHECK (nft_standard IN (1, 2)), + CHECK (payment_token_type IN (1, 2)), + CHECK (order_status IN (1, 2, 3)) + ); \ No newline at end of file diff --git a/indexer/src/db_migrations/migrations/2024-12-18-214551_add-index-to-collection-bids-table/down.sql b/indexer/src/db_migrations/migrations/2024-12-18-214551_add-index-to-collection-bids-table/down.sql new file mode 100644 index 0000000..e4ce76a --- /dev/null +++ b/indexer/src/db_migrations/migrations/2024-12-18-214551_add-index-to-collection-bids-table/down.sql @@ -0,0 +1,10 @@ +-- This file should undo anything in `up.sql` +DROP INDEX idx_collection_bids_collection_addr; + +DROP INDEX idx_collection_bids_collection_creator_addr; + +DROP INDEX idx_collection_bids_collection_name; + +DROP INDEX idx_collection_bids_buyer_addr; + +DROP INDEX idx_collection_bids_marketplace_addr; \ No newline at end of file diff --git a/indexer/src/db_migrations/migrations/2024-12-18-214551_add-index-to-collection-bids-table/up.sql b/indexer/src/db_migrations/migrations/2024-12-18-214551_add-index-to-collection-bids-table/up.sql new file mode 100644 index 0000000..a377701 --- /dev/null +++ b/indexer/src/db_migrations/migrations/2024-12-18-214551_add-index-to-collection-bids-table/up.sql @@ -0,0 +1,10 @@ +-- Your SQL goes here +CREATE INDEX idx_collection_bids_collection_addr ON collection_bids (collection_addr); + +CREATE INDEX idx_collection_bids_collection_creator_addr ON collection_bids (collection_creator_addr); + +CREATE INDEX idx_collection_bids_collection_name ON collection_bids (collection_name); + +CREATE INDEX idx_collection_bids_buyer_addr ON collection_bids (buyer_addr); + +CREATE INDEX idx_collection_bids_marketplace_addr ON collection_bids (marketplace_addr); \ No newline at end of file diff --git a/indexer/src/db_migrations/migrations/2024-12-19-195341_add-filled-collection-bids-table/down.sql b/indexer/src/db_migrations/migrations/2024-12-19-195341_add-filled-collection-bids-table/down.sql new file mode 100644 index 0000000..112573e --- /dev/null +++ b/indexer/src/db_migrations/migrations/2024-12-19-195341_add-filled-collection-bids-table/down.sql @@ -0,0 +1,2 @@ +-- This file should undo anything in `up.sql` +DROP TABLE filled_collection_bids; \ No newline at end of file diff --git a/indexer/src/db_migrations/migrations/2024-12-19-195341_add-filled-collection-bids-table/up.sql b/indexer/src/db_migrations/migrations/2024-12-19-195341_add-filled-collection-bids-table/up.sql new file mode 100644 index 0000000..3af0dbb --- /dev/null +++ b/indexer/src/db_migrations/migrations/2024-12-19-195341_add-filled-collection-bids-table/up.sql @@ -0,0 +1,23 @@ +-- This table should be used together with the collection_bids table to store filled collection bids +-- This table is backfill safe, i.e. you can re-index without dropping the table +-- Your SQL goes here +CREATE TABLE + filled_collection_bids ( + bid_obj_addr VARCHAR(300) NOT NULL, + -- For v1 NFTs, this is property_version, for v2 NFTs, this is nft_addr + -- For collection bid, this only gets set after order is filled + nft_id VARCHAR(300) NOT NULL, + nft_name VARCHAR(300) NOT NULL, + seller_addr VARCHAR(300) NOT NULL, + -- price in on-chain unit, for APT it's oct + price BIGINT NOT NULL, + -- in on-chain unit, for APT it's oct + royalties BIGINT NOT NULL, + -- in on-chain unit, for APT it's oct + commission BIGINT NOT NULL, + order_filled_timestamp BIGINT NOT NULL, + order_filled_tx_version BIGINT NOT NULL, + order_filled_event_idx BIGINT NOT NULL, + PRIMARY KEY (bid_obj_addr, nft_id, nft_name), + FOREIGN KEY (bid_obj_addr) REFERENCES collection_bids (bid_obj_addr) + ); \ No newline at end of file diff --git a/indexer/src/db_migrations/schema.rs b/indexer/src/db_migrations/schema.rs new file mode 100644 index 0000000..c70b127 --- /dev/null +++ b/indexer/src/db_migrations/schema.rs @@ -0,0 +1,195 @@ +// @generated automatically by Diesel CLI. + +diesel::table! { + collection_bids (bid_obj_addr) { + #[max_length = 300] + bid_obj_addr -> Varchar, + #[max_length = 300] + collection_addr -> Varchar, + #[max_length = 300] + collection_creator_addr -> Varchar, + #[max_length = 300] + collection_name -> Varchar, + nft_standard -> Int4, + #[max_length = 300] + marketplace_addr -> Varchar, + #[max_length = 300] + buyer_addr -> Varchar, + total_nft_amount -> Int8, + price -> Int8, + #[max_length = 300] + payment_token -> Varchar, + payment_token_type -> Int4, + order_placed_timestamp -> Int8, + order_placed_tx_version -> Int8, + order_placed_event_idx -> Int8, + latest_order_filled_timestamp -> Int8, + latest_order_filled_tx_version -> Int8, + latest_order_filled_event_idx -> Int8, + order_cancelled_timestamp -> Int8, + order_cancelled_tx_version -> Int8, + order_cancelled_event_idx -> Int8, + order_status -> Int4, + order_expiration_timestamp -> Int8, + } +} + +diesel::table! { + filled_collection_bids (bid_obj_addr, nft_id, nft_name) { + #[max_length = 300] + bid_obj_addr -> Varchar, + #[max_length = 300] + nft_id -> Varchar, + #[max_length = 300] + nft_name -> Varchar, + #[max_length = 300] + seller_addr -> Varchar, + price -> Int8, + royalties -> Int8, + commission -> Int8, + order_filled_timestamp -> Int8, + order_filled_tx_version -> Int8, + order_filled_event_idx -> Int8, + } +} + +diesel::table! { + ledger_infos (chain_id) { + chain_id -> Int8, + } +} + +diesel::table! { + module_upgrade_history (module_addr, module_name, package_name, upgrade_number) { + #[max_length = 300] + module_addr -> Varchar, + #[max_length = 300] + module_name -> Varchar, + #[max_length = 300] + package_name -> Varchar, + upgrade_number -> Int8, + module_bytecode -> Bytea, + module_source_code -> Text, + module_abi -> Json, + tx_version -> Int8, + } +} + +diesel::table! { + nft_asks (ask_obj_addr) { + #[max_length = 300] + ask_obj_addr -> Varchar, + #[max_length = 300] + nft_id -> Varchar, + #[max_length = 300] + nft_name -> Varchar, + #[max_length = 300] + collection_addr -> Varchar, + #[max_length = 300] + collection_creator_addr -> Varchar, + #[max_length = 300] + collection_name -> Varchar, + nft_standard -> Int4, + #[max_length = 300] + marketplace_addr -> Varchar, + #[max_length = 300] + buyer_addr -> Varchar, + #[max_length = 300] + seller_addr -> Varchar, + price -> Int8, + royalties -> Int8, + commission -> Int8, + #[max_length = 300] + payment_token -> Varchar, + payment_token_type -> Int4, + order_placed_timestamp -> Int8, + order_placed_tx_version -> Int8, + order_placed_event_idx -> Int8, + order_filled_timestamp -> Int8, + order_filled_tx_version -> Int8, + order_filled_event_idx -> Int8, + order_cancelled_timestamp -> Int8, + order_cancelled_tx_version -> Int8, + order_cancelled_event_idx -> Int8, + order_status -> Int4, + order_type -> Int4, + } +} + +diesel::table! { + nft_bids (bid_obj_addr) { + #[max_length = 300] + bid_obj_addr -> Varchar, + #[max_length = 300] + nft_id -> Varchar, + #[max_length = 300] + nft_name -> Varchar, + #[max_length = 300] + collection_addr -> Nullable, + #[max_length = 300] + collection_creator_addr -> Varchar, + #[max_length = 300] + collection_name -> Varchar, + nft_standard -> Int4, + #[max_length = 300] + marketplace_addr -> Varchar, + #[max_length = 300] + buyer_addr -> Varchar, + #[max_length = 300] + seller_addr -> Varchar, + price -> Int8, + royalties -> Int8, + commission -> Int8, + #[max_length = 300] + payment_token -> Varchar, + payment_token_type -> Int4, + order_placed_timestamp -> Int8, + order_placed_tx_version -> Int8, + order_placed_event_idx -> Int8, + order_filled_timestamp -> Int8, + order_filled_tx_version -> Int8, + order_filled_event_idx -> Int8, + order_cancelled_timestamp -> Int8, + order_cancelled_tx_version -> Int8, + order_cancelled_event_idx -> Int8, + order_status -> Int4, + order_expiration_timestamp -> Int8, + } +} + +diesel::table! { + package_upgrade_history (package_addr, package_name, upgrade_number) { + #[max_length = 300] + package_addr -> Varchar, + #[max_length = 300] + package_name -> Varchar, + upgrade_number -> Int8, + upgrade_policy -> Int8, + package_manifest -> Text, + source_digest -> Text, + tx_version -> Int8, + } +} + +diesel::table! { + processor_status (processor) { + #[max_length = 50] + processor -> Varchar, + last_success_version -> Int8, + last_updated -> Timestamp, + last_transaction_timestamp -> Nullable, + } +} + +diesel::joinable!(filled_collection_bids -> collection_bids (bid_obj_addr)); + +diesel::allow_tables_to_appear_in_same_query!( + collection_bids, + filled_collection_bids, + ledger_infos, + module_upgrade_history, + nft_asks, + nft_bids, + package_upgrade_history, + processor_status, +); diff --git a/indexer/src/db_models/collection_bids.rs b/indexer/src/db_models/collection_bids.rs new file mode 100644 index 0000000..0ca3461 --- /dev/null +++ b/indexer/src/db_models/collection_bids.rs @@ -0,0 +1,33 @@ +use diesel::{AsChangeset, Insertable}; +use field_count::FieldCount; +use serde::{Deserialize, Serialize}; + +use crate::schema::collection_bids; + +#[derive(AsChangeset, Clone, Debug, Deserialize, FieldCount, Insertable, Serialize)] +#[diesel(table_name = collection_bids)] +/// Database representation of a collection bid +pub struct CollectionBid { + pub bid_obj_addr: String, + pub collection_addr: String, + pub collection_creator_addr: String, + pub collection_name: String, + pub nft_standard: i32, + pub marketplace_addr: String, + pub buyer_addr: String, + pub total_nft_amount: i64, + pub price: i64, + pub payment_token: String, + pub payment_token_type: i32, + pub order_placed_timestamp: i64, + pub order_placed_tx_version: i64, + pub order_placed_event_idx: i64, + pub latest_order_filled_timestamp: i64, + pub latest_order_filled_tx_version: i64, + pub latest_order_filled_event_idx: i64, + pub order_cancelled_timestamp: i64, + pub order_cancelled_tx_version: i64, + pub order_cancelled_event_idx: i64, + pub order_status: i32, + pub order_expiration_timestamp: i64, +} diff --git a/indexer/src/db_models/contract_upgrade_indexer/mod.rs b/indexer/src/db_models/contract_upgrade_indexer/mod.rs deleted file mode 100644 index f6c8d3a..0000000 --- a/indexer/src/db_models/contract_upgrade_indexer/mod.rs +++ /dev/null @@ -1,4 +0,0 @@ -pub mod ledger_info; -pub mod module_upgrade; -pub mod package_upgrade; -pub mod processor_status; diff --git a/indexer/src/db_models/filled_collection_bids.rs b/indexer/src/db_models/filled_collection_bids.rs new file mode 100644 index 0000000..e45ac06 --- /dev/null +++ b/indexer/src/db_models/filled_collection_bids.rs @@ -0,0 +1,21 @@ +use diesel::{AsChangeset, Insertable}; +use field_count::FieldCount; +use serde::{Deserialize, Serialize}; + +use crate::schema::filled_collection_bids; + +#[derive(AsChangeset, Clone, Debug, Deserialize, FieldCount, Insertable, Serialize)] +#[diesel(table_name = filled_collection_bids)] +/// Database representation of a filled collection bid +pub struct FilledCollectionBid { + pub bid_obj_addr: String, + pub nft_id: String, + pub nft_name: String, + pub seller_addr: String, + pub price: i64, + pub royalties: i64, + pub commission: i64, + pub order_filled_timestamp: i64, + pub order_filled_tx_version: i64, + pub order_filled_event_idx: i64, +} diff --git a/indexer/src/db_models/contract_upgrade_indexer/ledger_info.rs b/indexer/src/db_models/ledger_info.rs similarity index 80% rename from indexer/src/db_models/contract_upgrade_indexer/ledger_info.rs rename to indexer/src/db_models/ledger_info.rs index f899992..e194d18 100644 --- a/indexer/src/db_models/contract_upgrade_indexer/ledger_info.rs +++ b/indexer/src/db_models/ledger_info.rs @@ -1,10 +1,7 @@ use diesel::{Identifiable, Insertable, OptionalExtension, QueryDsl, Queryable}; use diesel_async::RunQueryDsl; -use crate::{ - contract_upgrade_schema::contract_upgrade_ledger_infos as ledger_infos, - utils::database_utils::DbPoolConnection, -}; +use crate::{schema::ledger_infos, utils::database_utils::DbPoolConnection}; #[derive(Debug, Identifiable, Insertable, Queryable)] #[diesel(table_name = ledger_infos)] diff --git a/indexer/src/db_models/mod.rs b/indexer/src/db_models/mod.rs index 2d14658..a6a7ac9 100644 --- a/indexer/src/db_models/mod.rs +++ b/indexer/src/db_models/mod.rs @@ -1 +1,8 @@ -pub mod contract_upgrade_indexer; +pub mod collection_bids; +pub mod filled_collection_bids; +pub mod ledger_info; +pub mod module_upgrade; +pub mod nft_asks; +pub mod nft_bids; +pub mod package_upgrade; +pub mod processor_status; diff --git a/indexer/src/db_models/contract_upgrade_indexer/module_upgrade.rs b/indexer/src/db_models/module_upgrade.rs similarity index 90% rename from indexer/src/db_models/contract_upgrade_indexer/module_upgrade.rs rename to indexer/src/db_models/module_upgrade.rs index 9ed5e91..ec45ed4 100644 --- a/indexer/src/db_models/contract_upgrade_indexer/module_upgrade.rs +++ b/indexer/src/db_models/module_upgrade.rs @@ -2,7 +2,7 @@ use diesel::{AsChangeset, Insertable}; use field_count::FieldCount; use serde::{Deserialize, Serialize}; -use crate::contract_upgrade_schema::module_upgrade_history; +use crate::schema::module_upgrade_history; #[derive(AsChangeset, Clone, Debug, Deserialize, FieldCount, Insertable, Serialize)] #[diesel(table_name = module_upgrade_history)] diff --git a/indexer/src/db_models/nft_asks.rs b/indexer/src/db_models/nft_asks.rs new file mode 100644 index 0000000..4ca15e2 --- /dev/null +++ b/indexer/src/db_models/nft_asks.rs @@ -0,0 +1,37 @@ +use diesel::{AsChangeset, Insertable}; +use field_count::FieldCount; +use serde::{Deserialize, Serialize}; + +use crate::schema::nft_asks; + +#[derive(AsChangeset, Clone, Debug, Deserialize, FieldCount, Insertable, Serialize)] +#[diesel(table_name = nft_asks)] +/// Database representation of a nft ask +pub struct NftAsk { + pub ask_obj_addr: String, + pub nft_id: String, + pub nft_name: String, + pub collection_addr: String, + pub collection_creator_addr: String, + pub collection_name: String, + pub nft_standard: i32, + pub marketplace_addr: String, + pub buyer_addr: String, + pub seller_addr: String, + pub price: i64, + pub royalties: i64, + pub commission: i64, + pub payment_token: String, + pub payment_token_type: i32, + pub order_placed_timestamp: i64, + pub order_placed_tx_version: i64, + pub order_placed_event_idx: i64, + pub order_filled_timestamp: i64, + pub order_filled_tx_version: i64, + pub order_filled_event_idx: i64, + pub order_cancelled_timestamp: i64, + pub order_cancelled_tx_version: i64, + pub order_cancelled_event_idx: i64, + pub order_status: i32, + pub order_type: i32, +} diff --git a/indexer/src/db_models/nft_bids.rs b/indexer/src/db_models/nft_bids.rs new file mode 100644 index 0000000..3055acf --- /dev/null +++ b/indexer/src/db_models/nft_bids.rs @@ -0,0 +1,37 @@ +use diesel::{AsChangeset, Insertable}; +use field_count::FieldCount; +use serde::{Deserialize, Serialize}; + +use crate::schema::nft_bids; + +#[derive(AsChangeset, Clone, Debug, Deserialize, FieldCount, Insertable, Serialize)] +#[diesel(table_name = nft_bids)] +/// Database representation of a nft bid +pub struct NftBid { + pub bid_obj_addr: String, + pub nft_id: String, + pub nft_name: String, + pub collection_addr: String, + pub collection_creator_addr: String, + pub collection_name: String, + pub nft_standard: i32, + pub marketplace_addr: String, + pub buyer_addr: String, + pub seller_addr: String, + pub price: i64, + pub royalties: i64, + pub commission: i64, + pub payment_token: String, + pub payment_token_type: i32, + pub order_placed_timestamp: i64, + pub order_placed_tx_version: i64, + pub order_placed_event_idx: i64, + pub order_filled_timestamp: i64, + pub order_filled_tx_version: i64, + pub order_filled_event_idx: i64, + pub order_cancelled_timestamp: i64, + pub order_cancelled_tx_version: i64, + pub order_cancelled_event_idx: i64, + pub order_status: i32, + pub order_expiration_timestamp: i64, +} diff --git a/indexer/src/db_models/contract_upgrade_indexer/package_upgrade.rs b/indexer/src/db_models/package_upgrade.rs similarity index 97% rename from indexer/src/db_models/contract_upgrade_indexer/package_upgrade.rs rename to indexer/src/db_models/package_upgrade.rs index 4cb5f9e..3aeda19 100644 --- a/indexer/src/db_models/contract_upgrade_indexer/package_upgrade.rs +++ b/indexer/src/db_models/package_upgrade.rs @@ -2,7 +2,7 @@ use diesel::{AsChangeset, Insertable}; use field_count::FieldCount; use serde::{Deserialize, Serialize}; -use crate::contract_upgrade_schema::package_upgrade_history; +use crate::schema::package_upgrade_history; #[derive(AsChangeset, Clone, Debug, Deserialize, FieldCount, Insertable, Serialize)] #[diesel(table_name = package_upgrade_history)] diff --git a/indexer/src/db_models/contract_upgrade_indexer/processor_status.rs b/indexer/src/db_models/processor_status.rs similarity index 88% rename from indexer/src/db_models/contract_upgrade_indexer/processor_status.rs rename to indexer/src/db_models/processor_status.rs index f1d26ed..9af15e7 100644 --- a/indexer/src/db_models/contract_upgrade_indexer/processor_status.rs +++ b/indexer/src/db_models/processor_status.rs @@ -1,10 +1,7 @@ use diesel::{AsChangeset, ExpressionMethods, Insertable, OptionalExtension, QueryDsl, Queryable}; use diesel_async::RunQueryDsl; -use crate::{ - contract_upgrade_schema::contract_upgrade_processor_status as processor_status, - utils::database_utils::DbPoolConnection, -}; +use crate::{schema::processor_status, utils::database_utils::DbPoolConnection}; #[derive(AsChangeset, Debug, Insertable)] #[diesel(table_name = processor_status)] diff --git a/indexer/src/indexers/contract_upgrade_indexer/extractor.rs b/indexer/src/indexers/contract_upgrade_indexer/extractor.rs index 7e05edf..14a05a9 100644 --- a/indexer/src/indexers/contract_upgrade_indexer/extractor.rs +++ b/indexer/src/indexers/contract_upgrade_indexer/extractor.rs @@ -11,7 +11,7 @@ use aptos_indexer_processor_sdk::{ use async_trait::async_trait; use rayon::prelude::*; -use crate::db_models::contract_upgrade_indexer::{ +use crate::db_models::{ module_upgrade::ModuleUpgrade, package_upgrade::{PackageUpgrade, PackageUpgradeChangeOnChain}, }; @@ -56,7 +56,13 @@ impl Processable for Extractor { .map(|txn| { let txn_version = txn.version as i64; let txn_info = match txn.info.as_ref() { - Some(info) => info, + Some(info) => { + if info.success { + info + } else { + return (vec![], vec![]); + } + } None => { tracing::warn!( transaction_version = txn_version, diff --git a/indexer/src/indexers/contract_upgrade_indexer/processor.rs b/indexer/src/indexers/contract_upgrade_indexer/processor.rs index 1a52326..4a80779 100644 --- a/indexer/src/indexers/contract_upgrade_indexer/processor.rs +++ b/indexer/src/indexers/contract_upgrade_indexer/processor.rs @@ -40,7 +40,8 @@ impl ContractUpgradeProcessor { let starting_version = get_starting_version(&self.config, self.db_pool.clone()).await?; tracing::info!( - "Starting events processor with starting version: {:?}", + "Starting {} with starting version: {:?}", + self.config.processor_config.name(), starting_version ); diff --git a/indexer/src/indexers/contract_upgrade_indexer/storers/upgrade_module_change_storer.rs b/indexer/src/indexers/contract_upgrade_indexer/storers/upgrade_module_change_storer.rs index d51c491..60d98eb 100644 --- a/indexer/src/indexers/contract_upgrade_indexer/storers/upgrade_module_change_storer.rs +++ b/indexer/src/indexers/contract_upgrade_indexer/storers/upgrade_module_change_storer.rs @@ -5,8 +5,8 @@ use diesel::{insert_into, QueryResult}; use diesel_async::{AsyncConnection, AsyncPgConnection, RunQueryDsl}; use crate::{ - contract_upgrade_schema::module_upgrade_history, - db_models::contract_upgrade_indexer::module_upgrade::ModuleUpgrade, + db_models::module_upgrade::ModuleUpgrade, + schema::module_upgrade_history, utils::{ database_connection::get_db_connection, database_utils::{get_config_table_chunk_size, ArcDbPool}, diff --git a/indexer/src/indexers/contract_upgrade_indexer/storers/upgrade_package_change_storer.rs b/indexer/src/indexers/contract_upgrade_indexer/storers/upgrade_package_change_storer.rs index 05807fe..fcff880 100644 --- a/indexer/src/indexers/contract_upgrade_indexer/storers/upgrade_package_change_storer.rs +++ b/indexer/src/indexers/contract_upgrade_indexer/storers/upgrade_package_change_storer.rs @@ -5,8 +5,8 @@ use diesel::{insert_into, QueryResult}; use diesel_async::{AsyncConnection, AsyncPgConnection, RunQueryDsl}; use crate::{ - contract_upgrade_schema::package_upgrade_history, - db_models::contract_upgrade_indexer::package_upgrade::PackageUpgrade, + db_models::package_upgrade::PackageUpgrade, + schema::package_upgrade_history, utils::{ database_connection::get_db_connection, database_utils::{get_config_table_chunk_size, ArcDbPool}, diff --git a/indexer/src/indexers/marketplace_indexer/event_parsers/aptos_labs_contract_event_parser.rs b/indexer/src/indexers/marketplace_indexer/event_parsers/aptos_labs_contract_event_parser.rs new file mode 100644 index 0000000..ac143f8 --- /dev/null +++ b/indexer/src/indexers/marketplace_indexer/event_parsers/aptos_labs_contract_event_parser.rs @@ -0,0 +1,131 @@ +use aptos_indexer_processor_sdk::aptos_protos::transaction::v1::Event as EventPB; + +use crate::{ + indexers::marketplace_indexer::extractor::ContractEvent, + onchain_events::aptos_labs_contract_events::{ + collection_bid_event::{ + CollectionBidCancelledEventOnChain, CollectionBidFilledEventOnChain, + CollectionBidPlacedEventOnChain, + }, + nft_ask_event::{AskCancelledEventOnChain, AskFilledEventOnChain, AskPlacedEventOnChain}, + nft_bid_event::{BidCancelledEventOnChain, BidFilledEventOnChain, BidPlacedEventOnChain}, + }, +}; + +pub fn parse_from_aptos_labs_contract_event( + event_idx: i64, + event: &EventPB, + txn_version: i64, + event_addr: String, + event_type: String, +) -> Option { + if event_type.starts_with(format!("{}::events::TokenOfferPlaced", event_addr).as_str()) { + println!("Aptos labs contract TokenOfferPlaced {}", event.data.as_str()); + let parsed_event: BidPlacedEventOnChain = serde_json::from_str(event.data.as_str()) + .unwrap_or_else(|_| panic!("Failed to parse Aptos labs contract TokenOfferPlaced, {}", event.data.as_str())); + Some(ContractEvent::BidPlacedEvent(parsed_event.to_db_nft_bid( + event_addr, + txn_version, + event_idx, + ))) + } else if event_type.starts_with(format!("{}::events::TokenOfferFilled", event_addr).as_str()) { + println!("Aptos labs contract TokenOfferFilled {}", event.data.as_str()); + let parsed_event: BidFilledEventOnChain = serde_json::from_str(event.data.as_str()) + .unwrap_or_else(|_| panic!("Failed to parse Aptos labs contract TokenOfferFilled, {}", event.data.as_str())); + Some(ContractEvent::BidFilledEvent(parsed_event.to_db_nft_bid( + event_addr, + txn_version, + event_idx, + ))) + } else if event_type.starts_with(format!("{}::events::TokenOfferCancelled", event_addr).as_str()) + || event_type.starts_with(format!("{}::events::TokenOfferCanceled", event_addr).as_str()) + { + println!("Aptos labs contract TokenOfferCancelled {}", event.data.as_str()); + let parsed_event: BidCancelledEventOnChain = serde_json::from_str(event.data.as_str()) + .unwrap_or_else(|_| { + panic!("Failed to parse Aptos labs contract TokenOfferCancelled, {}", event.data.as_str()) + }); + Some(ContractEvent::BidCancelledEvent( + parsed_event.to_db_nft_bid(event_addr, txn_version, event_idx), + )) + } else if event_type.starts_with(format!("{}::events::ListingPlaced", event_addr).as_str()) { + println!("Aptos labs contract ListingPlaced {}", event.data.as_str()); + let parsed_event: AskPlacedEventOnChain = serde_json::from_str(event.data.as_str()) + .unwrap_or_else(|_| panic!("Failed to parse Aptos labs contract ListingPlaced, {}", event.data.as_str())); + Some(ContractEvent::AskPlacedEvent(parsed_event.to_db_nft_ask( + event_addr, + txn_version, + event_idx, + ))) + } else if event_type.starts_with(format!("{}::events::ListingFilled", event_addr).as_str()) { + println!("Aptos labs contract ListingFilled {}", event.data.as_str()); + let parsed_event: AskFilledEventOnChain = serde_json::from_str(event.data.as_str()) + .unwrap_or_else(|_| panic!("Failed to parse Aptos labs contract ListingFilled, {}", event.data.as_str())); + Some(ContractEvent::AskFilledEvent(parsed_event.to_db_nft_ask( + event_addr, + txn_version, + event_idx, + ))) + } else if event_type.starts_with(format!("{}::events::ListingCancelled", event_addr).as_str()) + || event_type.starts_with(format!("{}::events::ListingCanceled", event_addr).as_str()) + { + println!("Aptos labs contract ListingCancelled {}", event.data.as_str()); + let parsed_event: AskCancelledEventOnChain = serde_json::from_str(event.data.as_str()) + .unwrap_or_else(|_| { + panic!("Failed to parse Aptos labs contract ListingCancelled, {}", event.data.as_str()) + }); + Some(ContractEvent::AskCancelledEvent( + parsed_event.to_db_nft_ask(event_addr, txn_version, event_idx), + )) + } else if event_type + .starts_with(format!("{}::events::CollectionOfferPlaced", event_addr).as_str()) + { + println!("Aptos labs contract CollectionOfferPlaced {}", event.data.as_str()); + let parsed_event: CollectionBidPlacedEventOnChain = + serde_json::from_str(event.data.as_str()).unwrap_or_else(|_| { + panic!( + "Aptos labs contract Failed to parse CollectionOfferPlaced, {}", + event.data.as_str() + ) + }); + Some(ContractEvent::CollectionBidPlacedEvent( + parsed_event.to_db_collection_bid(event_addr, txn_version, event_idx), + )) + } else if event_type + .starts_with(format!("{}::events::CollectionOfferFilled", event_addr).as_str()) + { + println!("Aptos labs contract CollectionOfferFilled {}", event.data.as_str()); + let parsed_event: CollectionBidFilledEventOnChain = + serde_json::from_str(event.data.as_str()).unwrap_or_else(|_| { + panic!( + "Failed to parse Aptos labs contract CollectionOfferFilled, {}", + event.data.as_str() + ) + }); + Some(ContractEvent::CollectionBidFilledEvent( + parsed_event.to_db_collection_bid_and_filled_collection_bid( + event_addr, + txn_version, + event_idx, + ), + )) + } else if event_type + .starts_with(format!("{}::events::CollectionOfferCancelled", event_addr).as_str()) + || event_type + .starts_with(format!("{}::events::CollectionOfferCanceled", event_addr).as_str()) + { + println!("Aptos labs contract CollectionOfferCancelled {}", event.data.as_str()); + let parsed_event: CollectionBidCancelledEventOnChain = + serde_json::from_str(event.data.as_str()).unwrap_or_else(|_| { + panic!( + "Failed to parse Aptos labs contract CollectionOfferCancelled, {}", + event.data.as_str() + ) + }); + Some(ContractEvent::CollectionBidCancelledEvent( + parsed_event.to_db_collection_bid(event_addr, txn_version, event_idx), + )) + } else { + None + } +} diff --git a/indexer/src/indexers/marketplace_indexer/event_parsers/mod.rs b/indexer/src/indexers/marketplace_indexer/event_parsers/mod.rs new file mode 100644 index 0000000..057934b --- /dev/null +++ b/indexer/src/indexers/marketplace_indexer/event_parsers/mod.rs @@ -0,0 +1,3 @@ +pub mod aptos_labs_contract_event_parser; +pub mod tradeport_contract_v1_event_parser; +pub mod tradeport_contract_v2_event_parser; diff --git a/indexer/src/indexers/marketplace_indexer/event_parsers/tradeport_contract_v1_event_parser.rs b/indexer/src/indexers/marketplace_indexer/event_parsers/tradeport_contract_v1_event_parser.rs new file mode 100644 index 0000000..dfeda65 --- /dev/null +++ b/indexer/src/indexers/marketplace_indexer/event_parsers/tradeport_contract_v1_event_parser.rs @@ -0,0 +1,190 @@ +use aptos_indexer_processor_sdk::aptos_protos::transaction::v1::Event as EventPB; + +use crate::{ + indexers::marketplace_indexer::extractor::ContractEvent, + onchain_events::tradeport_contract_events::{ + collection_bid_event_v1::{ + TradeportV1CollectionBidCancelledEventOnChain, + TradeportV1CollectionBidFilledEventOnChain, TradeportV1CollectionBidPlacedEventOnChain, + }, + nft_ask_event_v1::{ + TradeportV1AskCancelledEventOnChain, TradeportV1AskFilledEventOnChain, + TradeportV1AskPlacedEventOnChain, TradeportV1AskUpdatedEventOnChain, + }, + nft_bid_event_v1::{ + TradeportV1BidCancelledEventOnChain, TradeportV1BidFilledEventOnChain, + TradeportV1BidPlacedEventOnChain, + }, + }, +}; + +pub fn parse_from_tradeport_v1_contract_event( + event_idx: i64, + event: &EventPB, + txn_version: i64, + event_addr: String, + event_type: String, +) -> Option { + if event_type.starts_with(format!("{}::biddings::InsertTokenBidEvent", event_addr).as_str()) { + println!("Tradeport v1 InsertTokenBidEvent {}", event.data.as_str()); + let parsed_event: TradeportV1BidPlacedEventOnChain = + serde_json::from_str(event.data.as_str()).unwrap_or_else(|_| { + panic!( + "Failed to parse Tradeport v1 InsertTokenBidEvent, {}", + event.data.as_str() + ) + }); + Some(ContractEvent::BidPlacedEvent(parsed_event.to_db_nft_bid( + event_addr, + txn_version, + event_idx, + ))) + } else if event_type + .starts_with(format!("{}::biddings::AcceptTokenBidEvent", event_addr).as_str()) + { + println!("Tradeport v1 AcceptTokenBidEvent {}", event.data.as_str()); + let parsed_event: TradeportV1BidFilledEventOnChain = + serde_json::from_str(event.data.as_str()).unwrap_or_else(|_| { + panic!( + "Failed to parse Tradeport v1 AcceptTokenBidEvent, {}", + event.data.as_str() + ) + }); + Some(ContractEvent::BidFilledEvent(parsed_event.to_db_nft_bid( + event_addr, + txn_version, + event_idx, + ))) + } else if event_type + .starts_with(format!("{}::biddings::DeleteTokenBidEvent", event_addr).as_str()) + { + println!("Tradeport v1 DeleteTokenBidEvent {}", event.data.as_str()); + let parsed_event: TradeportV1BidCancelledEventOnChain = + serde_json::from_str(event.data.as_str()).unwrap_or_else(|_| { + panic!( + "Failed to parse Tradeport v1 DeleteTokenBidEvent, {}", + event.data.as_str() + ) + }); + Some(ContractEvent::BidCancelledEvent( + parsed_event.to_db_nft_bid(event_addr, txn_version, event_idx), + )) + } else if event_type + .starts_with(format!("{}::listings::InsertListingEvent", event_addr).as_str()) + { + println!("Tradeport v1 InsertListingEvent {}", event.data.as_str()); + let parsed_event: TradeportV1AskPlacedEventOnChain = + serde_json::from_str(event.data.as_str()).unwrap_or_else(|_| { + panic!( + "Failed to parse Tradeport v1 InsertListingEvent, {}", + event.data.as_str() + ) + }); + Some(ContractEvent::AskPlacedEvent(parsed_event.to_db_nft_ask( + event_addr, + txn_version, + event_idx, + ))) + } else if event_type + .starts_with(format!("{}::listings::UpdateListingEvent", event_addr).as_str()) + { + println!("Tradeport v1 UpdateListingEvent {}", event.data.as_str()); + let parsed_event: TradeportV1AskUpdatedEventOnChain = + serde_json::from_str(event.data.as_str()).unwrap_or_else(|_| { + panic!( + "Failed to parse Tradeport v1 UpdateListingEvent, {}", + event.data.as_str() + ) + }); + Some(ContractEvent::AskPlacedEvent(parsed_event.to_db_nft_ask( + event_addr, + txn_version, + event_idx, + ))) + } else if event_type.starts_with(format!("{}::listings::BuyEvent", event_addr).as_str()) { + println!("Tradeport v1 BuyEvent {}", event.data.as_str()); + let parsed_event: TradeportV1AskFilledEventOnChain = + serde_json::from_str(event.data.as_str()).unwrap_or_else(|_| { + panic!( + "Failed to parse Tradeport v1 BuyEvent, {}", + event.data.as_str() + ) + }); + Some(ContractEvent::AskFilledEvent(parsed_event.to_db_nft_ask( + event_addr, + txn_version, + event_idx, + ))) + } else if event_type + .starts_with(format!("{}::listings::DeleteListingEvent", event_addr).as_str()) + { + println!("Tradeport v1 DeleteListingEvent {}", event.data.as_str()); + let parsed_event: TradeportV1AskCancelledEventOnChain = + serde_json::from_str(event.data.as_str()).unwrap_or_else(|_| { + panic!( + "Failed to parse Tradeport v1 DeleteListingEvent, {}", + event.data.as_str() + ) + }); + Some(ContractEvent::AskCancelledEvent( + parsed_event.to_db_nft_ask(event_addr, txn_version, event_idx), + )) + } else if event_type + .starts_with(format!("{}::biddings::InsertCollectionBidEvent", event_addr).as_str()) + { + println!( + "Tradeport v1 InsertCollectionBidEvent {}", + event.data.as_str() + ); + let parsed_event: TradeportV1CollectionBidPlacedEventOnChain = + serde_json::from_str(event.data.as_str()).unwrap_or_else(|_| { + panic!( + "Failed to parse Tradeport v1 InsertCollectionBidEvent, {}", + event.data.as_str() + ) + }); + Some(ContractEvent::CollectionBidPlacedEvent( + parsed_event.to_db_collection_bid(event_addr, txn_version, event_idx), + )) + } else if event_type + .starts_with(format!("{}::biddings::AcceptCollectionBidEvent", event_addr).as_str()) + { + println!( + "Tradeport v1 AcceptCollectionBidEvent {}", + event.data.as_str() + ); + let parsed_event: TradeportV1CollectionBidFilledEventOnChain = + serde_json::from_str(event.data.as_str()).unwrap_or_else(|_| { + panic!( + "Failed to parse Tradeport v1 AcceptCollectionBidEvent, {}", + event.data.as_str() + ) + }); + Some(ContractEvent::CollectionBidFilledEvent( + parsed_event.to_db_collection_bid_and_filled_collection_bid( + event_addr, + txn_version, + event_idx, + ), + )) + } else if event_type + .starts_with(format!("{}::biddings::DeleteCollectionBidEvent", event_addr).as_str()) + { + println!( + "Tradeport v1 DeleteCollectionBidEvent {}", + event.data.as_str() + ); + let parsed_event: TradeportV1CollectionBidCancelledEventOnChain = + serde_json::from_str(event.data.as_str()).unwrap_or_else(|_| { + panic!( + "Failed to parse Tradeport v1 DeleteCollectionBidEvent, {}", + event.data.as_str() + ) + }); + Some(ContractEvent::CollectionBidCancelledEvent( + parsed_event.to_db_collection_bid(event_addr, txn_version, event_idx), + )) + } else { + None + } +} diff --git a/indexer/src/indexers/marketplace_indexer/event_parsers/tradeport_contract_v2_event_parser.rs b/indexer/src/indexers/marketplace_indexer/event_parsers/tradeport_contract_v2_event_parser.rs new file mode 100644 index 0000000..98320ab --- /dev/null +++ b/indexer/src/indexers/marketplace_indexer/event_parsers/tradeport_contract_v2_event_parser.rs @@ -0,0 +1,174 @@ +use aptos_indexer_processor_sdk::aptos_protos::transaction::v1::Event as EventPB; + +use crate::{ + indexers::marketplace_indexer::extractor::ContractEvent, + onchain_events::tradeport_contract_events::{ + collection_bid_event_v2::{ + TradeportV2CollectionBidCancelledEventOnChain, + TradeportV2CollectionBidFilledEventOnChain, TradeportV2CollectionBidPlacedEventOnChain, + }, + nft_ask_event_v2::{ + TradeportV2AskCancelledEventOnChain, TradeportV2AskFilledEventOnChain, + TradeportV2AskPlacedEventOnChain, + }, + nft_bid_event_v2::{ + TradeportV2BidCancelledEventOnChain, TradeportV2BidFilledEventOnChain, + TradeportV2BidPlacedEventOnChain, + }, + }, +}; + +pub fn parse_from_tradeport_v2_contract_event( + event_idx: i64, + event: &EventPB, + txn_version: i64, + event_addr: String, + event_type: String, +) -> Option { + if event_type.starts_with(format!("{}::biddings_v2::InsertTokenBidEvent", event_addr).as_str()) { + println!("Tradeport v2 InsertTokenBidEvent {}", event.data.as_str()); + let parsed_event: TradeportV2BidPlacedEventOnChain = + serde_json::from_str(event.data.as_str()).unwrap_or_else(|_| { + panic!( + "Failed to parse Tradeport v2 InsertTokenBidEvent, {}", + event.data.as_str() + ) + }); + Some(ContractEvent::BidPlacedEvent(parsed_event.to_db_nft_bid( + event_addr, + txn_version, + event_idx, + ))) + } else if event_type + .starts_with(format!("{}::biddings_v2::AcceptTokenBidEvent", event_addr).as_str()) + { + println!("Tradeport v2 AcceptTokenBidEvent {}", event.data.as_str()); + let parsed_event: TradeportV2BidFilledEventOnChain = + serde_json::from_str(event.data.as_str()).unwrap_or_else(|_| { + panic!( + "Failed to parse Tradeport v2 AcceptTokenBidEvent, {}", + event.data.as_str() + ) + }); + Some(ContractEvent::BidFilledEvent(parsed_event.to_db_nft_bid( + event_addr, + txn_version, + event_idx, + ))) + } else if event_type + .starts_with(format!("{}::biddings_v2::DeleteTokenBidEvent", event_addr).as_str()) + { + println!("Tradeport v2 DeleteTokenBidEvent {}", event.data.as_str()); + let parsed_event: TradeportV2BidCancelledEventOnChain = + serde_json::from_str(event.data.as_str()).unwrap_or_else(|_| { + panic!( + "Failed to parse Tradeport v2 DeleteTokenBidEvent, {}", + event.data.as_str() + ) + }); + Some(ContractEvent::BidCancelledEvent( + parsed_event.to_db_nft_bid(event_addr, txn_version, event_idx), + )) + } else if event_type + .starts_with(format!("{}::listings_v2::InsertListingEvent", event_addr).as_str()) + { + println!("Tradeport v2 InsertListingEvent {}", event.data.as_str()); + let parsed_event: TradeportV2AskPlacedEventOnChain = + serde_json::from_str(event.data.as_str()).unwrap_or_else(|_| { + panic!( + "Failed to parse Tradeport v2 InsertListingEvent, {}", + event.data.as_str() + ) + }); + Some(ContractEvent::AskPlacedEvent(parsed_event.to_db_nft_ask( + event_addr, + txn_version, + event_idx, + ))) + } else if event_type.starts_with(format!("{}::listings_v2::BuyEvent", event_addr).as_str()) { + println!("Tradeport v2 BuyEvent {}", event.data.as_str()); + let parsed_event: TradeportV2AskFilledEventOnChain = + serde_json::from_str(event.data.as_str()).unwrap_or_else(|_| { + panic!( + "Failed to parse Tradeport v2 BuyEvent, {}", + event.data.as_str() + ) + }); + Some(ContractEvent::AskFilledEvent(parsed_event.to_db_nft_ask( + event_addr, + txn_version, + event_idx, + ))) + } else if event_type + .starts_with(format!("{}::listings_v2::DeleteListingEvent", event_addr).as_str()) + { + println!("Tradeport v2 DeleteListingEvent {}", event.data.as_str()); + let parsed_event: TradeportV2AskCancelledEventOnChain = + serde_json::from_str(event.data.as_str()).unwrap_or_else(|_| { + panic!( + "Failed to parse Tradeport v2 DeleteListingEvent, {}", + event.data.as_str() + ) + }); + Some(ContractEvent::AskCancelledEvent( + parsed_event.to_db_nft_ask(event_addr, txn_version, event_idx), + )) + } else if event_type + .starts_with(format!("{}::biddings_v2::InsertCollectionBidEvent", event_addr).as_str()) + { + println!( + "Tradeport v2 InsertCollectionBidEvent {}", + event.data.as_str() + ); + let parsed_event: TradeportV2CollectionBidPlacedEventOnChain = + serde_json::from_str(event.data.as_str()).unwrap_or_else(|_| { + panic!( + "Failed to parse Tradeport v2 InsertCollectionBidEvent, {}", + event.data.as_str() + ) + }); + Some(ContractEvent::CollectionBidPlacedEvent( + parsed_event.to_db_collection_bid(event_addr, txn_version, event_idx), + )) + } else if event_type + .starts_with(format!("{}::biddings_v2::AcceptCollectionBidEvent", event_addr).as_str()) + { + println!( + "Tradeport v2 AcceptCollectionBidEvent {}", + event.data.as_str() + ); + let parsed_event: TradeportV2CollectionBidFilledEventOnChain = + serde_json::from_str(event.data.as_str()).unwrap_or_else(|_| { + panic!( + "Failed to parse Tradeport v2 AcceptCollectionBidEvent, {}", + event.data.as_str() + ) + }); + Some(ContractEvent::CollectionBidFilledEvent( + parsed_event.to_db_collection_bid_and_filled_collection_bid( + event_addr, + txn_version, + event_idx, + ), + )) + } else if event_type + .starts_with(format!("{}::biddings_v2::DeleteCollectionBidEvent", event_addr).as_str()) + { + println!( + "Tradeport v2 DeleteCollectionBidEvent {}", + event.data.as_str() + ); + let parsed_event: TradeportV2CollectionBidCancelledEventOnChain = + serde_json::from_str(event.data.as_str()).unwrap_or_else(|_| { + panic!( + "Failed to parse Tradeport v2 DeleteCollectionBidEvent, {}", + event.data.as_str() + ) + }); + Some(ContractEvent::CollectionBidCancelledEvent( + parsed_event.to_db_collection_bid(event_addr, txn_version, event_idx), + )) + } else { + None + } +} diff --git a/indexer/src/indexers/marketplace_indexer/extractor.rs b/indexer/src/indexers/marketplace_indexer/extractor.rs new file mode 100644 index 0000000..c67544d --- /dev/null +++ b/indexer/src/indexers/marketplace_indexer/extractor.rs @@ -0,0 +1,193 @@ +use ahash::AHashSet; +use anyhow::Result; +use aptos_indexer_processor_sdk::{ + aptos_protos::transaction::v1::{ + transaction::TxnData, Event as EventPB, Transaction, WriteSetChange, + }, + traits::{async_step::AsyncRunType, AsyncStep, NamedStep, Processable}, + types::transaction_context::TransactionContext, + utils::{convert::standardize_address, errors::ProcessorError}, +}; +use async_trait::async_trait; +use rayon::prelude::*; + +use crate::db_models::{ + collection_bids::CollectionBid, filled_collection_bids::FilledCollectionBid, nft_asks::NftAsk, + nft_bids::NftBid, +}; + +use super::event_parsers::{ + aptos_labs_contract_event_parser::parse_from_aptos_labs_contract_event, + tradeport_contract_v1_event_parser::parse_from_tradeport_v1_contract_event, + tradeport_contract_v2_event_parser::parse_from_tradeport_v2_contract_event, +}; + +/// Extractor is a step that extracts events and their metadata from transactions. +pub struct Extractor +where + Self: Sized + Send + 'static, +{ + contract_addresses: AHashSet, +} + +impl Extractor { + pub fn new(contract_addresses: Vec) -> Self { + Self { + contract_addresses: contract_addresses.into_iter().collect(), + } + } +} + +impl AsyncStep for Extractor {} + +impl NamedStep for Extractor { + fn name(&self) -> String { + "Extractor".to_string() + } +} + +#[async_trait] +impl Processable for Extractor { + type Input = Vec; + type Output = TransactionContextData; + type RunType = AsyncRunType; + + async fn process( + &mut self, + item: TransactionContext>, + ) -> Result>, ProcessorError> { + let results: Vec<(Vec, Vec)> = item + .data + .par_iter() + .map(|txn| { + let txn_version = txn.version as i64; + match txn.info.as_ref() { + Some(info) => { + if !info.success { + return (vec![], vec![]); + } + } + None => { + tracing::warn!( + transaction_version = txn_version, + "Transaction info doesn't exist" + ); + return (vec![], vec![]); + } + }; + let txn_data = match txn.txn_data.as_ref() { + Some(data) => data, + None => { + tracing::warn!( + transaction_version = txn_version, + "Transaction data doesn't exist" + ); + return (vec![], vec![]); + } + }; + let raw_events = match txn_data { + TxnData::BlockMetadata(tx_inner) => &tx_inner.events, + TxnData::Genesis(tx_inner) => &tx_inner.events, + TxnData::User(tx_inner) => &tx_inner.events, + _ => &vec![], + }; + + let txn_events = + ContractEvent::from_events(&self.contract_addresses, raw_events, txn_version); + + (txn_events, vec![]) + }) + .collect::, Vec)>>(); + + let (events, changes): (Vec, Vec) = + results.into_iter().fold( + (Vec::new(), Vec::new()), + |(mut events_acc, mut changes_acc), (events, changes)| { + events_acc.extend(events); + changes_acc.extend(changes); + (events_acc, changes_acc) + }, + ); + + Ok(Some(TransactionContext { + data: TransactionContextData { events, changes }, + metadata: item.metadata, + })) + } +} + +#[derive(Debug, Clone)] +pub struct TransactionContextData { + pub events: Vec, + pub changes: Vec, +} + +#[derive(Debug, Clone)] +pub enum ContractEvent { + BidPlacedEvent(NftBid), + BidFilledEvent(NftBid), + BidCancelledEvent(NftBid), + AskPlacedEvent(NftAsk), + AskFilledEvent(NftAsk), + AskCancelledEvent(NftAsk), + CollectionBidPlacedEvent(CollectionBid), + CollectionBidFilledEvent((CollectionBid, FilledCollectionBid)), + CollectionBidCancelledEvent(CollectionBid), +} + +impl ContractEvent { + fn from_event( + contract_addresses: &AHashSet, + event_idx: i64, + event: &EventPB, + txn_version: i64, + ) -> Option { + // use standardize_address to pad the address in event type before processing + let parts = event.type_str.split("::").collect::>(); + let event_addr = standardize_address(parts[0]); + if contract_addresses.contains(event_addr.as_str()) { + let event_type = event_addr.clone() + "::" + parts[1] + "::" + parts[2]; + parse_from_aptos_labs_contract_event( + event_idx, + event, + txn_version, + event_addr.clone(), + event_type.clone(), + ) + .or_else(|| { + parse_from_tradeport_v1_contract_event( + event_idx, + event, + txn_version, + event_addr.clone(), + event_type.clone(), + ) + }) + .or_else(|| { + parse_from_tradeport_v2_contract_event( + event_idx, + event, + txn_version, + event_addr, + event_type, + ) + }) + } else { + None + } + } + + pub fn from_events( + contract_addresses: &AHashSet, + events: &[EventPB], + txn_version: i64, + ) -> Vec { + events + .iter() + .enumerate() + .filter_map(|(idx, event)| { + Self::from_event(contract_addresses, idx as i64, event, txn_version) + }) + .collect() + } +} diff --git a/indexer/src/indexers/marketplace_indexer/mod.rs b/indexer/src/indexers/marketplace_indexer/mod.rs new file mode 100644 index 0000000..19a25b4 --- /dev/null +++ b/indexer/src/indexers/marketplace_indexer/mod.rs @@ -0,0 +1,5 @@ +pub mod event_parsers; +pub mod extractor; +pub mod processor; +pub mod storer; +pub mod storers; diff --git a/indexer/src/indexers/marketplace_indexer/processor.rs b/indexer/src/indexers/marketplace_indexer/processor.rs new file mode 100644 index 0000000..e7583e8 --- /dev/null +++ b/indexer/src/indexers/marketplace_indexer/processor.rs @@ -0,0 +1,104 @@ +use anyhow::Result; +use aptos_indexer_processor_sdk::{ + aptos_indexer_transaction_stream::{TransactionStream, TransactionStreamConfig}, + builder::ProcessorBuilder, + common_steps::TransactionStreamStep, + traits::IntoRunnableStep, +}; + +use super::{extractor::Extractor, storer::Storer}; +use crate::{ + config::indexer_processor_config::{CustomConfig, IndexerProcessorConfig}, + utils::{ + chain_id::check_or_update_chain_id, database_connection::new_db_pool, + database_utils::ArcDbPool, latest_processed_version_tracker::LatestVersionProcessedTracker, + starting_version::get_starting_version, + }, +}; + +pub struct MarketplaceProcessor { + pub config: IndexerProcessorConfig, + pub db_pool: ArcDbPool, +} + +impl MarketplaceProcessor { + pub async fn new(config: IndexerProcessorConfig) -> Result { + let conn_pool = new_db_pool( + &config.db_config.postgres_connection_string, + config.db_config.db_pool_size, + ) + .await; + + Ok(Self { + config, + db_pool: conn_pool, + }) + } + + pub async fn run_processor(self) -> Result<()> { + // Merge the starting version from config and the latest processed version from the DB + let starting_version = get_starting_version(&self.config, self.db_pool.clone()).await?; + + tracing::info!( + "Starting {} with starting version: {:?}", + self.config.processor_config.name(), + starting_version + ); + + // Check and update the ledger chain id to ensure we're indexing the correct chain + let grpc_chain_id = TransactionStream::new(self.config.transaction_stream_config.clone()) + .await? + .get_chain_id() + .await?; + check_or_update_chain_id(grpc_chain_id as i64, self.db_pool.clone()).await?; + + // Define processor steps + let transaction_stream = TransactionStreamStep::new(TransactionStreamConfig { + starting_version: Some(starting_version), + ..self.config.transaction_stream_config + }) + .await?; + let events_extractor = Extractor::new(match self.config.custom_config { + CustomConfig::MarketplaceIndexer(contract_addresses) => contract_addresses, + _ => { + return Err(anyhow::anyhow!("Invalid custom config")); + } + }); + let events_storer = Storer::new(self.db_pool.clone()); + let version_tracker = LatestVersionProcessedTracker::new( + self.config.db_config, + starting_version, + self.config.processor_config.name().to_string(), + ) + .await?; + + // Connect processor steps together + let (_, buffer_receiver) = ProcessorBuilder::new_with_inputless_first_step( + transaction_stream.into_runnable_step(), + ) + .connect_to(events_extractor.into_runnable_step(), 10) + .connect_to(events_storer.into_runnable_step(), 10) + .connect_to(version_tracker.into_runnable_step(), 10) + .end_and_return_output_receiver(10); + + // (Optional) Parse the results + loop { + match buffer_receiver.recv().await { + Ok(txn_context) => { + if txn_context.data.events.is_empty() && txn_context.data.changes.is_empty() { + continue; + } + tracing::info!( + "Finished processing events from versions [{:?}, {:?}]", + txn_context.metadata.start_version, + txn_context.metadata.end_version, + ); + } + Err(_) => { + tracing::error!("Channel is closed"); + return Ok(()); + } + } + } + } +} diff --git a/indexer/src/indexers/marketplace_indexer/storer.rs b/indexer/src/indexers/marketplace_indexer/storer.rs new file mode 100644 index 0000000..7ccf8e2 --- /dev/null +++ b/indexer/src/indexers/marketplace_indexer/storer.rs @@ -0,0 +1,193 @@ +use ahash::AHashMap; +use anyhow::Result; +use aptos_indexer_processor_sdk::{ + traits::{async_step::AsyncRunType, AsyncStep, NamedStep, Processable}, + types::transaction_context::TransactionContext, + utils::errors::ProcessorError, +}; +use async_trait::async_trait; + +use super::{ + extractor::{ContractEvent, TransactionContextData}, + storers::{ + ask_cancelled_event_storer::process_ask_cancelled_events, + ask_filled_event_storer::process_ask_filled_events, + ask_placed_event_storer::process_ask_placed_events, + bid_cancelled_event_storer::process_bid_cancelled_events, + bid_filled_event_storer::process_bid_filled_events, + bid_placed_event_storer::process_bid_placed_events, + collection_bid_cancelled_event_storer::process_collection_bid_cancelled_events, + collection_bid_filled_event_storer::process_collection_bid_filled_events, + collection_bid_placed_event_storer::process_collection_bid_placed_events, + }, +}; +use crate::utils::database_utils::ArcDbPool; + +/// Storer is a step that inserts events in the database. +pub struct Storer +where + Self: Sized + Send + 'static, +{ + pool: ArcDbPool, +} + +impl AsyncStep for Storer {} + +impl NamedStep for Storer { + fn name(&self) -> String { + "Storer".to_string() + } +} + +impl Storer { + pub fn new(pool: ArcDbPool) -> Self { + Self { pool } + } +} + +#[async_trait] +impl Processable for Storer { + type Input = TransactionContextData; + type Output = TransactionContextData; + type RunType = AsyncRunType; + + async fn process( + &mut self, + transaction_context_data: TransactionContext, + ) -> Result>, ProcessorError> { + let per_table_chunk_sizes: AHashMap = AHashMap::new(); + let data = transaction_context_data.data.clone(); + let ( + bid_placed_events, + bid_filled_events, + bid_cancelled_events, + ask_placed_events, + ask_filled_events, + ask_cancelled_events, + collection_bid_placed_events, + collection_bid_filled_events, + collection_bid_cancelled_events, + ) = data.events.into_iter().fold( + ( + vec![], + vec![], + vec![], + vec![], + vec![], + vec![], + vec![], + vec![], + vec![], + ), + |( + mut bid_placed_events, + mut bid_filled_events, + mut bid_cancelled_events, + mut ask_placed_events, + mut ask_filled_events, + mut ask_cancelled_events, + mut collection_bid_placed_events, + mut collection_bid_filled_events, + mut collection_bid_cancelled_events, + ), + event| { + match event { + ContractEvent::BidPlacedEvent(nft_bid) => bid_placed_events.push(nft_bid), + ContractEvent::BidFilledEvent(nft_bid) => bid_filled_events.push(nft_bid), + ContractEvent::BidCancelledEvent(nft_bid) => bid_cancelled_events.push(nft_bid), + ContractEvent::AskPlacedEvent(nft_ask) => ask_placed_events.push(nft_ask), + ContractEvent::AskFilledEvent(nft_ask) => ask_filled_events.push(nft_ask), + ContractEvent::AskCancelledEvent(nft_ask) => ask_cancelled_events.push(nft_ask), + ContractEvent::CollectionBidPlacedEvent(collection_bid) => { + collection_bid_placed_events.push(collection_bid) + } + ContractEvent::CollectionBidFilledEvent(( + collection_bid, + filled_collection_bid, + )) => { + collection_bid_filled_events.push((collection_bid, filled_collection_bid)) + } + ContractEvent::CollectionBidCancelledEvent(collection_bid) => { + collection_bid_cancelled_events.push(collection_bid) + } + } + ( + bid_placed_events, + bid_filled_events, + bid_cancelled_events, + ask_placed_events, + ask_filled_events, + ask_cancelled_events, + collection_bid_placed_events, + collection_bid_filled_events, + collection_bid_cancelled_events, + ) + }, + ); + + process_bid_placed_events( + self.pool.clone(), + per_table_chunk_sizes.clone(), + bid_placed_events, + ) + .await?; + + process_bid_filled_events( + self.pool.clone(), + per_table_chunk_sizes.clone(), + bid_filled_events, + ) + .await?; + + process_bid_cancelled_events( + self.pool.clone(), + per_table_chunk_sizes.clone(), + bid_cancelled_events, + ) + .await?; + + process_ask_placed_events( + self.pool.clone(), + per_table_chunk_sizes.clone(), + ask_placed_events, + ) + .await?; + + process_ask_filled_events( + self.pool.clone(), + per_table_chunk_sizes.clone(), + ask_filled_events, + ) + .await?; + + process_ask_cancelled_events( + self.pool.clone(), + per_table_chunk_sizes.clone(), + ask_cancelled_events, + ) + .await?; + + process_collection_bid_placed_events( + self.pool.clone(), + per_table_chunk_sizes.clone(), + collection_bid_placed_events, + ) + .await?; + + process_collection_bid_filled_events( + self.pool.clone(), + per_table_chunk_sizes.clone(), + collection_bid_filled_events, + ) + .await?; + + process_collection_bid_cancelled_events( + self.pool.clone(), + per_table_chunk_sizes.clone(), + collection_bid_cancelled_events, + ) + .await?; + + Ok(Some(transaction_context_data)) + } +} diff --git a/indexer/src/indexers/marketplace_indexer/storers/ask_cancelled_event_storer.rs b/indexer/src/indexers/marketplace_indexer/storers/ask_cancelled_event_storer.rs new file mode 100644 index 0000000..152ddb5 --- /dev/null +++ b/indexer/src/indexers/marketplace_indexer/storers/ask_cancelled_event_storer.rs @@ -0,0 +1,86 @@ +use ahash::AHashMap; +use anyhow::Result; +use aptos_indexer_processor_sdk::utils::errors::ProcessorError; +use diesel::{ + insert_into, query_dsl::methods::FilterDsl, upsert::excluded, BoolExpressionMethods, + ExpressionMethods, QueryResult, +}; +use diesel_async::{AsyncConnection, AsyncPgConnection, RunQueryDsl}; + +use crate::{ + db_models::nft_asks::NftAsk, + schema::nft_asks, + utils::{ + database_connection::get_db_connection, + database_execution::handle_db_execution, + database_utils::{get_config_table_chunk_size, ArcDbPool}, + }, +}; + +async fn execute_sql( + conn: &mut AsyncPgConnection, + items_to_insert: Vec, +) -> QueryResult<()> { + conn.transaction(|conn| { + Box::pin(async move { + let sql = insert_into(nft_asks::table) + .values(items_to_insert.clone()) + .on_conflict(nft_asks::ask_obj_addr) + .do_update() + .set(( + nft_asks::price.eq(excluded(nft_asks::price)), + nft_asks::order_cancelled_timestamp + .eq(excluded(nft_asks::order_cancelled_timestamp)), + nft_asks::order_cancelled_tx_version + .eq(excluded(nft_asks::order_cancelled_tx_version)), + nft_asks::order_cancelled_event_idx + .eq(excluded(nft_asks::order_cancelled_event_idx)), + nft_asks::order_status.eq(excluded(nft_asks::order_status)), + )) + .filter( + // Update only if tx version is greater than the existing one + // or if the tx version is the same but the event index is greater + nft_asks::order_cancelled_tx_version + .lt(excluded(nft_asks::order_cancelled_tx_version)) + .or(nft_asks::order_cancelled_tx_version + .eq(excluded(nft_asks::order_cancelled_tx_version)) + .and( + nft_asks::order_cancelled_event_idx + .lt(excluded(nft_asks::order_cancelled_event_idx)), + )), + ); + sql.execute(conn).await?; + Ok(()) + }) + }) + .await +} + +pub async fn process_ask_cancelled_events( + pool: ArcDbPool, + per_table_chunk_sizes: AHashMap, + events: Vec, +) -> Result<(), ProcessorError> { + let chunk_size = get_config_table_chunk_size::("nft_asks", &per_table_chunk_sizes); + let tasks = events + .chunks(chunk_size) + .map(|chunk| { + let pool = pool.clone(); + let items = chunk.to_vec(); + tokio::spawn(async move { + let conn = &mut get_db_connection(&pool).await.expect( + "Failed to get connection from pool while processing ask cancelled events", + ); + execute_sql(conn, items).await + }) + }) + .collect::>(); + + match handle_db_execution(tasks).await { + Ok(_) => Ok(()), + Err(e) => { + println!("error writing ask cancelled events to db: {:?}", events); + Err(e) + } + } +} diff --git a/indexer/src/indexers/marketplace_indexer/storers/ask_filled_event_storer.rs b/indexer/src/indexers/marketplace_indexer/storers/ask_filled_event_storer.rs new file mode 100644 index 0000000..b1689b5 --- /dev/null +++ b/indexer/src/indexers/marketplace_indexer/storers/ask_filled_event_storer.rs @@ -0,0 +1,87 @@ +use ahash::AHashMap; +use anyhow::Result; +use aptos_indexer_processor_sdk::utils::errors::ProcessorError; +use diesel::{ + insert_into, query_dsl::methods::FilterDsl, upsert::excluded, BoolExpressionMethods, + ExpressionMethods, QueryResult, +}; +use diesel_async::{AsyncConnection, AsyncPgConnection, RunQueryDsl}; + +use crate::{ + db_models::nft_asks::NftAsk, + schema::nft_asks, + utils::{ + database_connection::get_db_connection, + database_execution::handle_db_execution, + database_utils::{get_config_table_chunk_size, ArcDbPool}, + }, +}; + +async fn execute_sql( + conn: &mut AsyncPgConnection, + items_to_insert: Vec, +) -> QueryResult<()> { + conn.transaction(|conn| { + Box::pin(async move { + let sql = insert_into(nft_asks::table) + .values(items_to_insert.clone()) + .on_conflict(nft_asks::ask_obj_addr) + .do_update() + .set(( + nft_asks::buyer_addr.eq(excluded(nft_asks::buyer_addr)), + nft_asks::royalties.eq(excluded(nft_asks::royalties)), + nft_asks::commission.eq(excluded(nft_asks::commission)), + nft_asks::price.eq(excluded(nft_asks::price)), + nft_asks::order_filled_timestamp.eq(excluded(nft_asks::order_filled_timestamp)), + nft_asks::order_filled_tx_version + .eq(excluded(nft_asks::order_filled_tx_version)), + nft_asks::order_filled_event_idx.eq(excluded(nft_asks::order_filled_event_idx)), + nft_asks::order_status.eq(excluded(nft_asks::order_status)), + )) + .filter( + // Update only if tx version is greater than the existing one + // or if the tx version is the same but the event index is greater + nft_asks::order_filled_tx_version + .lt(excluded(nft_asks::order_filled_tx_version)) + .or(nft_asks::order_filled_tx_version + .eq(excluded(nft_asks::order_filled_tx_version)) + .and( + nft_asks::order_filled_event_idx + .lt(excluded(nft_asks::order_filled_event_idx)), + )), + ); + sql.execute(conn).await?; + Ok(()) + }) + }) + .await +} + +pub async fn process_ask_filled_events( + pool: ArcDbPool, + per_table_chunk_sizes: AHashMap, + events: Vec, +) -> Result<(), ProcessorError> { + let chunk_size = get_config_table_chunk_size::("nft_asks", &per_table_chunk_sizes); + let tasks = events + .chunks(chunk_size) + .map(|chunk| { + let pool = pool.clone(); + let items = chunk.to_vec(); + tokio::spawn(async move { + let conn = &mut get_db_connection(&pool).await.expect( + "Failed to get connection from pool while processing ask filled events", + ); + execute_sql(conn, items).await + }) + }) + .collect::>(); + + match handle_db_execution(tasks).await { + Ok(_) => Ok(()), + Err(e) => { + println!("error writing ask filled events to db: {:?}", events); + Err(e) + } + } +} diff --git a/indexer/src/indexers/marketplace_indexer/storers/ask_placed_event_storer.rs b/indexer/src/indexers/marketplace_indexer/storers/ask_placed_event_storer.rs new file mode 100644 index 0000000..5fab210 --- /dev/null +++ b/indexer/src/indexers/marketplace_indexer/storers/ask_placed_event_storer.rs @@ -0,0 +1,103 @@ +use ahash::AHashMap; +use anyhow::Result; +use aptos_indexer_processor_sdk::utils::errors::ProcessorError; +use diesel::{ + insert_into, query_dsl::methods::FilterDsl, upsert::excluded, BoolExpressionMethods, + ExpressionMethods, QueryResult, +}; +use diesel_async::{AsyncConnection, AsyncPgConnection, RunQueryDsl}; + +use crate::{ + db_models::nft_asks::NftAsk, + schema::nft_asks, + utils::{ + database_connection::get_db_connection, + database_execution::handle_db_execution, + database_utils::{get_config_table_chunk_size, ArcDbPool}, + }, +}; + +async fn execute_sql( + conn: &mut AsyncPgConnection, + items_to_insert: Vec, +) -> QueryResult<()> { + conn.transaction(|conn| { + Box::pin(async move { + let sql = insert_into(nft_asks::table) + .values(items_to_insert.clone()) + .on_conflict(nft_asks::ask_obj_addr) + .do_update() + .set(( + nft_asks::seller_addr.eq(excluded(nft_asks::seller_addr)), + nft_asks::price.eq(excluded(nft_asks::price)), + nft_asks::order_placed_timestamp.eq(excluded(nft_asks::order_placed_timestamp)), + nft_asks::order_placed_tx_version + .eq(excluded(nft_asks::order_placed_tx_version)), + nft_asks::order_placed_event_idx.eq(excluded(nft_asks::order_placed_event_idx)), + nft_asks::order_status.eq(excluded(nft_asks::order_status)), + )) + .filter( + // Update only if tx version is greater than the existing one + // or if the tx version is the same but the event index is greater + nft_asks::order_placed_tx_version + .lt(excluded(nft_asks::order_placed_tx_version)) + .or(nft_asks::order_placed_tx_version + .eq(excluded(nft_asks::order_placed_tx_version)) + .and( + nft_asks::order_placed_event_idx + .lt(excluded(nft_asks::order_placed_event_idx)), + )), + ); + sql.execute(conn).await?; + Ok(()) + }) + }) + .await +} + +pub async fn process_ask_placed_events( + pool: ArcDbPool, + per_table_chunk_sizes: AHashMap, + events: Vec, +) -> Result<(), ProcessorError> { + let mut unique_events_map: AHashMap = AHashMap::new(); + for event in events { + if let Some(existing_event) = unique_events_map.get_mut(&event.ask_obj_addr) { + if event.order_placed_tx_version > existing_event.order_placed_tx_version + || event.order_placed_tx_version == existing_event.order_placed_tx_version + && event.order_placed_event_idx > existing_event.order_placed_event_idx + { + *existing_event = event; + } + } else { + unique_events_map.insert(event.ask_obj_addr.clone(), event); + } + } + let unique_events = unique_events_map + .into_iter() + .map(|(_, v)| v) + .collect::>(); + + let chunk_size = get_config_table_chunk_size::("nft_asks", &per_table_chunk_sizes); + let tasks = unique_events + .chunks(chunk_size) + .map(|chunk| { + let pool = pool.clone(); + let items = chunk.to_vec(); + tokio::spawn(async move { + let conn = &mut get_db_connection(&pool).await.expect( + "Failed to get connection from pool while processing ask placed events", + ); + execute_sql(conn, items).await + }) + }) + .collect::>(); + + match handle_db_execution(tasks).await { + Ok(_) => Ok(()), + Err(e) => { + println!("error writing ask placed events to db: {:?}", unique_events); + Err(e) + } + } +} diff --git a/indexer/src/indexers/marketplace_indexer/storers/bid_cancelled_event_storer.rs b/indexer/src/indexers/marketplace_indexer/storers/bid_cancelled_event_storer.rs new file mode 100644 index 0000000..75cc26a --- /dev/null +++ b/indexer/src/indexers/marketplace_indexer/storers/bid_cancelled_event_storer.rs @@ -0,0 +1,86 @@ +use ahash::AHashMap; +use anyhow::Result; +use aptos_indexer_processor_sdk::utils::errors::ProcessorError; +use diesel::{ + insert_into, query_dsl::methods::FilterDsl, upsert::excluded, BoolExpressionMethods, + ExpressionMethods, QueryResult, +}; +use diesel_async::{AsyncConnection, AsyncPgConnection, RunQueryDsl}; + +use crate::{ + db_models::nft_bids::NftBid, + schema::nft_bids, + utils::{ + database_connection::get_db_connection, + database_execution::handle_db_execution, + database_utils::{get_config_table_chunk_size, ArcDbPool}, + }, +}; + +async fn execute_sql( + conn: &mut AsyncPgConnection, + items_to_insert: Vec, +) -> QueryResult<()> { + conn.transaction(|conn| { + Box::pin(async move { + let sql = insert_into(nft_bids::table) + .values(items_to_insert.clone()) + .on_conflict(nft_bids::bid_obj_addr) + .do_update() + .set(( + nft_bids::price.eq(excluded(nft_bids::price)), + nft_bids::order_cancelled_timestamp + .eq(excluded(nft_bids::order_cancelled_timestamp)), + nft_bids::order_cancelled_tx_version + .eq(excluded(nft_bids::order_cancelled_tx_version)), + nft_bids::order_cancelled_event_idx + .eq(excluded(nft_bids::order_cancelled_event_idx)), + nft_bids::order_status.eq(excluded(nft_bids::order_status)), + )) + .filter( + // Update only if tx version is greater than the existing one + // or if the tx version is the same but the event index is greater + nft_bids::order_cancelled_tx_version + .lt(excluded(nft_bids::order_cancelled_tx_version)) + .or(nft_bids::order_cancelled_tx_version + .eq(excluded(nft_bids::order_cancelled_tx_version)) + .and( + nft_bids::order_cancelled_event_idx + .lt(excluded(nft_bids::order_cancelled_event_idx)), + )), + ); + sql.execute(conn).await?; + Ok(()) + }) + }) + .await +} + +pub async fn process_bid_cancelled_events( + pool: ArcDbPool, + per_table_chunk_sizes: AHashMap, + events: Vec, +) -> Result<(), ProcessorError> { + let chunk_size = get_config_table_chunk_size::("nft_bids", &per_table_chunk_sizes); + let tasks = events + .chunks(chunk_size) + .map(|chunk| { + let pool = pool.clone(); + let items = chunk.to_vec(); + tokio::spawn(async move { + let conn = &mut get_db_connection(&pool).await.expect( + "Failed to get connection from pool while processing bid cancelled events", + ); + execute_sql(conn, items).await + }) + }) + .collect::>(); + + match handle_db_execution(tasks).await { + Ok(_) => Ok(()), + Err(e) => { + println!("error writing bid cancelled events to db: {:?}", events); + Err(e) + } + } +} diff --git a/indexer/src/indexers/marketplace_indexer/storers/bid_filled_event_storer.rs b/indexer/src/indexers/marketplace_indexer/storers/bid_filled_event_storer.rs new file mode 100644 index 0000000..64ad86c --- /dev/null +++ b/indexer/src/indexers/marketplace_indexer/storers/bid_filled_event_storer.rs @@ -0,0 +1,87 @@ +use ahash::AHashMap; +use anyhow::Result; +use aptos_indexer_processor_sdk::utils::errors::ProcessorError; +use diesel::{ + insert_into, query_dsl::methods::FilterDsl, upsert::excluded, BoolExpressionMethods, + ExpressionMethods, QueryResult, +}; +use diesel_async::{AsyncConnection, AsyncPgConnection, RunQueryDsl}; + +use crate::{ + db_models::nft_bids::NftBid, + schema::nft_bids, + utils::{ + database_connection::get_db_connection, + database_execution::handle_db_execution, + database_utils::{get_config_table_chunk_size, ArcDbPool}, + }, +}; + +async fn execute_sql( + conn: &mut AsyncPgConnection, + items_to_insert: Vec, +) -> QueryResult<()> { + conn.transaction(|conn| { + Box::pin(async move { + let sql = insert_into(nft_bids::table) + .values(items_to_insert.clone()) + .on_conflict(nft_bids::bid_obj_addr) + .do_update() + .set(( + nft_bids::seller_addr.eq(excluded(nft_bids::seller_addr)), + nft_bids::royalties.eq(excluded(nft_bids::royalties)), + nft_bids::commission.eq(excluded(nft_bids::commission)), + nft_bids::price.eq(excluded(nft_bids::price)), + nft_bids::order_filled_timestamp.eq(excluded(nft_bids::order_filled_timestamp)), + nft_bids::order_filled_tx_version + .eq(excluded(nft_bids::order_filled_tx_version)), + nft_bids::order_filled_event_idx.eq(excluded(nft_bids::order_filled_event_idx)), + nft_bids::order_status.eq(excluded(nft_bids::order_status)), + )) + .filter( + // Update only if tx version is greater than the existing one + // or if the tx version is the same but the event index is greater + nft_bids::order_filled_tx_version + .lt(excluded(nft_bids::order_filled_tx_version)) + .or(nft_bids::order_filled_tx_version + .eq(excluded(nft_bids::order_filled_tx_version)) + .and( + nft_bids::order_filled_event_idx + .lt(excluded(nft_bids::order_filled_event_idx)), + )), + ); + sql.execute(conn).await?; + Ok(()) + }) + }) + .await +} + +pub async fn process_bid_filled_events( + pool: ArcDbPool, + per_table_chunk_sizes: AHashMap, + events: Vec, +) -> Result<(), ProcessorError> { + let chunk_size = get_config_table_chunk_size::("nft_bids", &per_table_chunk_sizes); + let tasks = events + .chunks(chunk_size) + .map(|chunk| { + let pool = pool.clone(); + let items = chunk.to_vec(); + tokio::spawn(async move { + let conn = &mut get_db_connection(&pool).await.expect( + "Failed to get connection from pool while processing bid filled events", + ); + execute_sql(conn, items).await + }) + }) + .collect::>(); + + match handle_db_execution(tasks).await { + Ok(_) => Ok(()), + Err(e) => { + println!("error writing bid filled events to db: {:?}", events); + Err(e) + } + } +} diff --git a/indexer/src/indexers/marketplace_indexer/storers/bid_placed_event_storer.rs b/indexer/src/indexers/marketplace_indexer/storers/bid_placed_event_storer.rs new file mode 100644 index 0000000..0ec4f34 --- /dev/null +++ b/indexer/src/indexers/marketplace_indexer/storers/bid_placed_event_storer.rs @@ -0,0 +1,103 @@ +use ahash::AHashMap; +use anyhow::Result; +use aptos_indexer_processor_sdk::utils::errors::ProcessorError; +use diesel::{ + insert_into, query_dsl::methods::FilterDsl, upsert::excluded, BoolExpressionMethods, + ExpressionMethods, QueryResult, +}; +use diesel_async::{AsyncConnection, AsyncPgConnection, RunQueryDsl}; + +use crate::{ + db_models::nft_bids::NftBid, + schema::nft_bids, + utils::{ + database_connection::get_db_connection, + database_execution::handle_db_execution, + database_utils::{get_config_table_chunk_size, ArcDbPool}, + }, +}; + +async fn execute_sql( + conn: &mut AsyncPgConnection, + items_to_insert: Vec, +) -> QueryResult<()> { + conn.transaction(|conn| { + Box::pin(async move { + let sql = insert_into(nft_bids::table) + .values(items_to_insert.clone()) + .on_conflict(nft_bids::bid_obj_addr) + .do_update() + .set(( + nft_bids::buyer_addr.eq(excluded(nft_bids::buyer_addr)), + nft_bids::price.eq(excluded(nft_bids::price)), + nft_bids::order_placed_timestamp.eq(excluded(nft_bids::order_placed_timestamp)), + nft_bids::order_placed_tx_version + .eq(excluded(nft_bids::order_placed_tx_version)), + nft_bids::order_placed_event_idx.eq(excluded(nft_bids::order_placed_event_idx)), + nft_bids::order_status.eq(excluded(nft_bids::order_status)), + )) + .filter( + // Update only if tx version is greater than the existing one + // or if the tx version is the same but the event index is greater + nft_bids::order_placed_tx_version + .lt(excluded(nft_bids::order_placed_tx_version)) + .or(nft_bids::order_placed_tx_version + .eq(excluded(nft_bids::order_placed_tx_version)) + .and( + nft_bids::order_placed_event_idx + .lt(excluded(nft_bids::order_placed_event_idx)), + )), + ); + sql.execute(conn).await?; + Ok(()) + }) + }) + .await +} + +pub async fn process_bid_placed_events( + pool: ArcDbPool, + per_table_chunk_sizes: AHashMap, + events: Vec, +) -> Result<(), ProcessorError> { + let mut unique_events_map: AHashMap = AHashMap::new(); + for event in events { + if let Some(existing_event) = unique_events_map.get_mut(&event.bid_obj_addr) { + if event.order_placed_tx_version > existing_event.order_placed_tx_version + || event.order_placed_tx_version == existing_event.order_placed_tx_version + && event.order_placed_event_idx > existing_event.order_placed_event_idx + { + *existing_event = event; + } + } else { + unique_events_map.insert(event.bid_obj_addr.clone(), event); + } + } + let unique_events = unique_events_map + .into_iter() + .map(|(_, v)| v) + .collect::>(); + + let chunk_size = get_config_table_chunk_size::("nft_bids", &per_table_chunk_sizes); + let tasks = unique_events + .chunks(chunk_size) + .map(|chunk| { + let pool = pool.clone(); + let items = chunk.to_vec(); + tokio::spawn(async move { + let conn = &mut get_db_connection(&pool).await.expect( + "Failed to get connection from pool while processing bid placed events", + ); + execute_sql(conn, items).await + }) + }) + .collect::>(); + + match handle_db_execution(tasks).await { + Ok(_) => Ok(()), + Err(e) => { + println!("error writing bid placed events to db: {:?}", unique_events); + Err(e) + } + } +} diff --git a/indexer/src/indexers/marketplace_indexer/storers/collection_bid_cancelled_event_storer.rs b/indexer/src/indexers/marketplace_indexer/storers/collection_bid_cancelled_event_storer.rs new file mode 100644 index 0000000..4857820 --- /dev/null +++ b/indexer/src/indexers/marketplace_indexer/storers/collection_bid_cancelled_event_storer.rs @@ -0,0 +1,90 @@ +use ahash::AHashMap; +use anyhow::Result; +use aptos_indexer_processor_sdk::utils::errors::ProcessorError; +use diesel::{ + insert_into, query_dsl::methods::FilterDsl, upsert::excluded, BoolExpressionMethods, + ExpressionMethods, QueryResult, +}; +use diesel_async::{AsyncConnection, AsyncPgConnection, RunQueryDsl}; + +use crate::{ + db_models::collection_bids::CollectionBid, + schema::collection_bids, + utils::{ + database_connection::get_db_connection, + database_execution::handle_db_execution, + database_utils::{get_config_table_chunk_size, ArcDbPool}, + }, +}; + +async fn execute_sql( + conn: &mut AsyncPgConnection, + items_to_insert: Vec, +) -> QueryResult<()> { + conn.transaction(|conn| { + Box::pin(async move { + let sql = insert_into(collection_bids::table) + .values(items_to_insert.clone()) + .on_conflict(collection_bids::bid_obj_addr) + .do_update() + .set(( + collection_bids::price.eq(excluded(collection_bids::price)), + collection_bids::order_cancelled_timestamp + .eq(excluded(collection_bids::order_cancelled_timestamp)), + collection_bids::order_cancelled_tx_version + .eq(excluded(collection_bids::order_cancelled_tx_version)), + collection_bids::order_cancelled_event_idx + .eq(excluded(collection_bids::order_cancelled_event_idx)), + collection_bids::order_status.eq(excluded(collection_bids::order_status)), + )) + .filter( + // Update only if tx version is greater than the existing one + // or if the tx version is the same but the event index is greater + collection_bids::order_cancelled_tx_version + .lt(excluded(collection_bids::order_cancelled_tx_version)) + .or(collection_bids::order_cancelled_tx_version + .eq(excluded(collection_bids::order_cancelled_tx_version)) + .and( + collection_bids::order_cancelled_event_idx + .lt(excluded(collection_bids::order_cancelled_event_idx)), + )), + ); + sql.execute(conn).await?; + Ok(()) + }) + }) + .await +} + +pub async fn process_collection_bid_cancelled_events( + pool: ArcDbPool, + per_table_chunk_sizes: AHashMap, + events: Vec, +) -> Result<(), ProcessorError> { + let chunk_size = + get_config_table_chunk_size::("collection_bids", &per_table_chunk_sizes); + let tasks = events + .chunks(chunk_size) + .map(|chunk| { + let pool = pool.clone(); + let items = chunk.to_vec(); + tokio::spawn(async move { + let conn = &mut get_db_connection(&pool).await.expect( + "Failed to get connection from pool while processing collection bid cancelled events", + ); + execute_sql(conn, items).await + }) + }) + .collect::>(); + + match handle_db_execution(tasks).await { + Ok(_) => Ok(()), + Err(e) => { + println!( + "error writing collection bid cancelled events to db: {:?}", + events + ); + Err(e) + } + } +} diff --git a/indexer/src/indexers/marketplace_indexer/storers/collection_bid_filled_event_storer.rs b/indexer/src/indexers/marketplace_indexer/storers/collection_bid_filled_event_storer.rs new file mode 100644 index 0000000..3c6ef9b --- /dev/null +++ b/indexer/src/indexers/marketplace_indexer/storers/collection_bid_filled_event_storer.rs @@ -0,0 +1,176 @@ +use ahash::AHashMap; +use anyhow::Result; +use aptos_indexer_processor_sdk::utils::errors::ProcessorError; +use diesel::{ + insert_into, query_dsl::methods::FilterDsl, upsert::excluded, BoolExpressionMethods, + ExpressionMethods, QueryResult, +}; +use diesel_async::{AsyncConnection, AsyncPgConnection, RunQueryDsl}; + +use crate::{ + db_models::{collection_bids::CollectionBid, filled_collection_bids::FilledCollectionBid}, + schema::{collection_bids, filled_collection_bids}, + utils::{ + database_connection::get_db_connection, + database_execution::handle_db_execution, + database_utils::{get_config_table_chunk_size, ArcDbPool}, + }, +}; + +async fn execute_sql( + conn: &mut AsyncPgConnection, + data: Vec<(CollectionBid, Vec)>, +) -> QueryResult<()> { + let (collection_bids, filled_collection_bids): (Vec, Vec) = + data.into_iter().fold( + (vec![], vec![]), + |(mut collection_bids, mut filled_collection_bids), + (collection_bid, filled_collection_bid)| { + collection_bids.push(collection_bid); + filled_collection_bids.extend(filled_collection_bid); + (collection_bids, filled_collection_bids) + }, + ); + + conn.transaction(|conn| { + Box::pin(async move { + let sql = insert_into(collection_bids::table) + .values(collection_bids.clone()) + .on_conflict(collection_bids::bid_obj_addr) + .do_update() + .set(( + collection_bids::latest_order_filled_tx_version + .eq(excluded(collection_bids::latest_order_filled_tx_version)), + collection_bids::latest_order_filled_event_idx + .eq(excluded(collection_bids::latest_order_filled_event_idx)), + collection_bids::latest_order_filled_timestamp + .eq(excluded(collection_bids::latest_order_filled_timestamp)), + )) + .filter( + // Update only if tx version is greater than the existing one + // or if the tx version is the same but the event index is greater + collection_bids::latest_order_filled_tx_version + .lt(excluded(collection_bids::latest_order_filled_tx_version)) + .or(collection_bids::latest_order_filled_tx_version + .eq(excluded(collection_bids::latest_order_filled_tx_version)) + .and( + collection_bids::latest_order_filled_event_idx + .lt(excluded(collection_bids::latest_order_filled_event_idx)), + )), + ); + sql.execute(conn).await?; + + let sql = insert_into(filled_collection_bids::table) + .values(filled_collection_bids.clone()) + .on_conflict(( + filled_collection_bids::bid_obj_addr, + filled_collection_bids::nft_id, + filled_collection_bids::nft_name, + )) + .do_update() + .set(( + filled_collection_bids::seller_addr + .eq(excluded(filled_collection_bids::seller_addr)), + filled_collection_bids::royalties + .eq(excluded(filled_collection_bids::royalties)), + filled_collection_bids::commission + .eq(excluded(filled_collection_bids::commission)), + filled_collection_bids::price.eq(excluded(filled_collection_bids::price)), + filled_collection_bids::order_filled_timestamp + .eq(excluded(filled_collection_bids::order_filled_timestamp)), + filled_collection_bids::order_filled_tx_version + .eq(excluded(filled_collection_bids::order_filled_tx_version)), + filled_collection_bids::order_filled_event_idx + .eq(excluded(filled_collection_bids::order_filled_event_idx)), + )) + .filter( + filled_collection_bids::order_filled_tx_version + .lt(excluded(filled_collection_bids::order_filled_tx_version)) + .or(filled_collection_bids::order_filled_tx_version + .eq(excluded(filled_collection_bids::order_filled_tx_version)) + .and( + filled_collection_bids::order_filled_event_idx + .lt(excluded(filled_collection_bids::order_filled_event_idx)), + )), + ); + sql.execute(conn).await?; + Ok(()) + }) + }) + .await +} + +pub async fn process_collection_bid_filled_events( + pool: ArcDbPool, + per_table_chunk_sizes: AHashMap, + events: Vec<(CollectionBid, FilledCollectionBid)>, +) -> Result<(), ProcessorError> { + let mut collection_bids_map: AHashMap)> = + AHashMap::new(); + + for (curr_collection_bid, curr_filled_collection_bid) in events.clone() { + // if not exist, insert, otherwise use the one with greater tx version and event index + let existing_collection_bid = collection_bids_map + .get(&curr_collection_bid.bid_obj_addr) + .cloned(); + match existing_collection_bid { + Some(mut existing_collection_bid) => { + existing_collection_bid.1.push(curr_filled_collection_bid); + if curr_collection_bid.order_placed_tx_version + > existing_collection_bid.0.order_placed_tx_version + || (curr_collection_bid.order_placed_tx_version + == existing_collection_bid.0.order_placed_tx_version + && curr_collection_bid.order_placed_event_idx + > existing_collection_bid.0.order_placed_event_idx) + { + collection_bids_map.insert( + curr_collection_bid.bid_obj_addr.clone(), + (curr_collection_bid.clone(), existing_collection_bid.1), + ); + } + } + None => { + collection_bids_map.insert( + curr_collection_bid.bid_obj_addr.clone(), + ( + curr_collection_bid.clone(), + vec![curr_filled_collection_bid], + ), + ); + } + } + } + + let collection_bids = collection_bids_map.values().cloned().collect::>(); + + let chunk_size = + get_config_table_chunk_size::("collection_bids", &per_table_chunk_sizes); + let tasks = collection_bids + .chunks(chunk_size) + .map(|chunk| { + let pool = pool.clone(); + let items = chunk.to_vec(); + tokio::spawn(async move { + let conn = &mut get_db_connection(&pool).await.expect( + "Failed to get connection from pool while processing collection bid filled events", + ); + execute_sql( + conn, + items + ) + .await + }) + }) + .collect::>(); + + match handle_db_execution(tasks).await { + Ok(_) => Ok(()), + Err(e) => { + println!( + "error writing collection bid filled events to db: {:?}", + collection_bids + ); + Err(e) + } + } +} diff --git a/indexer/src/indexers/marketplace_indexer/storers/collection_bid_placed_event_storer.rs b/indexer/src/indexers/marketplace_indexer/storers/collection_bid_placed_event_storer.rs new file mode 100644 index 0000000..04cb6dd --- /dev/null +++ b/indexer/src/indexers/marketplace_indexer/storers/collection_bid_placed_event_storer.rs @@ -0,0 +1,111 @@ +use ahash::AHashMap; +use anyhow::Result; +use aptos_indexer_processor_sdk::utils::errors::ProcessorError; +use diesel::{ + insert_into, query_dsl::methods::FilterDsl, upsert::excluded, BoolExpressionMethods, + ExpressionMethods, QueryResult, +}; +use diesel_async::{AsyncConnection, AsyncPgConnection, RunQueryDsl}; + +use crate::{ + db_models::collection_bids::CollectionBid, + schema::collection_bids, + utils::{ + database_connection::get_db_connection, + database_execution::handle_db_execution, + database_utils::{get_config_table_chunk_size, ArcDbPool}, + }, +}; + +async fn execute_sql( + conn: &mut AsyncPgConnection, + items_to_insert: Vec, +) -> QueryResult<()> { + conn.transaction(|conn| { + Box::pin(async move { + let sql = insert_into(collection_bids::table) + .values(items_to_insert.clone()) + .on_conflict(collection_bids::bid_obj_addr) + .do_update() + .set(( + collection_bids::price.eq(excluded(collection_bids::price)), + collection_bids::buyer_addr.eq(excluded(collection_bids::buyer_addr)), + collection_bids::total_nft_amount + .eq(excluded(collection_bids::total_nft_amount)), + collection_bids::order_placed_timestamp + .eq(excluded(collection_bids::order_placed_timestamp)), + collection_bids::order_placed_tx_version + .eq(excluded(collection_bids::order_placed_tx_version)), + collection_bids::order_placed_event_idx + .eq(excluded(collection_bids::order_placed_event_idx)), + collection_bids::order_status.eq(excluded(collection_bids::order_status)), + )) + .filter( + // Update only if tx version is greater than the existing one + // or if the tx version is the same but the event index is greater + collection_bids::order_placed_tx_version + .lt(excluded(collection_bids::order_placed_tx_version)) + .or(collection_bids::order_placed_tx_version + .eq(excluded(collection_bids::order_placed_tx_version)) + .and( + collection_bids::order_placed_event_idx + .lt(excluded(collection_bids::order_placed_event_idx)), + )), + ); + sql.execute(conn).await?; + Ok(()) + }) + }) + .await +} + +pub async fn process_collection_bid_placed_events( + pool: ArcDbPool, + per_table_chunk_sizes: AHashMap, + events: Vec, +) -> Result<(), ProcessorError> { + let mut unique_events_map: AHashMap = AHashMap::new(); + for event in events { + if let Some(existing_event) = unique_events_map.get_mut(&event.bid_obj_addr) { + if event.order_placed_tx_version > existing_event.order_placed_tx_version + || event.order_placed_tx_version == existing_event.order_placed_tx_version + && event.order_placed_event_idx > existing_event.order_placed_event_idx + { + *existing_event = event; + } + } else { + unique_events_map.insert(event.bid_obj_addr.clone(), event); + } + } + let unique_events = unique_events_map + .into_iter() + .map(|(_, v)| v) + .collect::>(); + + let chunk_size = + get_config_table_chunk_size::("nft_bids", &per_table_chunk_sizes); + let tasks = unique_events + .chunks(chunk_size) + .map(|chunk| { + let pool = pool.clone(); + let items = chunk.to_vec(); + tokio::spawn(async move { + let conn = &mut get_db_connection(&pool).await.expect( + "Failed to get connection from pool while processing collection bid placed events", + ); + execute_sql(conn, items).await + }) + }) + .collect::>(); + + match handle_db_execution(tasks).await { + Ok(_) => Ok(()), + Err(e) => { + println!( + "error writing collection bid placed events to db: {:?}", + unique_events + ); + Err(e) + } + } +} diff --git a/indexer/src/indexers/marketplace_indexer/storers/mod.rs b/indexer/src/indexers/marketplace_indexer/storers/mod.rs new file mode 100644 index 0000000..4a278a1 --- /dev/null +++ b/indexer/src/indexers/marketplace_indexer/storers/mod.rs @@ -0,0 +1,9 @@ +pub mod ask_cancelled_event_storer; +pub mod ask_filled_event_storer; +pub mod ask_placed_event_storer; +pub mod bid_cancelled_event_storer; +pub mod bid_filled_event_storer; +pub mod bid_placed_event_storer; +pub mod collection_bid_cancelled_event_storer; +pub mod collection_bid_filled_event_storer; +pub mod collection_bid_placed_event_storer; diff --git a/indexer/src/indexers/mod.rs b/indexer/src/indexers/mod.rs index eaf6142..033bbec 100644 --- a/indexer/src/indexers/mod.rs +++ b/indexer/src/indexers/mod.rs @@ -1,2 +1,2 @@ pub mod contract_upgrade_indexer; -pub mod tradeport_indexer; +pub mod marketplace_indexer; diff --git a/indexer/src/indexers/tradeport_indexer/mod.rs b/indexer/src/indexers/tradeport_indexer/mod.rs deleted file mode 100644 index 8b13789..0000000 --- a/indexer/src/indexers/tradeport_indexer/mod.rs +++ /dev/null @@ -1 +0,0 @@ - diff --git a/indexer/src/lib.rs b/indexer/src/lib.rs index ac6c2f4..0d8e701 100644 --- a/indexer/src/lib.rs +++ b/indexer/src/lib.rs @@ -2,7 +2,8 @@ pub mod config; pub mod db_models; pub mod health_check_server; pub mod indexers; +pub mod onchain_events; pub mod utils; -#[path = "db_migrations/contract_upgrade_indexer/contract_upgrade_schema.rs"] -pub mod contract_upgrade_schema; +#[path = "db_migrations/schema.rs"] +pub mod schema; diff --git a/indexer/src/onchain_events/aptos_labs_contract_events/collection_bid_event.rs b/indexer/src/onchain_events/aptos_labs_contract_events/collection_bid_event.rs new file mode 100644 index 0000000..e702cb6 --- /dev/null +++ b/indexer/src/onchain_events/aptos_labs_contract_events/collection_bid_event.rs @@ -0,0 +1,164 @@ +use aptos_indexer_processor_sdk::utils::convert::standardize_address; +use serde::{Deserialize, Serialize}; + +use crate::{ + db_models::{collection_bids::CollectionBid, filled_collection_bids::FilledCollectionBid}, + utils::{ + aptos_utils::{OrderStatus, PaymentTokenType, APT_COIN}, + time_utils::get_unix_timestamp_in_secs, + }, +}; + +use super::shared::{CollectionMetadataOnChain, TokenMetadataOnChain}; + +#[derive(Clone, Debug, Deserialize, Serialize)] +pub struct CollectionBidPlacedEventOnChain { + pub collection_offer: String, + pub purchaser: String, + pub price: String, + pub token_amount: String, + pub collection_metadata: CollectionMetadataOnChain, +} + +#[derive(Clone, Debug, Deserialize, Serialize)] +pub struct CollectionBidFilledEventOnChain { + pub collection_offer: String, + pub purchaser: String, + pub seller: String, + pub price: String, + pub royalties: String, + pub commission: String, + pub token_metadata: TokenMetadataOnChain, +} + +#[derive(Clone, Debug, Deserialize, Serialize)] +pub struct CollectionBidCancelledEventOnChain { + pub collection_offer: String, + pub purchaser: String, + pub price: String, + pub remaining_token_amount: String, + pub collection_metadata: CollectionMetadataOnChain, +} + +impl CollectionBidPlacedEventOnChain { + pub fn to_db_collection_bid( + &self, + marketplace_addr: String, + tx_version: i64, + event_idx: i64, + ) -> CollectionBid { + CollectionBid { + bid_obj_addr: standardize_address(self.collection_offer.as_str()), + collection_addr: self.collection_metadata.get_collection_addr().clone(), + collection_creator_addr: standardize_address( + self.collection_metadata.creator_address.as_str(), + ), + collection_name: self.collection_metadata.collection_name.clone(), + nft_standard: self.collection_metadata.get_nft_standard(), + marketplace_addr, + total_nft_amount: self.token_amount.parse().unwrap(), + buyer_addr: standardize_address(self.purchaser.as_str()), + price: self.price.parse().unwrap(), + payment_token: APT_COIN.to_string(), + payment_token_type: PaymentTokenType::Coin as i32, + order_placed_timestamp: get_unix_timestamp_in_secs(), + order_placed_tx_version: tx_version, + order_placed_event_idx: event_idx, + latest_order_filled_timestamp: 0, + latest_order_filled_tx_version: 0, + latest_order_filled_event_idx: 0, + order_cancelled_timestamp: 0, + order_cancelled_tx_version: 0, + order_cancelled_event_idx: 0, + order_status: OrderStatus::Open as i32, + order_expiration_timestamp: 0, + } + } +} + +impl CollectionBidFilledEventOnChain { + pub fn to_db_collection_bid_and_filled_collection_bid( + &self, + marketplace_addr: String, + tx_version: i64, + event_idx: i64, + ) -> (CollectionBid, FilledCollectionBid) { + let time_now = get_unix_timestamp_in_secs(); + ( + CollectionBid { + bid_obj_addr: standardize_address(self.collection_offer.as_str()), + collection_addr: self.token_metadata.get_collection_addr().clone(), + collection_creator_addr: standardize_address( + self.token_metadata.creator_address.as_str(), + ), + collection_name: self.token_metadata.collection_name.clone(), + nft_standard: self.token_metadata.get_nft_standard(), + marketplace_addr, + total_nft_amount: 0, + buyer_addr: standardize_address(self.purchaser.as_str()), + price: self.price.parse().unwrap(), + payment_token: APT_COIN.to_string(), + payment_token_type: PaymentTokenType::Coin as i32, + order_placed_timestamp: 0, + order_placed_tx_version: 0, + order_placed_event_idx: 0, + latest_order_filled_timestamp: time_now, + latest_order_filled_tx_version: tx_version, + latest_order_filled_event_idx: event_idx, + order_cancelled_timestamp: 0, + order_cancelled_tx_version: 0, + order_cancelled_event_idx: 0, + order_status: OrderStatus::Open as i32, + order_expiration_timestamp: 0, + }, + FilledCollectionBid { + bid_obj_addr: standardize_address(self.collection_offer.as_str()), + nft_id: self.token_metadata.get_id(), + nft_name: self.token_metadata.token_name.clone(), + seller_addr: standardize_address(self.seller.as_str()), + price: self.price.parse().unwrap(), + royalties: self.royalties.parse().unwrap(), + commission: self.commission.parse().unwrap(), + order_filled_timestamp: time_now, + order_filled_tx_version: tx_version, + order_filled_event_idx: event_idx, + }, + ) + } +} + +impl CollectionBidCancelledEventOnChain { + pub fn to_db_collection_bid( + &self, + marketplace_addr: String, + tx_version: i64, + event_idx: i64, + ) -> CollectionBid { + CollectionBid { + bid_obj_addr: standardize_address(self.collection_offer.as_str()), + collection_addr: self.collection_metadata.get_collection_addr().clone(), + collection_creator_addr: standardize_address( + self.collection_metadata.creator_address.as_str(), + ), + collection_name: self.collection_metadata.collection_name.clone(), + nft_standard: self.collection_metadata.get_nft_standard(), + marketplace_addr, + total_nft_amount: 0, + buyer_addr: standardize_address(self.purchaser.as_str()), + price: self.price.parse().unwrap(), + payment_token: APT_COIN.to_string(), + payment_token_type: PaymentTokenType::Coin as i32, + order_placed_timestamp: 0, + order_placed_tx_version: 0, + order_placed_event_idx: 0, + latest_order_filled_timestamp: 0, + latest_order_filled_tx_version: 0, + latest_order_filled_event_idx: 0, + order_cancelled_timestamp: get_unix_timestamp_in_secs(), + order_cancelled_tx_version: tx_version, + order_cancelled_event_idx: event_idx, + order_status: OrderStatus::Cancelled as i32, + order_expiration_timestamp: 0, + } + } +} diff --git a/indexer/src/onchain_events/aptos_labs_contract_events/mod.rs b/indexer/src/onchain_events/aptos_labs_contract_events/mod.rs new file mode 100644 index 0000000..08729b4 --- /dev/null +++ b/indexer/src/onchain_events/aptos_labs_contract_events/mod.rs @@ -0,0 +1,4 @@ +pub mod collection_bid_event; +pub mod nft_ask_event; +pub mod nft_bid_event; +pub mod shared; diff --git a/indexer/src/onchain_events/aptos_labs_contract_events/nft_ask_event.rs b/indexer/src/onchain_events/aptos_labs_contract_events/nft_ask_event.rs new file mode 100644 index 0000000..2289e6c --- /dev/null +++ b/indexer/src/onchain_events/aptos_labs_contract_events/nft_ask_event.rs @@ -0,0 +1,169 @@ +use aptos_indexer_processor_sdk::utils::convert::standardize_address; +use serde::{Deserialize, Serialize}; + +use crate::{ + db_models::nft_asks::NftAsk, + utils::{ + aptos_utils::{AskOrderType, OrderStatus, PaymentTokenType, APT_COIN}, + time_utils::get_unix_timestamp_in_secs, + }, +}; + +use super::shared::TokenMetadataOnChain; + +fn convert_on_chain_order_type_to_db_order_type(order_type: &str) -> AskOrderType { + match order_type { + "fixed price" => AskOrderType::FixedPrice, + "auction" => AskOrderType::Auction, + _ => panic!("Invalid ask order type"), + } +} + +#[derive(Clone, Debug, Deserialize, Serialize)] +pub struct AskPlacedEventOnChain { + #[serde(rename = "type")] + pub order_type: String, + pub listing: String, + pub seller: String, + pub price: String, + pub token_metadata: TokenMetadataOnChain, +} + +#[derive(Clone, Debug, Deserialize, Serialize)] +pub struct AskFilledEventOnChain { + #[serde(rename = "type")] + pub order_type: String, + pub listing: String, + pub seller: String, + pub purchaser: String, + pub price: String, + pub commission: String, + pub royalties: String, + pub token_metadata: TokenMetadataOnChain, +} + +#[derive(Clone, Debug, Deserialize, Serialize)] +pub struct AskCancelledEventOnChain { + #[serde(rename = "type")] + pub order_type: String, + pub listing: String, + pub seller: String, + pub price: String, + pub token_metadata: TokenMetadataOnChain, +} + +impl AskPlacedEventOnChain { + pub fn to_db_nft_ask( + &self, + marketplace_addr: String, + tx_version: i64, + event_idx: i64, + ) -> NftAsk { + NftAsk { + ask_obj_addr: standardize_address(self.listing.as_str()), + nft_id: self.token_metadata.get_id(), + nft_name: self.token_metadata.token_name.clone(), + collection_addr: self.token_metadata.get_collection_addr().clone(), + collection_creator_addr: standardize_address( + self.token_metadata.creator_address.as_str(), + ), + collection_name: self.token_metadata.collection_name.clone(), + nft_standard: self.token_metadata.get_nft_standard(), + marketplace_addr, + buyer_addr: "".to_string(), + seller_addr: standardize_address(self.seller.as_str()), + price: self.price.parse().unwrap(), + commission: 0, + royalties: 0, + payment_token: APT_COIN.to_string(), + payment_token_type: PaymentTokenType::Coin as i32, + order_placed_timestamp: get_unix_timestamp_in_secs(), + order_placed_tx_version: tx_version, + order_placed_event_idx: event_idx, + order_filled_timestamp: 0, + order_filled_tx_version: 0, + order_filled_event_idx: 0, + order_cancelled_timestamp: 0, + order_cancelled_tx_version: 0, + order_cancelled_event_idx: 0, + order_status: OrderStatus::Open as i32, + order_type: convert_on_chain_order_type_to_db_order_type(&self.order_type) as i32, + } + } +} + +impl AskFilledEventOnChain { + pub fn to_db_nft_ask( + &self, + marketplace_addr: String, + tx_version: i64, + event_idx: i64, + ) -> NftAsk { + NftAsk { + ask_obj_addr: standardize_address(self.listing.as_str()), + nft_id: "".to_string(), + nft_name: "".to_string(), + collection_addr: "".to_string(), + collection_creator_addr: "".to_string(), + collection_name: "".to_string(), + nft_standard: 1, + marketplace_addr, + buyer_addr: standardize_address(self.purchaser.as_str()), + seller_addr: standardize_address(self.seller.as_str()), + price: self.price.parse().unwrap(), + commission: self.commission.parse().unwrap(), + royalties: self.royalties.parse().unwrap(), + payment_token: APT_COIN.to_string(), + payment_token_type: PaymentTokenType::Coin as i32, + order_placed_timestamp: 0, + order_placed_tx_version: 0, + order_placed_event_idx: 0, + order_filled_timestamp: get_unix_timestamp_in_secs(), + order_filled_tx_version: tx_version, + order_filled_event_idx: event_idx, + order_cancelled_timestamp: 0, + order_cancelled_tx_version: 0, + order_cancelled_event_idx: 0, + order_status: OrderStatus::Filled as i32, + order_type: convert_on_chain_order_type_to_db_order_type(&self.order_type) as i32, + } + } +} + +impl AskCancelledEventOnChain { + pub fn to_db_nft_ask( + &self, + marketplace_addr: String, + tx_version: i64, + event_idx: i64, + ) -> NftAsk { + NftAsk { + ask_obj_addr: standardize_address(self.listing.as_str()), + nft_id: "".to_string(), + nft_name: "".to_string(), + collection_addr: "".to_string(), + collection_creator_addr: "".to_string(), + collection_name: "".to_string(), + nft_standard: 1, + marketplace_addr, + buyer_addr: "".to_string(), + seller_addr: standardize_address(self.seller.as_str()), + price: self.price.parse().unwrap(), + commission: 0, + royalties: 0, + payment_token: APT_COIN.to_string(), + payment_token_type: PaymentTokenType::Coin as i32, + order_placed_timestamp: 0, + order_placed_tx_version: 0, + order_placed_event_idx: 0, + order_filled_timestamp: 0, + order_filled_tx_version: 0, + order_filled_event_idx: 0, + order_cancelled_timestamp: get_unix_timestamp_in_secs(), + order_cancelled_tx_version: tx_version, + order_cancelled_event_idx: event_idx, + order_status: OrderStatus::Cancelled as i32, + order_type: convert_on_chain_order_type_to_db_order_type(&self.order_type) as i32, + } + } +} diff --git a/indexer/src/onchain_events/aptos_labs_contract_events/nft_bid_event.rs b/indexer/src/onchain_events/aptos_labs_contract_events/nft_bid_event.rs new file mode 100644 index 0000000..e3cc820 --- /dev/null +++ b/indexer/src/onchain_events/aptos_labs_contract_events/nft_bid_event.rs @@ -0,0 +1,155 @@ +use aptos_indexer_processor_sdk::utils::convert::standardize_address; +use serde::{Deserialize, Serialize}; + +use crate::{ + db_models::nft_bids::NftBid, + utils::{ + aptos_utils::{OrderStatus, PaymentTokenType, APT_COIN}, + time_utils::get_unix_timestamp_in_secs, + }, +}; + +use super::shared::TokenMetadataOnChain; + +#[derive(Clone, Debug, Deserialize, Serialize)] +pub struct BidPlacedEventOnChain { + pub token_offer: String, + pub purchaser: String, + pub price: String, + pub token_metadata: TokenMetadataOnChain, +} + +#[derive(Clone, Debug, Deserialize, Serialize)] +pub struct BidFilledEventOnChain { + pub token_offer: String, + pub purchaser: String, + pub seller: String, + pub price: String, + pub royalties: String, + pub commission: String, + pub token_metadata: TokenMetadataOnChain, +} + +#[derive(Clone, Debug, Deserialize, Serialize)] +pub struct BidCancelledEventOnChain { + pub token_offer: String, + pub purchaser: String, + pub price: String, + pub token_metadata: TokenMetadataOnChain, +} + +impl BidPlacedEventOnChain { + pub fn to_db_nft_bid( + &self, + marketplace_addr: String, + tx_version: i64, + event_idx: i64, + ) -> NftBid { + NftBid { + bid_obj_addr: standardize_address(self.token_offer.as_str()), + nft_id: self.token_metadata.get_id(), + nft_name: self.token_metadata.token_name.clone(), + collection_addr: self.token_metadata.get_collection_addr().clone(), + collection_creator_addr: standardize_address( + self.token_metadata.creator_address.as_str(), + ), + collection_name: self.token_metadata.collection_name.clone(), + nft_standard: self.token_metadata.get_nft_standard(), + marketplace_addr, + buyer_addr: standardize_address(self.purchaser.as_str()), + seller_addr: "".to_string(), + price: self.price.parse().unwrap(), + commission: 0, + royalties: 0, + payment_token: APT_COIN.to_string(), + payment_token_type: PaymentTokenType::Coin as i32, + order_placed_timestamp: get_unix_timestamp_in_secs(), + order_placed_tx_version: tx_version, + order_placed_event_idx: event_idx, + order_filled_timestamp: 0, + order_filled_tx_version: 0, + order_filled_event_idx: 0, + order_cancelled_timestamp: 0, + order_cancelled_tx_version: 0, + order_cancelled_event_idx: 0, + order_status: OrderStatus::Open as i32, + order_expiration_timestamp: 0, + } + } +} + +impl BidFilledEventOnChain { + pub fn to_db_nft_bid( + &self, + marketplace_addr: String, + tx_version: i64, + event_idx: i64, + ) -> NftBid { + NftBid { + bid_obj_addr: standardize_address(self.token_offer.as_str()), + nft_id: "".to_string(), + nft_name: "".to_string(), + collection_addr: "".to_string(), + collection_creator_addr: "".to_string(), + collection_name: "".to_string(), + nft_standard: 1, + marketplace_addr, + buyer_addr: standardize_address(self.purchaser.as_str()), + seller_addr: standardize_address(self.seller.as_str()), + price: self.price.parse().unwrap(), + commission: self.commission.parse().unwrap(), + royalties: self.royalties.parse().unwrap(), + payment_token: APT_COIN.to_string(), + payment_token_type: PaymentTokenType::Coin as i32, + order_placed_timestamp: 0, + order_placed_tx_version: 0, + order_placed_event_idx: 0, + order_filled_timestamp: get_unix_timestamp_in_secs(), + order_filled_tx_version: tx_version, + order_filled_event_idx: event_idx, + order_cancelled_timestamp: 0, + order_cancelled_tx_version: 0, + order_cancelled_event_idx: 0, + order_status: OrderStatus::Filled as i32, + order_expiration_timestamp: 0, + } + } +} + +impl BidCancelledEventOnChain { + pub fn to_db_nft_bid( + &self, + marketplace_addr: String, + tx_version: i64, + event_idx: i64, + ) -> NftBid { + NftBid { + bid_obj_addr: standardize_address(self.token_offer.as_str()), + nft_id: "".to_string(), + nft_name: "".to_string(), + collection_addr: "".to_string(), + collection_creator_addr: "".to_string(), + collection_name: "".to_string(), + nft_standard: 1, + marketplace_addr, + buyer_addr: standardize_address(self.purchaser.as_str()), + seller_addr: "".to_string(), + price: self.price.parse().unwrap(), + commission: 0, + royalties: 0, + payment_token: APT_COIN.to_string(), + payment_token_type: PaymentTokenType::Coin as i32, + order_placed_timestamp: 0, + order_placed_tx_version: 0, + order_placed_event_idx: 0, + order_filled_timestamp: 0, + order_filled_tx_version: 0, + order_filled_event_idx: 0, + order_cancelled_timestamp: get_unix_timestamp_in_secs(), + order_cancelled_tx_version: tx_version, + order_cancelled_event_idx: event_idx, + order_status: OrderStatus::Cancelled as i32, + order_expiration_timestamp: 0, + } + } +} diff --git a/indexer/src/onchain_events/aptos_labs_contract_events/shared.rs b/indexer/src/onchain_events/aptos_labs_contract_events/shared.rs new file mode 100644 index 0000000..07d2c1e --- /dev/null +++ b/indexer/src/onchain_events/aptos_labs_contract_events/shared.rs @@ -0,0 +1,102 @@ +use aptos_indexer_processor_sdk::utils::convert::standardize_address; +use serde::{Deserialize, Serialize}; + +use crate::utils::aptos_utils::NFTStandard; + +#[derive(Clone, Debug, Deserialize, Serialize)] +pub struct CollectionMetadataOnChain { + pub creator_address: String, + pub collection_name: String, + pub collection: MoveOptionObject, +} + +impl CollectionMetadataOnChain { + pub fn is_v1(&self) -> bool { + self.collection.vec.is_empty() + } + + pub fn get_nft_standard(&self) -> i32 { + if self.is_v1() { + NFTStandard::V1 as i32 + } else { + NFTStandard::V2 as i32 + } + } + + pub fn get_collection_addr(&self) -> String { + if self.is_v1() { + "".to_string() + } else { + standardize_address(self.collection.vec[0].clone().inner.as_str()) + } + } +} + +#[derive(Clone, Debug, Deserialize, Serialize)] +pub struct MoveObject { + pub inner: String, +} + +#[derive(Clone, Debug, Deserialize, Serialize)] +pub struct MoveOptionObject { + pub vec: Vec, +} + +#[derive(Clone, Debug, Deserialize, Serialize)] +pub struct MoveOptionU64 { + pub vec: Vec, +} + +#[derive(Clone, Debug, Deserialize, Serialize)] +pub struct TokenMetadataOnChain { + pub creator_address: String, + pub collection_name: String, + pub collection: MoveOptionObject, + pub token_name: String, + pub token: MoveOptionObject, + pub property_version: MoveOptionU64, +} + +impl TokenMetadataOnChain { + pub fn is_v1(&self) -> bool { + self.collection.vec.is_empty() + } + + pub fn get_nft_standard(&self) -> i32 { + if self.is_v1() { + NFTStandard::V1 as i32 + } else { + NFTStandard::V2 as i32 + } + } + + pub fn get_id(&self) -> String { + if self.is_v1() { + self.property_version.vec[0].clone() + } else { + standardize_address(self.token.vec[0].clone().inner.as_str()) + } + } + + pub fn get_collection_addr(&self) -> String { + if self.is_v1() { + "".to_string() + } else { + standardize_address(self.collection.vec[0].clone().inner.as_str()) + } + } + + // for v1 nft, identifier is nft_name_property_version + // for v2 nft, identifier is nft obj addr + pub fn get_nft_identifier_for_filled_order(&self) -> String { + if self.is_v1() { + format!( + "{}_{}", + self.token_name.clone(), + self.property_version.vec[0].clone() + ) + } else { + standardize_address(self.token.vec[0].clone().inner.as_str()) + } + } +} diff --git a/indexer/src/onchain_events/mod.rs b/indexer/src/onchain_events/mod.rs new file mode 100644 index 0000000..5605568 --- /dev/null +++ b/indexer/src/onchain_events/mod.rs @@ -0,0 +1,2 @@ +pub mod aptos_labs_contract_events; +pub mod tradeport_contract_events; diff --git a/indexer/src/onchain_events/tradeport_contract_events/collection_bid_event_v1.rs b/indexer/src/onchain_events/tradeport_contract_events/collection_bid_event_v1.rs new file mode 100644 index 0000000..6f8f417 --- /dev/null +++ b/indexer/src/onchain_events/tradeport_contract_events/collection_bid_event_v1.rs @@ -0,0 +1,160 @@ +use aptos_indexer_processor_sdk::utils::convert::standardize_address; +use serde::{Deserialize, Serialize}; + +use crate::{ + db_models::{collection_bids::CollectionBid, filled_collection_bids::FilledCollectionBid}, + utils::aptos_utils::{NFTStandard, OrderStatus, PaymentTokenType, APT_COIN}, +}; + +use super::shared::{generate_collection_bid_order_id_for_nft_v1, NftV1CollectionId, NftV1TokenId}; + +// Tradeport v1 InsertCollectionBidEvent +#[derive(Clone, Debug, Deserialize, Serialize)] +pub struct TradeportV1CollectionBidPlacedEventOnChain { + pub timestamp: String, + pub nonce: String, + pub bid_buyer: String, + pub collection_id: NftV1CollectionId, + pub amount: String, + pub price: String, +} + +// Tradeport v1 AcceptCollectionBidEvent +#[derive(Clone, Debug, Deserialize, Serialize)] +pub struct TradeportV1CollectionBidFilledEventOnChain { + pub timestamp: String, + pub nonce: String, + pub bid_buyer: String, + pub bid_seller: String, + pub token_id: NftV1TokenId, + pub price: String, +} + +// Tradeport v1 DeleteCollectionBidEvent +#[derive(Clone, Debug, Deserialize, Serialize)] +pub struct TradeportV1CollectionBidCancelledEventOnChain { + pub timestamp: String, + pub nonce: String, + pub bid_buyer: String, + pub collection_id: NftV1CollectionId, + pub amount: String, + pub price: String, +} + +impl TradeportV1CollectionBidPlacedEventOnChain { + pub fn to_db_collection_bid( + &self, + marketplace_addr: String, + tx_version: i64, + event_idx: i64, + ) -> CollectionBid { + CollectionBid { + bid_obj_addr: generate_collection_bid_order_id_for_nft_v1(self.nonce.clone()), + collection_addr: "".to_string(), + collection_creator_addr: standardize_address( + self.collection_id.collection_creator.as_str(), + ), + collection_name: self.collection_id.collection_name.clone(), + nft_standard: NFTStandard::V1 as i32, + marketplace_addr, + total_nft_amount: self.amount.parse().unwrap(), + buyer_addr: standardize_address(self.bid_buyer.as_str()), + price: self.price.parse().unwrap(), + payment_token: APT_COIN.to_string(), + payment_token_type: PaymentTokenType::Coin as i32, + order_placed_timestamp: self.timestamp.parse().unwrap(), + order_placed_tx_version: tx_version, + order_placed_event_idx: event_idx, + latest_order_filled_timestamp: 0, + latest_order_filled_tx_version: 0, + latest_order_filled_event_idx: 0, + order_cancelled_timestamp: 0, + order_cancelled_tx_version: 0, + order_cancelled_event_idx: 0, + order_status: OrderStatus::Open as i32, + order_expiration_timestamp: 0, + } + } +} + +impl TradeportV1CollectionBidFilledEventOnChain { + pub fn to_db_collection_bid_and_filled_collection_bid( + &self, + marketplace_addr: String, + tx_version: i64, + event_idx: i64, + ) -> (CollectionBid, FilledCollectionBid) { + ( + CollectionBid { + bid_obj_addr: generate_collection_bid_order_id_for_nft_v1(self.nonce.clone()), + collection_addr: "".to_string(), + collection_creator_addr: "".to_string(), + collection_name: "".to_string(), + nft_standard: 1, + marketplace_addr, + total_nft_amount: 0, + buyer_addr: standardize_address(self.bid_buyer.as_str()), + price: self.price.parse().unwrap(), + payment_token: APT_COIN.to_string(), + payment_token_type: PaymentTokenType::Coin as i32, + order_placed_timestamp: 0, + order_placed_tx_version: 0, + order_placed_event_idx: 0, + latest_order_filled_timestamp: self.timestamp.parse().unwrap(), + latest_order_filled_tx_version: tx_version, + latest_order_filled_event_idx: event_idx, + order_cancelled_timestamp: 0, + order_cancelled_tx_version: 0, + order_cancelled_event_idx: 0, + order_status: OrderStatus::Open as i32, + order_expiration_timestamp: 0, + }, + FilledCollectionBid { + bid_obj_addr: generate_collection_bid_order_id_for_nft_v1(self.nonce.clone()), + nft_id: self.token_id.property_version.clone(), + nft_name: self.token_id.token_data_id.name.clone(), + seller_addr: standardize_address(self.bid_seller.as_str()), + price: self.price.parse().unwrap(), + royalties: 0, + commission: 0, + order_filled_timestamp: self.timestamp.parse().unwrap(), + order_filled_tx_version: tx_version, + order_filled_event_idx: event_idx, + }, + ) + } +} + +impl TradeportV1CollectionBidCancelledEventOnChain { + pub fn to_db_collection_bid( + &self, + marketplace_addr: String, + tx_version: i64, + event_idx: i64, + ) -> CollectionBid { + CollectionBid { + bid_obj_addr: generate_collection_bid_order_id_for_nft_v1(self.nonce.clone()), + collection_addr: "".to_string(), + collection_creator_addr: "".to_string(), + collection_name: "".to_string(), + nft_standard: 1, + marketplace_addr, + total_nft_amount: 0, + buyer_addr: standardize_address(self.bid_buyer.as_str()), + price: self.price.parse().unwrap(), + payment_token: APT_COIN.to_string(), + payment_token_type: PaymentTokenType::Coin as i32, + order_placed_timestamp: 0, + order_placed_tx_version: 0, + order_placed_event_idx: 0, + latest_order_filled_timestamp: 0, + latest_order_filled_tx_version: 0, + latest_order_filled_event_idx: 0, + order_cancelled_timestamp: self.timestamp.parse().unwrap(), + order_cancelled_tx_version: tx_version, + order_cancelled_event_idx: event_idx, + order_status: OrderStatus::Cancelled as i32, + order_expiration_timestamp: 0, + } + } +} diff --git a/indexer/src/onchain_events/tradeport_contract_events/collection_bid_event_v2.rs b/indexer/src/onchain_events/tradeport_contract_events/collection_bid_event_v2.rs new file mode 100644 index 0000000..aa97441 --- /dev/null +++ b/indexer/src/onchain_events/tradeport_contract_events/collection_bid_event_v2.rs @@ -0,0 +1,155 @@ +use aptos_indexer_processor_sdk::utils::convert::standardize_address; +use serde::{Deserialize, Serialize}; + +use crate::{ + db_models::{collection_bids::CollectionBid, filled_collection_bids::FilledCollectionBid}, + onchain_events::aptos_labs_contract_events::shared::MoveObject, + utils::aptos_utils::{NFTStandard, OrderStatus, PaymentTokenType, APT_COIN}, +}; + +// Tradeport v2 InsertCollectionBidEvent +#[derive(Clone, Debug, Deserialize, Serialize)] +pub struct TradeportV2CollectionBidPlacedEventOnChain { + pub timestamp: String, + pub bid: MoveObject, + pub bid_buyer: String, + pub collection: MoveObject, + pub price: String, +} + +// Tradeport v2 AcceptCollectionBidEvent +#[derive(Clone, Debug, Deserialize, Serialize)] +pub struct TradeportV2CollectionBidFilledEventOnChain { + pub timestamp: String, + pub bid: MoveObject, + pub bid_buyer: String, + pub bid_seller: String, + pub token: MoveObject, + pub price: String, +} + +// Tradeport v2 DeleteCollectionBidEvent +#[derive(Clone, Debug, Deserialize, Serialize)] +pub struct TradeportV2CollectionBidCancelledEventOnChain { + pub timestamp: String, + pub bid: MoveObject, + pub bid_buyer: String, + pub collection: MoveObject, + pub price: String, +} + +impl TradeportV2CollectionBidPlacedEventOnChain { + pub fn to_db_collection_bid( + &self, + marketplace_addr: String, + tx_version: i64, + event_idx: i64, + ) -> CollectionBid { + CollectionBid { + bid_obj_addr: standardize_address(self.bid.inner.as_str()), + collection_addr: standardize_address(self.collection.inner.as_str()), + collection_creator_addr: "".to_string(), + collection_name: "".to_string(), + nft_standard: NFTStandard::V2 as i32, + marketplace_addr, + total_nft_amount: 1, + buyer_addr: standardize_address(self.bid_buyer.as_str()), + price: self.price.parse().unwrap(), + payment_token: APT_COIN.to_string(), + payment_token_type: PaymentTokenType::Coin as i32, + order_placed_timestamp: self.timestamp.parse().unwrap(), + order_placed_tx_version: tx_version, + order_placed_event_idx: event_idx, + latest_order_filled_timestamp: 0, + latest_order_filled_tx_version: 0, + latest_order_filled_event_idx: 0, + order_cancelled_timestamp: 0, + order_cancelled_tx_version: 0, + order_cancelled_event_idx: 0, + order_status: OrderStatus::Open as i32, + order_expiration_timestamp: 0, + } + } +} + +impl TradeportV2CollectionBidFilledEventOnChain { + pub fn to_db_collection_bid_and_filled_collection_bid( + &self, + marketplace_addr: String, + tx_version: i64, + event_idx: i64, + ) -> (CollectionBid, FilledCollectionBid) { + ( + CollectionBid { + bid_obj_addr: standardize_address(self.bid.inner.as_str()), + collection_addr: "".to_string(), + collection_creator_addr: "".to_string(), + collection_name: "".to_string(), + nft_standard: NFTStandard::V2 as i32, + marketplace_addr, + total_nft_amount: 0, + buyer_addr: standardize_address(self.bid_buyer.as_str()), + price: self.price.parse().unwrap(), + payment_token: APT_COIN.to_string(), + payment_token_type: PaymentTokenType::Coin as i32, + order_placed_timestamp: 0, + order_placed_tx_version: 0, + order_placed_event_idx: 0, + latest_order_filled_timestamp: self.timestamp.parse().unwrap(), + latest_order_filled_tx_version: tx_version, + latest_order_filled_event_idx: event_idx, + order_cancelled_timestamp: 0, + order_cancelled_tx_version: 0, + order_cancelled_event_idx: 0, + order_status: OrderStatus::Open as i32, + order_expiration_timestamp: 0, + }, + FilledCollectionBid { + bid_obj_addr: standardize_address(self.bid.inner.as_str()), + nft_id: self.token.inner.clone(), + nft_name: "".to_string(), + seller_addr: standardize_address(self.bid_seller.as_str()), + price: self.price.parse().unwrap(), + royalties: 0, + commission: 0, + order_filled_timestamp: self.timestamp.parse().unwrap(), + order_filled_tx_version: tx_version, + order_filled_event_idx: event_idx, + }, + ) + } +} + +impl TradeportV2CollectionBidCancelledEventOnChain { + pub fn to_db_collection_bid( + &self, + marketplace_addr: String, + tx_version: i64, + event_idx: i64, + ) -> CollectionBid { + CollectionBid { + bid_obj_addr: standardize_address(self.collection.inner.as_str()), + collection_addr: "".to_string(), + collection_creator_addr: "".to_string(), + collection_name: "".to_string(), + nft_standard: NFTStandard::V2 as i32, + marketplace_addr, + total_nft_amount: 0, + buyer_addr: standardize_address(self.bid_buyer.as_str()), + price: self.price.parse().unwrap(), + payment_token: APT_COIN.to_string(), + payment_token_type: PaymentTokenType::Coin as i32, + order_placed_timestamp: 0, + order_placed_tx_version: 0, + order_placed_event_idx: 0, + latest_order_filled_timestamp: 0, + latest_order_filled_tx_version: 0, + latest_order_filled_event_idx: 0, + order_cancelled_timestamp: self.timestamp.parse().unwrap(), + order_cancelled_tx_version: tx_version, + order_cancelled_event_idx: event_idx, + order_status: OrderStatus::Cancelled as i32, + order_expiration_timestamp: 0, + } + } +} diff --git a/indexer/src/onchain_events/tradeport_contract_events/mod.rs b/indexer/src/onchain_events/tradeport_contract_events/mod.rs new file mode 100644 index 0000000..f909a7c --- /dev/null +++ b/indexer/src/onchain_events/tradeport_contract_events/mod.rs @@ -0,0 +1,7 @@ +pub mod collection_bid_event_v1; +pub mod collection_bid_event_v2; +pub mod nft_ask_event_v1; +pub mod nft_ask_event_v2; +pub mod nft_bid_event_v1; +pub mod nft_bid_event_v2; +pub mod shared; diff --git a/indexer/src/onchain_events/tradeport_contract_events/nft_ask_event_v1.rs b/indexer/src/onchain_events/tradeport_contract_events/nft_ask_event_v1.rs new file mode 100644 index 0000000..67f110d --- /dev/null +++ b/indexer/src/onchain_events/tradeport_contract_events/nft_ask_event_v1.rs @@ -0,0 +1,213 @@ +use aptos_indexer_processor_sdk::utils::convert::standardize_address; +use serde::{Deserialize, Serialize}; + +use crate::{ + db_models::nft_asks::NftAsk, + utils::aptos_utils::{AskOrderType, NFTStandard, OrderStatus, PaymentTokenType, APT_COIN}, +}; + +use super::shared::{generate_ask_order_id_for_nft_v1, NftV1TokenId}; + +// Tradeport v1 InsertListingEvent +#[derive(Clone, Debug, Deserialize, Serialize)] +pub struct TradeportV1AskPlacedEventOnChain { + pub timestamp: String, + pub token_id: NftV1TokenId, + pub price: String, + pub owner: String, +} + +// Tradeport v1 UpdateListingEvent +#[derive(Clone, Debug, Deserialize, Serialize)] +pub struct TradeportV1AskUpdatedEventOnChain { + pub timestamp: String, + pub token_id: NftV1TokenId, + pub price: String, + pub old_price: String, + pub owner: String, +} + +// Tradeport v1 BuyEvent +#[derive(Clone, Debug, Deserialize, Serialize)] +pub struct TradeportV1AskFilledEventOnChain { + pub timestamp: String, + pub token_id: NftV1TokenId, + pub price: String, + pub owner: String, + pub buyer: String, +} + +// Tradeport v1 DeleteListingEvent +#[derive(Clone, Debug, Deserialize, Serialize)] +pub struct TradeportV1AskCancelledEventOnChain { + pub timestamp: String, + pub token_id: NftV1TokenId, + pub price: String, + pub owner: String, +} + +impl TradeportV1AskPlacedEventOnChain { + pub fn to_db_nft_ask( + &self, + marketplace_addr: String, + tx_version: i64, + event_idx: i64, + ) -> NftAsk { + NftAsk { + ask_obj_addr: generate_ask_order_id_for_nft_v1( + self.owner.clone(), + self.token_id.clone(), + ), + nft_id: self.token_id.property_version.clone(), + nft_name: self.token_id.token_data_id.name.clone(), + collection_addr: "".to_string(), + collection_creator_addr: standardize_address( + self.token_id.token_data_id.creator.as_str(), + ), + collection_name: self.token_id.token_data_id.collection.clone(), + nft_standard: NFTStandard::V1 as i32, + marketplace_addr, + buyer_addr: "".to_string(), + seller_addr: standardize_address(self.owner.as_str()), + price: self.price.parse().unwrap(), + commission: 0, + royalties: 0, + payment_token: APT_COIN.to_string(), + payment_token_type: PaymentTokenType::Coin as i32, + order_placed_timestamp: self.timestamp.parse().unwrap(), + order_placed_tx_version: tx_version, + order_placed_event_idx: event_idx, + order_filled_timestamp: 0, + order_filled_tx_version: 0, + order_filled_event_idx: 0, + order_cancelled_timestamp: 0, + order_cancelled_tx_version: 0, + order_cancelled_event_idx: 0, + order_status: OrderStatus::Open as i32, + order_type: AskOrderType::FixedPrice as i32, + } + } +} + +impl TradeportV1AskUpdatedEventOnChain { + pub fn to_db_nft_ask( + &self, + marketplace_addr: String, + tx_version: i64, + event_idx: i64, + ) -> NftAsk { + NftAsk { + ask_obj_addr: generate_ask_order_id_for_nft_v1( + self.owner.clone(), + self.token_id.clone(), + ), + nft_id: "".to_string(), + nft_name: "".to_string(), + collection_addr: "".to_string(), + collection_creator_addr: "".to_string(), + collection_name: "".to_string(), + nft_standard: 1, + marketplace_addr, + buyer_addr: "".to_string(), + seller_addr: standardize_address(self.owner.as_str()), + price: self.price.parse().unwrap(), + commission: 0, + royalties: 0, + payment_token: APT_COIN.to_string(), + payment_token_type: PaymentTokenType::Coin as i32, + order_placed_timestamp: 0, + order_placed_tx_version: 0, + order_placed_event_idx: 0, + order_filled_timestamp: 0, + order_filled_tx_version: 0, + order_filled_event_idx: 0, + order_cancelled_timestamp: self.timestamp.parse().unwrap(), + order_cancelled_tx_version: tx_version, + order_cancelled_event_idx: event_idx, + order_status: OrderStatus::Open as i32, + order_type: AskOrderType::FixedPrice as i32, + } + } +} + +impl TradeportV1AskFilledEventOnChain { + pub fn to_db_nft_ask( + &self, + marketplace_addr: String, + tx_version: i64, + event_idx: i64, + ) -> NftAsk { + NftAsk { + ask_obj_addr: generate_ask_order_id_for_nft_v1( + self.owner.clone(), + self.token_id.clone(), + ), + nft_id: "".to_string(), + nft_name: "".to_string(), + collection_addr: "".to_string(), + collection_creator_addr: "".to_string(), + collection_name: "".to_string(), + nft_standard: 1, + marketplace_addr, + buyer_addr: standardize_address(self.buyer.as_str()), + seller_addr: standardize_address(self.owner.as_str()), + price: self.price.parse().unwrap(), + commission: 0, + royalties: 0, + payment_token: APT_COIN.to_string(), + payment_token_type: PaymentTokenType::Coin as i32, + order_placed_timestamp: 0, + order_placed_tx_version: 0, + order_placed_event_idx: 0, + order_filled_timestamp: self.timestamp.parse().unwrap(), + order_filled_tx_version: tx_version, + order_filled_event_idx: event_idx, + order_cancelled_timestamp: 0, + order_cancelled_tx_version: 0, + order_cancelled_event_idx: 0, + order_status: OrderStatus::Filled as i32, + order_type: AskOrderType::FixedPrice as i32, + } + } +} + +impl TradeportV1AskCancelledEventOnChain { + pub fn to_db_nft_ask( + &self, + marketplace_addr: String, + tx_version: i64, + event_idx: i64, + ) -> NftAsk { + NftAsk { + ask_obj_addr: generate_ask_order_id_for_nft_v1( + self.owner.clone(), + self.token_id.clone(), + ), + nft_id: "".to_string(), + nft_name: "".to_string(), + collection_addr: "".to_string(), + collection_creator_addr: "".to_string(), + collection_name: "".to_string(), + nft_standard: 1, + marketplace_addr, + buyer_addr: "".to_string(), + seller_addr: standardize_address(self.owner.as_str()), + price: self.price.parse().unwrap(), + commission: 0, + royalties: 0, + payment_token: APT_COIN.to_string(), + payment_token_type: PaymentTokenType::Coin as i32, + order_placed_timestamp: 0, + order_placed_tx_version: 0, + order_placed_event_idx: 0, + order_filled_timestamp: 0, + order_filled_tx_version: 0, + order_filled_event_idx: 0, + order_cancelled_timestamp: self.timestamp.parse().unwrap(), + order_cancelled_tx_version: tx_version, + order_cancelled_event_idx: event_idx, + order_status: OrderStatus::Cancelled as i32, + order_type: AskOrderType::FixedPrice as i32, + } + } +} diff --git a/indexer/src/onchain_events/tradeport_contract_events/nft_ask_event_v2.rs b/indexer/src/onchain_events/tradeport_contract_events/nft_ask_event_v2.rs new file mode 100644 index 0000000..4b24a8c --- /dev/null +++ b/indexer/src/onchain_events/tradeport_contract_events/nft_ask_event_v2.rs @@ -0,0 +1,153 @@ +use aptos_indexer_processor_sdk::utils::convert::standardize_address; +use serde::{Deserialize, Serialize}; + +use crate::{ + db_models::nft_asks::NftAsk, + onchain_events::aptos_labs_contract_events::shared::MoveObject, + utils::aptos_utils::{AskOrderType, NFTStandard, OrderStatus, PaymentTokenType, APT_COIN}, +}; + +// Tradeport v2 InsertListingEvent +#[derive(Clone, Debug, Deserialize, Serialize)] +pub struct TradeportV2AskPlacedEventOnChain { + pub listing: MoveObject, + pub timestamp: String, + pub token: MoveObject, + pub price: String, + pub seller: String, +} + +// Tradeport v2 BuyEvent +#[derive(Clone, Debug, Deserialize, Serialize)] +pub struct TradeportV2AskFilledEventOnChain { + pub listing: MoveObject, + pub timestamp: String, + pub token: MoveObject, + pub price: String, + pub seller: String, + pub buyer: String, +} + +// Tradeport v2 DeleteListingEvent +#[derive(Clone, Debug, Deserialize, Serialize)] +pub struct TradeportV2AskCancelledEventOnChain { + pub listing: MoveObject, + pub timestamp: String, + pub token: MoveObject, + pub price: String, + pub seller: String, +} + +impl TradeportV2AskPlacedEventOnChain { + pub fn to_db_nft_ask( + &self, + marketplace_addr: String, + tx_version: i64, + event_idx: i64, + ) -> NftAsk { + NftAsk { + ask_obj_addr: standardize_address(self.listing.inner.as_str()), + nft_id: standardize_address(self.token.inner.as_str()), + nft_name: "".to_string(), + collection_addr: "".to_string(), + collection_creator_addr: "".to_string(), + collection_name: "".to_string(), + nft_standard: NFTStandard::V2 as i32, + marketplace_addr, + buyer_addr: "".to_string(), + seller_addr: standardize_address(self.seller.as_str()), + price: self.price.parse().unwrap(), + commission: 0, + royalties: 0, + payment_token: APT_COIN.to_string(), + payment_token_type: PaymentTokenType::Coin as i32, + order_placed_timestamp: self.timestamp.parse().unwrap(), + order_placed_tx_version: tx_version, + order_placed_event_idx: event_idx, + order_filled_timestamp: 0, + order_filled_tx_version: 0, + order_filled_event_idx: 0, + order_cancelled_timestamp: 0, + order_cancelled_tx_version: 0, + order_cancelled_event_idx: 0, + order_status: OrderStatus::Open as i32, + order_type: AskOrderType::FixedPrice as i32, + } + } +} + +impl TradeportV2AskFilledEventOnChain { + pub fn to_db_nft_ask( + &self, + marketplace_addr: String, + tx_version: i64, + event_idx: i64, + ) -> NftAsk { + NftAsk { + ask_obj_addr: standardize_address(self.listing.inner.as_str()), + nft_id: "".to_string(), + nft_name: "".to_string(), + collection_addr: "".to_string(), + collection_creator_addr: "".to_string(), + collection_name: "".to_string(), + nft_standard: NFTStandard::V2 as i32, + marketplace_addr, + buyer_addr: standardize_address(self.buyer.as_str()), + seller_addr: standardize_address(self.seller.as_str()), + price: self.price.parse().unwrap(), + commission: 0, + royalties: 0, + payment_token: APT_COIN.to_string(), + payment_token_type: PaymentTokenType::Coin as i32, + order_placed_timestamp: 0, + order_placed_tx_version: 0, + order_placed_event_idx: 0, + order_filled_timestamp: self.timestamp.parse().unwrap(), + order_filled_tx_version: tx_version, + order_filled_event_idx: event_idx, + order_cancelled_timestamp: 0, + order_cancelled_tx_version: 0, + order_cancelled_event_idx: 0, + order_status: OrderStatus::Filled as i32, + order_type: AskOrderType::FixedPrice as i32, + } + } +} + +impl TradeportV2AskCancelledEventOnChain { + pub fn to_db_nft_ask( + &self, + marketplace_addr: String, + tx_version: i64, + event_idx: i64, + ) -> NftAsk { + NftAsk { + ask_obj_addr: standardize_address(self.listing.inner.as_str()), + nft_id: "".to_string(), + nft_name: "".to_string(), + collection_addr: "".to_string(), + collection_creator_addr: "".to_string(), + collection_name: "".to_string(), + nft_standard: NFTStandard::V2 as i32, + marketplace_addr, + buyer_addr: "".to_string(), + seller_addr: standardize_address(self.seller.as_str()), + price: self.price.parse().unwrap(), + commission: 0, + royalties: 0, + payment_token: APT_COIN.to_string(), + payment_token_type: PaymentTokenType::Coin as i32, + order_placed_timestamp: 0, + order_placed_tx_version: 0, + order_placed_event_idx: 0, + order_filled_timestamp: 0, + order_filled_tx_version: 0, + order_filled_event_idx: 0, + order_cancelled_timestamp: self.timestamp.parse().unwrap(), + order_cancelled_tx_version: tx_version, + order_cancelled_event_idx: event_idx, + order_status: OrderStatus::Cancelled as i32, + order_type: AskOrderType::FixedPrice as i32, + } + } +} diff --git a/indexer/src/onchain_events/tradeport_contract_events/nft_bid_event_v1.rs b/indexer/src/onchain_events/tradeport_contract_events/nft_bid_event_v1.rs new file mode 100644 index 0000000..d95a660 --- /dev/null +++ b/indexer/src/onchain_events/tradeport_contract_events/nft_bid_event_v1.rs @@ -0,0 +1,156 @@ +use aptos_indexer_processor_sdk::utils::convert::standardize_address; +use serde::{Deserialize, Serialize}; + +use crate::{ + db_models::nft_bids::NftBid, + utils::aptos_utils::{NFTStandard, OrderStatus, PaymentTokenType, APT_COIN}, +}; + +use super::shared::{generate_bid_order_id_for_nft_v1, NftV1TokenId}; + +// Tradeport v1 InsertTokenBidEvent +#[derive(Clone, Debug, Deserialize, Serialize)] +pub struct TradeportV1BidPlacedEventOnChain { + pub timestamp: String, + pub nonce: String, + pub bid_buyer: String, + pub token_id: NftV1TokenId, + pub price: String, +} + +// Tradeport v1 AcceptTokenBidEvent +#[derive(Clone, Debug, Deserialize, Serialize)] +pub struct TradeportV1BidFilledEventOnChain { + pub timestamp: String, + pub nonce: String, + pub bid_buyer: String, + pub bid_seller: String, + pub token_id: NftV1TokenId, + pub price: String, +} + +// Tradeport v1 DeleteTokenBidEvent +#[derive(Clone, Debug, Deserialize, Serialize)] +pub struct TradeportV1BidCancelledEventOnChain { + pub timestamp: String, + pub nonce: String, + pub bid_buyer: String, + pub token_id: NftV1TokenId, + pub price: String, +} + +impl TradeportV1BidPlacedEventOnChain { + pub fn to_db_nft_bid( + &self, + marketplace_addr: String, + tx_version: i64, + event_idx: i64, + ) -> NftBid { + NftBid { + bid_obj_addr: generate_bid_order_id_for_nft_v1(self.nonce.clone()), + nft_id: self.token_id.property_version.clone(), + nft_name: self.token_id.token_data_id.name.clone(), + collection_addr: "".to_string(), + collection_creator_addr: standardize_address( + self.token_id.token_data_id.creator.as_str(), + ), + collection_name: self.token_id.token_data_id.collection.clone(), + nft_standard: NFTStandard::V1 as i32, + marketplace_addr, + buyer_addr: standardize_address(self.bid_buyer.as_str()), + seller_addr: "".to_string(), + price: self.price.parse().unwrap(), + commission: 0, + royalties: 0, + payment_token: APT_COIN.to_string(), + payment_token_type: PaymentTokenType::Coin as i32, + order_placed_timestamp: self.timestamp.parse().unwrap(), + order_placed_tx_version: tx_version, + order_placed_event_idx: event_idx, + order_filled_timestamp: 0, + order_filled_tx_version: 0, + order_filled_event_idx: 0, + order_cancelled_timestamp: 0, + order_cancelled_tx_version: 0, + order_cancelled_event_idx: 0, + order_status: OrderStatus::Open as i32, + order_expiration_timestamp: 0, + } + } +} + +impl TradeportV1BidFilledEventOnChain { + pub fn to_db_nft_bid( + &self, + marketplace_addr: String, + tx_version: i64, + event_idx: i64, + ) -> NftBid { + NftBid { + bid_obj_addr: generate_bid_order_id_for_nft_v1(self.nonce.clone()), + nft_id: "".to_string(), + nft_name: "".to_string(), + collection_addr: "".to_string(), + collection_creator_addr: "".to_string(), + collection_name: "".to_string(), + nft_standard: 1, + marketplace_addr, + buyer_addr: standardize_address(self.bid_buyer.as_str()), + seller_addr: standardize_address(self.bid_seller.as_str()), + price: self.price.parse().unwrap(), + commission: 0, + royalties: 0, + payment_token: APT_COIN.to_string(), + payment_token_type: PaymentTokenType::Coin as i32, + order_placed_timestamp: 0, + order_placed_tx_version: 0, + order_placed_event_idx: 0, + order_filled_timestamp: self.timestamp.parse().unwrap(), + order_filled_tx_version: tx_version, + order_filled_event_idx: event_idx, + order_cancelled_timestamp: 0, + order_cancelled_tx_version: 0, + order_cancelled_event_idx: 0, + order_status: OrderStatus::Filled as i32, + order_expiration_timestamp: 0, + } + } +} + +impl TradeportV1BidCancelledEventOnChain { + pub fn to_db_nft_bid( + &self, + marketplace_addr: String, + tx_version: i64, + event_idx: i64, + ) -> NftBid { + NftBid { + bid_obj_addr: generate_bid_order_id_for_nft_v1(self.nonce.clone()), + nft_id: "".to_string(), + nft_name: "".to_string(), + collection_addr: "".to_string(), + collection_creator_addr: "".to_string(), + collection_name: "".to_string(), + nft_standard: 1, + marketplace_addr, + buyer_addr: standardize_address(self.bid_buyer.as_str()), + seller_addr: "".to_string(), + price: self.price.parse().unwrap(), + commission: 0, + royalties: 0, + payment_token: APT_COIN.to_string(), + payment_token_type: PaymentTokenType::Coin as i32, + order_placed_timestamp: 0, + order_placed_tx_version: 0, + order_placed_event_idx: 0, + order_filled_timestamp: 0, + order_filled_tx_version: 0, + order_filled_event_idx: 0, + order_cancelled_timestamp: self.timestamp.parse().unwrap(), + order_cancelled_tx_version: tx_version, + order_cancelled_event_idx: event_idx, + order_status: OrderStatus::Cancelled as i32, + order_expiration_timestamp: 0, + } + } +} diff --git a/indexer/src/onchain_events/tradeport_contract_events/nft_bid_event_v2.rs b/indexer/src/onchain_events/tradeport_contract_events/nft_bid_event_v2.rs new file mode 100644 index 0000000..4aca285 --- /dev/null +++ b/indexer/src/onchain_events/tradeport_contract_events/nft_bid_event_v2.rs @@ -0,0 +1,153 @@ +use aptos_indexer_processor_sdk::utils::convert::standardize_address; +use serde::{Deserialize, Serialize}; + +use crate::{ + db_models::nft_bids::NftBid, + onchain_events::aptos_labs_contract_events::shared::MoveObject, + utils::aptos_utils::{NFTStandard, OrderStatus, PaymentTokenType, APT_COIN}, +}; + +// Tradeport v2 InsertTokenBidEvent +#[derive(Clone, Debug, Deserialize, Serialize)] +pub struct TradeportV2BidPlacedEventOnChain { + pub timestamp: String, + pub bid: MoveObject, + pub bid_buyer: String, + pub token: MoveObject, + pub price: String, +} + +// Tradeport v2 AcceptTokenBidEvent +#[derive(Clone, Debug, Deserialize, Serialize)] +pub struct TradeportV2BidFilledEventOnChain { + pub timestamp: String, + pub bid: MoveObject, + pub bid_buyer: String, + pub bid_seller: String, + pub token: MoveObject, + pub price: String, +} + +// Tradeport v2 DeleteTokenBidEvent +#[derive(Clone, Debug, Deserialize, Serialize)] +pub struct TradeportV2BidCancelledEventOnChain { + pub timestamp: String, + pub bid: MoveObject, + pub bid_buyer: String, + pub token: MoveObject, + pub price: String, +} + +impl TradeportV2BidPlacedEventOnChain { + pub fn to_db_nft_bid( + &self, + marketplace_addr: String, + tx_version: i64, + event_idx: i64, + ) -> NftBid { + NftBid { + bid_obj_addr: standardize_address(self.bid.inner.as_str()), + nft_id: self.token.inner.clone(), + nft_name: "".to_string(), + collection_addr: "".to_string(), + collection_creator_addr: "".to_string(), + collection_name: "".to_string(), + nft_standard: NFTStandard::V2 as i32, + marketplace_addr, + buyer_addr: standardize_address(self.bid_buyer.as_str()), + seller_addr: "".to_string(), + price: self.price.parse().unwrap(), + commission: 0, + royalties: 0, + payment_token: APT_COIN.to_string(), + payment_token_type: PaymentTokenType::Coin as i32, + order_placed_timestamp: self.timestamp.parse().unwrap(), + order_placed_tx_version: tx_version, + order_placed_event_idx: event_idx, + order_filled_timestamp: 0, + order_filled_tx_version: 0, + order_filled_event_idx: 0, + order_cancelled_timestamp: 0, + order_cancelled_tx_version: 0, + order_cancelled_event_idx: 0, + order_status: OrderStatus::Open as i32, + order_expiration_timestamp: 0, + } + } +} + +impl TradeportV2BidFilledEventOnChain { + pub fn to_db_nft_bid( + &self, + marketplace_addr: String, + tx_version: i64, + event_idx: i64, + ) -> NftBid { + NftBid { + bid_obj_addr: standardize_address(self.bid.inner.as_str()), + nft_id: "".to_string(), + nft_name: "".to_string(), + collection_addr: "".to_string(), + collection_creator_addr: "".to_string(), + collection_name: "".to_string(), + nft_standard: 1, + marketplace_addr, + buyer_addr: standardize_address(self.bid_buyer.as_str()), + seller_addr: standardize_address(self.bid_seller.as_str()), + price: self.price.parse().unwrap(), + commission: 0, + royalties: 0, + payment_token: APT_COIN.to_string(), + payment_token_type: PaymentTokenType::Coin as i32, + order_placed_timestamp: 0, + order_placed_tx_version: 0, + order_placed_event_idx: 0, + order_filled_timestamp: self.timestamp.parse().unwrap(), + order_filled_tx_version: tx_version, + order_filled_event_idx: event_idx, + order_cancelled_timestamp: 0, + order_cancelled_tx_version: 0, + order_cancelled_event_idx: 0, + order_status: OrderStatus::Filled as i32, + order_expiration_timestamp: 0, + } + } +} + +impl TradeportV2BidCancelledEventOnChain { + pub fn to_db_nft_bid( + &self, + marketplace_addr: String, + tx_version: i64, + event_idx: i64, + ) -> NftBid { + NftBid { + bid_obj_addr: standardize_address(self.bid.inner.as_str()), + nft_id: "".to_string(), + nft_name: "".to_string(), + collection_addr: "".to_string(), + collection_creator_addr: "".to_string(), + collection_name: "".to_string(), + nft_standard: 1, + marketplace_addr, + buyer_addr: standardize_address(self.bid_buyer.as_str()), + seller_addr: "".to_string(), + price: self.price.parse().unwrap(), + commission: 0, + royalties: 0, + payment_token: APT_COIN.to_string(), + payment_token_type: PaymentTokenType::Coin as i32, + order_placed_timestamp: 0, + order_placed_tx_version: 0, + order_placed_event_idx: 0, + order_filled_timestamp: 0, + order_filled_tx_version: 0, + order_filled_event_idx: 0, + order_cancelled_timestamp: self.timestamp.parse().unwrap(), + order_cancelled_tx_version: tx_version, + order_cancelled_event_idx: event_idx, + order_status: OrderStatus::Cancelled as i32, + order_expiration_timestamp: 0, + } + } +} diff --git a/indexer/src/onchain_events/tradeport_contract_events/shared.rs b/indexer/src/onchain_events/tradeport_contract_events/shared.rs new file mode 100644 index 0000000..648233b --- /dev/null +++ b/indexer/src/onchain_events/tradeport_contract_events/shared.rs @@ -0,0 +1,53 @@ +use serde::{Deserialize, Serialize}; + +#[derive(Clone, Debug, Deserialize, Serialize)] +pub struct NftV1TokenDataId { + pub creator: String, + pub collection: String, + pub name: String, +} + +#[derive(Clone, Debug, Deserialize, Serialize)] +pub struct NftV1TokenId { + /// the id to the common token data shared by token with different property_version + pub token_data_id: NftV1TokenDataId, + /// The version of the property map; when a fungible token is mutated, a new property version is created and assigned to the token to make it an NFT + pub property_version: String, +} + +#[derive(Clone, Debug, Deserialize, Serialize)] +pub struct NftV1CollectionId { + pub collection_creator: String, + pub collection_name: String, +} + +// if the an user sell a nft, buy it back, then sell it again, 2 orders will have the same order_id +// i can't think of a workaround here, because if we add other element (e.g. order placed tx version) to the order_id, we cannot retrieve the order id when the order is filled later +// we just hope no one is buy/sell v1 anymore +pub fn generate_ask_order_id_for_nft_v1(owner: String, token_id: NftV1TokenId) -> String { + let order_id = format!( + "{}_{}_{}_{}_{}", + token_id.token_data_id.creator, + token_id.token_data_id.collection, + token_id.token_data_id.name, + token_id.property_version, + owner + ); + + let hash = blake3::hash(order_id.as_bytes()); + hex::encode(hash.as_bytes()) +} + +pub fn generate_bid_order_id_for_nft_v1(nonce: String) -> String { + let order_id = format!("tradeport_v1_bid_order_{}", nonce); + + let hash = blake3::hash(order_id.as_bytes()); + hex::encode(hash.as_bytes()) +} + +pub fn generate_collection_bid_order_id_for_nft_v1(nonce: String) -> String { + let order_id = format!("tradeport_v1_collection_bid_order__{}", nonce); + + let hash = blake3::hash(order_id.as_bytes()); + hex::encode(hash.as_bytes()) +} diff --git a/indexer/src/utils/aptos_utils.rs b/indexer/src/utils/aptos_utils.rs new file mode 100644 index 0000000..ad45020 --- /dev/null +++ b/indexer/src/utils/aptos_utils.rs @@ -0,0 +1,23 @@ +pub const APT_COIN: &str = "0x1::aptos_coin::AptosCoin"; +pub const APT_FA: &str = "0xa"; + +pub enum NFTStandard { + V1 = 1, + V2 = 2, +} + +pub enum PaymentTokenType { + Coin = 1, + FA = 2, +} + +pub enum OrderStatus { + Open = 1, + Filled = 2, + Cancelled = 3, +} + +pub enum AskOrderType { + FixedPrice = 1, + Auction = 2, +} diff --git a/indexer/src/utils/chain_id.rs b/indexer/src/utils/chain_id.rs index de9784b..0546eb1 100644 --- a/indexer/src/utils/chain_id.rs +++ b/indexer/src/utils/chain_id.rs @@ -2,8 +2,8 @@ use anyhow::{Context, Result}; use super::database_utils::ArcDbPool; use crate::{ - contract_upgrade_schema::contract_upgrade_ledger_infos as ledger_infos, - db_models::contract_upgrade_indexer::ledger_info::LedgerInfo, + db_models::ledger_info::LedgerInfo, + schema::ledger_infos, utils::{ database_connection::get_db_connection, database_execution::execute_with_better_error, }, diff --git a/indexer/src/utils/database_connection.rs b/indexer/src/utils/database_connection.rs index 1dd1cd6..c247d5b 100644 --- a/indexer/src/utils/database_connection.rs +++ b/indexer/src/utils/database_connection.rs @@ -29,6 +29,7 @@ fn establish_connection(database_url: &str) -> BoxFuture { let connector = TlsConnector::builder() + .danger_accept_invalid_certs(true) .build() .expect("Could not build default TLS connector"); MakeTlsConnector::new(connector) diff --git a/indexer/src/utils/database_execution.rs b/indexer/src/utils/database_execution.rs index 77ce55d..3997019 100644 --- a/indexer/src/utils/database_execution.rs +++ b/indexer/src/utils/database_execution.rs @@ -1,10 +1,13 @@ +use aptos_indexer_processor_sdk::utils::errors::ProcessorError; use diesel::{ debug_query, pg::Pg, query_builder::{QueryFragment, QueryId}, + result::Error as DieselError, QueryResult, }; use diesel_async::{AsyncConnection, AsyncPgConnection, RunQueryDsl}; +use futures_util::future; pub async fn execute_with_better_error( conn: &mut AsyncPgConnection, @@ -32,3 +35,19 @@ where } res } +pub async fn handle_db_execution( + tasks: Vec>>, +) -> Result<(), ProcessorError> { + let results = future::try_join_all(tasks) + .await + .expect("Task panicked executing in chunks"); + for res in results { + res.map_err(|e| { + tracing::warn!("Error running query: {:?}", e); + ProcessorError::ProcessError { + message: e.to_string(), + } + })?; + } + Ok(()) +} diff --git a/indexer/src/utils/latest_processed_version_tracker.rs b/indexer/src/utils/latest_processed_version_tracker.rs index 1867545..aa7933f 100644 --- a/indexer/src/utils/latest_processed_version_tracker.rs +++ b/indexer/src/utils/latest_processed_version_tracker.rs @@ -15,9 +15,8 @@ use super::{ database_utils::ArcDbPool, }; use crate::{ - config::indexer_processor_config::DbConfig, - contract_upgrade_schema::contract_upgrade_processor_status as processor_status, - db_models::contract_upgrade_indexer::processor_status::ProcessorStatus, + config::indexer_processor_config::DbConfig, db_models::processor_status::ProcessorStatus, + schema::processor_status, }; const UPDATE_PROCESSOR_STATUS_SECS: u64 = 1; diff --git a/indexer/src/utils/mod.rs b/indexer/src/utils/mod.rs index 1a5c503..94d2331 100644 --- a/indexer/src/utils/mod.rs +++ b/indexer/src/utils/mod.rs @@ -1,6 +1,8 @@ +pub mod aptos_utils; pub mod chain_id; pub mod database_connection; pub mod database_execution; pub mod database_utils; pub mod latest_processed_version_tracker; pub mod starting_version; +pub mod time_utils; diff --git a/indexer/src/utils/starting_version.rs b/indexer/src/utils/starting_version.rs index b30abee..784c517 100644 --- a/indexer/src/utils/starting_version.rs +++ b/indexer/src/utils/starting_version.rs @@ -3,7 +3,7 @@ use anyhow::{Context, Result}; use super::database_utils::ArcDbPool; use crate::{ config::indexer_processor_config::IndexerProcessorConfig, - db_models::contract_upgrade_indexer::processor_status::ProcessorStatusQuery, + db_models::processor_status::ProcessorStatusQuery, }; pub async fn get_starting_version( diff --git a/indexer/src/utils/time_utils.rs b/indexer/src/utils/time_utils.rs new file mode 100644 index 0000000..307cc68 --- /dev/null +++ b/indexer/src/utils/time_utils.rs @@ -0,0 +1,8 @@ +use std::time::{SystemTime, UNIX_EPOCH}; + +pub fn get_unix_timestamp_in_secs() -> i64 { + SystemTime::now() + .duration_since(UNIX_EPOCH) + .unwrap() + .as_secs() as i64 +}