diff --git a/Cargo.lock b/Cargo.lock index 3fc343eca..c6cc5a822 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -5998,7 +5998,9 @@ dependencies = [ "frame-support", "frame-system", "pallet-assets", + "pallet-assets-ext", "pallet-balances", + "pallet-nft", "parity-scale-codec", "scale-info", "seed-pallet-common", diff --git a/evm-precompiles/erc1155/src/lib.rs b/evm-precompiles/erc1155/src/lib.rs index 57285272c..766916203 100644 --- a/evm-precompiles/erc1155/src/lib.rs +++ b/evm-precompiles/erc1155/src/lib.rs @@ -31,7 +31,9 @@ use precompile_utils::{ prelude::*, }; use seed_pallet_common::utils::TokenBurnAuthority; -use seed_primitives::{AssetId, Balance, CollectionUuid, MetadataScheme, SerialNumber, TokenId}; +use seed_primitives::{ + AssetId, Balance, CollectionUuid, IssuanceId, MetadataScheme, SerialNumber, TokenId, +}; use sp_core::{Encode, H160, H256, U256}; use sp_runtime::{traits::SaturatedConversion, BoundedVec}; use sp_std::{marker::PhantomData, vec, vec::Vec}; @@ -1290,8 +1292,7 @@ where .map_err(|_| revert("ERC1155: Too many serial numbers in one issuance"))?; handle.record_cost(RuntimeHelper::::db_read_gas_cost())?; - let next_issuance_id = - pallet_sft::PendingIssuances::::get(collection_id).next_issuance_id; + let next_issuance_id = pallet_sft::NextIssuanceId::::get(); // Dispatch call (if enough gas). RuntimeHelper::::try_dispatch( @@ -1324,41 +1325,40 @@ where read_args!(handle, { owner: Address }); let owner: H160 = owner.into(); + let mut iter = pallet_sft::PendingIssuances::::iter_prefix(( + collection_id, + Runtime::AccountId::from(owner), + )); - handle.record_cost(RuntimeHelper::::db_read_gas_cost())?; - let pending_issuances = pallet_sft::PendingIssuances::::get(collection_id) - .get_pending_issuances(&owner.into()); + let mut issuance_ids: Vec = Vec::new(); + let mut issuances: Vec<(Vec, Vec, Vec)> = Vec::new(); - let issuance_ids = pending_issuances.iter().map(|p| U256::from(p.issuance_id)).collect(); + while let Some((p, q)) = iter.next() { + handle.record_cost(RuntimeHelper::::db_read_gas_cost())?; - let issuances: Vec<(Vec, Vec, Vec)> = pending_issuances - .iter() - .map(|p| -> EvmResult<(Vec, Vec, Vec)> { - let (serial_numbers, balances): (Vec, Vec) = - p.serial_numbers.clone().into_iter().unzip(); - - let mut burn_auths = vec![]; - for serial_number in serial_numbers.iter() { - handle.record_cost(RuntimeHelper::::db_read_gas_cost())?; - - let burn_auth = match pallet_sft::TokenUtilityFlags::::get(( - collection_id, - *serial_number, - )) - .burn_authority - { - Some(burn_auth) => burn_auth.into(), - _ => 0 as u8, - }; + let mut burn_auths = Vec::new(); + let mut serial_numbers = Vec::new(); + let mut balances = Vec::new(); + for (serial_number, balance) in q.into_inner() { + handle.record_cost(RuntimeHelper::::db_read_gas_cost())?; + let burn_auth = match pallet_sft::TokenUtilityFlags::::get(( + collection_id, + serial_number, + )) + .burn_authority + { + Some(burn_auth) => burn_auth.into(), + _ => 0 as u8, + }; - burn_auths.push(burn_auth); - } + burn_auths.push(burn_auth); + serial_numbers.push(serial_number); + balances.push(balance); + } - Ok((serial_numbers, balances, burn_auths)) - }) - .collect::>>() - .into_iter() - .collect::>()?; + issuance_ids.push(U256::from(p)); + issuances.push((serial_numbers, balances, burn_auths)); + } Ok(succeed(EvmDataWriter::new().write::>(issuance_ids).write(issuances).build())) } @@ -1371,10 +1371,10 @@ where read_args!(handle, { issuance_id: U256 }); - if issuance_id > u32::MAX.into() { + if issuance_id > IssuanceId::MAX.into() { return Err(revert("ERC721: Expected issuance id <= 2^32")); } - let issuance_id: u32 = issuance_id.saturated_into(); + let issuance_id: IssuanceId = issuance_id.saturated_into(); let origin = handle.context().caller; @@ -1387,11 +1387,12 @@ where let collection_owner = collection.collection_owner; handle.record_cost(RuntimeHelper::::db_read_gas_cost())?; - let pending_issuance = match pallet_sft::PendingIssuances::::get(collection_id) - .get_pending_issuance(&origin.into(), issuance_id) - { - Some(pending_issuance) => pending_issuance, - None => return Err(revert("Issuance does not exist")), + let Some(serial_numbers) = pallet_sft::PendingIssuances::::get(( + collection_id, + &Runtime::AccountId::from(origin), + issuance_id, + )) else { + return Err(revert("Issuance does not exist")); }; // Dispatch call (if enough gas). @@ -1401,7 +1402,7 @@ where pallet_sft::Call::::accept_soulbound_issuance { collection_id, issuance_id }, )?; - for (serial_number, _) in pending_issuance.serial_numbers { + for (serial_number, _) in serial_numbers { handle.record_cost(RuntimeHelper::::db_read_gas_cost())?; let burn_authority = match pallet_sft::TokenUtilityFlags::::get((collection_id, serial_number)) diff --git a/evm-precompiles/erc721/src/lib.rs b/evm-precompiles/erc721/src/lib.rs index 166cb2ce3..6387b8427 100644 --- a/evm-precompiles/erc721/src/lib.rs +++ b/evm-precompiles/erc721/src/lib.rs @@ -29,7 +29,7 @@ use precompile_utils::{ }; use seed_pallet_common::{utils::TokenBurnAuthority, NFTExt}; use seed_primitives::{ - AssetId, Balance, CollectionUuid, EthAddress, SerialNumber, TokenCount, TokenId, + AssetId, Balance, CollectionUuid, EthAddress, IssuanceId, SerialNumber, TokenCount, TokenId, }; use sp_core::{Encode, H160, H256, U256}; use sp_runtime::{traits::SaturatedConversion, BoundedVec}; @@ -1215,8 +1215,7 @@ where let origin = handle.context().caller; handle.record_cost(RuntimeHelper::::db_read_gas_cost())?; - let next_issuance_id = - pallet_nft::PendingIssuances::::get(collection_id).next_issuance_id; + let next_issuance_id = pallet_nft::NextIssuanceId::::get(); // Dispatch call (if enough gas). RuntimeHelper::::try_dispatch( @@ -1256,17 +1255,22 @@ where let owner: H160 = owner.into(); - handle.record_cost(RuntimeHelper::::db_read_gas_cost())?; - let pending_issuances = pallet_nft::PendingIssuances::::get(collection_id) - .get_pending_issuances(&owner.into()); + let mut iter = pallet_nft::PendingIssuances::::iter_prefix(( + collection_id, + Runtime::AccountId::from(owner), + )); + + let mut issuance_ids: Vec = Vec::new(); + let mut issuances: Vec<(U256, u8)> = Vec::new(); - let issuance_ids: Vec = - pending_issuances.iter().map(|p| U256::from(p.issuance_id)).collect(); + while let Some((p, q)) = iter.next() { + // Record gas cost before processing the item + handle.record_cost(RuntimeHelper::::db_read_gas_cost())?; - let issuances: Vec<(U256, u8)> = pending_issuances - .iter() - .map(|p| (U256::from(p.quantity), p.burn_authority.into())) - .collect(); + // Process and store the values + issuance_ids.push(p); + issuances.push((U256::from(q.quantity), q.burn_authority.into())); + } Ok(succeed(EvmDataWriter::new().write(issuance_ids).write(issuances).build())) } @@ -1279,10 +1283,10 @@ where read_args!(handle, { issuance_id: U256 }); - if issuance_id > u32::MAX.into() { - return Err(revert("ERC721: Expected issuance id <= 2^32")); + if issuance_id > IssuanceId::MAX.into() { + return Err(revert("ERC721: Expected issuance id <= 2^64")); } - let issuance_id: u32 = issuance_id.saturated_into(); + let issuance_id: IssuanceId = issuance_id.saturated_into(); let origin = handle.context().caller; @@ -1296,11 +1300,12 @@ where let serial_number = collection.next_serial_number; handle.record_cost(RuntimeHelper::::db_read_gas_cost())?; - let pending_issuance = match pallet_nft::PendingIssuances::::get(collection_id) - .get_pending_issuance(&origin.into(), issuance_id) - { - Some(pending_issuance) => pending_issuance, - None => return Err(revert("Issuance does not exist")), + let Some(pending_issuance) = pallet_nft::PendingIssuances::::get(( + collection_id, + Runtime::AccountId::from(origin), + issuance_id, + )) else { + return Err(revert("Issuance does not exist")); }; // Dispatch call (if enough gas). @@ -1338,16 +1343,17 @@ where if token_id > u32::MAX.into() { return Err(revert("ERC721: Expected token id <= 2^32")); } - let token_id: SerialNumber = token_id.saturated_into(); + let serial_number: SerialNumber = token_id.saturated_into(); handle.record_cost(RuntimeHelper::::db_read_gas_cost())?; - let burn_auth: u8 = - match pallet_nft::TokenUtilityFlags::::get((collection_id, token_id)) - .burn_authority - { - Some(burn_authority) => burn_authority.into(), - _ => 0, // default to TokenOwner - }; + let token_info = match pallet_nft::TokenInfo::::get(collection_id, serial_number) { + Some(token_info) => token_info, + None => return Err(revert("ERC721: Token does not exist")), + }; + let burn_auth: u8 = match token_info.utility_flags.burn_authority { + Some(burn_authority) => burn_authority.into(), + _ => 0, // default to TokenOwner + }; Ok(succeed(EvmDataWriter::new().write(burn_auth).build())) } diff --git a/pallet/common/src/lib.rs b/pallet/common/src/lib.rs index 1f99d67c1..a9bbff3f9 100644 --- a/pallet/common/src/lib.rs +++ b/pallet/common/src/lib.rs @@ -561,6 +561,9 @@ pub trait NFTExt { /// Returns Some(token_owner) for a token if the owner exists fn get_token_owner(token_id: &TokenId) -> Option; + /// Returns whether the token exists + fn token_exists(token_id: &TokenId) -> bool; + /// Returns collection current issuance and max issuance fn get_collection_issuance( collection_id: CollectionUuid, @@ -609,7 +612,7 @@ pub trait NFTExt { ) -> DispatchResult; /// Remove a token lock without performing checks - fn remove_token_lock(token_id: TokenId); + fn remove_token_lock(token_id: TokenId) -> DispatchResult; fn get_collection_owner( collection_id: CollectionUuid, diff --git a/pallet/common/src/test_utils.rs b/pallet/common/src/test_utils.rs index 264718eef..218f3508d 100644 --- a/pallet/common/src/test_utils.rs +++ b/pallet/common/src/test_utils.rs @@ -265,6 +265,7 @@ macro_rules! impl_pallet_nft_config { type StringLimit = StringLimit; type NFIRequest = (); type MaxPendingIssuances = MaxPendingIssuances; + type Migrator = (); } }; } diff --git a/pallet/marketplace/src/tests.rs b/pallet/marketplace/src/tests.rs index 5ca19fbba..572a238b4 100644 --- a/pallet/marketplace/src/tests.rs +++ b/pallet/marketplace/src/tests.rs @@ -21,7 +21,7 @@ use crate::mock::{ }; use core::ops::Mul; use frame_support::traits::{fungibles::Inspect, OnInitialize}; -use pallet_nft::{test_utils::NftBuilder, TokenLocks}; +use pallet_nft::test_utils::NftBuilder; use pallet_sft::{test_utils::sft_balance_of, test_utils::SftBuilder, TokenInfo}; use seed_pallet_common::test_prelude::*; use seed_primitives::{CrossChainCompatibility, MetadataScheme, RoyaltiesSchedule, TokenCount}; @@ -171,8 +171,8 @@ fn sell() { for serial_number in serial_numbers.iter() { assert_eq!( - TokenLocks::::get((collection_id, serial_number)).unwrap(), - TokenLockReason::Listed(listing_id) + ::NFTExt::get_token_lock((collection_id, *serial_number)), + Some(TokenLockReason::Listed(listing_id)) ); } @@ -296,7 +296,10 @@ fn sell_multiple() { close: System::block_number() + DefaultListingDuration::get(), })); - assert_eq!(TokenLocks::::get(token_id).unwrap(), TokenLockReason::Listed(listing_id)); + assert_eq!( + ::NFTExt::get_token_lock(token_id), + Some(TokenLockReason::Listed(listing_id)) + ); assert!(OpenCollectionListings::::get(collection_id, listing_id).unwrap()); let fee_pot_account: AccountId = FeePotId::get().into_account_truncating(); @@ -1021,7 +1024,7 @@ fn buy() { .is_none()); // ownership changed - assert!(TokenLocks::::get(token_id).is_none()); + assert!(::NFTExt::get_token_lock(token_id).is_none()); assert!(OpenCollectionListings::::get(collection_id, listing_id).is_none()); assert_eq!( Nft::owned_tokens(collection_id, &buyer, 0, 1000), @@ -1082,7 +1085,7 @@ fn buy_with_xrp() { .is_none()); // ownership changed - assert!(TokenLocks::::get(token_id).is_none()); + assert!(::NFTExt::get_token_lock(token_id).is_none()); assert!(OpenCollectionListings::::get(collection_id, listing_id).is_none()); assert_eq!( Nft::owned_tokens(collection_id, &buyer, 0, 1000), @@ -1415,8 +1418,8 @@ fn auction_bundle() { assert!(OpenCollectionListings::::get(collection_id, listing_id).unwrap()); for serial_number in serial_numbers.iter() { assert_eq!( - TokenLocks::::get((collection_id, serial_number)).unwrap(), - TokenLockReason::Listed(listing_id) + ::NFTExt::get_token_lock((collection_id, *serial_number)), + Some(TokenLockReason::Listed(listing_id)) ); } @@ -1479,8 +1482,8 @@ fn auction_bundle_no_bids() { assert!(OpenCollectionListings::::get(collection_id, listing_id).unwrap()); for serial_number in serial_numbers.iter() { assert_eq!( - TokenLocks::::get((collection_id, serial_number)).unwrap(), - TokenLockReason::Listed(listing_id) + ::NFTExt::get_token_lock((collection_id, *serial_number)), + Some(TokenLockReason::Listed(listing_id)) ); } @@ -1492,7 +1495,8 @@ fn auction_bundle_no_bids() { assert!(!OpenCollectionListings::::contains_key(collection_id, listing_id)); // Token locks should be removed for serial_number in serial_numbers.iter() { - assert_eq!(TokenLocks::::get((collection_id, serial_number)), None); + assert!(::NFTExt::get_token_lock((collection_id, *serial_number)) + .is_none()); } }) } @@ -1546,8 +1550,8 @@ fn auction() { None, )); assert_eq!( - TokenLocks::::get(token_id).unwrap(), - TokenLockReason::Listed(listing_id) + ::NFTExt::get_token_lock(token_id), + Some(TokenLockReason::Listed(listing_id)) ); assert_eq!(NextListingId::::get(), listing_id + 1); assert!(OpenCollectionListings::::get(collection_id, listing_id).unwrap()); @@ -1605,7 +1609,7 @@ fn auction() { ); // ownership changed - assert!(TokenLocks::::get(token_id).is_none()); + assert!(::NFTExt::get_token_lock(token_id).is_none()); assert_eq!( Nft::owned_tokens(collection_id, &bidder_2, 0, 1000), (0_u32, 1, vec![token_id.1]) @@ -1651,8 +1655,8 @@ fn auction_with_xrp_asset() { None, )); assert_eq!( - TokenLocks::::get(token_id).unwrap(), - TokenLockReason::Listed(listing_id) + ::NFTExt::get_token_lock(token_id), + Some(TokenLockReason::Listed(listing_id)) ); assert_eq!(NextListingId::::get(), listing_id + 1); assert!(OpenCollectionListings::::get(collection_id, listing_id).unwrap()); @@ -1694,7 +1698,7 @@ fn auction_with_xrp_asset() { ); // ownership changed - assert!(TokenLocks::::get(token_id).is_none()); + assert!(::NFTExt::get_token_lock(token_id).is_none()); assert_eq!( Nft::owned_tokens(collection_id, &bidder_2, 0, 1000), (0_u32, 1, vec![token_id.1]) @@ -1934,7 +1938,7 @@ fn auction_fails_prechecks() { Some(1), None, ), - pallet_nft::Error::::NotTokenOwner + pallet_nft::Error::::NoToken ); let serial_numbers: BoundedVec = @@ -2271,7 +2275,10 @@ fn make_simple_offer_on_fixed_price_listing() { )); // Sanity check assert!(Listings::::get(listing_id).is_some()); - assert!(TokenLocks::::get(token_id).is_some()); + assert_eq!( + ::NFTExt::get_token_lock(token_id), + Some(TokenLockReason::Listed(listing_id)) + ); let (offer_id, _) = make_new_simple_offer(offer_amount, token_id, buyer, None); // Check funds have been locked @@ -2288,7 +2295,7 @@ fn make_simple_offer_on_fixed_price_listing() { // Check that fixed price listing and locks are now removed assert!(Listings::::get(listing_id).is_none()); - assert!(TokenLocks::::get(token_id).is_none()); + assert!(::NFTExt::get_token_lock(token_id).is_none()); // Check offer storage has been removed assert!(TokenOffers::::get(token_id).is_none()); assert!(Offers::::get(offer_id).is_none()); @@ -3686,7 +3693,7 @@ mod listing_tokens { }); assert_noop!( tokens.lock_tokens(&create_account(1), 1), - pallet_nft::Error::::NotTokenOwner + pallet_nft::Error::::NoToken ); // Lock tokens not token owner @@ -3702,7 +3709,10 @@ mod listing_tokens { // Lock tokens works assert_ok!(tokens.lock_tokens(&token_owner, 1)); - assert_eq!(TokenLocks::::get(token_id).unwrap(), TokenLockReason::Listed(1)); + assert_eq!( + ::NFTExt::get_token_lock(token_id), + Some(TokenLockReason::Listed(1)) + ); // Lock tokens token already locked assert_noop!( @@ -3765,12 +3775,15 @@ mod listing_tokens { assert_ok!(tokens.lock_tokens(&token_owner, 1)); // Sanity check - assert_eq!(TokenLocks::::get(token_id).unwrap(), TokenLockReason::Listed(1)); + assert_eq!( + ::NFTExt::get_token_lock(token_id), + Some(TokenLockReason::Listed(1)) + ); // Unlock tokens works assert_ok!(tokens.unlock_tokens(&token_owner)); - assert!(!TokenLocks::::contains_key(token_id)); + assert!(::NFTExt::get_token_lock(token_id).is_none()); }); } @@ -3826,13 +3839,16 @@ mod listing_tokens { assert_ok!(tokens.lock_tokens(&token_owner, 1)); // Sanity check - assert_eq!(TokenLocks::::get(token_id).unwrap(), TokenLockReason::Listed(1)); + assert_eq!( + ::NFTExt::get_token_lock(token_id), + Some(TokenLockReason::Listed(1)) + ); // Unlock and transfer tokens works let recipient = create_account(1123); assert_ok!(tokens.unlock_and_transfer(&token_owner, &recipient)); - assert!(!TokenLocks::::contains_key(token_id)); + assert!(::NFTExt::get_token_lock(token_id).is_none()); // Verify owner of token is recipient assert_eq!( @@ -3867,8 +3883,8 @@ mod listing_tokens { // Sanity check for serial_number in serial_numbers.clone() { assert_eq!( - TokenLocks::::get((collection_id, serial_number)).unwrap(), - TokenLockReason::Listed(1) + ::NFTExt::get_token_lock((collection_id, serial_number)), + Some(TokenLockReason::Listed(1)) ); } @@ -3876,7 +3892,7 @@ mod listing_tokens { let recipient = create_account(1123); assert_ok!(tokens.unlock_and_transfer(&token_owner, &recipient)); - assert!(!TokenLocks::::contains_key(token_id)); + assert!(::NFTExt::get_token_lock(token_id).is_none()); // Verify owner of token is recipient assert_eq!( diff --git a/pallet/marketplace/src/types.rs b/pallet/marketplace/src/types.rs index 4cee0dc89..ec08c4c33 100644 --- a/pallet/marketplace/src/types.rs +++ b/pallet/marketplace/src/types.rs @@ -121,7 +121,7 @@ impl ListingTokens { ListingTokens::Nft(nfts) => { for serial_number in nfts.serial_numbers.iter() { let token_id = (nfts.collection_id, *serial_number); - T::NFTExt::remove_token_lock(token_id); + T::NFTExt::remove_token_lock(token_id)?; } }, ListingTokens::Sft(sfts) => { @@ -141,7 +141,7 @@ impl ListingTokens { ListingTokens::Nft(nfts) => { ensure!(!nfts.serial_numbers.is_empty(), Error::::EmptyTokens); for serial_number in nfts.serial_numbers.iter() { - T::NFTExt::remove_token_lock((nfts.collection_id, *serial_number)); + T::NFTExt::remove_token_lock((nfts.collection_id, *serial_number))?; } T::NFTExt::do_transfer( from, diff --git a/pallet/migration/Cargo.toml b/pallet/migration/Cargo.toml index cff13fd42..cac2f9b32 100644 --- a/pallet/migration/Cargo.toml +++ b/pallet/migration/Cargo.toml @@ -20,12 +20,14 @@ sp-std = { workspace = true } seed-primitives = { workspace = true } seed-pallet-common = { workspace = true } +pallet-nft = { workspace = true } [dev-dependencies] sp-runtime = { workspace = true } sp-io = { workspace = true } pallet-balances = { workspace = true } pallet-assets = { workspace = true } +pallet-assets-ext = { workspace = true } [features] default = ["std"] @@ -40,6 +42,8 @@ std = [ "frame-support/std", "frame-system/std", "frame-benchmarking?/std", + "pallet-nft/std", + "pallet-assets-ext/std", ] runtime-benchmarks = ["frame-benchmarking"] try-runtime = ["frame-support/try-runtime"] diff --git a/pallet/migration/src/benchmarking.rs b/pallet/migration/src/benchmarking.rs index 5f15212a7..8d423407d 100644 --- a/pallet/migration/src/benchmarking.rs +++ b/pallet/migration/src/benchmarking.rs @@ -16,12 +16,18 @@ use super::*; #[allow(unused_imports)] use crate::Pallet as Migration; -use frame_benchmarking::{benchmarks, impl_benchmark_test_suite}; +use frame_benchmarking::{account as bench_account, benchmarks, impl_benchmark_test_suite}; use frame_support::StorageHasher; use frame_system::RawOrigin; -use seed_primitives::{CollectionUuid, SerialNumber}; +use seed_pallet_common::utils::{TokenBurnAuthority, TokenUtilityFlags as TokenFlags}; +use seed_primitives::{ + CollectionUuid, CrossChainCompatibility, ListingId, MetadataScheme, OriginChain, + RoyaltiesSchedule, SerialNumber, TokenCount, TokenLockReason, +}; benchmarks! { + where_clause { where T: pallet_nft::Config } + // This benchmarks the weight of dispatching migrate to execute 1 `NoopMigraton` step migrate { let weight_limit = T::MaxMigrationWeight::get(); @@ -35,12 +41,65 @@ benchmarks! { } current_migration_step { + let p in 1 .. (50); + MigrationEnabled::::put(true); - let mut key = Twox64Concat::hash(&(1 as CollectionUuid).encode()); - let serial_key = Twox64Concat::hash(&(2 as SerialNumber).encode()); - key.extend_from_slice(&serial_key); - let xls20_token_id: [u8; 64] = "000b013a95f14b0e44f78a264e41713c64b5f89242540ee2bc8b858e00000d67".as_bytes().try_into().unwrap(); - frame_support::migration::put_storage_value::<[u8; 64]>(b"Xls20", b"Xls20TokenMap", &key, xls20_token_id); + + ///Old Collection Info + #[derive(Encode,Decode,TypeInfo)] + pub struct OldCollectionInformation + { + pub owner: AccountId, + pub name: BoundedVec, + pub metadata_scheme: MetadataScheme, + pub royalties_schedule: Option>, + pub max_issuance: Option, + pub origin_chain: OriginChain, + pub next_serial_number: SerialNumber, + pub collection_issuance: TokenCount, + pub cross_chain_compatibility: CrossChainCompatibility, + pub owned_tokens: + BoundedVec, MaxTokensPerCollection>, + } + + #[derive(Decode,Encode,TypeInfo)] + pub struct OldTokenOwnership + { + pub owner: AccountId, + pub owned_serials: BoundedVec, + } + let serials = (1..=p).collect::>(); + let key = Twox64Concat::hash(&(1 as CollectionUuid).encode()); + let collection_info = OldCollectionInformation { + owner: T::AccountId::from(bench_account("test", 0, 0)), + name: BoundedVec::truncate_from(vec![1, 2, 3]), + metadata_scheme: MetadataScheme::try_from(b"metadata".as_slice()).unwrap(), + royalties_schedule: None, + max_issuance: Some(100), + origin_chain: OriginChain::Root, + next_serial_number: 1, + collection_issuance: 1, + cross_chain_compatibility: CrossChainCompatibility::default(), + owned_tokens: BoundedVec::truncate_from(vec![OldTokenOwnership { + owner: T::AccountId::from(bench_account("test", 0, 0)), + owned_serials: BoundedVec::truncate_from(serials.clone()), + }]), + }; + frame_support::migration::put_storage_value::> (b"Nft", b"CollectionInfo", &key, collection_info); + + // Insert data into TokenLocks and TokenUtilityFlags to benchmark worst case scenario + for serial in serials { + let token_id = (1 as CollectionUuid, serial); + let token_lock_reason = TokenLockReason::Listed(1 as ListingId); + let key = Twox64Concat::hash(&token_id.encode()); + frame_support::migration::put_storage_value:: (b"Nft", b"TokenLocks", &key, token_lock_reason); + let token_flags = TokenFlags { + transferable: true, + burn_authority: Some(TokenBurnAuthority::Both), + }; + frame_support::migration::put_storage_value:: (b"Nft", b"TokenUtilityFlags", &key, token_flags); + } + Status::::put(MigrationStatus::InProgress { steps_done: 0 }); }: { // Call a single step to benchmark. diff --git a/pallet/migration/src/lib.rs b/pallet/migration/src/lib.rs index d5fd09bce..c3974d409 100644 --- a/pallet/migration/src/lib.rs +++ b/pallet/migration/src/lib.rs @@ -111,18 +111,25 @@ pub mod pallet { #[pallet::event] #[pallet::generate_deposit(pub(super) fn deposit_event)] pub enum Event { + MigrationRuntimeUpgrade, /// Multi-Block migration has been enabled MigrationEnabled, /// Multi-Block migration has been disabled MigrationDisabled, /// The current migration has completed - MigrationComplete { items_migrated: u32 }, - /// A Migration has started - MigrationStarted, + MigrationComplete { + items_migrated: u32, + }, + /// A Migration has been set + MigrationSet, /// The block delay has been set - BlockDelaySet { block_delay: Option }, + BlockDelaySet { + block_delay: Option, + }, /// The block limit has been set - BlockLimitSet { block_limit: u32 }, + BlockLimitSet { + block_limit: u32, + }, } #[pallet::error] @@ -161,7 +168,7 @@ pub mod pallet { // Return read for Status within migration_in_progress function Status::::put(MigrationStatus::InProgress { steps_done: 0 }); - Self::deposit_event(Event::MigrationStarted); + Self::deposit_event(Event::MigrationSet); log::debug!(target: LOG_TARGET, "🦆 A new multi-block migration has started"); DbWeight::get().reads_writes(1, 1) } @@ -294,6 +301,7 @@ impl Pallet { Status::::put(MigrationStatus::Completed); LastKey::::kill(); T::CurrentMigration::on_complete(); + MigrationEnabled::::put(false); log::debug!(target: LOG_TARGET, "🦆 Migration completed successfully"); log::debug!(target: LOG_TARGET, "🦆 Total items migrated: {}", total_steps); Self::deposit_event(Event::MigrationComplete { items_migrated: total_steps }); diff --git a/pallet/migration/src/mock.rs b/pallet/migration/src/mock.rs index 77e691e08..52a56a8e6 100644 --- a/pallet/migration/src/mock.rs +++ b/pallet/migration/src/mock.rs @@ -27,13 +27,17 @@ construct_runtime!( System: frame_system, Assets: pallet_assets, Balances: pallet_balances, + AssetsExt: pallet_assets_ext, Migration: pallet_migration, + Nft: pallet_nft, } ); impl_frame_system_config!(Test); impl_pallet_assets_config!(Test); impl_pallet_balance_config!(Test); +impl_pallet_assets_ext_config!(Test); +impl_pallet_nft_config!(Test); pub const WEIGHT_PER_MIGRATION: u64 = 1000; @@ -83,7 +87,7 @@ impl MigrationStep for MockMigration { let new_value = (value + 1).to_string(); TestMap::::insert(key, new_value); let last_key = old::TestMap::::hashed_key_for(key); - MigrationStepResult::continue_step(Self::max_step_weight(), last_key) + MigrationStepResult::continue_step(Self::max_step_weight(), Some(last_key)) } else { MigrationStepResult::finish_step(Self::max_step_weight()) } diff --git a/pallet/migration/src/tests.rs b/pallet/migration/src/tests.rs index 809eb217e..35da1f95d 100644 --- a/pallet/migration/src/tests.rs +++ b/pallet/migration/src/tests.rs @@ -183,7 +183,7 @@ mod on_runtime_upgrade { let used_weight = Migration::on_runtime_upgrade(); // Check storage updated assert_eq!(Status::::get(), MigrationStatus::InProgress { steps_done: 0 }); - System::assert_has_event(Event::MigrationStarted.into()); + System::assert_has_event(Event::MigrationSet.into()); assert_eq!(used_weight, DbWeight::get().reads_writes(1, 1)); // Ensure migrated should fail as the migration is now in progress assert_noop!(Pallet::::ensure_migrated(), Error::::MigrationInProgress); @@ -248,6 +248,7 @@ mod migrate { verify_new_data(data_count, None); assert_eq!(Status::::get(), MigrationStatus::Completed); assert_ok!(Pallet::::ensure_migrated()); + assert_eq!(MigrationEnabled::::get(), false); assert!(LastKey::::get().is_none()); assert!(::CurrentMigration::version_check()); }); diff --git a/pallet/migration/src/weights.rs b/pallet/migration/src/weights.rs index fdac2b440..1a1e14155 100644 --- a/pallet/migration/src/weights.rs +++ b/pallet/migration/src/weights.rs @@ -18,7 +18,7 @@ //! Autogenerated weights for pallet_migration //! //! THIS FILE WAS AUTO-GENERATED USING THE SUBSTRATE BENCHMARK CLI VERSION 4.0.0-dev -//! DATE: 2024-10-29, STEPS: `50`, REPEAT: 20, LOW RANGE: `[]`, HIGH RANGE: `[]` +//! DATE: 2025-03-05, STEPS: `50`, REPEAT: 20, LOW RANGE: `[]`, HIGH RANGE: `[]` //! HOSTNAME: `ip-172-31-102-147`, CPU: `Intel(R) Xeon(R) CPU E5-2686 v4 @ 2.30GHz` //! EXECUTION: , WASM-EXECUTION: Compiled, CHAIN: Some("dev"), DB CACHE: 1024 @@ -48,7 +48,7 @@ use sp_std::marker::PhantomData; /// Weight functions needed for pallet_migration. pub trait WeightInfo { fn migrate() -> Weight; - fn current_migration_step() -> Weight; + fn current_migration_step(p: u32, ) -> Weight; fn enable_migration() -> Weight; fn set_block_delay() -> Weight; fn set_block_limit() -> Weight; @@ -59,7 +59,7 @@ pub struct SubstrateWeight(PhantomData); impl WeightInfo for SubstrateWeight { // Storage: `Migration::Status` (r:1 w:1) // Proof: `Migration::Status` (`max_values`: Some(1), `max_size`: None, mode: `Measured`) - // Storage: `Migration::MigrationEnabled` (r:1 w:0) + // Storage: `Migration::MigrationEnabled` (r:1 w:1) // Proof: `Migration::MigrationEnabled` (`max_values`: Some(1), `max_size`: None, mode: `Measured`) // Storage: `Migration::BlockDelay` (r:1 w:0) // Proof: `Migration::BlockDelay` (`max_values`: Some(1), `max_size`: None, mode: `Measured`) @@ -67,39 +67,52 @@ impl WeightInfo for SubstrateWeight { // Proof: `Migration::BlockLimit` (`max_values`: Some(1), `max_size`: None, mode: `Measured`) // Storage: `Migration::LastKey` (r:1 w:1) // Proof: `Migration::LastKey` (`max_values`: Some(1), `max_size`: None, mode: `Measured`) - // Storage: `Xls20::Xls20TokenMap` (r:1 w:0) - // Proof: `Xls20::Xls20TokenMap` (`max_values`: None, `max_size`: Some(56), added: 2531, mode: `MaxEncodedLen`) - // Storage: UNKNOWN KEY `0x28fc2cbf777640e8e3e472d285713c8d4e7b9012096b41c4eb3aaf947f6ea429` (r:0 w:1) - // Proof: UNKNOWN KEY `0x28fc2cbf777640e8e3e472d285713c8d4e7b9012096b41c4eb3aaf947f6ea429` (r:0 w:1) + // Storage: `Nft::CollectionInfo` (r:1 w:0) + // Proof: `Nft::CollectionInfo` (`max_values`: None, `max_size`: Some(494), added: 2969, mode: `MaxEncodedLen`) + // Storage: UNKNOWN KEY `0xf43ffbe61ef468749d3617ac1a63c4b74e7b9012096b41c4eb3aaf947f6ea429` (r:0 w:1) + // Proof: UNKNOWN KEY `0xf43ffbe61ef468749d3617ac1a63c4b74e7b9012096b41c4eb3aaf947f6ea429` (r:0 w:1) fn migrate() -> Weight { - Weight::from_all(45_461_000 as u64) - .saturating_add(T::DbWeight::get().reads(6 as u64)) - .saturating_add(T::DbWeight::get().writes(3 as u64)) + Weight::from_all(46_909_000) + .saturating_add(T::DbWeight::get().reads(6)) + .saturating_add(T::DbWeight::get().writes(4)) } - // Storage: `Xls20::Xls20TokenMap` (r:2 w:1) - // Proof: `Xls20::Xls20TokenMap` (`max_values`: None, `max_size`: Some(56), added: 2531, mode: `MaxEncodedLen`) - fn current_migration_step() -> Weight { - Weight::from_all(31_525_000 as u64) - .saturating_add(T::DbWeight::get().reads(2 as u64)) - .saturating_add(T::DbWeight::get().writes(1 as u64)) + // Storage: `Nft::CollectionInfo` (r:2 w:1) + // Proof: `Nft::CollectionInfo` (`max_values`: None, `max_size`: Some(494), added: 2969, mode: `MaxEncodedLen`) + // Storage: UNKNOWN KEY `0xf43ffbe61ef468749d3617ac1a63c4b70d96583a751bd644fd42252931d83e5f` (r:50 w:50) + // Proof: UNKNOWN KEY `0xf43ffbe61ef468749d3617ac1a63c4b70d96583a751bd644fd42252931d83e5f` (r:50 w:50) + // Storage: UNKNOWN KEY `0xf43ffbe61ef468749d3617ac1a63c4b726db73e92fe0bb513cac8d5ccc97e899` (r:50 w:50) + // Proof: UNKNOWN KEY `0xf43ffbe61ef468749d3617ac1a63c4b726db73e92fe0bb513cac8d5ccc97e899` (r:50 w:50) + // Storage: `Nft::OwnedTokens` (r:1 w:1) + // Proof: `Nft::OwnedTokens` (`max_values`: None, `max_size`: Some(4000052), added: 4002527, mode: `MaxEncodedLen`) + // Storage: `Nft::TokenInfo` (r:0 w:50) + // Proof: `Nft::TokenInfo` (`max_values`: None, `max_size`: Some(65), added: 2540, mode: `MaxEncodedLen`) + /// The range of component `p` is `[1, 50]`. + fn current_migration_step(p: u32, ) -> Weight { + Weight::from_all(45_451_449) + // Standard Error: 20_013 + .saturating_add(Weight::from_all(16_452_621_u64).saturating_mul(p as u64)) + .saturating_add(T::DbWeight::get().reads(3)) + .saturating_add(T::DbWeight::get().reads((2_u64).saturating_mul(p as u64))) + .saturating_add(T::DbWeight::get().writes(2)) + .saturating_add(T::DbWeight::get().writes((3_u64).saturating_mul(p as u64))) } // Storage: `Migration::MigrationEnabled` (r:0 w:1) // Proof: `Migration::MigrationEnabled` (`max_values`: Some(1), `max_size`: None, mode: `Measured`) fn enable_migration() -> Weight { - Weight::from_all(22_368_000 as u64) - .saturating_add(T::DbWeight::get().writes(1 as u64)) + Weight::from_all(36_263_000) + .saturating_add(T::DbWeight::get().writes(1)) } // Storage: `Migration::BlockDelay` (r:0 w:1) // Proof: `Migration::BlockDelay` (`max_values`: Some(1), `max_size`: None, mode: `Measured`) fn set_block_delay() -> Weight { - Weight::from_all(22_928_000 as u64) - .saturating_add(T::DbWeight::get().writes(1 as u64)) + Weight::from_all(23_123_000) + .saturating_add(T::DbWeight::get().writes(1)) } // Storage: `Migration::BlockLimit` (r:0 w:1) // Proof: `Migration::BlockLimit` (`max_values`: Some(1), `max_size`: None, mode: `Measured`) fn set_block_limit() -> Weight { - Weight::from_all(22_532_000 as u64) - .saturating_add(T::DbWeight::get().writes(1 as u64)) + Weight::from_all(22_991_000) + .saturating_add(T::DbWeight::get().writes(1)) } } @@ -107,7 +120,7 @@ impl WeightInfo for SubstrateWeight { impl WeightInfo for () { // Storage: `Migration::Status` (r:1 w:1) // Proof: `Migration::Status` (`max_values`: Some(1), `max_size`: None, mode: `Measured`) - // Storage: `Migration::MigrationEnabled` (r:1 w:0) + // Storage: `Migration::MigrationEnabled` (r:1 w:1) // Proof: `Migration::MigrationEnabled` (`max_values`: Some(1), `max_size`: None, mode: `Measured`) // Storage: `Migration::BlockDelay` (r:1 w:0) // Proof: `Migration::BlockDelay` (`max_values`: Some(1), `max_size`: None, mode: `Measured`) @@ -115,39 +128,52 @@ impl WeightInfo for () { // Proof: `Migration::BlockLimit` (`max_values`: Some(1), `max_size`: None, mode: `Measured`) // Storage: `Migration::LastKey` (r:1 w:1) // Proof: `Migration::LastKey` (`max_values`: Some(1), `max_size`: None, mode: `Measured`) - // Storage: `Xls20::Xls20TokenMap` (r:1 w:0) - // Proof: `Xls20::Xls20TokenMap` (`max_values`: None, `max_size`: Some(56), added: 2531, mode: `MaxEncodedLen`) - // Storage: UNKNOWN KEY `0x28fc2cbf777640e8e3e472d285713c8d4e7b9012096b41c4eb3aaf947f6ea429` (r:0 w:1) - // Proof: UNKNOWN KEY `0x28fc2cbf777640e8e3e472d285713c8d4e7b9012096b41c4eb3aaf947f6ea429` (r:0 w:1) + // Storage: `Nft::CollectionInfo` (r:1 w:0) + // Proof: `Nft::CollectionInfo` (`max_values`: None, `max_size`: Some(494), added: 2969, mode: `MaxEncodedLen`) + // Storage: UNKNOWN KEY `0xf43ffbe61ef468749d3617ac1a63c4b74e7b9012096b41c4eb3aaf947f6ea429` (r:0 w:1) + // Proof: UNKNOWN KEY `0xf43ffbe61ef468749d3617ac1a63c4b74e7b9012096b41c4eb3aaf947f6ea429` (r:0 w:1) fn migrate() -> Weight { - Weight::from_all(45_461_000 as u64) - .saturating_add(RocksDbWeight::get().reads(6 as u64)) - .saturating_add(RocksDbWeight::get().writes(3 as u64)) + Weight::from_all(46_909_000) + .saturating_add(RocksDbWeight::get().reads(6)) + .saturating_add(RocksDbWeight::get().writes(4)) } - // Storage: `Xls20::Xls20TokenMap` (r:2 w:1) - // Proof: `Xls20::Xls20TokenMap` (`max_values`: None, `max_size`: Some(56), added: 2531, mode: `MaxEncodedLen`) - fn current_migration_step() -> Weight { - Weight::from_all(31_525_000 as u64) - .saturating_add(RocksDbWeight::get().reads(2 as u64)) - .saturating_add(RocksDbWeight::get().writes(1 as u64)) + // Storage: `Nft::CollectionInfo` (r:2 w:1) + // Proof: `Nft::CollectionInfo` (`max_values`: None, `max_size`: Some(494), added: 2969, mode: `MaxEncodedLen`) + // Storage: UNKNOWN KEY `0xf43ffbe61ef468749d3617ac1a63c4b70d96583a751bd644fd42252931d83e5f` (r:50 w:50) + // Proof: UNKNOWN KEY `0xf43ffbe61ef468749d3617ac1a63c4b70d96583a751bd644fd42252931d83e5f` (r:50 w:50) + // Storage: UNKNOWN KEY `0xf43ffbe61ef468749d3617ac1a63c4b726db73e92fe0bb513cac8d5ccc97e899` (r:50 w:50) + // Proof: UNKNOWN KEY `0xf43ffbe61ef468749d3617ac1a63c4b726db73e92fe0bb513cac8d5ccc97e899` (r:50 w:50) + // Storage: `Nft::OwnedTokens` (r:1 w:1) + // Proof: `Nft::OwnedTokens` (`max_values`: None, `max_size`: Some(4000052), added: 4002527, mode: `MaxEncodedLen`) + // Storage: `Nft::TokenInfo` (r:0 w:50) + // Proof: `Nft::TokenInfo` (`max_values`: None, `max_size`: Some(65), added: 2540, mode: `MaxEncodedLen`) + /// The range of component `p` is `[1, 50]`. + fn current_migration_step(p: u32, ) -> Weight { + Weight::from_all(45_451_449) + // Standard Error: 20_013 + .saturating_add(Weight::from_all(16_452_621_u64).saturating_mul(p as u64)) + .saturating_add(RocksDbWeight::get().reads(3)) + .saturating_add(RocksDbWeight::get().reads((2_u64).saturating_mul(p as u64))) + .saturating_add(RocksDbWeight::get().writes(2)) + .saturating_add(RocksDbWeight::get().writes((3_u64).saturating_mul(p as u64))) } // Storage: `Migration::MigrationEnabled` (r:0 w:1) // Proof: `Migration::MigrationEnabled` (`max_values`: Some(1), `max_size`: None, mode: `Measured`) fn enable_migration() -> Weight { - Weight::from_all(22_368_000 as u64) - .saturating_add(RocksDbWeight::get().writes(1 as u64)) + Weight::from_all(36_263_000) + .saturating_add(RocksDbWeight::get().writes(1)) } // Storage: `Migration::BlockDelay` (r:0 w:1) // Proof: `Migration::BlockDelay` (`max_values`: Some(1), `max_size`: None, mode: `Measured`) fn set_block_delay() -> Weight { - Weight::from_all(22_928_000 as u64) - .saturating_add(RocksDbWeight::get().writes(1 as u64)) + Weight::from_all(23_123_000) + .saturating_add(RocksDbWeight::get().writes(1)) } // Storage: `Migration::BlockLimit` (r:0 w:1) // Proof: `Migration::BlockLimit` (`max_values`: Some(1), `max_size`: None, mode: `Measured`) fn set_block_limit() -> Weight { - Weight::from_all(22_532_000 as u64) - .saturating_add(RocksDbWeight::get().writes(1 as u64)) + Weight::from_all(22_991_000) + .saturating_add(RocksDbWeight::get().writes(1)) } } diff --git a/pallet/nfi/src/mock.rs b/pallet/nfi/src/mock.rs index a964d970d..3f888e47a 100644 --- a/pallet/nfi/src/mock.rs +++ b/pallet/nfi/src/mock.rs @@ -61,6 +61,7 @@ impl pallet_nft::Config for Test { type Xls20MintRequest = (); type NFIRequest = Nfi; type MaxPendingIssuances = MaxPendingIssuances; + type Migrator = (); } parameter_types! { diff --git a/pallet/nft-peg/src/benchmarking.rs b/pallet/nft-peg/src/benchmarking.rs index 254b9cfa9..e5e887fb6 100644 --- a/pallet/nft-peg/src/benchmarking.rs +++ b/pallet/nft-peg/src/benchmarking.rs @@ -23,7 +23,9 @@ use crate::{BlockedTokens, EthToRootNft, NextBlockedMintId, Pallet as NftPeg, Ro use frame_benchmarking::{account as bench_account, benchmarks, impl_benchmark_test_suite}; use frame_support::assert_ok; use frame_system::RawOrigin; -use pallet_nft::{CollectionInfo, CollectionInformation, Pallet as Nft}; +use pallet_nft::{ + CollectionInfo, CollectionInformation, Pallet as Nft, TokenInfo as TokenInformation, +}; use seed_primitives::{CrossChainCompatibility, MetadataScheme}; use sp_std::vec; @@ -66,15 +68,14 @@ benchmarks! { assert_ok!(NftPeg::do_deposit(token_info, alice.clone().into())); // Sanity Check - let collection_info: CollectionInformation = CollectionInfo::::get(coll_id).expect("Collection exists"); for serial_id in &serial_numbers { - assert!(collection_info.token_exists(*serial_id)); + assert!(TokenInformation::::contains_key(coll_id, *serial_id)); } }: _(origin::(&alice), collection_ids, bounded_serial_numbers, alice.clone().into()) verify { - for serial_id in serial_numbers { - assert!(collection_info.token_exists(serial_id)); + for serial_id in &serial_numbers { + assert!(!TokenInformation::::contains_key(coll_id, *serial_id)); } } @@ -102,7 +103,6 @@ benchmarks! { next_serial_number: 1_000_000_001_u32, collection_issuance: 1_000_000_000_u32, cross_chain_compatibility: CrossChainCompatibility::default(), - owned_tokens: BoundedVec::truncate_from(vec![]), }; CollectionInfo::::insert(collection_id, collection_info); diff --git a/pallet/nft/src/benchmarking.rs b/pallet/nft/src/benchmarking.rs index 39d9de3af..78e283265 100644 --- a/pallet/nft/src/benchmarking.rs +++ b/pallet/nft/src/benchmarking.rs @@ -60,22 +60,45 @@ benchmarks! { claim_unowned_collection { let collection_id = build_collection::(Some(Nft::::account_id())); }: _(RawOrigin::Root, collection_id, account::("Alice")) + verify { + let collection_info = CollectionInfo::::get(collection_id).unwrap(); + assert_eq!(collection_info.owner, account::("Alice")); + } set_owner { let collection_id = build_collection::(None); }: _(origin::(&account::("Alice")), collection_id, account::("Bob")) + verify { + let collection_info = CollectionInfo::::get(collection_id).unwrap(); + assert_eq!(collection_info.owner, account::("Bob")); + } set_max_issuance { let collection_id = build_collection::(None); }: _(origin::(&account::("Alice")), collection_id, 32) + verify { + let collection_info = CollectionInfo::::get(collection_id).unwrap(); + assert_eq!(collection_info.max_issuance, Some(32)); + } set_base_uri { let collection_id = build_collection::(None); - }: _(origin::(&account::("Alice")), collection_id, "https://example.com/tokens/".into()) + let metadata_uri: Vec = "https://example.com/tokens/".into(); + }: _(origin::(&account::("Alice")), collection_id, metadata_uri.clone()) + verify { + let collection_info = CollectionInfo::::get(collection_id).unwrap(); + let metadata_scheme: MetadataScheme = metadata_uri.as_slice().try_into().unwrap(); + assert_eq!(collection_info.metadata_scheme, metadata_scheme); + } set_name { let collection_id = build_collection::(None); - }: _(origin::(&account::("Alice")), collection_id, BoundedVec::truncate_from("New Name".encode())) + let name = BoundedVec::truncate_from("New Name".encode()); + }: _(origin::(&account::("Alice")), collection_id, name.clone()) + verify { + let collection_info = CollectionInfo::::get(collection_id).unwrap(); + assert_eq!(collection_info.name, name); + } set_royalties_schedule { let collection_id = build_collection::(None); @@ -83,25 +106,67 @@ benchmarks! { let royalties_schedule = RoyaltiesSchedule { entitlements: BoundedVec::truncate_from(vec![(collection_owner, Permill::one())]), }; - }: _(origin::(&account::("Alice")), collection_id, royalties_schedule) + }: _(origin::(&account::("Alice")), collection_id, royalties_schedule.clone()) + verify { + let collection_info = CollectionInfo::::get(collection_id).unwrap(); + assert_eq!(collection_info.royalties_schedule, Some(royalties_schedule)); + } create_collection { + let p in 1 .. (500); + let collection_id = Nft::::next_collection_uuid().unwrap(); let metadata = MetadataScheme::try_from(b"https://google.com/".as_slice()).unwrap(); let ccc = CrossChainCompatibility { xrpl: false }; - }: _(origin::(&account::("Alice")), BoundedVec::truncate_from("Collection".encode()), 0, None, None, metadata, None, ccc) + }: _(origin::(&account::("Alice")), BoundedVec::truncate_from("Collection".encode()), p, None, None, metadata, None, ccc) + verify { + let collection_info = CollectionInfo::::get(collection_id); + assert_eq!(collection_info.unwrap().collection_issuance, p); + } toggle_public_mint { let collection_id = build_collection::(None); }: _(origin::(&account::("Alice")), collection_id, true) + verify { + assert!(PublicMintInfo::::get(collection_id).unwrap().enabled); + } set_mint_fee { let collection_id = build_collection::(None); let pricing_details = Some((1, 100)); }: _(origin::(&account::("Alice")), collection_id, pricing_details) + verify { + assert_eq!(PublicMintInfo::::get(collection_id).unwrap().pricing_details, pricing_details); + } mint { - let collection_id = build_collection::(None); - }: _(origin::(&account::("Alice")), collection_id, 1, None) + let p in 1 .. (500); + let owner = account::("Alice"); + let collection_id = build_collection::(Some(owner.clone())); + let asset_id = 1; + let mint_fee: Balance = 100; + + // Toggle public mint to traverse worst case scenario + assert_ok!(Nft::::toggle_public_mint( + origin::(&owner).into(), + collection_id, + true + )); + assert_ok!(Nft::::set_mint_fee( + origin::(&owner).into(), + collection_id, + Some((1, 100)) + )); + + // fund the mint account + let minter = account::("Bob"); + assert_ok!(T::MultiCurrency::mint_into(asset_id, &minter, mint_fee * 5u128 * p as u128)); + }: _(origin::(&minter), collection_id, p, Some(minter.clone())) + verify { + assert_eq!(Nft::::token_balance_of(&minter, collection_id), p); + for i in 1..=p { + assert_eq!(TokenInfo::::get(collection_id, i).unwrap().owner, minter); + } + } transfer { let collection_id = build_collection::(None); @@ -116,15 +181,18 @@ benchmarks! { let serial_numbers = BoundedVec::try_from(serial_numbers).unwrap(); }: _(origin::(&account::("Alice")), collection_id, serial_numbers.clone(), account::("Bob")) verify { - let collection_info = CollectionInfo::::get(collection_id).expect("Collection not found"); for serial_number in serial_numbers.iter() { - assert!(collection_info.is_token_owner(&account::("Bob"), *serial_number)); + assert_eq!(TokenInfo::::get(collection_id, *serial_number).unwrap().owner, account::("Bob")); } } burn { let collection_id = build_collection::(None); + assert!(TokenInfo::::get(collection_id, 0).is_some()); }: _(origin::(&account::("Alice")), TokenId::from((collection_id, 0))) + verify { + assert!(TokenInfo::::get(collection_id, 0).is_none()); + } set_utility_flags { let collection_id = build_collection::(None); @@ -143,27 +211,25 @@ benchmarks! { let token_id = (collection_id, 0); }: _(origin::(&account::("Alice")), token_id, true) verify { - assert_eq!(TokenUtilityFlags::::get(token_id).transferable, true); + assert!(TokenInfo::::get(collection_id, 0).unwrap().utility_flags.transferable); } issue_soulbound { let collection_id = build_collection::(None); + let issuance_id = NextIssuanceId::::get(); }: _(origin::(&account::("Alice")), collection_id, 1, account::("Bob"), TokenBurnAuthority::Both) verify { - let collection_issuances = - PendingIssuances::::get(collection_id).pending_issuances; - - let pending_issuances = &collection_issuances[0].1; - assert_eq!( - pending_issuances.len(), - 1, + PendingIssuances::::get((collection_id, &account::("Bob"), issuance_id)).unwrap(), + PendingIssuance { + quantity: 1, + burn_authority: TokenBurnAuthority::Both, + }, ) } accept_soulbound_issuance { let collection_id = build_collection::(None); - let receiver = account::("Bob"); assert_ok!(Nft::::issue_soulbound( @@ -175,8 +241,7 @@ benchmarks! { )); }: _(origin::(&receiver.clone()), collection_id, 0) verify { - let collection_info = CollectionInfo::::get(collection_id).expect("Collection not found"); - assert!(collection_info.is_token_owner(&receiver, 1)) + assert_eq!(TokenInfo::::get(collection_id, 1).unwrap().owner, receiver); } } diff --git a/pallet/nft/src/impls.rs b/pallet/nft/src/impls.rs index 92aa3343b..d3de5d3aa 100644 --- a/pallet/nft/src/impls.rs +++ b/pallet/nft/src/impls.rs @@ -24,7 +24,7 @@ use precompile_utils::constants::ERC721_PRECOMPILE_ADDRESS_PREFIX; use seed_pallet_common::{ log, utils::{next_asset_uuid, HasBurnAuthority, PublicMintInformation}, - NFTExt, NFTMinter, OnNewAssetSubscriber, OnTransferSubscriber, + Migrator, NFTExt, NFTMinter, OnNewAssetSubscriber, OnTransferSubscriber, }; use seed_primitives::{ CollectionUuid, MetadataScheme, OriginChain, RoyaltiesSchedule, SerialNumber, TokenCount, @@ -35,7 +35,6 @@ use sp_runtime::{ traits::Zero, ArithmeticError, BoundedVec, DispatchError, DispatchResult, Permill, SaturatedConversion, }; -use sp_std::vec; impl Pallet { /// Returns the CollectionUuid unique across parachains @@ -49,22 +48,20 @@ impl Pallet { /// Return whether the collection exists or not pub fn collection_exists(collection_id: CollectionUuid) -> bool { + if T::Migrator::ensure_migrated().is_err() { + return false; + } >::contains_key(collection_id) } /// Returns number of tokens owned by an account in a collection /// Used by the ERC721 precompile for balance_of pub fn token_balance_of(who: &T::AccountId, collection_id: CollectionUuid) -> TokenCount { - match >::get(collection_id) { - Some(collection_info) => { - let serial_numbers: Vec = collection_info - .owned_tokens - .into_iter() - .find(|token_ownership| &token_ownership.owner == who) - .map(|token_ownership| token_ownership.owned_serials.clone().into_inner()) - .unwrap_or_default(); - serial_numbers.len() as TokenCount - }, + if T::Migrator::ensure_migrated().is_err() { + return 0; + } + match >::get(who, collection_id) { + Some(owned_tokens) => owned_tokens.len() as TokenCount, None => TokenCount::zero(), } } @@ -72,6 +69,9 @@ impl Pallet { /// Construct & return the full metadata URI for a given `token_id` (analogous to ERC721 /// metadata token_uri) pub fn token_uri(token_id: TokenId) -> Vec { + if T::Migrator::ensure_migrated().is_err() { + return Default::default(); + } let Some(collection_info) = >::get(token_id.0) else { // should not happen log!(warn, "🃏 Unexpected empty metadata scheme: {:?}", token_id); @@ -88,51 +88,88 @@ impl Pallet { current_owner: &T::AccountId, new_owner: &T::AccountId, ) -> DispatchResult { + T::Migrator::ensure_migrated()?; ensure!(current_owner != new_owner, Error::::InvalidNewOwner); ensure!( >::get(collection_id).transferable, Error::::TransferUtilityBlocked ); + ensure!(CollectionInfo::::contains_key(collection_id), Error::::NoCollectionFound); - CollectionInfo::::try_mutate(collection_id, |maybe_collection_info| -> DispatchResult { - let collection_info = - maybe_collection_info.as_mut().ok_or(Error::::NoCollectionFound)?; + // Update `TokenOwner` mapping and check token level restrictions + for &serial_number in &serial_numbers { + TokenInfo::::try_mutate( + collection_id, + serial_number, + |token_info| -> DispatchResult { + let token_info = token_info.as_mut().ok_or(Error::::NoToken)?; + ensure!(token_info.owner == current_owner.clone(), Error::::NotTokenOwner); + ensure!(token_info.lock_status.is_none(), Error::::TokenLocked); + ensure!( + token_info.utility_flags.transferable, + Error::::TransferUtilityBlocked + ); + // Check if soulbound + ensure!( + token_info.utility_flags.burn_authority.is_none(), + Error::::TransferUtilityBlocked + ); + token_info.owner = new_owner.clone(); + Ok(()) + }, + )?; + } - // Check ownership anddo_ locks - for serial_number in serial_numbers.iter() { - ensure!( - collection_info.is_token_owner(current_owner, *serial_number), - Error::::NotTokenOwner - ); - ensure!( - !>::contains_key((collection_id, serial_number)), - Error::::TokenLocked - ); - let token_utility_flags = - >::get((collection_id, serial_number)); - ensure!(token_utility_flags.transferable, Error::::TransferUtilityBlocked); - ensure!( - token_utility_flags.burn_authority.is_none(), - Error::::TransferUtilityBlocked - ); - } + // Update `OwnedTokens` for current owner + OwnedTokens::::try_mutate( + current_owner, + collection_id, + |maybe_owned_serials| -> DispatchResult { + if let Some(owned_serials) = maybe_owned_serials { + owned_serials.retain(|serial| !serial_numbers.contains(serial)); + // If no tokens remain, remove the entry completely + if owned_serials.is_empty() { + *maybe_owned_serials = None; + } + } else { + Err(Error::::NotTokenOwner)?; + } + Ok(()) + }, + )?; + + // Update `OwnedTokens` for new owner + OwnedTokens::::try_mutate( + new_owner, + collection_id, + |owned_serials| -> DispatchResult { + match owned_serials.as_mut() { + Some(owned_serials) => { + for &serial_number in &serial_numbers { + owned_serials + .try_push(serial_number) + .map_err(|_| Error::::TokenLimitExceeded)?; + } + }, + None => { + *owned_serials = Some(serial_numbers.clone()); + }, + } + Ok(()) + }, + )?; - collection_info.remove_user_tokens(current_owner, serial_numbers.clone()); - collection_info - .add_user_tokens(new_owner, serial_numbers.clone()) - .map_err(Error::::from)?; + for serial_number in &serial_numbers { + T::OnTransferSubscription::on_nft_transfer(&(collection_id, *serial_number)); + } - for serial_number in serial_numbers.clone().iter() { - T::OnTransferSubscription::on_nft_transfer(&(collection_id, *serial_number)); - } - Self::deposit_event(Event::::Transfer { - previous_owner: current_owner.clone(), - collection_id, - serial_numbers: serial_numbers.into_inner(), - new_owner: new_owner.clone(), - }); - Ok(()) - }) + Self::deposit_event(Event::::Transfer { + previous_owner: current_owner.clone(), + collection_id, + serial_numbers: serial_numbers.into_inner(), + new_owner: new_owner.clone(), + }); + Ok(()) } /// Mint additional tokens in a collection @@ -144,6 +181,7 @@ impl Pallet { collection_id: CollectionUuid, serial_numbers: Vec, ) -> WeightedDispatchResult { + T::Migrator::ensure_migrated().map_err(|e| (Weight::zero(), e))?; if serial_numbers.is_empty() { return Ok(Weight::zero()); }; @@ -162,7 +200,7 @@ impl Pallet { serial_numbers_trimmed = serial_numbers_trimmed .into_iter() .filter(|serial_number| { - if collection_info.token_exists(*serial_number) { + if TokenInfo::::contains_key(collection_id, *serial_number) { // Since we don't want to error, throw a warning instead. // If we error, then some tokens may be lost log!( @@ -182,7 +220,13 @@ impl Pallet { BoundedVec::try_from(serial_numbers_trimmed); match serial_numbers { Ok(serial_numbers) => { - let mint = Self::do_mint(collection_id, collection_info, owner, &serial_numbers); + let mint = Self::do_mint( + collection_id, + collection_info, + owner, + &serial_numbers, + TokenFlags::default(), + ); if mint.is_ok() { // throw event, listing all serial numbers minted from bridging @@ -208,11 +252,7 @@ impl Pallet { /// Returns a bounded vec of serial numbers to mint. pub fn pre_mint( collection_id: CollectionUuid, - collection_info: &mut CollectionInformation< - T::AccountId, - T::MaxTokensPerCollection, - T::StringLimit, - >, + collection_info: &mut CollectionInformation, quantity: TokenCount, ) -> Result, DispatchError> { ensure!(quantity <= T::MintLimit::get(), Error::::MintLimitExceeded); @@ -298,14 +338,12 @@ impl Pallet { /// Perform the mint operation and update storage accordingly. pub(crate) fn do_mint( collection_id: CollectionUuid, - collection_info: CollectionInformation< - T::AccountId, - T::MaxTokensPerCollection, - T::StringLimit, - >, + collection_info: CollectionInformation, token_owner: &T::AccountId, serial_numbers: &BoundedVec, + utility_flags: TokenFlags, ) -> DispatchResult { + T::Migrator::ensure_migrated()?; let mut new_collection_info = collection_info; // Update collection issuance new_collection_info.collection_issuance = new_collection_info @@ -318,9 +356,32 @@ impl Pallet { Error::::TokenLimitExceeded ); - new_collection_info - .add_user_tokens(token_owner, serial_numbers.clone()) - .map_err(Error::::from)?; + // Update `TokenInfo` mapping + for serial_number in serial_numbers { + let token_info = TokenInformation::new(token_owner.clone(), utility_flags); + TokenInfo::::insert(collection_id, serial_number, token_info); + } + + // Update `OwnedTokens` + OwnedTokens::::try_mutate( + token_owner, + collection_id, + |owned_serials| -> DispatchResult { + match owned_serials.as_mut() { + Some(owned_serials) => { + for serial_number in serial_numbers { + owned_serials + .try_push(*serial_number) + .map_err(|_| Error::::TokenLimitExceeded)?; + } + }, + None => { + *owned_serials = Some(serial_numbers.clone()); + }, + } + Ok(()) + }, + )?; // Update CollectionInfo storage >::insert(collection_id, new_collection_info); @@ -337,22 +398,14 @@ impl Pallet { cursor: SerialNumber, limit: u16, ) -> (SerialNumber, TokenCount, Vec) { - let collection_info = match >::get(collection_id) { - Some(info) => info, + if T::Migrator::ensure_migrated().is_err() { + return (Default::default(), Default::default(), Default::default()); + } + let mut owned_tokens = match >::get(who, collection_id) { + Some(tokens) => tokens, None => return (Default::default(), Default::default(), Default::default()), }; - // Collect all tokens owned by address - let mut owned_tokens: Vec = match collection_info - .owned_tokens - .into_inner() - .iter() - .find(|token_ownership| &token_ownership.owner == who) - { - Some(token_ownership) => token_ownership.owned_serials.clone().into_inner(), - None => vec![], - }; - // Sort the vec to ensure no tokens are missed owned_tokens.sort(); // Store the last owned token by this account @@ -394,6 +447,7 @@ impl Pallet { where ::AccountId: core::default::Default, { + T::Migrator::ensure_migrated()?; let collection_info = >::get(collection_id).ok_or(Error::::NoCollectionFound)?; let collection_info = collection_info; @@ -436,6 +490,7 @@ impl Pallet { origin_chain: OriginChain, cross_chain_compatibility: CrossChainCompatibility, ) -> Result { + T::Migrator::ensure_migrated()?; // Check we can issue the new tokens let collection_uuid = Self::next_collection_uuid()?; @@ -463,8 +518,7 @@ impl Pallet { ensure!(royalties_schedule.validate(), Error::::RoyaltiesInvalid); } - // Now mint the collection tokens - let mut owned_tokens = BoundedVec::default(); + // Mint the collection tokens if initial_issuance > Zero::zero() { ensure!(initial_issuance <= T::MintLimit::get(), Error::::MintLimitExceeded); // XLS-20 compatible collections cannot have an initial issuance @@ -472,16 +526,40 @@ impl Pallet { // Instead the user should specify 0 initial_issuance and use the mint function to // mint tokens ensure!(!cross_chain_compatibility.xrpl, Error::::InitialIssuanceNotZero); + // mint initial tokens to token_owner or owner let token_owner = token_owner.unwrap_or(owner.clone()); let serial_numbers_unbounded: Vec = (0..initial_issuance).collect(); let serial_numbers: BoundedVec = BoundedVec::try_from(serial_numbers_unbounded) .map_err(|_| Error::::TokenLimitExceeded)?; - // Create token_ownership object with token_owner and initial serial_numbers - let token_ownership = TokenOwnership::new(token_owner, serial_numbers); - owned_tokens = BoundedVec::try_from(vec![token_ownership]) - .map_err(|_| Error::::TokenLimitExceeded)?; + + // Update `TokenInfo` mapping + for &serial_number in &serial_numbers { + let token_info = TokenInformation::new(token_owner.clone(), TokenFlags::default()); + TokenInfo::::insert(collection_uuid, serial_number, token_info); + } + + // Update `OwnedTokens` + OwnedTokens::::try_mutate( + token_owner, + collection_uuid, + |owned_serials| -> DispatchResult { + match owned_serials.as_mut() { + Some(owned_serials) => { + for serial_number in serial_numbers { + owned_serials + .try_push(serial_number) + .map_err(|_| Error::::TokenLimitExceeded)?; + } + }, + None => { + *owned_serials = Some(serial_numbers.clone()); + }, + } + Ok(()) + }, + )?; } let collection_info = CollectionInformation { @@ -494,7 +572,6 @@ impl Pallet { next_serial_number: initial_issuance, collection_issuance: initial_issuance, cross_chain_compatibility, - owned_tokens, }; >::insert(collection_uuid, collection_info); @@ -527,46 +604,61 @@ impl Pallet { collection_id: CollectionUuid, serial_number: SerialNumber, ) -> DispatchResult { - ensure!( - !>::contains_key((collection_id, serial_number)), - Error::::TokenLocked - ); + T::Migrator::ensure_migrated()?; ensure!(>::get(collection_id).burnable, Error::::BurnUtilityBlocked); - // Remove any NFI data associated with this token - T::NFIRequest::on_burn((collection_id, serial_number)); - CollectionInfo::::try_mutate(collection_id, |maybe_collection_info| -> DispatchResult { let collection_info = maybe_collection_info.as_mut().ok_or(Error::::NoCollectionFound)?; - if let Some(burn_authority) = - TokenUtilityFlags::::get((collection_id, serial_number)).burn_authority - { - let token_owner = collection_info - .get_token_owner(serial_number) - .ok_or(Error::::InvalidBurnAuthority)?; - - ensure!( - burn_authority.has_burn_authority(&collection_info.owner, &token_owner, who,), - Error::::InvalidBurnAuthority - ); - } else { - ensure!( - collection_info.is_token_owner(who, serial_number), - Error::::NotTokenOwner - ); - } - collection_info.collection_issuance = collection_info.collection_issuance.saturating_sub(1); - collection_info.owned_tokens.iter_mut().for_each(|token_ownership| { - if token_ownership.owner == *who { - token_ownership.owned_serials.retain(|&serial| serial != serial_number) - } - }); + + TokenInfo::::try_mutate( + collection_id, + serial_number, + |maybe_token_info| -> DispatchResult { + let token_info = maybe_token_info.as_mut().ok_or(Error::::NoToken)?; + let token_owner = &token_info.owner; + ensure!(token_info.lock_status.is_none(), Error::::TokenLocked); + if let Some(burn_authority) = token_info.utility_flags.burn_authority { + ensure!( + burn_authority.has_burn_authority( + &collection_info.owner, + token_owner, + who, + ), + Error::::InvalidBurnAuthority + ); + } else { + ensure!(token_owner == who, Error::::NotTokenOwner); + } + + *maybe_token_info = None; + Ok(()) + }, + )?; + + OwnedTokens::::try_mutate( + who, + collection_id, + |maybe_owned_serials| -> DispatchResult { + if let Some(owned_serials) = maybe_owned_serials { + owned_serials.retain(|serial| serial != &serial_number); + // If no tokens remain, remove the entry completely + if owned_serials.is_empty() { + *maybe_owned_serials = None; + } + } + Ok(()) + }, + )?; + // Remove approvals for this token T::OnTransferSubscription::on_nft_transfer(&(collection_id, serial_number)); + + // Remove any NFI data associated with this token + T::NFIRequest::on_burn((collection_id, serial_number)); Ok(()) }) } @@ -576,11 +668,12 @@ impl Pallet { who: T::AccountId, collection_id: CollectionUuid, ) -> DispatchResult { + T::Migrator::ensure_migrated()?; let mut collection_info = CollectionInfo::::get(collection_id).ok_or(Error::::NoCollectionFound)?; // Caller must be collection owner - ensure!(collection_info.is_collection_owner(&who), Error::::NotCollectionOwner); + ensure!(collection_info.owner == who, Error::::NotCollectionOwner); // Collection issuance must be 0 (i.e. no tokens minted) ensure!( collection_info.collection_issuance.is_zero(), @@ -598,12 +691,10 @@ impl Pallet { collection_id: CollectionUuid, new_owner: T::AccountId, ) -> DispatchResult { + T::Migrator::ensure_migrated()?; let mut collection_info = >::get(collection_id).ok_or(Error::::NoCollectionFound)?; - ensure!( - collection_info.is_collection_owner(&previous_owner), - Error::::NotCollectionOwner - ); + ensure!(collection_info.owner == previous_owner, Error::::NotCollectionOwner); collection_info.owner = new_owner.clone(); >::insert(collection_id, collection_info); Self::deposit_event(Event::::OwnerSet { collection_id, new_owner }); @@ -626,6 +717,7 @@ impl NFTExt for Pallet { quantity: TokenCount, token_owner: Option, ) -> DispatchResult { + T::Migrator::ensure_migrated()?; Self::mint(RawOrigin::Signed(origin).into(), collection_id, quantity, token_owner) } @@ -635,6 +727,7 @@ impl NFTExt for Pallet { serial_numbers: Vec, new_owner: &Self::AccountId, ) -> DispatchResult { + T::Migrator::ensure_migrated()?; let bounded_serials = BoundedVec::try_from(serial_numbers).map_err(|_| Error::::TokenLimitExceeded)?; Self::do_transfer(collection_id, bounded_serials, origin, new_owner) @@ -651,6 +744,7 @@ impl NFTExt for Pallet { origin_chain: OriginChain, cross_chain_compatibility: CrossChainCompatibility, ) -> Result { + T::Migrator::ensure_migrated()?; Self::do_create_collection( owner, name, @@ -665,13 +759,18 @@ impl NFTExt for Pallet { } fn get_token_owner(token_id: &TokenId) -> Option { - let collection = CollectionInfo::::get(token_id.0)?; - collection.get_token_owner(token_id.1) + let token_info = TokenInfo::::get(token_id.0, token_id.1)?; + Some(token_info.owner) + } + + fn token_exists(token_id: &TokenId) -> bool { + TokenInfo::::contains_key(token_id.0, token_id.1) } fn get_collection_issuance( collection_id: CollectionUuid, ) -> Result<(TokenCount, Option), DispatchError> { + T::Migrator::ensure_migrated()?; let collection_info = CollectionInfo::::get(collection_id).ok_or(Error::::NoCollectionFound)?; Ok((collection_info.collection_issuance, collection_info.max_issuance)) @@ -690,12 +789,14 @@ impl NFTExt for Pallet { collection_id: CollectionUuid, new_owner: Self::AccountId, ) -> DispatchResult { + T::Migrator::ensure_migrated()?; Self::do_set_owner(who, collection_id, new_owner) } fn get_royalties_schedule( collection_id: CollectionUuid, ) -> Result>, DispatchError> { + T::Migrator::ensure_migrated()?; let collection_info = CollectionInfo::::get(collection_id).ok_or(Error::::NoCollectionFound)?; Ok(collection_info.royalties_schedule) @@ -705,6 +806,7 @@ impl NFTExt for Pallet { who: Self::AccountId, collection_id: CollectionUuid, ) -> DispatchResult { + T::Migrator::ensure_migrated()?; Self::enable_xls20_compatibility(who, collection_id) } @@ -719,7 +821,10 @@ impl NFTExt for Pallet { } fn get_token_lock(token_id: TokenId) -> Option { - >::get(token_id) + if T::Migrator::ensure_migrated().is_err() { + return None; + } + >::get(token_id.0, token_id.1)?.lock_status } fn set_token_lock( @@ -727,19 +832,28 @@ impl NFTExt for Pallet { lock_reason: TokenLockReason, who: Self::AccountId, ) -> DispatchResult { - ensure!(!>::contains_key(token_id), Error::::TokenLocked); - ensure!(Self::get_token_owner(&token_id) == Some(who), Error::::NotTokenOwner); - >::insert(token_id, lock_reason); - Ok(()) + T::Migrator::ensure_migrated()?; + TokenInfo::::try_mutate(token_id.0, token_id.1, |maybe_token_info| -> DispatchResult { + let token_info = maybe_token_info.as_mut().ok_or(Error::::NoToken)?; + ensure!(token_info.lock_status.is_none(), Error::::TokenLocked); + ensure!(token_info.owner == who, Error::::NotTokenOwner); + token_info.lock_status = Some(lock_reason); + Ok(()) + }) } - fn remove_token_lock(token_id: TokenId) { - >::remove(token_id); + fn remove_token_lock(token_id: TokenId) -> DispatchResult { + TokenInfo::::try_mutate(token_id.0, token_id.1, |maybe_token_info| -> DispatchResult { + let token_info = maybe_token_info.as_mut().ok_or(Error::::NoToken)?; + token_info.lock_status = None; + Ok(()) + }) } fn get_collection_owner( collection_id: CollectionUuid, ) -> Result { + T::Migrator::ensure_migrated()?; let collection_info = CollectionInfo::::get(collection_id).ok_or(Error::::NoCollectionFound)?; Ok(collection_info.owner) @@ -750,12 +864,14 @@ impl NFTExt for Pallet { collection_id: CollectionUuid, serial_number: SerialNumber, ) -> DispatchResult { + T::Migrator::ensure_migrated()?; Self::do_burn(&who, collection_id, serial_number) } fn get_cross_chain_compatibility( collection_id: CollectionUuid, ) -> Result { + T::Migrator::ensure_migrated()?; let collection_info = CollectionInfo::::get(collection_id).ok_or(Error::::NoCollectionFound)?; Ok(collection_info.cross_chain_compatibility) @@ -764,15 +880,12 @@ impl NFTExt for Pallet { impl NFTCollectionInfo for Pallet { type AccountId = T::AccountId; - type MaxTokensPerCollection = T::MaxTokensPerCollection; type StringLimit = T::StringLimit; fn get_collection_info( collection_id: CollectionUuid, - ) -> Result< - CollectionInformation, - DispatchError, - > { + ) -> Result, DispatchError> { + T::Migrator::ensure_migrated()?; CollectionInfo::::get(collection_id).ok_or(Error::::NoCollectionFound.into()) } } @@ -788,6 +901,7 @@ impl NFTMinter for Pallet { collection_id: CollectionUuid, serial_numbers: Vec, ) -> WeightedDispatchResult { + T::Migrator::ensure_migrated().map_err(|e| (Weight::zero(), e))?; Self::mint_bridged_token(owner, collection_id, serial_numbers) } } diff --git a/pallet/nft/src/lib.rs b/pallet/nft/src/lib.rs index 25282f036..971ffa7a8 100644 --- a/pallet/nft/src/lib.rs +++ b/pallet/nft/src/lib.rs @@ -81,9 +81,11 @@ pub mod pallet { use super::{DispatchResult, *}; use frame_support::pallet_prelude::*; use frame_system::pallet_prelude::*; + use seed_pallet_common::Migrator; + use seed_primitives::IssuanceId; /// The current storage version. - const STORAGE_VERSION: StorageVersion = StorageVersion::new(5); + const STORAGE_VERSION: StorageVersion = StorageVersion::new(9); #[pallet::pallet] #[pallet::storage_version(STORAGE_VERSION)] @@ -141,6 +143,8 @@ pub mod pallet { type NFIRequest: NFIRequest; /// Max number of pending issuances for a collection type MaxPendingIssuances: Get; + /// Current Migrator handling the migration of storage values + type Migrator: Migrator; } /// Map from collection to its information @@ -149,7 +153,29 @@ pub mod pallet { _, Twox64Concat, CollectionUuid, - CollectionInformation, + CollectionInformation, + >; + + /// Map from a token to its information, including owner, lock_status and utility_flags + #[pallet::storage] + pub type TokenInfo = StorageDoubleMap< + _, + Twox64Concat, + CollectionUuid, + Twox64Concat, + SerialNumber, + TokenInformation, + >; + + /// All tokens owned by a single account + #[pallet::storage] + pub type OwnedTokens = StorageDoubleMap< + _, + Blake2_128Concat, + T::AccountId, + Twox64Concat, + CollectionUuid, + BoundedVec, >; /// Map from collection to its public minting information @@ -161,29 +187,27 @@ pub mod pallet { #[pallet::storage] pub type NextCollectionId = StorageValue<_, u32, ValueQuery>; - /// Map from a token to lock status if any - #[pallet::storage] - pub type TokenLocks = StorageMap<_, Twox64Concat, TokenId, TokenLockReason>; - /// Map from a collection to additional utility flags #[pallet::storage] pub type UtilityFlags = StorageMap<_, Twox64Concat, CollectionUuid, CollectionUtilityFlags, ValueQuery>; - /// Map from a token_id to transferable and burn authority flags - #[pallet::storage] - pub type TokenUtilityFlags = StorageMap<_, Twox64Concat, TokenId, TokenFlags, ValueQuery>; - // Map from a collection id to a collection's pending issuances #[pallet::storage] - pub type PendingIssuances = StorageMap< + pub type PendingIssuances = StorageNMap< _, - Twox64Concat, - CollectionUuid, - CollectionPendingIssuances, - ValueQuery, + ( + NMapKey, + NMapKey, + NMapKey, + ), + PendingIssuance, >; + /// The next available incrementing issuance ID, unique across all pending issuances + #[pallet::storage] + pub type NextIssuanceId = StorageValue<_, IssuanceId, ValueQuery>; + #[pallet::event] #[pallet::generate_deposit(pub (super) fn deposit_event)] pub enum Event { @@ -263,7 +287,7 @@ pub mod pallet { /// A pending issuance for a soulbound token has been created PendingIssuanceCreated { collection_id: CollectionUuid, - issuance_id: u32, + issuance_id: IssuanceId, token_owner: T::AccountId, quantity: u32, burn_authority: TokenBurnAuthority, @@ -344,17 +368,18 @@ pub mod pallet { #[pallet::call] impl Pallet { - #[pallet::call_index(0)] - #[pallet::weight(T::WeightInfo::claim_unowned_collection())] /// Bridged collections from Ethereum will initially lack an owner. These collections will /// be assigned to the pallet. This allows for claiming those collections assuming they were /// assigned to the pallet + #[pallet::call_index(0)] + #[pallet::weight(T::WeightInfo::claim_unowned_collection())] pub fn claim_unowned_collection( origin: OriginFor, collection_id: CollectionUuid, new_owner: T::AccountId, ) -> DispatchResult { ensure_root(origin)?; + T::Migrator::ensure_migrated()?; CollectionInfo::::try_mutate(collection_id, |maybe_collection| -> DispatchResult { let collection = maybe_collection.as_mut().ok_or(Error::::NoCollectionFound)?; @@ -382,6 +407,7 @@ pub mod pallet { new_owner: T::AccountId, ) -> DispatchResult { let who = ensure_signed(origin)?; + T::Migrator::ensure_migrated()?; Self::do_set_owner(who, collection_id, new_owner) } @@ -395,10 +421,11 @@ pub mod pallet { max_issuance: TokenCount, ) -> DispatchResult { let who = ensure_signed(origin)?; + T::Migrator::ensure_migrated()?; let mut collection_info = >::get(collection_id).ok_or(Error::::NoCollectionFound)?; ensure!(!max_issuance.is_zero(), Error::::InvalidMaxIssuance); - ensure!(collection_info.is_collection_owner(&who), Error::::NotCollectionOwner); + ensure!(&collection_info.owner == &who, Error::::NotCollectionOwner); ensure!(collection_info.max_issuance.is_none(), Error::::MaxIssuanceAlreadySet); ensure!( collection_info.collection_issuance <= max_issuance, @@ -423,9 +450,10 @@ pub mod pallet { base_uri: Vec, ) -> DispatchResult { let who = ensure_signed(origin)?; + T::Migrator::ensure_migrated()?; let mut collection_info = >::get(collection_id).ok_or(Error::::NoCollectionFound)?; - ensure!(collection_info.is_collection_owner(&who), Error::::NotCollectionOwner); + ensure!(&collection_info.owner == &who, Error::::NotCollectionOwner); ensure!( !collection_info.cross_chain_compatibility.xrpl, Error::::CannotUpdateMetadata @@ -470,6 +498,7 @@ pub mod pallet { cross_chain_compatibility: CrossChainCompatibility, ) -> DispatchResult { let who = ensure_signed(origin)?; + T::Migrator::ensure_migrated()?; Self::do_create_collection( who, name, @@ -492,10 +521,11 @@ pub mod pallet { enabled: bool, ) -> DispatchResult { let who = ensure_signed(origin)?; + T::Migrator::ensure_migrated()?; let collection_info = >::get(collection_id).ok_or(Error::::NoCollectionFound)?; // Only the owner can make this call - ensure!(collection_info.is_collection_owner(&who), Error::::NotCollectionOwner); + ensure!(&collection_info.owner == &who, Error::::NotCollectionOwner); // Get public mint info and set enabled flag let mut public_mint_info = >::get(collection_id).unwrap_or_default(); @@ -522,10 +552,11 @@ pub mod pallet { pricing_details: Option<(AssetId, Balance)>, ) -> DispatchResult { let who = ensure_signed(origin)?; + T::Migrator::ensure_migrated()?; let collection_info = >::get(collection_id).ok_or(Error::::NoCollectionFound)?; // Only the owner can make this call - ensure!(collection_info.is_collection_owner(&who), Error::::NotCollectionOwner); + ensure!(&collection_info.owner == &who, Error::::NotCollectionOwner); // Get the existing public mint info if it exists let mut public_mint_info = >::get(collection_id).unwrap_or_default(); @@ -563,7 +594,7 @@ pub mod pallet { /// ----------- /// Weight is O(N) where N is `quantity` #[pallet::call_index(7)] - #[pallet::weight(T::WeightInfo::mint())] + #[pallet::weight(T::WeightInfo::mint(*quantity as u32))] #[transactional] pub fn mint( origin: OriginFor, @@ -572,6 +603,7 @@ pub mod pallet { token_owner: Option, ) -> DispatchResult { let who = ensure_signed(origin)?; + T::Migrator::ensure_migrated()?; let mut collection_info = >::get(collection_id).ok_or(Error::::NoCollectionFound)?; @@ -579,7 +611,7 @@ pub mod pallet { let public_mint_info = >::get(collection_id).unwrap_or_default(); // Caller must be collection_owner if public mint is disabled ensure!( - collection_info.is_collection_owner(&who) || public_mint_info.enabled, + &collection_info.owner == &who || public_mint_info.enabled, Error::::PublicMintDisabled ); @@ -590,7 +622,7 @@ pub mod pallet { let owner = token_owner.unwrap_or(who.clone()); // Only charge mint fee if public mint enabled and caller is not collection owner - if public_mint_info.enabled && !collection_info.is_collection_owner(&who) { + if public_mint_info.enabled && collection_info.owner != who { // Charge the mint fee for the mint Self::charge_mint_fee( &who, @@ -602,7 +634,13 @@ pub mod pallet { } // Perform the mint and update storage - Self::do_mint(collection_id, collection_info, &owner, &serial_numbers)?; + Self::do_mint( + collection_id, + collection_info, + &owner, + &serial_numbers, + TokenFlags::default(), + )?; // Check if this collection is XLS-20 compatible if xls20_compatible { @@ -640,6 +678,7 @@ pub mod pallet { new_owner: T::AccountId, ) -> DispatchResult { let who = ensure_signed(origin)?; + T::Migrator::ensure_migrated()?; Self::do_transfer(collection_id, serial_numbers, &who, &new_owner) } @@ -652,6 +691,7 @@ pub mod pallet { #[transactional] pub fn burn(origin: OriginFor, token_id: TokenId) -> DispatchResult { let who = ensure_signed(origin)?; + T::Migrator::ensure_migrated()?; let (collection_id, serial_number) = token_id; Self::do_burn(&who, collection_id, serial_number)?; @@ -673,9 +713,10 @@ pub mod pallet { name: BoundedVec, ) -> DispatchResult { let who = ensure_signed(origin)?; + T::Migrator::ensure_migrated()?; let mut collection_info = >::get(collection_id).ok_or(Error::::NoCollectionFound)?; - ensure!(collection_info.is_collection_owner(&who), Error::::NotCollectionOwner); + ensure!(&collection_info.owner == &who, Error::::NotCollectionOwner); ensure!(!name.is_empty(), Error::::CollectionNameInvalid); ensure!(core::str::from_utf8(&name).is_ok(), Error::::CollectionNameInvalid); @@ -696,9 +737,10 @@ pub mod pallet { royalties_schedule: RoyaltiesSchedule, ) -> DispatchResult { let who = ensure_signed(origin)?; + T::Migrator::ensure_migrated()?; let mut collection_info = >::get(collection_id).ok_or(Error::::NoCollectionFound)?; - ensure!(collection_info.is_collection_owner(&who), Error::::NotCollectionOwner); + ensure!(&collection_info.owner == &who, Error::::NotCollectionOwner); // Check that the entitlements are less than MAX_ENTITLEMENTS - 2 // This is because when the token is listed, two more entitlements will be added @@ -730,9 +772,10 @@ pub mod pallet { utility_flags: CollectionUtilityFlags, ) -> DispatchResult { let who = ensure_signed(origin)?; + T::Migrator::ensure_migrated()?; let collection_info = >::get(collection_id).ok_or(Error::::NoCollectionFound)?; - ensure!(collection_info.is_collection_owner(&who), Error::::NotCollectionOwner); + ensure!(&collection_info.owner == &who, Error::::NotCollectionOwner); if utility_flags == CollectionUtilityFlags::default() { // If the utility flags are default, remove the storage entry @@ -757,21 +800,27 @@ pub mod pallet { transferable: bool, ) -> DispatchResult { let who = ensure_signed(origin)?; + T::Migrator::ensure_migrated()?; let collection_info = >::get(token_id.0).ok_or(Error::::NoCollectionFound)?; - ensure!(collection_info.is_collection_owner(&who), Error::::NotCollectionOwner); - - // Check if the token exists - ensure!(collection_info.token_exists(token_id.1), Error::::NoToken); - - ensure!( - >::get(token_id).burn_authority.is_none(), - Error::::CannotUpdateTokenUtility - ); - - TokenUtilityFlags::::mutate(token_id, |flags| { - flags.transferable = transferable; - }); + ensure!(&collection_info.owner == &who, Error::::NotCollectionOwner); + + TokenInfo::::try_mutate_exists( + token_id.0, + token_id.1, + |maybe_token_info| -> DispatchResult { + let token_info = maybe_token_info.as_mut().ok_or(Error::::NoToken)?; + // Don't set transferrable if we have a burn authority, this indicates that the token + // is soulbound + ensure!( + token_info.utility_flags.burn_authority.is_none(), + Error::::CannotUpdateTokenUtility + ); + + token_info.utility_flags.transferable = transferable; + Ok(()) + }, + )?; Self::deposit_event(Event::::TokenTransferableFlagSet { token_id, transferable }); Ok(()) @@ -790,31 +839,27 @@ pub mod pallet { burn_authority: TokenBurnAuthority, ) -> DispatchResult { let who = ensure_signed(origin)?; + T::Migrator::ensure_migrated()?; let mut collection_info = >::get(collection_id).ok_or(Error::::NoCollectionFound)?; // Only the owner can make this call - ensure!(collection_info.is_collection_owner(&who), Error::::NotCollectionOwner); + ensure!(&collection_info.owner == &who, Error::::NotCollectionOwner); let _ = Self::pre_mint(collection_id, &mut collection_info, quantity)?; - >::try_mutate( + let issuance_id = NextIssuanceId::::get(); + let pending_issuance = PendingIssuance { quantity, burn_authority }; + >::insert( + (collection_id, &token_owner, issuance_id), + pending_issuance, + ); + Self::deposit_event(Event::::PendingIssuanceCreated { collection_id, - |pending_issuances| -> DispatchResult { - let issuance_id = pending_issuances - .insert_pending_issuance(&token_owner, quantity, burn_authority) - .map_err(Error::::from)?; - - Self::deposit_event(Event::::PendingIssuanceCreated { - collection_id, - issuance_id, - token_owner: token_owner.clone(), - quantity, - burn_authority, - }); - - Ok(()) - }, - )?; + issuance_id, + token_owner, + quantity, + burn_authority, + }); Ok(()) } @@ -826,15 +871,14 @@ pub mod pallet { pub fn accept_soulbound_issuance( origin: OriginFor, collection_id: CollectionUuid, - issuance_id: u32, + issuance_id: IssuanceId, ) -> DispatchResult { let who = ensure_signed(origin)?; + T::Migrator::ensure_migrated()?; - let collection_pending_issuances = >::get(collection_id); - - let pending_issuance = collection_pending_issuances - .get_pending_issuance(&who, issuance_id) + let pending_issuance = >::get((collection_id, &who, issuance_id)) .ok_or(Error::::InvalidPendingIssuance)?; + // ensure!(&pending_issuance.token_owner == &who, Error::::InvalidPendingIssuance); let mut collection_info = >::get(collection_id).ok_or(Error::::NoCollectionFound)?; @@ -849,7 +893,11 @@ pub mod pallet { let metadata_scheme = collection_info.metadata_scheme.clone(); // Perform the mint and update storage - Self::do_mint(collection_id, collection_info, &who, &serial_numbers)?; + let token_flags = TokenFlags { + transferable: false, + burn_authority: Some(pending_issuance.burn_authority), + }; + Self::do_mint(collection_id, collection_info, &who, &serial_numbers, token_flags)?; // Check if this collection is XLS-20 compatible if xls20_compatible { @@ -869,14 +917,6 @@ pub mod pallet { serial_numbers.clone().into_inner(), )?; - // Set the utility flags for the tokens - for serial_number in serial_numbers.clone() { - TokenUtilityFlags::::mutate((collection_id, serial_number), |flags| { - flags.transferable = false; - flags.burn_authority = Some(pending_issuance.burn_authority); - }); - } - Self::deposit_event(Event::::Issued { token_owner: who.clone(), start: *serial_numbers.first().ok_or(Error::::NoToken)?, @@ -885,34 +925,9 @@ pub mod pallet { }); // remove the pending issuance - >::try_mutate( - collection_id, - |pending_issuances| -> DispatchResult { - pending_issuances.remove_pending_issuance(&who, issuance_id); - - Ok(()) - }, - )?; + >::remove((collection_id, who, issuance_id)); Ok(()) } } } - -impl From for Error { - fn from(val: TokenOwnershipError) -> Error { - match val { - TokenOwnershipError::TokenLimitExceeded => Error::::TokenLimitExceeded, - } - } -} - -impl From for Error { - fn from(val: PendingIssuanceError) -> Error { - match val { - PendingIssuanceError::PendingIssuanceLimitExceeded => { - Error::::PendingIssuanceLimitExceeded - }, - } - } -} diff --git a/pallet/nft/src/mock.rs b/pallet/nft/src/mock.rs index a0f3c5e66..5753e1df0 100644 --- a/pallet/nft/src/mock.rs +++ b/pallet/nft/src/mock.rs @@ -101,4 +101,5 @@ impl crate::Config for Test { type Xls20MintRequest = MockXls20MintRequest; type NFIRequest = (); type MaxPendingIssuances = MaxPendingIssuances; + type Migrator = (); } diff --git a/pallet/nft/src/tests.rs b/pallet/nft/src/tests.rs index d5e579612..9930e23ef 100644 --- a/pallet/nft/src/tests.rs +++ b/pallet/nft/src/tests.rs @@ -16,21 +16,13 @@ use super::*; use crate::{ mock::{MaxTokensPerCollection, Nft, RuntimeEvent as MockEvent, System, Test}, - CollectionInfo, Event as NftEvent, TokenLocks, + CollectionInfo, Event as NftEvent, }; use seed_pallet_common::test_prelude::*; use seed_pallet_common::utils::TokenBurnAuthority; use seed_pallet_common::utils::TokenUtilityFlags as TokenFlags; use seed_primitives::{OriginChain, RoyaltiesSchedule, TokenCount}; -type OwnedTokens = BoundedVec< - TokenOwnership< - ::AccountId, - ::MaxTokensPerCollection, - >, - ::MaxTokensPerCollection, ->; - // Create an NFT collection // Returns the created `collection_id` fn setup_collection(owner: AccountId) -> CollectionUuid { @@ -50,18 +42,6 @@ fn setup_collection(owner: AccountId) -> CollectionUuid { collection_id } -/// Helper function to create bounded vec of TokenOwnership -pub fn create_owned_tokens(owned_tokens: Vec<(AccountId, Vec)>) -> OwnedTokens { - let mut token_ownership: OwnedTokens = BoundedVec::default(); - for (owner, serial_numbers) in owned_tokens { - let serial_numbers_bounded: BoundedVec = - BoundedVec::try_from(serial_numbers).unwrap(); - let new_token_ownership = TokenOwnership::new(owner, serial_numbers_bounded); - token_ownership.try_push(new_token_ownership).unwrap(); - } - token_ownership -} - // Helper function for creating the collection name type pub fn bounded_string(name: &str) -> BoundedVec::StringLimit> { BoundedVec::truncate_from(name.as_bytes().to_vec()) @@ -200,7 +180,6 @@ fn create_collection() { entitlements: BoundedVec::truncate_from(vec![(collection_owner, Permill::one())]), }; - let expected_tokens = create_owned_tokens(vec![(token_owner, vec![0, 1, 2, 3, 4])]); let expected_info = CollectionInformation { owner: collection_owner, name: bounded_string("test-collection"), @@ -211,7 +190,6 @@ fn create_collection() { origin_chain: OriginChain::Root, next_serial_number: quantity, collection_issuance: quantity, - owned_tokens: expected_tokens, cross_chain_compatibility: CrossChainCompatibility::default(), }; @@ -228,6 +206,13 @@ fn create_collection() { )); assert_eq!(CollectionInfo::::get(collection_id).unwrap(), expected_info); + for i in 0..quantity { + assert!(TokenInfo::::get(collection_id, i).unwrap().owner == token_owner); + } + assert_eq!( + OwnedTokens::::get(token_owner, collection_id).unwrap().into_inner(), + vec![0, 1, 2, 3, 4] + ); // EVM pallet should have account code for collection assert!(!pallet_evm::Pallet::::is_account_empty(&H160::from_low_u64_be( @@ -489,9 +474,7 @@ fn transfer() { assert_eq!(Nft::token_balance_of(&token_owner, collection_id), 0); assert_eq!(Nft::token_balance_of(&new_owner, collection_id), 1); - assert!(CollectionInfo::::get(collection_id) - .unwrap() - .is_token_owner(&new_owner, 0)); + assert_eq!(TokenInfo::::get(collection_id, 0).unwrap().owner, new_owner); }); } @@ -539,7 +522,10 @@ fn transfer_fails_prechecks() { ); // cannot transfer while listed - >::insert((collection_id, 0), TokenLockReason::Listed(1)); + TokenInfo::::mutate(collection_id, 0, |maybe_token_info| { + let token_info = maybe_token_info.as_mut().unwrap(); + token_info.lock_status = Some(TokenLockReason::Listed(1)); + }); assert_noop!( Nft::transfer(Some(token_owner).into(), collection_id, serial_numbers, new_owner), Error::::TokenLocked, @@ -575,14 +561,30 @@ fn burn() { assert_ok!(Nft::burn(Some(token_owner).into(), (collection_id, 1))); System::assert_last_event( - Event::::Burn { token_owner, collection_id, serial_number: 1 }.into(), + Event::::Burn { + token_owner: token_owner.clone(), + collection_id, + serial_number: 1, + } + .into(), ); + assert_eq!(Nft::token_balance_of(&token_owner, collection_id), 1); + assert_ok!(Nft::burn(Some(token_owner).into(), (collection_id, 2))); System::assert_last_event( - Event::::Burn { token_owner, collection_id, serial_number: 2 }.into(), + Event::::Burn { + token_owner: token_owner.clone(), + collection_id, + serial_number: 2, + } + .into(), ); + assert_eq!(Nft::token_balance_of(&token_owner, collection_id), 0); assert_eq!(CollectionInfo::::get(collection_id).unwrap().collection_issuance, 0); + // Check token info removed from storage + assert!(!TokenInfo::::contains_key(collection_id, 0)); + assert!(!OwnedTokens::::contains_key(&token_owner, collection_id)); assert_eq!(Nft::owned_tokens(collection_id, &token_owner, 0, 1000), (0_u32, 0_u32, vec![])); assert_eq!(Nft::token_balance_of(&token_owner, collection_id), 0); }); @@ -620,7 +622,10 @@ fn burn_fails_prechecks() { ); // cannot burn while listed - >::insert((collection_id, 0), TokenLockReason::Listed(1)); + TokenInfo::::mutate(collection_id, 0, |maybe_token_info| { + let token_info = maybe_token_info.as_mut().unwrap(); + token_info.lock_status = Some(TokenLockReason::Listed(1)); + }); assert_noop!( Nft::burn(Some(token_owner).into(), (collection_id, 0)), @@ -798,6 +803,29 @@ fn mint_over_mint_limit_fails() { }); } +#[test] +fn mint_with_zero_issuance() { + TestExt::::default().build().execute_with(|| { + let collection_owner = create_account(1); + let collection_id = Nft::next_collection_uuid().unwrap(); + + // mint token Ids 0-4 + assert_ok!(Nft::create_collection( + Some(collection_owner).into(), + bounded_string("test-collection"), + 0, // 0 initial issuance + None, + None, + MetadataScheme::try_from(b"https://example.com/metadata".as_slice()).unwrap(), + None, + CrossChainCompatibility::default(), + )); + + // Should succeed even though no tokens exist in the collection + assert_ok!(Nft::mint(Some(collection_owner).into(), collection_id, 1, None),); + }); +} + #[test] fn create_collection_over_mint_limit_fails() { TestExt::::default().build().execute_with(|| { @@ -1102,9 +1130,11 @@ fn mints_multiple_specified_tokens_by_id() { // Ownership checks assert_eq!(Nft::token_balance_of(&token_owner, collection_id), token_ids.len() as u32); - let collection_info = CollectionInfo::::get(collection_id).unwrap(); token_ids.iter().for_each(|&serial_number| { - assert!(collection_info.is_token_owner(&token_owner, serial_number)); + assert_eq!( + TokenInfo::::get(collection_id, serial_number).unwrap().owner, + token_owner + ); }); // Next serial number should be 0, origin chain is Ethereum so we don't count this @@ -1144,9 +1174,11 @@ fn mint_duplicate_token_id_should_fail_silently() { // We expect the token balance to be 5 as that is the number of unique token_ids in the vec assert_eq!(Nft::token_balance_of(&token_owner, collection_id), 5); - let collection_info = CollectionInfo::::get(collection_id).unwrap(); token_ids.iter().for_each(|&serial_number| { - assert!(collection_info.is_token_owner(&token_owner, serial_number)); + assert_eq!( + TokenInfo::::get(collection_id, serial_number).unwrap().owner, + token_owner + ); }); // Collection issuance should be 5 to indicate the 5 unique tokens @@ -1163,9 +1195,11 @@ fn mint_duplicate_token_id_should_fail_silently() { // We expect the token balance to be 3 assert_eq!(Nft::token_balance_of(&other_owner, collection_id), 3); - let collection_info = CollectionInfo::::get(collection_id).unwrap(); [3000, 40005, 1234].iter().for_each(|&serial_number| { - assert!(collection_info.is_token_owner(&other_owner, serial_number)); + assert_eq!( + TokenInfo::::get(collection_id, serial_number).unwrap().owner, + other_owner + ); }); }); } @@ -1189,16 +1223,14 @@ fn token_exists_works() { CrossChainCompatibility::default(), )); - let collection_info = CollectionInfo::::get(collection_id).unwrap(); - // Check that the tokens exist for serial_number in 0..quantity { - assert!(collection_info.token_exists(serial_number)); + assert!(Pallet::::token_exists(&(collection_id, serial_number))); } // Check that a non-existent token does not exist for serial_number in quantity..1000 { - assert!(!collection_info.token_exists(serial_number)); + assert!(!Pallet::::token_exists(&(collection_id, serial_number))); } }); } @@ -1236,109 +1268,6 @@ fn token_balance_of_works() { }); } -#[test] -fn add_user_tokens_works() { - TestExt::::default().build().execute_with(|| { - let collection_owner = create_account(1); - let token_owner = create_account(2); - let tokens: Vec = vec![0, 1, 2, 3, 900, 1000, 101010101]; - let collection_id = setup_collection(collection_owner); - let mut collection_info = CollectionInfo::::get(collection_id).unwrap(); - let expected_owned_tokens: OwnedTokens = BoundedVec::default(); - // Initially, owned tokens should be empty - assert_eq!(collection_info.owned_tokens, expected_owned_tokens); - - // Add tokens to token_owner - let tokens_bounded: BoundedVec = - BoundedVec::try_from(tokens.clone()).unwrap(); - assert_ok!(collection_info.add_user_tokens(&token_owner, tokens_bounded.clone())); - - let expected_owned_tokens = create_owned_tokens(vec![(token_owner, tokens.clone())]); - assert_eq!(collection_info.owned_tokens, expected_owned_tokens); - - // Add tokens to token_owner_2 - let token_owner_2 = create_account(3); - let tokens_2: Vec = vec![6, 9, 4, 2, 0]; - let tokens_2_bounded: BoundedVec = - BoundedVec::try_from(tokens_2.clone()).unwrap(); - assert_ok!(collection_info.add_user_tokens(&token_owner_2, tokens_2_bounded.clone())); - - let expected_owned_tokens = - create_owned_tokens(vec![(token_owner, tokens), (token_owner_2, tokens_2.clone())]); - assert_eq!(collection_info.owned_tokens, expected_owned_tokens); - - // Now remove some tokens from token_owner - let tokens_to_remove: Vec = vec![0, 1, 2, 3]; - let tokens_to_remove_bounded: BoundedVec = - BoundedVec::try_from(tokens_to_remove.clone()).unwrap(); - collection_info.remove_user_tokens(&token_owner, tokens_to_remove_bounded); - let expected_owned_tokens = create_owned_tokens(vec![ - (token_owner, vec![900, 1000, 101010101]), - (token_owner_2, tokens_2), - ]); - assert_eq!(collection_info.owned_tokens, expected_owned_tokens); - }); -} - -#[test] -fn add_user_tokens_over_token_limit_should_fail() { - TestExt::::default().build().execute_with(|| { - let collection_owner = create_account(1); - let token_owner = create_account(2); - let token_owner_2 = create_account(3); - let collection_id = setup_collection(collection_owner); - let mut collection_info = CollectionInfo::::get(collection_id).unwrap(); - let max = mock::MaxTokensPerCollection::get(); - - // Add tokens to token_owner - let serial_numbers_unbounded: Vec = (0..max).collect(); - let serial_numbers: BoundedVec = - BoundedVec::try_from(serial_numbers_unbounded).unwrap(); - assert_ok!(collection_info.add_user_tokens(&token_owner, serial_numbers.clone())); - - // Adding one more token to token_owner should fail - let serial_numbers_max: BoundedVec = - BoundedVec::try_from(vec![max]).unwrap(); - assert_noop!( - collection_info.add_user_tokens(&token_owner, serial_numbers_max.clone()), - TokenOwnershipError::TokenLimitExceeded - ); - // Adding tokens to different user still works - assert_ok!(collection_info.add_user_tokens(&token_owner_2, serial_numbers_max.clone())); - - // Now let's remove a token - let serial_numbers: BoundedVec = - BoundedVec::try_from(vec![1]).unwrap(); - collection_info.remove_user_tokens(&token_owner, serial_numbers); - // Adding one more token to token_owner should now work - assert_ok!(collection_info.add_user_tokens(&token_owner, serial_numbers_max)); - }); -} - -#[test] -fn add_user_tokens_over_user_limit_should_fail() { - TestExt::::default().build().execute_with(|| { - let collection_owner = create_account(1); - let collection_id = setup_collection(collection_owner); - let mut collection_info = CollectionInfo::::get(collection_id).unwrap(); - let max = mock::MaxTokensPerCollection::get(); - let serial_numbers: BoundedVec = - BoundedVec::try_from(vec![100]).unwrap(); - - // Adding users up to max should work - for i in 0..max as u64 { - let account = create_account(i); - assert_ok!(collection_info.add_user_tokens(&account, serial_numbers.clone())); - } - - // adding another user should fail - assert_noop!( - collection_info.add_user_tokens(&create_account(max as u64), serial_numbers), - TokenOwnershipError::TokenLimitExceeded - ); - }); -} - mod claim_unowned_collection { use super::*; @@ -1461,7 +1390,6 @@ fn create_xls20_collection_works() { None, cross_chain_compatibility, )); - let expected_tokens = create_owned_tokens(vec![]); System::assert_last_event( Event::::CollectionCreate { @@ -1490,7 +1418,6 @@ fn create_xls20_collection_works() { origin_chain: OriginChain::Root, next_serial_number: 0, collection_issuance: 0, - owned_tokens: expected_tokens, cross_chain_compatibility, } ); @@ -2951,7 +2878,10 @@ mod set_token_transferable_flag { // Ensure default is correct let default_flags = TokenFlags { transferable: true, burn_authority: None }; - assert_eq!(TokenUtilityFlags::::get(token_id), default_flags); + assert_eq!( + TokenInfo::::get(collection_id, 0).unwrap().utility_flags, + default_flags + ); // set to false assert_ok!(Nft::set_token_transferable_flag( @@ -2960,7 +2890,7 @@ mod set_token_transferable_flag { false )); let new_flags = TokenFlags { transferable: false, burn_authority: None }; - assert_eq!(TokenUtilityFlags::::get(token_id), new_flags); + assert_eq!(TokenInfo::::get(collection_id, 0).unwrap().utility_flags, new_flags); System::assert_last_event( Event::::TokenTransferableFlagSet { token_id, transferable: false }.into(), ); @@ -2972,7 +2902,7 @@ mod set_token_transferable_flag { true )); let new_flags = TokenFlags { transferable: true, burn_authority: None }; - assert_eq!(TokenUtilityFlags::::get(token_id), new_flags); + assert_eq!(TokenInfo::::get(collection_id, 0).unwrap().utility_flags, new_flags); System::assert_last_event( Event::::TokenTransferableFlagSet { token_id, transferable: true }.into(), ); @@ -2994,7 +2924,7 @@ mod set_token_transferable_flag { false )); let new_flags = TokenFlags { transferable: false, burn_authority: None }; - assert_eq!(TokenUtilityFlags::::get(token_id), new_flags); + assert_eq!(TokenInfo::::get(collection_id, 0).unwrap().utility_flags, new_flags); System::assert_last_event( Event::::TokenTransferableFlagSet { token_id, transferable: false }.into(), ); @@ -3092,7 +3022,7 @@ mod soulbound_token { token_owner: AccountId, burn_authority: TokenBurnAuthority, ) -> TokenId { - let issuance_id = PendingIssuances::::get(collection_id).next_issuance_id; + let issuance_id = NextIssuanceId::::get(); let collection_info = CollectionInfo::::get(collection_id).unwrap(); assert_ok!(Nft::issue_soulbound( @@ -3145,9 +3075,8 @@ mod soulbound_token { ); assert_eq!( - PendingIssuances::::get(collection_id) - .get_pending_issuance(&token_owner, issuance_id), - Some(PendingIssuance { issuance_id, quantity, burn_authority }) + PendingIssuances::::get((collection_id, &token_owner, issuance_id)), + Some(PendingIssuance { quantity, burn_authority }) ); assert_ok!(Nft::accept_soulbound_issuance( @@ -3168,7 +3097,7 @@ mod soulbound_token { // assert token flags let new_flags = TokenFlags { transferable: false, burn_authority: Some(burn_authority) }; - assert_eq!(TokenUtilityFlags::::get(token_id), new_flags); + assert_eq!(TokenInfo::::get(collection_id, 0).unwrap().utility_flags, new_flags); }); } @@ -3194,9 +3123,8 @@ mod soulbound_token { let issuance_id = 0; assert_eq!( - PendingIssuances::::get(collection_id) - .get_pending_issuance(&token_owner, issuance_id), - Some(PendingIssuance { issuance_id, quantity, burn_authority }) + PendingIssuances::::get((collection_id, &token_owner, issuance_id)), + Some(PendingIssuance { quantity, burn_authority }) ); assert_ok!(Nft::accept_soulbound_issuance( diff --git a/pallet/nft/src/traits.rs b/pallet/nft/src/traits.rs index 7d5bc8570..98babbb06 100644 --- a/pallet/nft/src/traits.rs +++ b/pallet/nft/src/traits.rs @@ -22,13 +22,9 @@ use crate::CollectionInformation; pub trait NFTCollectionInfo { type AccountId: Debug + PartialEq + Clone; - type MaxTokensPerCollection: Get; type StringLimit: Get; fn get_collection_info( collection_id: CollectionUuid, - ) -> Result< - CollectionInformation, - DispatchError, - >; + ) -> Result, DispatchError>; } diff --git a/pallet/nft/src/types.rs b/pallet/nft/src/types.rs index 8f983e8a3..fe5d67f20 100644 --- a/pallet/nft/src/types.rs +++ b/pallet/nft/src/types.rs @@ -18,62 +18,15 @@ use codec::{Decode, Encode, MaxEncodedLen}; use frame_support::{traits::Get, CloneNoBound, PartialEqNoBound, RuntimeDebugNoBound}; use scale_info::TypeInfo; -use seed_pallet_common::utils::TokenBurnAuthority; +use seed_pallet_common::utils::{TokenBurnAuthority, TokenUtilityFlags as TokenFlags}; use seed_primitives::{ - CrossChainCompatibility, IssuanceId, MetadataScheme, OriginChain, RoyaltiesSchedule, - SerialNumber, TokenCount, + CrossChainCompatibility, MetadataScheme, OriginChain, RoyaltiesSchedule, SerialNumber, + TokenCount, TokenLockReason, }; use serde::{Deserialize, Serialize}; use sp_runtime::{BoundedVec, Permill}; use sp_std::{fmt::Debug, prelude::*}; -#[derive(Decode, Encode, Debug, Clone, Copy, PartialEq, TypeInfo)] -pub enum TokenOwnershipError { - TokenLimitExceeded, -} - -/// Struct that represents the owned serial numbers within a collection of an individual account -#[derive( - PartialEqNoBound, RuntimeDebugNoBound, Decode, Encode, CloneNoBound, TypeInfo, MaxEncodedLen, -)] -#[codec(mel_bound(AccountId: MaxEncodedLen))] -#[scale_info(skip_type_params(MaxTokensPerCollection))] -pub struct TokenOwnership -where - AccountId: Debug + PartialEq + Clone, - MaxTokensPerCollection: Get, -{ - pub owner: AccountId, - pub owned_serials: BoundedVec, -} - -impl TokenOwnership -where - AccountId: Debug + PartialEq + Clone, - MaxTokensPerCollection: Get, -{ - /// Creates a new TokenOwnership with the given owner and serial numbers - pub fn new( - owner: AccountId, - serial_numbers: BoundedVec, - ) -> Self { - Self { owner, owned_serials: serial_numbers } - } - - /// Adds a serial to owned_serials and sorts the vec - pub fn add(&mut self, serial_number: SerialNumber) -> Result<(), TokenOwnershipError> { - self.owned_serials - .try_push(serial_number) - .map_err(|_| TokenOwnershipError::TokenLimitExceeded)?; - Ok(()) - } - - /// Returns true if the serial number is containerd within owned_serials - pub fn contains_serial(&self, serial_number: &SerialNumber) -> bool { - self.owned_serials.contains(serial_number) - } -} - /// Information related to a specific collection /// Need for separate collection structure from CollectionInformation for RPC call is cause /// of complexity of deserialization/serialization BoundedVec @@ -117,11 +70,10 @@ where PartialEqNoBound, RuntimeDebugNoBound, CloneNoBound, Encode, Decode, TypeInfo, MaxEncodedLen, )] #[codec(mel_bound(AccountId: MaxEncodedLen))] -#[scale_info(skip_type_params(MaxTokensPerCollection, StringLimit))] -pub struct CollectionInformation +#[scale_info(skip_type_params(StringLimit))] +pub struct CollectionInformation where AccountId: Debug + PartialEq + Clone, - MaxTokensPerCollection: Get, StringLimit: Get, { /// The owner of the collection @@ -142,217 +94,39 @@ where pub collection_issuance: TokenCount, /// This collections compatibility with other chains pub cross_chain_compatibility: CrossChainCompatibility, - /// All serial numbers owned by an account in a collection - pub owned_tokens: - BoundedVec, MaxTokensPerCollection>, -} - -impl - CollectionInformation -where - AccountId: Debug + PartialEq + Clone, - MaxTokensPerCollection: Get, - StringLimit: Get, -{ - /// Check whether a token has been minted in a collection - pub fn token_exists(&self, serial_number: SerialNumber) -> bool { - self.owned_tokens - .iter() - .any(|token_ownership| token_ownership.contains_serial(&serial_number)) - } - - /// Check whether who is the collection owner - pub fn is_collection_owner(&self, who: &AccountId) -> bool { - &self.owner == who - } - - /// Check whether who owns the serial number in collection_info - pub fn is_token_owner(&self, who: &AccountId, serial_number: SerialNumber) -> bool { - self.owned_tokens.iter().any(|token_ownership| { - if &token_ownership.owner == who { - token_ownership.contains_serial(&serial_number) - } else { - false - } - }) - } - - /// Get's the token owner - pub fn get_token_owner(&self, serial_number: SerialNumber) -> Option { - let token = self.owned_tokens.iter().find(|x| x.contains_serial(&serial_number))?; - Some(token.owner.clone()) - } - - /// Adds a list of tokens to a users balance in collection_info - pub fn add_user_tokens( - &mut self, - token_owner: &AccountId, - serial_numbers: BoundedVec, - ) -> Result<(), TokenOwnershipError> { - if let Some(token_ownership) = - self.owned_tokens.iter_mut().find(|p| &p.owner == token_owner) - { - // Add new serial numbers to existing owner - for serial_number in serial_numbers.iter() { - token_ownership.add(*serial_number)?; - } - } else { - // If token owner doesn't exist, create new entry - let new_token_ownership = TokenOwnership::new(token_owner.clone(), serial_numbers); - self.owned_tokens - .try_push(new_token_ownership) - .map_err(|_| TokenOwnershipError::TokenLimitExceeded)?; - } - Ok(()) - } - - /// Removes a list of tokens from a users balance in collection_info - pub fn remove_user_tokens( - &mut self, - token_owner: &AccountId, - serial_numbers: BoundedVec, - ) { - let mut removing_all_tokens: bool = false; - for token_ownership in self.owned_tokens.iter_mut() { - if &token_ownership.owner != token_owner { - continue; - } - token_ownership.owned_serials.retain(|serial| !serial_numbers.contains(serial)); - removing_all_tokens = token_ownership.owned_serials.is_empty(); - break; - } - // Check whether the owner has any tokens left, if not remove them from the collection - if removing_all_tokens { - self.owned_tokens - .retain(|token_ownership| &token_ownership.owner != token_owner); - } - } } -#[derive( - PartialEqNoBound, RuntimeDebugNoBound, CloneNoBound, Encode, Decode, TypeInfo, MaxEncodedLen, -)] -pub struct PendingIssuance { - pub issuance_id: IssuanceId, - pub quantity: u32, - pub burn_authority: TokenBurnAuthority, -} - -pub enum PendingIssuanceError { - PendingIssuanceLimitExceeded, -} - -/// The state of a collection's pending issuances +/// Information related to a specific token #[derive( PartialEqNoBound, RuntimeDebugNoBound, CloneNoBound, Encode, Decode, TypeInfo, MaxEncodedLen, )] #[codec(mel_bound(AccountId: MaxEncodedLen))] -#[scale_info(skip_type_params(MaxPendingIssuances))] -pub struct CollectionPendingIssuances> +pub struct TokenInformation where AccountId: Debug + PartialEq + Clone, - MaxPendingIssuances: Get, { - pub next_issuance_id: IssuanceId, - pub pending_issuances: BoundedVec< - (AccountId, BoundedVec), - MaxPendingIssuances, - >, + /// The owner of the token + pub owner: AccountId, + /// Does this token have any locks, i.e. locked for sale + pub lock_status: Option, + /// transferable and burn authority flags + pub utility_flags: TokenFlags, } -impl Default - for CollectionPendingIssuances +impl TokenInformation where AccountId: Debug + PartialEq + Clone, - MaxPendingIssuances: Get, { - fn default() -> Self { - CollectionPendingIssuances { next_issuance_id: 0, pending_issuances: BoundedVec::new() } + /// Creates a new instance of `TokenInformation` with the owner set to the provided account id + pub fn new(owner: AccountId, utility_flags: TokenFlags) -> Self { + TokenInformation { owner, lock_status: None, utility_flags } } } -impl CollectionPendingIssuances -where - AccountId: Debug + PartialEq + Clone, - MaxPendingIssuances: Get, -{ - /// Creates a new instance of `CollectionPendingIssuances` with the next - /// issuance id set to 0, and an empty list of pending issuances - pub fn new() -> Self { - CollectionPendingIssuances { next_issuance_id: 0, pending_issuances: BoundedVec::new() } - } - - /// Inserts a new pending issuance for a token owner - pub fn insert_pending_issuance( - &mut self, - token_owner: &AccountId, - quantity: u32, - burn_authority: TokenBurnAuthority, - ) -> Result { - let issuance_id = self.next_issuance_id; - let pending_issuance = PendingIssuance { issuance_id, quantity, burn_authority }; - - if let Some(account_pending_issuances) = - self.pending_issuances.iter_mut().find(|p| &p.0 == token_owner) - { - account_pending_issuances - .1 - .try_push(pending_issuance) - .map_err(|_| PendingIssuanceError::PendingIssuanceLimitExceeded)?; - } else { - // create new entry - let mut new_account_issuance = BoundedVec::new(); - new_account_issuance.force_push(pending_issuance); - - self.pending_issuances - .try_push((token_owner.clone(), new_account_issuance)) - .map_err(|_| PendingIssuanceError::PendingIssuanceLimitExceeded)?; - } - - self.next_issuance_id = self.next_issuance_id.saturating_add(1); - - Ok(issuance_id) - } - - /// Gets the pending issuance by the token owner and issuance id - pub fn get_pending_issuance( - &self, - token_owner: &AccountId, - issuance_id: IssuanceId, - ) -> Option { - let account_pending_issuances = self - .pending_issuances - .iter() - .find(|pending_issuance| &pending_issuance.0 == token_owner)?; - - let pending_issuance = - account_pending_issuances.1.iter().find(|p| p.issuance_id == issuance_id)?; - - Some(pending_issuance.clone()) - } - - /// Removes a pending issuance for a token owner - pub fn remove_pending_issuance(&mut self, token_owner: &AccountId, issuance_id: IssuanceId) { - for account_pending_issuance in self.pending_issuances.iter_mut() { - if &account_pending_issuance.0 != token_owner { - continue; - } - - account_pending_issuance.1.retain(|p| p.issuance_id != issuance_id); - break; - } - } - - /// Gets all pending issuances for a token owner - pub fn get_pending_issuances(&self, token_owner: &AccountId) -> Vec { - if let Some(account_pending_issuances) = self - .pending_issuances - .iter() - .find(|pending_issuance| &pending_issuance.0 == token_owner) - { - return account_pending_issuances.1.to_vec(); - } - - vec![] - } +#[derive( + PartialEqNoBound, RuntimeDebugNoBound, CloneNoBound, Encode, Decode, TypeInfo, MaxEncodedLen, +)] +pub struct PendingIssuance { + pub quantity: u32, + pub burn_authority: TokenBurnAuthority, } diff --git a/pallet/nft/src/weights.rs b/pallet/nft/src/weights.rs index d7fcf2874..8521111ed 100644 --- a/pallet/nft/src/weights.rs +++ b/pallet/nft/src/weights.rs @@ -18,7 +18,7 @@ //! Autogenerated weights for pallet_nft //! //! THIS FILE WAS AUTO-GENERATED USING THE SUBSTRATE BENCHMARK CLI VERSION 4.0.0-dev -//! DATE: 2025-02-26, STEPS: `50`, REPEAT: 20, LOW RANGE: `[]`, HIGH RANGE: `[]` +//! DATE: 2025-03-06, STEPS: `50`, REPEAT: 20, LOW RANGE: `[]`, HIGH RANGE: `[]` //! HOSTNAME: `ip-172-31-102-147`, CPU: `Intel(R) Xeon(R) CPU E5-2686 v4 @ 2.30GHz` //! EXECUTION: , WASM-EXECUTION: Compiled, CHAIN: Some("dev"), DB CACHE: 1024 @@ -53,10 +53,10 @@ pub trait WeightInfo { fn set_base_uri() -> Weight; fn set_name() -> Weight; fn set_royalties_schedule() -> Weight; - fn create_collection() -> Weight; + fn create_collection(p: u32, ) -> Weight; fn toggle_public_mint() -> Weight; fn set_mint_fee() -> Weight; - fn mint() -> Weight; + fn mint(p: u32, ) -> Weight; fn transfer(p: u32, ) -> Weight; fn burn() -> Weight; fn set_utility_flags() -> Weight; @@ -68,50 +68,66 @@ pub trait WeightInfo { /// Weights for pallet_nft using the Substrate node and recommended hardware. pub struct SubstrateWeight(PhantomData); impl WeightInfo for SubstrateWeight { + // Storage: `Migration::Status` (r:1 w:0) + // Proof: `Migration::Status` (`max_values`: Some(1), `max_size`: None, mode: `Measured`) // Storage: `Nft::CollectionInfo` (r:1 w:1) - // Proof: `Nft::CollectionInfo` (`max_values`: None, `max_size`: Some(4294967295), added: 2474, mode: `MaxEncodedLen`) + // Proof: `Nft::CollectionInfo` (`max_values`: None, `max_size`: Some(494), added: 2969, mode: `MaxEncodedLen`) fn claim_unowned_collection() -> Weight { - Weight::from_all(44_993_000) - .saturating_add(T::DbWeight::get().reads(1)) + Weight::from_all(46_084_000) + .saturating_add(T::DbWeight::get().reads(2)) .saturating_add(T::DbWeight::get().writes(1)) } + // Storage: `Migration::Status` (r:1 w:0) + // Proof: `Migration::Status` (`max_values`: Some(1), `max_size`: None, mode: `Measured`) // Storage: `Nft::CollectionInfo` (r:1 w:1) - // Proof: `Nft::CollectionInfo` (`max_values`: None, `max_size`: Some(4294967295), added: 2474, mode: `MaxEncodedLen`) + // Proof: `Nft::CollectionInfo` (`max_values`: None, `max_size`: Some(494), added: 2969, mode: `MaxEncodedLen`) fn set_owner() -> Weight { - Weight::from_all(47_528_000) - .saturating_add(T::DbWeight::get().reads(1)) + Weight::from_all(50_697_000) + .saturating_add(T::DbWeight::get().reads(2)) .saturating_add(T::DbWeight::get().writes(1)) } + // Storage: `Migration::Status` (r:1 w:0) + // Proof: `Migration::Status` (`max_values`: Some(1), `max_size`: None, mode: `Measured`) // Storage: `Nft::CollectionInfo` (r:1 w:1) - // Proof: `Nft::CollectionInfo` (`max_values`: None, `max_size`: Some(4294967295), added: 2474, mode: `MaxEncodedLen`) + // Proof: `Nft::CollectionInfo` (`max_values`: None, `max_size`: Some(494), added: 2969, mode: `MaxEncodedLen`) fn set_max_issuance() -> Weight { - Weight::from_all(47_606_000) - .saturating_add(T::DbWeight::get().reads(1)) + Weight::from_all(48_716_000) + .saturating_add(T::DbWeight::get().reads(2)) .saturating_add(T::DbWeight::get().writes(1)) } + // Storage: `Migration::Status` (r:1 w:0) + // Proof: `Migration::Status` (`max_values`: Some(1), `max_size`: None, mode: `Measured`) // Storage: `Nft::CollectionInfo` (r:1 w:1) - // Proof: `Nft::CollectionInfo` (`max_values`: None, `max_size`: Some(4294967295), added: 2474, mode: `MaxEncodedLen`) + // Proof: `Nft::CollectionInfo` (`max_values`: None, `max_size`: Some(494), added: 2969, mode: `MaxEncodedLen`) fn set_base_uri() -> Weight { - Weight::from_all(49_609_000) - .saturating_add(T::DbWeight::get().reads(1)) + Weight::from_all(49_881_000) + .saturating_add(T::DbWeight::get().reads(2)) .saturating_add(T::DbWeight::get().writes(1)) } + // Storage: `Migration::Status` (r:1 w:0) + // Proof: `Migration::Status` (`max_values`: Some(1), `max_size`: None, mode: `Measured`) // Storage: `Nft::CollectionInfo` (r:1 w:1) - // Proof: `Nft::CollectionInfo` (`max_values`: None, `max_size`: Some(4294967295), added: 2474, mode: `MaxEncodedLen`) + // Proof: `Nft::CollectionInfo` (`max_values`: None, `max_size`: Some(494), added: 2969, mode: `MaxEncodedLen`) fn set_name() -> Weight { - Weight::from_all(48_103_000) - .saturating_add(T::DbWeight::get().reads(1)) + Weight::from_all(50_127_000) + .saturating_add(T::DbWeight::get().reads(2)) .saturating_add(T::DbWeight::get().writes(1)) } + // Storage: `Migration::Status` (r:1 w:0) + // Proof: `Migration::Status` (`max_values`: Some(1), `max_size`: None, mode: `Measured`) // Storage: `Nft::CollectionInfo` (r:1 w:1) - // Proof: `Nft::CollectionInfo` (`max_values`: None, `max_size`: Some(4294967295), added: 2474, mode: `MaxEncodedLen`) + // Proof: `Nft::CollectionInfo` (`max_values`: None, `max_size`: Some(494), added: 2969, mode: `MaxEncodedLen`) fn set_royalties_schedule() -> Weight { - Weight::from_all(48_752_000) - .saturating_add(T::DbWeight::get().reads(1)) + Weight::from_all(50_316_000) + .saturating_add(T::DbWeight::get().reads(2)) .saturating_add(T::DbWeight::get().writes(1)) } + // Storage: `Migration::Status` (r:1 w:0) + // Proof: `Migration::Status` (`max_values`: Some(1), `max_size`: None, mode: `Measured`) // Storage: `Nft::NextCollectionId` (r:1 w:1) // Proof: `Nft::NextCollectionId` (`max_values`: Some(1), `max_size`: Some(4), added: 499, mode: `MaxEncodedLen`) + // Storage: `Nft::OwnedTokens` (r:1 w:1) + // Proof: `Nft::OwnedTokens` (`max_values`: None, `max_size`: Some(4000052), added: 4002527, mode: `MaxEncodedLen`) // Storage: `EVM::AccountCodes` (r:1 w:1) // Proof: `EVM::AccountCodes` (`max_values`: None, `max_size`: None, mode: `Measured`) // Storage: `Futurepass::DefaultProxy` (r:1 w:0) @@ -120,177 +136,233 @@ impl WeightInfo for SubstrateWeight { // Proof: `System::Account` (`max_values`: None, `max_size`: Some(116), added: 2591, mode: `MaxEncodedLen`) // Storage: `EVM::AccountCodesMetadata` (r:0 w:1) // Proof: `EVM::AccountCodesMetadata` (`max_values`: None, `max_size`: None, mode: `Measured`) + // Storage: `Nft::TokenInfo` (r:0 w:500) + // Proof: `Nft::TokenInfo` (`max_values`: None, `max_size`: Some(65), added: 2540, mode: `MaxEncodedLen`) // Storage: `Nft::CollectionInfo` (r:0 w:1) - // Proof: `Nft::CollectionInfo` (`max_values`: None, `max_size`: Some(4294967295), added: 2474, mode: `MaxEncodedLen`) - fn create_collection() -> Weight { - Weight::from_all(95_109_000) - .saturating_add(T::DbWeight::get().reads(4)) - .saturating_add(T::DbWeight::get().writes(5)) + // Proof: `Nft::CollectionInfo` (`max_values`: None, `max_size`: Some(494), added: 2969, mode: `MaxEncodedLen`) + /// The range of component `p` is `[1, 500]`. + fn create_collection(p: u32, ) -> Weight { + Weight::from_all(103_415_723) + // Standard Error: 5_493 + .saturating_add(Weight::from_all(3_853_867_u64).saturating_mul(p as u64)) + .saturating_add(T::DbWeight::get().reads(6)) + .saturating_add(T::DbWeight::get().writes(6)) + .saturating_add(T::DbWeight::get().writes((1_u64).saturating_mul(p as u64))) } + // Storage: `Migration::Status` (r:1 w:0) + // Proof: `Migration::Status` (`max_values`: Some(1), `max_size`: None, mode: `Measured`) // Storage: `Nft::CollectionInfo` (r:1 w:0) - // Proof: `Nft::CollectionInfo` (`max_values`: None, `max_size`: Some(4294967295), added: 2474, mode: `MaxEncodedLen`) + // Proof: `Nft::CollectionInfo` (`max_values`: None, `max_size`: Some(494), added: 2969, mode: `MaxEncodedLen`) // Storage: `Nft::PublicMintInfo` (r:1 w:1) // Proof: `Nft::PublicMintInfo` (`max_values`: None, `max_size`: Some(34), added: 2509, mode: `MaxEncodedLen`) fn toggle_public_mint() -> Weight { - Weight::from_all(50_396_000) - .saturating_add(T::DbWeight::get().reads(2)) + Weight::from_all(52_574_000) + .saturating_add(T::DbWeight::get().reads(3)) .saturating_add(T::DbWeight::get().writes(1)) } + // Storage: `Migration::Status` (r:1 w:0) + // Proof: `Migration::Status` (`max_values`: Some(1), `max_size`: None, mode: `Measured`) // Storage: `Nft::CollectionInfo` (r:1 w:0) - // Proof: `Nft::CollectionInfo` (`max_values`: None, `max_size`: Some(4294967295), added: 2474, mode: `MaxEncodedLen`) + // Proof: `Nft::CollectionInfo` (`max_values`: None, `max_size`: Some(494), added: 2969, mode: `MaxEncodedLen`) // Storage: `Nft::PublicMintInfo` (r:1 w:1) // Proof: `Nft::PublicMintInfo` (`max_values`: None, `max_size`: Some(34), added: 2509, mode: `MaxEncodedLen`) fn set_mint_fee() -> Weight { - Weight::from_all(50_500_000) - .saturating_add(T::DbWeight::get().reads(2)) + Weight::from_all(52_872_000) + .saturating_add(T::DbWeight::get().reads(3)) .saturating_add(T::DbWeight::get().writes(1)) } + // Storage: `Migration::Status` (r:1 w:0) + // Proof: `Migration::Status` (`max_values`: Some(1), `max_size`: None, mode: `Measured`) // Storage: `Nft::CollectionInfo` (r:1 w:1) - // Proof: `Nft::CollectionInfo` (`max_values`: None, `max_size`: Some(4294967295), added: 2474, mode: `MaxEncodedLen`) + // Proof: `Nft::CollectionInfo` (`max_values`: None, `max_size`: Some(494), added: 2969, mode: `MaxEncodedLen`) // Storage: `Nft::PublicMintInfo` (r:1 w:0) // Proof: `Nft::PublicMintInfo` (`max_values`: None, `max_size`: Some(34), added: 2509, mode: `MaxEncodedLen`) // Storage: `Nft::UtilityFlags` (r:1 w:0) // Proof: `Nft::UtilityFlags` (`max_values`: None, `max_size`: Some(15), added: 2490, mode: `MaxEncodedLen`) + // Storage: `System::Account` (r:2 w:2) + // Proof: `System::Account` (`max_values`: None, `max_size`: Some(116), added: 2591, mode: `MaxEncodedLen`) + // Storage: `Nft::OwnedTokens` (r:1 w:1) + // Proof: `Nft::OwnedTokens` (`max_values`: None, `max_size`: Some(4000052), added: 4002527, mode: `MaxEncodedLen`) // Storage: `EVMChainId::ChainId` (r:1 w:0) // Proof: `EVMChainId::ChainId` (`max_values`: Some(1), `max_size`: Some(8), added: 503, mode: `MaxEncodedLen`) // Storage: `Nfi::NfiEnabled` (r:1 w:0) // Proof: `Nfi::NfiEnabled` (`max_values`: None, `max_size`: Some(529), added: 3004, mode: `MaxEncodedLen`) - fn mint() -> Weight { - Weight::from_all(74_352_000) - .saturating_add(T::DbWeight::get().reads(5)) - .saturating_add(T::DbWeight::get().writes(1)) + // Storage: `Nft::TokenInfo` (r:0 w:500) + // Proof: `Nft::TokenInfo` (`max_values`: None, `max_size`: Some(65), added: 2540, mode: `MaxEncodedLen`) + /// The range of component `p` is `[1, 500]`. + fn mint(p: u32, ) -> Weight { + Weight::from_all(192_632_016) + // Standard Error: 6_419 + .saturating_add(Weight::from_all(3_818_810_u64).saturating_mul(p as u64)) + .saturating_add(T::DbWeight::get().reads(9)) + .saturating_add(T::DbWeight::get().writes(4)) + .saturating_add(T::DbWeight::get().writes((1_u64).saturating_mul(p as u64))) } + // Storage: `Migration::Status` (r:1 w:0) + // Proof: `Migration::Status` (`max_values`: Some(1), `max_size`: None, mode: `Measured`) // Storage: `Nft::UtilityFlags` (r:1 w:0) // Proof: `Nft::UtilityFlags` (`max_values`: None, `max_size`: Some(15), added: 2490, mode: `MaxEncodedLen`) - // Storage: `Nft::CollectionInfo` (r:1 w:1) - // Proof: `Nft::CollectionInfo` (`max_values`: None, `max_size`: Some(4294967295), added: 2474, mode: `MaxEncodedLen`) - // Storage: `Nft::TokenLocks` (r:500 w:0) - // Proof: `Nft::TokenLocks` (`max_values`: None, `max_size`: Some(33), added: 2508, mode: `MaxEncodedLen`) - // Storage: `Nft::TokenUtilityFlags` (r:500 w:0) - // Proof: `Nft::TokenUtilityFlags` (`max_values`: None, `max_size`: Some(19), added: 2494, mode: `MaxEncodedLen`) + // Storage: `Nft::CollectionInfo` (r:1 w:0) + // Proof: `Nft::CollectionInfo` (`max_values`: None, `max_size`: Some(494), added: 2969, mode: `MaxEncodedLen`) + // Storage: `Nft::TokenInfo` (r:500 w:500) + // Proof: `Nft::TokenInfo` (`max_values`: None, `max_size`: Some(65), added: 2540, mode: `MaxEncodedLen`) + // Storage: `Nft::OwnedTokens` (r:2 w:2) + // Proof: `Nft::OwnedTokens` (`max_values`: None, `max_size`: Some(4000052), added: 4002527, mode: `MaxEncodedLen`) // Storage: `TokenApprovals::ERC721Approvals` (r:0 w:500) // Proof: `TokenApprovals::ERC721Approvals` (`max_values`: None, `max_size`: Some(36), added: 2511, mode: `MaxEncodedLen`) /// The range of component `p` is `[1, 500]`. fn transfer(p: u32, ) -> Weight { - Weight::from_all(29_114_267) - // Standard Error: 8_975 - .saturating_add(Weight::from_all(9_079_327_u64).saturating_mul(p as u64)) - .saturating_add(T::DbWeight::get().reads(2)) - .saturating_add(T::DbWeight::get().reads((2_u64).saturating_mul(p as u64))) - .saturating_add(T::DbWeight::get().writes(1)) - .saturating_add(T::DbWeight::get().writes((1_u64).saturating_mul(p as u64))) + Weight::from_all(88_178_000) + // Standard Error: 17_634 + .saturating_add(Weight::from_all(12_096_955_u64).saturating_mul(p as u64)) + .saturating_add(T::DbWeight::get().reads(5)) + .saturating_add(T::DbWeight::get().reads((1_u64).saturating_mul(p as u64))) + .saturating_add(T::DbWeight::get().writes(2)) + .saturating_add(T::DbWeight::get().writes((2_u64).saturating_mul(p as u64))) } - // Storage: `Nft::TokenLocks` (r:1 w:0) - // Proof: `Nft::TokenLocks` (`max_values`: None, `max_size`: Some(33), added: 2508, mode: `MaxEncodedLen`) + // Storage: `Migration::Status` (r:1 w:0) + // Proof: `Migration::Status` (`max_values`: Some(1), `max_size`: None, mode: `Measured`) // Storage: `Nft::UtilityFlags` (r:1 w:0) // Proof: `Nft::UtilityFlags` (`max_values`: None, `max_size`: Some(15), added: 2490, mode: `MaxEncodedLen`) + // Storage: `Nft::CollectionInfo` (r:1 w:1) + // Proof: `Nft::CollectionInfo` (`max_values`: None, `max_size`: Some(494), added: 2969, mode: `MaxEncodedLen`) + // Storage: `Nft::TokenInfo` (r:1 w:1) + // Proof: `Nft::TokenInfo` (`max_values`: None, `max_size`: Some(65), added: 2540, mode: `MaxEncodedLen`) + // Storage: `Nft::OwnedTokens` (r:1 w:1) + // Proof: `Nft::OwnedTokens` (`max_values`: None, `max_size`: Some(4000052), added: 4002527, mode: `MaxEncodedLen`) // Storage: `EVMChainId::ChainId` (r:1 w:0) // Proof: `EVMChainId::ChainId` (`max_values`: Some(1), `max_size`: Some(8), added: 503, mode: `MaxEncodedLen`) // Storage: `Nfi::NfiData` (r:1 w:0) // Proof: `Nfi::NfiData` (`max_values`: None, `max_size`: Some(1166), added: 3641, mode: `MaxEncodedLen`) - // Storage: `Nft::CollectionInfo` (r:1 w:1) - // Proof: `Nft::CollectionInfo` (`max_values`: None, `max_size`: Some(4294967295), added: 2474, mode: `MaxEncodedLen`) - // Storage: `Nft::TokenUtilityFlags` (r:1 w:0) - // Proof: `Nft::TokenUtilityFlags` (`max_values`: None, `max_size`: Some(19), added: 2494, mode: `MaxEncodedLen`) // Storage: `TokenApprovals::ERC721Approvals` (r:0 w:1) // Proof: `TokenApprovals::ERC721Approvals` (`max_values`: None, `max_size`: Some(36), added: 2511, mode: `MaxEncodedLen`) fn burn() -> Weight { - Weight::from_all(82_554_000) - .saturating_add(T::DbWeight::get().reads(6)) - .saturating_add(T::DbWeight::get().writes(2)) + Weight::from_all(94_562_000) + .saturating_add(T::DbWeight::get().reads(7)) + .saturating_add(T::DbWeight::get().writes(4)) } + // Storage: `Migration::Status` (r:1 w:0) + // Proof: `Migration::Status` (`max_values`: Some(1), `max_size`: None, mode: `Measured`) // Storage: `Nft::CollectionInfo` (r:1 w:0) - // Proof: `Nft::CollectionInfo` (`max_values`: None, `max_size`: Some(4294967295), added: 2474, mode: `MaxEncodedLen`) + // Proof: `Nft::CollectionInfo` (`max_values`: None, `max_size`: Some(494), added: 2969, mode: `MaxEncodedLen`) // Storage: `Nft::UtilityFlags` (r:0 w:1) // Proof: `Nft::UtilityFlags` (`max_values`: None, `max_size`: Some(15), added: 2490, mode: `MaxEncodedLen`) fn set_utility_flags() -> Weight { - Weight::from_all(52_410_000) - .saturating_add(T::DbWeight::get().reads(1)) + Weight::from_all(52_932_000) + .saturating_add(T::DbWeight::get().reads(2)) .saturating_add(T::DbWeight::get().writes(1)) } + // Storage: `Migration::Status` (r:1 w:0) + // Proof: `Migration::Status` (`max_values`: Some(1), `max_size`: None, mode: `Measured`) // Storage: `Nft::CollectionInfo` (r:1 w:0) - // Proof: `Nft::CollectionInfo` (`max_values`: None, `max_size`: Some(4294967295), added: 2474, mode: `MaxEncodedLen`) - // Storage: `Nft::TokenUtilityFlags` (r:1 w:1) - // Proof: `Nft::TokenUtilityFlags` (`max_values`: None, `max_size`: Some(19), added: 2494, mode: `MaxEncodedLen`) + // Proof: `Nft::CollectionInfo` (`max_values`: None, `max_size`: Some(494), added: 2969, mode: `MaxEncodedLen`) + // Storage: `Nft::TokenInfo` (r:1 w:1) + // Proof: `Nft::TokenInfo` (`max_values`: None, `max_size`: Some(65), added: 2540, mode: `MaxEncodedLen`) fn set_token_transferable_flag() -> Weight { - Weight::from_all(56_964_000) - .saturating_add(T::DbWeight::get().reads(2)) + Weight::from_all(57_716_000) + .saturating_add(T::DbWeight::get().reads(3)) .saturating_add(T::DbWeight::get().writes(1)) } + // Storage: `Migration::Status` (r:1 w:0) + // Proof: `Migration::Status` (`max_values`: Some(1), `max_size`: None, mode: `Measured`) // Storage: `Nft::CollectionInfo` (r:1 w:0) - // Proof: `Nft::CollectionInfo` (`max_values`: None, `max_size`: Some(4294967295), added: 2474, mode: `MaxEncodedLen`) - // Storage: `Nft::PendingIssuances` (r:1 w:1) - // Proof: `Nft::PendingIssuances` (`max_values`: None, `max_size`: Some(4294967295), added: 2474, mode: `MaxEncodedLen`) + // Proof: `Nft::CollectionInfo` (`max_values`: None, `max_size`: Some(494), added: 2969, mode: `MaxEncodedLen`) + // Storage: `Nft::UtilityFlags` (r:1 w:0) + // Proof: `Nft::UtilityFlags` (`max_values`: None, `max_size`: Some(15), added: 2490, mode: `MaxEncodedLen`) + // Storage: `Nft::NextIssuanceId` (r:1 w:0) + // Proof: `Nft::NextIssuanceId` (`max_values`: Some(1), `max_size`: Some(8), added: 503, mode: `MaxEncodedLen`) + // Storage: `Nft::PendingIssuances` (r:0 w:1) + // Proof: `Nft::PendingIssuances` (`max_values`: None, `max_size`: Some(61), added: 2536, mode: `MaxEncodedLen`) fn issue_soulbound() -> Weight { - Weight::from_all(54_786_000) - .saturating_add(T::DbWeight::get().reads(2)) + Weight::from_all(66_051_000) + .saturating_add(T::DbWeight::get().reads(4)) .saturating_add(T::DbWeight::get().writes(1)) } + // Storage: `Migration::Status` (r:1 w:0) + // Proof: `Migration::Status` (`max_values`: Some(1), `max_size`: None, mode: `Measured`) // Storage: `Nft::PendingIssuances` (r:1 w:1) - // Proof: `Nft::PendingIssuances` (`max_values`: None, `max_size`: Some(4294967295), added: 2474, mode: `MaxEncodedLen`) + // Proof: `Nft::PendingIssuances` (`max_values`: None, `max_size`: Some(61), added: 2536, mode: `MaxEncodedLen`) // Storage: `Nft::CollectionInfo` (r:1 w:1) - // Proof: `Nft::CollectionInfo` (`max_values`: None, `max_size`: Some(4294967295), added: 2474, mode: `MaxEncodedLen`) + // Proof: `Nft::CollectionInfo` (`max_values`: None, `max_size`: Some(494), added: 2969, mode: `MaxEncodedLen`) // Storage: `Nft::UtilityFlags` (r:1 w:0) // Proof: `Nft::UtilityFlags` (`max_values`: None, `max_size`: Some(15), added: 2490, mode: `MaxEncodedLen`) + // Storage: `Nft::OwnedTokens` (r:1 w:1) + // Proof: `Nft::OwnedTokens` (`max_values`: None, `max_size`: Some(4000052), added: 4002527, mode: `MaxEncodedLen`) // Storage: `EVMChainId::ChainId` (r:1 w:0) // Proof: `EVMChainId::ChainId` (`max_values`: Some(1), `max_size`: Some(8), added: 503, mode: `MaxEncodedLen`) // Storage: `Nfi::NfiEnabled` (r:1 w:0) // Proof: `Nfi::NfiEnabled` (`max_values`: None, `max_size`: Some(529), added: 3004, mode: `MaxEncodedLen`) - // Storage: `Nft::TokenUtilityFlags` (r:1 w:1) - // Proof: `Nft::TokenUtilityFlags` (`max_values`: None, `max_size`: Some(19), added: 2494, mode: `MaxEncodedLen`) + // Storage: `Nft::TokenInfo` (r:0 w:1) + // Proof: `Nft::TokenInfo` (`max_values`: None, `max_size`: Some(65), added: 2540, mode: `MaxEncodedLen`) fn accept_soulbound_issuance() -> Weight { - Weight::from_all(90_260_000) - .saturating_add(T::DbWeight::get().reads(6)) - .saturating_add(T::DbWeight::get().writes(3)) + Weight::from_all(109_552_000) + .saturating_add(T::DbWeight::get().reads(7)) + .saturating_add(T::DbWeight::get().writes(4)) } } // For backwards compatibility and tests impl WeightInfo for () { + // Storage: `Migration::Status` (r:1 w:0) + // Proof: `Migration::Status` (`max_values`: Some(1), `max_size`: None, mode: `Measured`) // Storage: `Nft::CollectionInfo` (r:1 w:1) - // Proof: `Nft::CollectionInfo` (`max_values`: None, `max_size`: Some(4294967295), added: 2474, mode: `MaxEncodedLen`) + // Proof: `Nft::CollectionInfo` (`max_values`: None, `max_size`: Some(494), added: 2969, mode: `MaxEncodedLen`) fn claim_unowned_collection() -> Weight { - Weight::from_all(44_993_000) - .saturating_add(RocksDbWeight::get().reads(1)) + Weight::from_all(46_084_000) + .saturating_add(RocksDbWeight::get().reads(2)) .saturating_add(RocksDbWeight::get().writes(1)) } + // Storage: `Migration::Status` (r:1 w:0) + // Proof: `Migration::Status` (`max_values`: Some(1), `max_size`: None, mode: `Measured`) // Storage: `Nft::CollectionInfo` (r:1 w:1) - // Proof: `Nft::CollectionInfo` (`max_values`: None, `max_size`: Some(4294967295), added: 2474, mode: `MaxEncodedLen`) + // Proof: `Nft::CollectionInfo` (`max_values`: None, `max_size`: Some(494), added: 2969, mode: `MaxEncodedLen`) fn set_owner() -> Weight { - Weight::from_all(47_528_000) - .saturating_add(RocksDbWeight::get().reads(1)) + Weight::from_all(50_697_000) + .saturating_add(RocksDbWeight::get().reads(2)) .saturating_add(RocksDbWeight::get().writes(1)) } + // Storage: `Migration::Status` (r:1 w:0) + // Proof: `Migration::Status` (`max_values`: Some(1), `max_size`: None, mode: `Measured`) // Storage: `Nft::CollectionInfo` (r:1 w:1) - // Proof: `Nft::CollectionInfo` (`max_values`: None, `max_size`: Some(4294967295), added: 2474, mode: `MaxEncodedLen`) + // Proof: `Nft::CollectionInfo` (`max_values`: None, `max_size`: Some(494), added: 2969, mode: `MaxEncodedLen`) fn set_max_issuance() -> Weight { - Weight::from_all(47_606_000) - .saturating_add(RocksDbWeight::get().reads(1)) + Weight::from_all(48_716_000) + .saturating_add(RocksDbWeight::get().reads(2)) .saturating_add(RocksDbWeight::get().writes(1)) } + // Storage: `Migration::Status` (r:1 w:0) + // Proof: `Migration::Status` (`max_values`: Some(1), `max_size`: None, mode: `Measured`) // Storage: `Nft::CollectionInfo` (r:1 w:1) - // Proof: `Nft::CollectionInfo` (`max_values`: None, `max_size`: Some(4294967295), added: 2474, mode: `MaxEncodedLen`) + // Proof: `Nft::CollectionInfo` (`max_values`: None, `max_size`: Some(494), added: 2969, mode: `MaxEncodedLen`) fn set_base_uri() -> Weight { - Weight::from_all(49_609_000) - .saturating_add(RocksDbWeight::get().reads(1)) + Weight::from_all(49_881_000) + .saturating_add(RocksDbWeight::get().reads(2)) .saturating_add(RocksDbWeight::get().writes(1)) } + // Storage: `Migration::Status` (r:1 w:0) + // Proof: `Migration::Status` (`max_values`: Some(1), `max_size`: None, mode: `Measured`) // Storage: `Nft::CollectionInfo` (r:1 w:1) - // Proof: `Nft::CollectionInfo` (`max_values`: None, `max_size`: Some(4294967295), added: 2474, mode: `MaxEncodedLen`) + // Proof: `Nft::CollectionInfo` (`max_values`: None, `max_size`: Some(494), added: 2969, mode: `MaxEncodedLen`) fn set_name() -> Weight { - Weight::from_all(48_103_000) - .saturating_add(RocksDbWeight::get().reads(1)) + Weight::from_all(50_127_000) + .saturating_add(RocksDbWeight::get().reads(2)) .saturating_add(RocksDbWeight::get().writes(1)) } + // Storage: `Migration::Status` (r:1 w:0) + // Proof: `Migration::Status` (`max_values`: Some(1), `max_size`: None, mode: `Measured`) // Storage: `Nft::CollectionInfo` (r:1 w:1) - // Proof: `Nft::CollectionInfo` (`max_values`: None, `max_size`: Some(4294967295), added: 2474, mode: `MaxEncodedLen`) + // Proof: `Nft::CollectionInfo` (`max_values`: None, `max_size`: Some(494), added: 2969, mode: `MaxEncodedLen`) fn set_royalties_schedule() -> Weight { - Weight::from_all(48_752_000) - .saturating_add(RocksDbWeight::get().reads(1)) + Weight::from_all(50_316_000) + .saturating_add(RocksDbWeight::get().reads(2)) .saturating_add(RocksDbWeight::get().writes(1)) } + // Storage: `Migration::Status` (r:1 w:0) + // Proof: `Migration::Status` (`max_values`: Some(1), `max_size`: None, mode: `Measured`) // Storage: `Nft::NextCollectionId` (r:1 w:1) // Proof: `Nft::NextCollectionId` (`max_values`: Some(1), `max_size`: Some(4), added: 499, mode: `MaxEncodedLen`) + // Storage: `Nft::OwnedTokens` (r:1 w:1) + // Proof: `Nft::OwnedTokens` (`max_values`: None, `max_size`: Some(4000052), added: 4002527, mode: `MaxEncodedLen`) // Storage: `EVM::AccountCodes` (r:1 w:1) // Proof: `EVM::AccountCodes` (`max_values`: None, `max_size`: None, mode: `Measured`) // Storage: `Futurepass::DefaultProxy` (r:1 w:0) @@ -299,128 +371,168 @@ impl WeightInfo for () { // Proof: `System::Account` (`max_values`: None, `max_size`: Some(116), added: 2591, mode: `MaxEncodedLen`) // Storage: `EVM::AccountCodesMetadata` (r:0 w:1) // Proof: `EVM::AccountCodesMetadata` (`max_values`: None, `max_size`: None, mode: `Measured`) + // Storage: `Nft::TokenInfo` (r:0 w:500) + // Proof: `Nft::TokenInfo` (`max_values`: None, `max_size`: Some(65), added: 2540, mode: `MaxEncodedLen`) // Storage: `Nft::CollectionInfo` (r:0 w:1) - // Proof: `Nft::CollectionInfo` (`max_values`: None, `max_size`: Some(4294967295), added: 2474, mode: `MaxEncodedLen`) - fn create_collection() -> Weight { - Weight::from_all(95_109_000) - .saturating_add(RocksDbWeight::get().reads(4)) - .saturating_add(RocksDbWeight::get().writes(5)) + // Proof: `Nft::CollectionInfo` (`max_values`: None, `max_size`: Some(494), added: 2969, mode: `MaxEncodedLen`) + /// The range of component `p` is `[1, 500]`. + fn create_collection(p: u32, ) -> Weight { + Weight::from_all(103_415_723) + // Standard Error: 5_493 + .saturating_add(Weight::from_all(3_853_867_u64).saturating_mul(p as u64)) + .saturating_add(RocksDbWeight::get().reads(6)) + .saturating_add(RocksDbWeight::get().writes(6)) + .saturating_add(RocksDbWeight::get().writes((1_u64).saturating_mul(p as u64))) } + // Storage: `Migration::Status` (r:1 w:0) + // Proof: `Migration::Status` (`max_values`: Some(1), `max_size`: None, mode: `Measured`) // Storage: `Nft::CollectionInfo` (r:1 w:0) - // Proof: `Nft::CollectionInfo` (`max_values`: None, `max_size`: Some(4294967295), added: 2474, mode: `MaxEncodedLen`) + // Proof: `Nft::CollectionInfo` (`max_values`: None, `max_size`: Some(494), added: 2969, mode: `MaxEncodedLen`) // Storage: `Nft::PublicMintInfo` (r:1 w:1) // Proof: `Nft::PublicMintInfo` (`max_values`: None, `max_size`: Some(34), added: 2509, mode: `MaxEncodedLen`) fn toggle_public_mint() -> Weight { - Weight::from_all(50_396_000) - .saturating_add(RocksDbWeight::get().reads(2)) + Weight::from_all(52_574_000) + .saturating_add(RocksDbWeight::get().reads(3)) .saturating_add(RocksDbWeight::get().writes(1)) } + // Storage: `Migration::Status` (r:1 w:0) + // Proof: `Migration::Status` (`max_values`: Some(1), `max_size`: None, mode: `Measured`) // Storage: `Nft::CollectionInfo` (r:1 w:0) - // Proof: `Nft::CollectionInfo` (`max_values`: None, `max_size`: Some(4294967295), added: 2474, mode: `MaxEncodedLen`) + // Proof: `Nft::CollectionInfo` (`max_values`: None, `max_size`: Some(494), added: 2969, mode: `MaxEncodedLen`) // Storage: `Nft::PublicMintInfo` (r:1 w:1) // Proof: `Nft::PublicMintInfo` (`max_values`: None, `max_size`: Some(34), added: 2509, mode: `MaxEncodedLen`) fn set_mint_fee() -> Weight { - Weight::from_all(50_500_000) - .saturating_add(RocksDbWeight::get().reads(2)) + Weight::from_all(52_872_000) + .saturating_add(RocksDbWeight::get().reads(3)) .saturating_add(RocksDbWeight::get().writes(1)) } + // Storage: `Migration::Status` (r:1 w:0) + // Proof: `Migration::Status` (`max_values`: Some(1), `max_size`: None, mode: `Measured`) // Storage: `Nft::CollectionInfo` (r:1 w:1) - // Proof: `Nft::CollectionInfo` (`max_values`: None, `max_size`: Some(4294967295), added: 2474, mode: `MaxEncodedLen`) + // Proof: `Nft::CollectionInfo` (`max_values`: None, `max_size`: Some(494), added: 2969, mode: `MaxEncodedLen`) // Storage: `Nft::PublicMintInfo` (r:1 w:0) // Proof: `Nft::PublicMintInfo` (`max_values`: None, `max_size`: Some(34), added: 2509, mode: `MaxEncodedLen`) // Storage: `Nft::UtilityFlags` (r:1 w:0) // Proof: `Nft::UtilityFlags` (`max_values`: None, `max_size`: Some(15), added: 2490, mode: `MaxEncodedLen`) + // Storage: `System::Account` (r:2 w:2) + // Proof: `System::Account` (`max_values`: None, `max_size`: Some(116), added: 2591, mode: `MaxEncodedLen`) + // Storage: `Nft::OwnedTokens` (r:1 w:1) + // Proof: `Nft::OwnedTokens` (`max_values`: None, `max_size`: Some(4000052), added: 4002527, mode: `MaxEncodedLen`) // Storage: `EVMChainId::ChainId` (r:1 w:0) // Proof: `EVMChainId::ChainId` (`max_values`: Some(1), `max_size`: Some(8), added: 503, mode: `MaxEncodedLen`) // Storage: `Nfi::NfiEnabled` (r:1 w:0) // Proof: `Nfi::NfiEnabled` (`max_values`: None, `max_size`: Some(529), added: 3004, mode: `MaxEncodedLen`) - fn mint() -> Weight { - Weight::from_all(74_352_000) - .saturating_add(RocksDbWeight::get().reads(5)) - .saturating_add(RocksDbWeight::get().writes(1)) + // Storage: `Nft::TokenInfo` (r:0 w:500) + // Proof: `Nft::TokenInfo` (`max_values`: None, `max_size`: Some(65), added: 2540, mode: `MaxEncodedLen`) + /// The range of component `p` is `[1, 500]`. + fn mint(p: u32, ) -> Weight { + Weight::from_all(192_632_016) + // Standard Error: 6_419 + .saturating_add(Weight::from_all(3_818_810_u64).saturating_mul(p as u64)) + .saturating_add(RocksDbWeight::get().reads(9)) + .saturating_add(RocksDbWeight::get().writes(4)) + .saturating_add(RocksDbWeight::get().writes((1_u64).saturating_mul(p as u64))) } + // Storage: `Migration::Status` (r:1 w:0) + // Proof: `Migration::Status` (`max_values`: Some(1), `max_size`: None, mode: `Measured`) // Storage: `Nft::UtilityFlags` (r:1 w:0) // Proof: `Nft::UtilityFlags` (`max_values`: None, `max_size`: Some(15), added: 2490, mode: `MaxEncodedLen`) - // Storage: `Nft::CollectionInfo` (r:1 w:1) - // Proof: `Nft::CollectionInfo` (`max_values`: None, `max_size`: Some(4294967295), added: 2474, mode: `MaxEncodedLen`) - // Storage: `Nft::TokenLocks` (r:500 w:0) - // Proof: `Nft::TokenLocks` (`max_values`: None, `max_size`: Some(33), added: 2508, mode: `MaxEncodedLen`) - // Storage: `Nft::TokenUtilityFlags` (r:500 w:0) - // Proof: `Nft::TokenUtilityFlags` (`max_values`: None, `max_size`: Some(19), added: 2494, mode: `MaxEncodedLen`) + // Storage: `Nft::CollectionInfo` (r:1 w:0) + // Proof: `Nft::CollectionInfo` (`max_values`: None, `max_size`: Some(494), added: 2969, mode: `MaxEncodedLen`) + // Storage: `Nft::TokenInfo` (r:500 w:500) + // Proof: `Nft::TokenInfo` (`max_values`: None, `max_size`: Some(65), added: 2540, mode: `MaxEncodedLen`) + // Storage: `Nft::OwnedTokens` (r:2 w:2) + // Proof: `Nft::OwnedTokens` (`max_values`: None, `max_size`: Some(4000052), added: 4002527, mode: `MaxEncodedLen`) // Storage: `TokenApprovals::ERC721Approvals` (r:0 w:500) // Proof: `TokenApprovals::ERC721Approvals` (`max_values`: None, `max_size`: Some(36), added: 2511, mode: `MaxEncodedLen`) /// The range of component `p` is `[1, 500]`. fn transfer(p: u32, ) -> Weight { - Weight::from_all(29_114_267) - // Standard Error: 8_975 - .saturating_add(Weight::from_all(9_079_327_u64).saturating_mul(p as u64)) - .saturating_add(RocksDbWeight::get().reads(2)) - .saturating_add(RocksDbWeight::get().reads((2_u64).saturating_mul(p as u64))) - .saturating_add(RocksDbWeight::get().writes(1)) - .saturating_add(RocksDbWeight::get().writes((1_u64).saturating_mul(p as u64))) + Weight::from_all(88_178_000) + // Standard Error: 17_634 + .saturating_add(Weight::from_all(12_096_955_u64).saturating_mul(p as u64)) + .saturating_add(RocksDbWeight::get().reads(5)) + .saturating_add(RocksDbWeight::get().reads((1_u64).saturating_mul(p as u64))) + .saturating_add(RocksDbWeight::get().writes(2)) + .saturating_add(RocksDbWeight::get().writes((2_u64).saturating_mul(p as u64))) } - // Storage: `Nft::TokenLocks` (r:1 w:0) - // Proof: `Nft::TokenLocks` (`max_values`: None, `max_size`: Some(33), added: 2508, mode: `MaxEncodedLen`) + // Storage: `Migration::Status` (r:1 w:0) + // Proof: `Migration::Status` (`max_values`: Some(1), `max_size`: None, mode: `Measured`) // Storage: `Nft::UtilityFlags` (r:1 w:0) // Proof: `Nft::UtilityFlags` (`max_values`: None, `max_size`: Some(15), added: 2490, mode: `MaxEncodedLen`) + // Storage: `Nft::CollectionInfo` (r:1 w:1) + // Proof: `Nft::CollectionInfo` (`max_values`: None, `max_size`: Some(494), added: 2969, mode: `MaxEncodedLen`) + // Storage: `Nft::TokenInfo` (r:1 w:1) + // Proof: `Nft::TokenInfo` (`max_values`: None, `max_size`: Some(65), added: 2540, mode: `MaxEncodedLen`) + // Storage: `Nft::OwnedTokens` (r:1 w:1) + // Proof: `Nft::OwnedTokens` (`max_values`: None, `max_size`: Some(4000052), added: 4002527, mode: `MaxEncodedLen`) // Storage: `EVMChainId::ChainId` (r:1 w:0) // Proof: `EVMChainId::ChainId` (`max_values`: Some(1), `max_size`: Some(8), added: 503, mode: `MaxEncodedLen`) // Storage: `Nfi::NfiData` (r:1 w:0) // Proof: `Nfi::NfiData` (`max_values`: None, `max_size`: Some(1166), added: 3641, mode: `MaxEncodedLen`) - // Storage: `Nft::CollectionInfo` (r:1 w:1) - // Proof: `Nft::CollectionInfo` (`max_values`: None, `max_size`: Some(4294967295), added: 2474, mode: `MaxEncodedLen`) - // Storage: `Nft::TokenUtilityFlags` (r:1 w:0) - // Proof: `Nft::TokenUtilityFlags` (`max_values`: None, `max_size`: Some(19), added: 2494, mode: `MaxEncodedLen`) // Storage: `TokenApprovals::ERC721Approvals` (r:0 w:1) // Proof: `TokenApprovals::ERC721Approvals` (`max_values`: None, `max_size`: Some(36), added: 2511, mode: `MaxEncodedLen`) fn burn() -> Weight { - Weight::from_all(82_554_000) - .saturating_add(RocksDbWeight::get().reads(6)) - .saturating_add(RocksDbWeight::get().writes(2)) + Weight::from_all(94_562_000) + .saturating_add(RocksDbWeight::get().reads(7)) + .saturating_add(RocksDbWeight::get().writes(4)) } + // Storage: `Migration::Status` (r:1 w:0) + // Proof: `Migration::Status` (`max_values`: Some(1), `max_size`: None, mode: `Measured`) // Storage: `Nft::CollectionInfo` (r:1 w:0) - // Proof: `Nft::CollectionInfo` (`max_values`: None, `max_size`: Some(4294967295), added: 2474, mode: `MaxEncodedLen`) + // Proof: `Nft::CollectionInfo` (`max_values`: None, `max_size`: Some(494), added: 2969, mode: `MaxEncodedLen`) // Storage: `Nft::UtilityFlags` (r:0 w:1) // Proof: `Nft::UtilityFlags` (`max_values`: None, `max_size`: Some(15), added: 2490, mode: `MaxEncodedLen`) fn set_utility_flags() -> Weight { - Weight::from_all(52_410_000) - .saturating_add(RocksDbWeight::get().reads(1)) + Weight::from_all(52_932_000) + .saturating_add(RocksDbWeight::get().reads(2)) .saturating_add(RocksDbWeight::get().writes(1)) } + // Storage: `Migration::Status` (r:1 w:0) + // Proof: `Migration::Status` (`max_values`: Some(1), `max_size`: None, mode: `Measured`) // Storage: `Nft::CollectionInfo` (r:1 w:0) - // Proof: `Nft::CollectionInfo` (`max_values`: None, `max_size`: Some(4294967295), added: 2474, mode: `MaxEncodedLen`) - // Storage: `Nft::TokenUtilityFlags` (r:1 w:1) - // Proof: `Nft::TokenUtilityFlags` (`max_values`: None, `max_size`: Some(19), added: 2494, mode: `MaxEncodedLen`) + // Proof: `Nft::CollectionInfo` (`max_values`: None, `max_size`: Some(494), added: 2969, mode: `MaxEncodedLen`) + // Storage: `Nft::TokenInfo` (r:1 w:1) + // Proof: `Nft::TokenInfo` (`max_values`: None, `max_size`: Some(65), added: 2540, mode: `MaxEncodedLen`) fn set_token_transferable_flag() -> Weight { - Weight::from_all(56_964_000) - .saturating_add(RocksDbWeight::get().reads(2)) + Weight::from_all(57_716_000) + .saturating_add(RocksDbWeight::get().reads(3)) .saturating_add(RocksDbWeight::get().writes(1)) } + // Storage: `Migration::Status` (r:1 w:0) + // Proof: `Migration::Status` (`max_values`: Some(1), `max_size`: None, mode: `Measured`) // Storage: `Nft::CollectionInfo` (r:1 w:0) - // Proof: `Nft::CollectionInfo` (`max_values`: None, `max_size`: Some(4294967295), added: 2474, mode: `MaxEncodedLen`) - // Storage: `Nft::PendingIssuances` (r:1 w:1) - // Proof: `Nft::PendingIssuances` (`max_values`: None, `max_size`: Some(4294967295), added: 2474, mode: `MaxEncodedLen`) + // Proof: `Nft::CollectionInfo` (`max_values`: None, `max_size`: Some(494), added: 2969, mode: `MaxEncodedLen`) + // Storage: `Nft::UtilityFlags` (r:1 w:0) + // Proof: `Nft::UtilityFlags` (`max_values`: None, `max_size`: Some(15), added: 2490, mode: `MaxEncodedLen`) + // Storage: `Nft::NextIssuanceId` (r:1 w:0) + // Proof: `Nft::NextIssuanceId` (`max_values`: Some(1), `max_size`: Some(8), added: 503, mode: `MaxEncodedLen`) + // Storage: `Nft::PendingIssuances` (r:0 w:1) + // Proof: `Nft::PendingIssuances` (`max_values`: None, `max_size`: Some(61), added: 2536, mode: `MaxEncodedLen`) fn issue_soulbound() -> Weight { - Weight::from_all(54_786_000) - .saturating_add(RocksDbWeight::get().reads(2)) + Weight::from_all(66_051_000) + .saturating_add(RocksDbWeight::get().reads(4)) .saturating_add(RocksDbWeight::get().writes(1)) } + // Storage: `Migration::Status` (r:1 w:0) + // Proof: `Migration::Status` (`max_values`: Some(1), `max_size`: None, mode: `Measured`) // Storage: `Nft::PendingIssuances` (r:1 w:1) - // Proof: `Nft::PendingIssuances` (`max_values`: None, `max_size`: Some(4294967295), added: 2474, mode: `MaxEncodedLen`) + // Proof: `Nft::PendingIssuances` (`max_values`: None, `max_size`: Some(61), added: 2536, mode: `MaxEncodedLen`) // Storage: `Nft::CollectionInfo` (r:1 w:1) - // Proof: `Nft::CollectionInfo` (`max_values`: None, `max_size`: Some(4294967295), added: 2474, mode: `MaxEncodedLen`) + // Proof: `Nft::CollectionInfo` (`max_values`: None, `max_size`: Some(494), added: 2969, mode: `MaxEncodedLen`) // Storage: `Nft::UtilityFlags` (r:1 w:0) // Proof: `Nft::UtilityFlags` (`max_values`: None, `max_size`: Some(15), added: 2490, mode: `MaxEncodedLen`) + // Storage: `Nft::OwnedTokens` (r:1 w:1) + // Proof: `Nft::OwnedTokens` (`max_values`: None, `max_size`: Some(4000052), added: 4002527, mode: `MaxEncodedLen`) // Storage: `EVMChainId::ChainId` (r:1 w:0) // Proof: `EVMChainId::ChainId` (`max_values`: Some(1), `max_size`: Some(8), added: 503, mode: `MaxEncodedLen`) // Storage: `Nfi::NfiEnabled` (r:1 w:0) // Proof: `Nfi::NfiEnabled` (`max_values`: None, `max_size`: Some(529), added: 3004, mode: `MaxEncodedLen`) - // Storage: `Nft::TokenUtilityFlags` (r:1 w:1) - // Proof: `Nft::TokenUtilityFlags` (`max_values`: None, `max_size`: Some(19), added: 2494, mode: `MaxEncodedLen`) + // Storage: `Nft::TokenInfo` (r:0 w:1) + // Proof: `Nft::TokenInfo` (`max_values`: None, `max_size`: Some(65), added: 2540, mode: `MaxEncodedLen`) fn accept_soulbound_issuance() -> Weight { - Weight::from_all(90_260_000) - .saturating_add(RocksDbWeight::get().reads(6)) - .saturating_add(RocksDbWeight::get().writes(3)) + Weight::from_all(109_552_000) + .saturating_add(RocksDbWeight::get().reads(7)) + .saturating_add(RocksDbWeight::get().writes(4)) } } diff --git a/pallet/sft/src/benchmarking.rs b/pallet/sft/src/benchmarking.rs index fc95793f2..cd060f602 100644 --- a/pallet/sft/src/benchmarking.rs +++ b/pallet/sft/src/benchmarking.rs @@ -314,6 +314,7 @@ benchmarks! { let mut tokens = vec![]; let collection_id = build_collection::(Some(owner.clone())); + let issuance_id = NextIssuanceId::::get(); for serial_number in 0..p { let token_name = bounded_string::("test-token"); @@ -328,7 +329,6 @@ benchmarks! { )); let serial_numbers = (serial_number, u128::MAX); - tokens.push(serial_numbers); assert_ok!(Sft::::set_token_burn_authority( @@ -337,14 +337,12 @@ benchmarks! { TokenBurnAuthority::Both, )); } - }: _(origin::(&account::("Alice")), collection_id, BoundedVec::try_from(tokens).unwrap(), account::("Bob")) + let bounded_serials = BoundedVec::truncate_from(tokens); + }: _(origin::(&account::("Alice")), collection_id, bounded_serials.clone(), account::("Bob")) verify { - let pending_issuances = - &PendingIssuances::::get(collection_id).pending_issuances[0].1; - assert_eq!( - pending_issuances.len(), - 1, + PendingIssuances::::get((collection_id, & account::("Bob"), issuance_id)), + Some(bounded_serials), ) } diff --git a/pallet/sft/src/lib.rs b/pallet/sft/src/lib.rs index a030bddf3..7af9b5972 100644 --- a/pallet/sft/src/lib.rs +++ b/pallet/sft/src/lib.rs @@ -54,9 +54,10 @@ pub mod pallet { use frame_support::pallet_prelude::*; use frame_system::pallet_prelude::*; use seed_pallet_common::utils::TokenBurnAuthority; + use seed_primitives::IssuanceId; /// The current storage version. - const STORAGE_VERSION: StorageVersion = StorageVersion::new(0); + const STORAGE_VERSION: StorageVersion = StorageVersion::new(1); #[pallet::pallet] #[pallet::storage_version(STORAGE_VERSION)] @@ -134,18 +135,20 @@ pub mod pallet { // Map from a collection id to a collection's pending issuances #[pallet::storage] - pub type PendingIssuances = StorageMap< + pub type PendingIssuances = StorageNMap< _, - Twox64Concat, - CollectionUuid, - SftCollectionPendingIssuances< - T::AccountId, - T::MaxSerialsPerMint, - T::MaxSftPendingIssuances, - >, - ValueQuery, + ( + NMapKey, + NMapKey, + NMapKey, + ), + BoundedVec<(SerialNumber, Balance), T::MaxSerialsPerMint>, >; + /// The next available incrementing issuance ID, unique across all pending issuances + #[pallet::storage] + pub type NextIssuanceId = StorageValue<_, IssuanceId, ValueQuery>; + #[pallet::event] #[pallet::generate_deposit(pub (super) fn deposit_event)] pub enum Event { @@ -254,7 +257,7 @@ pub mod pallet { /// A pending issuance for a soulbound token has been created PendingIssuanceCreated { collection_id: CollectionUuid, - issuance_id: u32, + issuance_id: IssuanceId, serial_numbers: BoundedVec, balances: BoundedVec, token_owner: T::AccountId, @@ -747,36 +750,28 @@ pub mod pallet { serial_numbers.clone(), )?; - >::try_mutate( + let issuance_id = NextIssuanceId::::get(); + for (serial_number, _) in serial_numbers.iter() { + // ensure burn authority has been pre declared + ensure!( + >::get((collection_id, serial_number)) + .burn_authority + .is_some(), + Error::::NoBurnAuthority + ); + } + >::insert( + (collection_id, &token_owner, issuance_id), + &serial_numbers, + ); + let (serial_numbers, balances) = Self::unzip_serial_numbers(serial_numbers); + Self::deposit_event(Event::::PendingIssuanceCreated { collection_id, - |pending_issuances| -> DispatchResult { - for (serial_number, _) in serial_numbers.iter() { - // ensure burn authority has been pre declared - ensure!( - >::get((collection_id, serial_number)) - .burn_authority - .is_some(), - Error::::NoBurnAuthority - ); - } - - let issuance_id = pending_issuances - .insert_pending_issuance(&token_owner, serial_numbers.clone()) - .map_err(Error::::from)?; - - let (serial_numbers, balances) = Self::unzip_serial_numbers(serial_numbers); - - Self::deposit_event(Event::::PendingIssuanceCreated { - collection_id, - issuance_id, - serial_numbers, - balances, - token_owner: token_owner.clone(), - }); - - Ok(()) - }, - )?; + issuance_id, + serial_numbers, + balances, + token_owner: token_owner.clone(), + }); Ok(()) } @@ -788,12 +783,11 @@ pub mod pallet { pub fn accept_soulbound_issuance( origin: OriginFor, collection_id: CollectionUuid, - issuance_id: u32, + issuance_id: IssuanceId, ) -> DispatchResult { let who = ensure_signed(origin)?; - let pending_issuance = >::get(collection_id) - .get_pending_issuance(&who, issuance_id) + let serial_numbers = >::get((collection_id, &who, issuance_id)) .ok_or(Error::::InvalidPendingIssuance)?; let sft_collection_info = @@ -803,19 +797,18 @@ pub mod pallet { sft_collection_info.collection_owner.clone(), collection_id, sft_collection_info.clone(), - pending_issuance.serial_numbers.clone(), + serial_numbers.clone(), )?; Self::do_mint( sft_collection_info.collection_owner.clone(), collection_id, sft_collection_info, - pending_issuance.serial_numbers.clone(), + serial_numbers.clone(), Some(who.clone()), )?; - let (serial_numbers, balances) = - Self::unzip_serial_numbers(pending_issuance.serial_numbers); + let (serial_numbers, balances) = Self::unzip_serial_numbers(serial_numbers); Self::deposit_event(Event::::Issued { token_owner: who.clone(), @@ -824,26 +817,9 @@ pub mod pallet { }); // remove the pending issuance - >::try_mutate( - collection_id, - |pending_issuances| -> DispatchResult { - pending_issuances.remove_pending_issuance(&who, issuance_id); - - Ok(()) - }, - )?; + >::remove((collection_id, &who, issuance_id)); Ok(()) } } } - -impl From for Error { - fn from(val: SftPendingIssuanceError) -> Error { - match val { - SftPendingIssuanceError::PendingIssuanceLimitExceeded => { - Error::::PendingIssuanceLimitExceeded - }, - } - } -} diff --git a/pallet/sft/src/tests.rs b/pallet/sft/src/tests.rs index 6cb62f832..46f1bdabe 100644 --- a/pallet/sft/src/tests.rs +++ b/pallet/sft/src/tests.rs @@ -3025,7 +3025,7 @@ mod set_token_transferable_flag { } mod soulbound_token { - use crate::{PendingIssuances, SftPendingIssuance}; + use crate::PendingIssuances; use super::*; @@ -3167,9 +3167,8 @@ mod soulbound_token { )); assert_eq!( - PendingIssuances::::get(collection_id) - .get_pending_issuance(&token_owner, issuance_id), - Some(SftPendingIssuance { issuance_id, serial_numbers: serial_numbers.clone() }) + PendingIssuances::::get((collection_id, &token_owner, issuance_id)), + Some(serial_numbers.clone()) ); let (serial_numbers, balances) = Sft::unzip_serial_numbers(serial_numbers); @@ -3191,6 +3190,10 @@ mod soulbound_token { issuance_id )); + assert!( + PendingIssuances::::get((collection_id, &token_owner, issuance_id)).is_none() + ); + System::assert_last_event( Event::::Issued { token_owner, serial_numbers, balances }.into(), ); diff --git a/pallet/sft/src/types.rs b/pallet/sft/src/types.rs index 755cb7450..6ebcf4e2e 100644 --- a/pallet/sft/src/types.rs +++ b/pallet/sft/src/types.rs @@ -20,9 +20,7 @@ use crate::{Config, Error}; use codec::{Decode, Encode, MaxEncodedLen}; use frame_support::{traits::Get, CloneNoBound, PartialEqNoBound, RuntimeDebugNoBound}; use scale_info::TypeInfo; -use seed_primitives::{ - Balance, IssuanceId, MetadataScheme, OriginChain, RoyaltiesSchedule, SerialNumber, -}; +use seed_primitives::{Balance, MetadataScheme, OriginChain, RoyaltiesSchedule, SerialNumber}; use sp_runtime::BoundedVec; use sp_std::{fmt::Debug, prelude::*}; @@ -247,131 +245,3 @@ impl SftTokenBalance { Ok(()) } } - -#[derive( - PartialEqNoBound, RuntimeDebugNoBound, CloneNoBound, Encode, Decode, TypeInfo, MaxEncodedLen, -)] -#[scale_info(skip_type_params(MaxSerialsPerMint))] -pub struct SftPendingIssuance> { - pub issuance_id: IssuanceId, - pub serial_numbers: BoundedVec<(SerialNumber, Balance), MaxSerialsPerMint>, -} - -pub enum SftPendingIssuanceError { - PendingIssuanceLimitExceeded, -} - -/// The state of a sft collection's pending issuances -#[derive( - PartialEqNoBound, RuntimeDebugNoBound, CloneNoBound, Encode, Decode, TypeInfo, MaxEncodedLen, -)] -#[codec(mel_bound(AccountId: MaxEncodedLen))] -#[scale_info(skip_type_params(MaxSerialsPerMint, MaxPendingIssuances))] -pub struct SftCollectionPendingIssuances -where - AccountId: Debug + PartialEq + Clone, - MaxSerialsPerMint: Get, - MaxPendingIssuances: Get, -{ - pub next_issuance_id: IssuanceId, - pub pending_issuances: BoundedVec< - (AccountId, BoundedVec, MaxPendingIssuances>), - MaxPendingIssuances, - >, -} - -impl Default - for SftCollectionPendingIssuances -where - AccountId: Debug + PartialEq + Clone, - MaxSerialsPerMint: Get, - MaxPendingIssuances: Get, -{ - fn default() -> Self { - SftCollectionPendingIssuances { next_issuance_id: 0, pending_issuances: BoundedVec::new() } - } -} - -impl - SftCollectionPendingIssuances -where - AccountId: Debug + PartialEq + Clone, - MaxSerialsPerMint: Get, - MaxPendingIssuances: Get, -{ - /// Inserts a new pending issuance for a token owner - pub fn insert_pending_issuance( - &mut self, - token_owner: &AccountId, - serial_numbers: BoundedVec<(SerialNumber, Balance), MaxSerialsPerMint>, - ) -> Result { - let issuance_id = self.next_issuance_id; - let pending_issuance = SftPendingIssuance { issuance_id, serial_numbers }; - - if let Some(account_pending_issuances) = - self.pending_issuances.iter_mut().find(|p| &p.0 == token_owner) - { - account_pending_issuances - .1 - .try_push(pending_issuance) - .map_err(|_| SftPendingIssuanceError::PendingIssuanceLimitExceeded)?; - } else { - // create new entry - let mut new_account_issuance = BoundedVec::new(); - new_account_issuance.force_push(pending_issuance); - - self.pending_issuances - .try_push((token_owner.clone(), new_account_issuance)) - .map_err(|_| SftPendingIssuanceError::PendingIssuanceLimitExceeded)?; - } - - self.next_issuance_id = self.next_issuance_id.saturating_add(1); - - Ok(issuance_id) - } - - /// Gets the pending issuance by the token owner and issuance id - pub fn get_pending_issuance( - &self, - token_owner: &AccountId, - issuance_id: IssuanceId, - ) -> Option> { - let account_pending_issuances = self - .pending_issuances - .iter() - .find(|pending_issuance| &pending_issuance.0 == token_owner)?; - - let pending_issuance = - account_pending_issuances.1.iter().find(|p| p.issuance_id == issuance_id)?; - - Some(pending_issuance.clone()) - } - - /// Removes a pending issuance for a token owner - pub fn remove_pending_issuance(&mut self, token_owner: &AccountId, issuance_id: IssuanceId) { - for account_pending_issuance in self.pending_issuances.iter_mut() { - if &account_pending_issuance.0 != token_owner { - continue; - } - - account_pending_issuance.1.retain(|p| p.issuance_id != issuance_id); - break; - } - } - - /// Gets all pending issuances for a token owner - pub fn get_pending_issuances( - &self, - token_owner: &AccountId, - ) -> Vec> { - if let Some(account_pending_issuances) = self - .pending_issuances - .iter() - .find(|pending_issuance| &pending_issuance.0 == token_owner) - { - return account_pending_issuances.1.to_vec(); - } - - vec![] - } -} diff --git a/pallet/xls20/src/benchmarking.rs b/pallet/xls20/src/benchmarking.rs index 0f132ab05..ecdb52d0f 100644 --- a/pallet/xls20/src/benchmarking.rs +++ b/pallet/xls20/src/benchmarking.rs @@ -128,7 +128,6 @@ benchmarks! { next_serial_number: 3429, collection_issuance: 0, cross_chain_compatibility: CrossChainCompatibility::default(), - owned_tokens: BoundedVec::truncate_from(vec![]), // No tokens }; >::insert(collection_id, collection_info); CollectionMapping::::insert(xls20_collection, collection_id); diff --git a/pallet/xls20/src/lib.rs b/pallet/xls20/src/lib.rs index a67245721..f1c9b6359 100644 --- a/pallet/xls20/src/lib.rs +++ b/pallet/xls20/src/lib.rs @@ -212,14 +212,17 @@ pub mod pallet { let collection_info = T::NFTCollectionInfo::get_collection_info(collection_id)?; // Caller must be collection owner - ensure!(collection_info.is_collection_owner(&who), Error::::NotCollectionOwner); + ensure!(collection_info.owner == who, Error::::NotCollectionOwner); // Must be an XLS-20 compatible collection ensure!(collection_info.cross_chain_compatibility.xrpl, Error::::NotXLS20Compatible); // Check whether token exists but mapping does not exist for serial_number in serial_numbers.iter() { - ensure!(collection_info.token_exists(*serial_number), Error::::NoToken); + ensure!( + T::NFTExt::token_exists(&(collection_id, *serial_number)), + Error::::NoToken + ); ensure!( !Xls20TokenMap::::contains_key(collection_id, serial_number), Error::::MappingAlreadyExists @@ -255,11 +258,14 @@ pub mod pallet { // Ensure only relayer can call extrinsic ensure!(Some(who) == Relayer::::get(), Error::::NotRelayer); - let collection_info = T::NFTCollectionInfo::get_collection_info(collection_id)?; + let _ = T::NFTCollectionInfo::get_collection_info(collection_id)?; for (serial_number, xls20_token_id) in token_mappings.iter() { // Ensure token exists on TRN - ensure!(collection_info.token_exists(*serial_number), Error::::NoToken); + ensure!( + T::NFTExt::token_exists(&(collection_id, *serial_number)), + Error::::NoToken + ); // Ensure mapping doesn't already exist ensure!( !Xls20TokenMap::::contains_key(collection_id, serial_number), @@ -369,8 +375,6 @@ impl Xls20Ext for Pallet { receiver: &Self::AccountId, xls20_token_id: Xls20TokenId, ) -> WeightedDispatchResult { - // Ensure the migration is complete - T::Migrator::ensure_migrated().map_err(|e| (Weight::zero(), e))?; let xls20_token = Xls20Token::from(xls20_token_id); // Check flag is not burnable, if the burnable flag is set then the issuer can @@ -425,8 +429,6 @@ impl Xls20Ext for Pallet { } fn get_xls20_token_id(token_id: TokenId) -> Option { - // Ensure the migration is complete - T::Migrator::ensure_migrated().ok()?; Xls20TokenMap::::get(token_id.0, token_id.1) } diff --git a/pallet/xls20/src/mock.rs b/pallet/xls20/src/mock.rs index e70f7a0ef..856356033 100644 --- a/pallet/xls20/src/mock.rs +++ b/pallet/xls20/src/mock.rs @@ -74,6 +74,7 @@ impl pallet_nft::Config for Test { type Xls20MintRequest = Xls20; type NFIRequest = (); type MaxPendingIssuances = MaxPendingIssuances; + type Migrator = (); } parameter_types! { diff --git a/pallet/xrpl-bridge/src/tests.rs b/pallet/xrpl-bridge/src/tests.rs index 98d385005..9a68a6441 100644 --- a/pallet/xrpl-bridge/src/tests.rs +++ b/pallet/xrpl-bridge/src/tests.rs @@ -4393,7 +4393,7 @@ fn withdraw_nft_more_failure_scenarios() { (nft_collection_id, 0 + 100), // non existent destination, ), - pallet_nft::Error::::NotTokenOwner + pallet_nft::Error::::NoToken ); }); } diff --git a/primitives/src/lib.rs b/primitives/src/lib.rs index 1af17a27b..f992e1827 100644 --- a/primitives/src/lib.rs +++ b/primitives/src/lib.rs @@ -106,7 +106,7 @@ pub mod types { pub type WeightedDispatchResult = Result; /// Identifier for a pending issuance of a soulbound token - pub type IssuanceId = u32; + pub type IssuanceId = u64; } /// Opaque types. These are used by the CLI to instantiate machinery that don't need to know diff --git a/primitives/src/migration.rs b/primitives/src/migration.rs index 6d7143392..ea4fe680f 100644 --- a/primitives/src/migration.rs +++ b/primitives/src/migration.rs @@ -11,8 +11,8 @@ pub struct MigrationStepResult { impl MigrationStepResult { /// Generate a MigrationStepResult for a non-final step - pub fn continue_step(weight_consumed: Weight, last_key: Vec) -> Self { - Self { is_finished: false, weight_consumed, last_key: Some(last_key) } + pub fn continue_step(weight_consumed: Weight, last_key: Option>) -> Self { + Self { is_finished: false, weight_consumed, last_key } } /// Generate a MigrationStepResult for the final step diff --git a/runtime/src/lib.rs b/runtime/src/lib.rs index aaffa1ce3..bcf548c97 100644 --- a/runtime/src/lib.rs +++ b/runtime/src/lib.rs @@ -145,7 +145,6 @@ mod migrations; mod weights; use precompile_utils::constants::FEE_PROXY_ADDRESS; -use seed_primitives::migration::NoopMigration; #[cfg(test)] mod tests; @@ -475,6 +474,7 @@ impl pallet_nft::Config for Runtime { type Xls20MintRequest = Xls20; type NFIRequest = Nfi; type MaxPendingIssuances = MaxPendingIssuances; + type Migrator = Migration; } parameter_types! { @@ -1418,7 +1418,7 @@ parameter_types! { impl pallet_migration::Config for Runtime { type RuntimeEvent = RuntimeEvent; // Set to NoopMigration if no migration is in progress - type CurrentMigration = NoopMigration; + type CurrentMigration = migrations::nft_multi::NftMigration; type MaxMigrationWeight = MaxMigrationWeight; type WeightInfo = weights::pallet_migration::WeightInfo; } @@ -2347,6 +2347,7 @@ fn validate_self_contained_inner( #[cfg(feature = "runtime-benchmarks")] #[macro_use] extern crate frame_benchmarking; +extern crate core; #[cfg(feature = "runtime-benchmarks")] mod benches { diff --git a/runtime/src/migrations/mod.rs b/runtime/src/migrations/mod.rs index 422db5c75..3b649fec8 100644 --- a/runtime/src/migrations/mod.rs +++ b/runtime/src/migrations/mod.rs @@ -13,6 +13,10 @@ // limitations under the License. // You may obtain a copy of the License at the root of this project source code +mod nft; +pub mod nft_multi; +mod sft; + use codec::{Decode, Encode, FullCodec, FullEncode}; use frame_support::{ migration::{ @@ -32,15 +36,21 @@ pub struct AllMigrations; impl OnRuntimeUpgrade for AllMigrations { #[cfg(feature = "try-runtime")] fn pre_upgrade() -> Result, DispatchError> { - Ok(Vec::new()) + let mut v = sft::Upgrade::pre_upgrade()?; + v.extend(nft::Upgrade::pre_upgrade()?); + Ok(v) } fn on_runtime_upgrade() -> Weight { - Weight::zero() + let mut weight = sft::Upgrade::on_runtime_upgrade(); + weight += nft::Upgrade::on_runtime_upgrade(); + weight } #[cfg(feature = "try-runtime")] - fn post_upgrade(_state: Vec) -> Result<(), DispatchError> { + fn post_upgrade(state: Vec) -> Result<(), DispatchError> { + sft::Upgrade::post_upgrade(state.clone())?; + nft::Upgrade::post_upgrade(state)?; Ok(()) } } diff --git a/runtime/src/migrations/nft.rs b/runtime/src/migrations/nft.rs new file mode 100644 index 000000000..56f639152 --- /dev/null +++ b/runtime/src/migrations/nft.rs @@ -0,0 +1,198 @@ +// Copyright 2022-2023 Futureverse Corporation Limited +// +// Licensed under the LGPL, Version 3.0 (the "License"); +// you may not use this file except in compliance with the License. +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// You may obtain a copy of the License at the root of this project source code + +use crate::{Nft, Runtime, Weight}; +use frame_support::{ + dispatch::GetStorageVersion, + traits::{OnRuntimeUpgrade, StorageVersion}, +}; + +#[allow(unused_imports)] +use sp_runtime::DispatchError; +#[allow(unused_imports)] +use sp_std::vec::Vec; + +pub struct Upgrade; + +impl OnRuntimeUpgrade for Upgrade { + fn on_runtime_upgrade() -> Weight { + let current = Nft::current_storage_version(); + let onchain = Nft::on_chain_storage_version(); + log::info!(target: "Migration", "Nft: Running migration with current storage version {current:?} / on-chain {onchain:?}"); + + let mut weight = ::DbWeight::get().reads(2); + + if onchain != 7 { + log::info!( + target: "Migration", + "Nft: No migration was done, This migration should be on top of storage version 7. Migration code needs to be removed." + ); + return weight; + } + + log::info!(target: "Migration", "Nft: Migrating from on-chain version {onchain:?} to on-chain version 8."); + weight += v8::migrate::(); + StorageVersion::new(8).put::(); + log::info!(target: "Migration", "Nft: Migration successfully completed."); + weight + } + + #[cfg(feature = "try-runtime")] + fn pre_upgrade() -> Result, DispatchError> { + log::info!(target: "Migration", "Nft: Upgrade to v8 Pre Upgrade."); + let onchain = Nft::on_chain_storage_version(); + // Return OK(()) if upgrade has already been done + if onchain == 1 { + return Ok(Vec::new()); + } + assert_eq!(onchain, 0); + + Ok(Vec::new()) + } + + #[cfg(feature = "try-runtime")] + fn post_upgrade(_state: Vec) -> Result<(), DispatchError> { + log::info!(target: "Migration", "Nft: Upgrade to v8 Post Upgrade."); + let current = Nft::current_storage_version(); + let onchain = Nft::on_chain_storage_version(); + assert_eq!(current, 8); + assert_eq!(onchain, 8); + Ok(()) + } +} + +#[allow(dead_code)] +#[allow(unused_imports)] +pub mod v8 { + use super::*; + use crate::migrations::Value; + use codec::{Decode, Encode, MaxEncodedLen}; + use core::fmt::Debug; + use frame_support::pallet_prelude::ValueQuery; + use frame_support::weights::Weight; + use frame_support::{storage_alias, CloneNoBound, PartialEqNoBound, RuntimeDebugNoBound}; + use frame_support::{BoundedVec, Twox64Concat}; + use scale_info::TypeInfo; + use seed_pallet_common::utils::TokenBurnAuthority; + use seed_primitives::{Balance, CollectionUuid, SerialNumber, TokenId}; + use sp_core::{Get, H160}; + + #[frame_support::storage_alias] + pub type PendingIssuances = StorageMap< + pallet_nft::Pallet, + Twox64Concat, + CollectionUuid, + CollectionPendingIssuances< + ::AccountId, + ::MaxPendingIssuances, + >, + ValueQuery, + >; + + type IssuanceId = u32; + + #[derive( + PartialEqNoBound, RuntimeDebugNoBound, CloneNoBound, Encode, Decode, TypeInfo, MaxEncodedLen, + )] + pub struct PendingIssuance { + pub issuance_id: IssuanceId, + pub quantity: u32, + pub burn_authority: TokenBurnAuthority, + } + + #[derive( + PartialEqNoBound, RuntimeDebugNoBound, CloneNoBound, Encode, Decode, TypeInfo, MaxEncodedLen, + )] + #[codec(mel_bound(AccountId: MaxEncodedLen))] + #[scale_info(skip_type_params(MaxPendingIssuances))] + pub struct CollectionPendingIssuances> + where + AccountId: Debug + PartialEq + Clone, + MaxPendingIssuances: Get, + { + pub next_issuance_id: IssuanceId, + pub pending_issuances: BoundedVec< + (AccountId, BoundedVec), + MaxPendingIssuances, + >, + } + + impl Default + for CollectionPendingIssuances + where + AccountId: Debug + PartialEq + Clone, + MaxPendingIssuances: Get, + { + fn default() -> Self { + CollectionPendingIssuances { next_issuance_id: 0, pending_issuances: BoundedVec::new() } + } + } + + pub fn migrate() -> Weight { + log::info!(target: "Migration", "Nft: removing PendingIssuances"); + + let results = PendingIssuances::::clear(u32::MAX, None); + let weight: Weight = + ::DbWeight::get().reads(results.loops as u64); + + log::info!(target: "Migration", "Nft: removal of PendingIssuance successful"); + weight + } + + #[cfg(test)] + mod tests { + use super::*; + use crate::migrations::tests::create_account; + use crate::migrations::tests::new_test_ext; + use crate::migrations::Map; + use codec::{Decode, Encode}; + use frame_support::{BoundedVec, StorageHasher}; + use scale_info::TypeInfo; + use sp_core::H256; + + #[test] + fn migrate_with_data() { + new_test_ext().execute_with(|| { + // Setup storage + StorageVersion::new(7).put::(); + let item_count = 10_u32; + for i in 0..item_count { + let pending_issuance = CollectionPendingIssuances { + next_issuance_id: i + 5, + pending_issuances: BoundedVec::truncate_from(vec![( + create_account(i as u64), + BoundedVec::truncate_from(vec![PendingIssuance { + issuance_id: i + 4, + quantity: i, + burn_authority: TokenBurnAuthority::Both, + }]), + )]), + }; + PendingIssuances::::insert(i, pending_issuance); + } + + // Sanity check + for i in 0..item_count { + assert!(PendingIssuances::::contains_key(i)); + } + + // Do runtime upgrade + let used_weight = Upgrade::on_runtime_upgrade(); + assert_eq!(Nft::on_chain_storage_version(), 8); + + // Check storage is removed + for i in 0..item_count { + assert!(!PendingIssuances::::contains_key(i)); + } + }); + } + } +} diff --git a/runtime/src/migrations/nft_multi.rs b/runtime/src/migrations/nft_multi.rs new file mode 100644 index 000000000..421bc8cad --- /dev/null +++ b/runtime/src/migrations/nft_multi.rs @@ -0,0 +1,683 @@ +use crate::*; +use codec::{Decode, Encode, MaxEncodedLen}; +use core::fmt::Debug; +use frame_support::traits::IsType; +use frame_support::{dispatch::GetStorageVersion, traits::StorageVersion, DefaultNoBound}; +use frame_support::{ + storage_alias, CloneNoBound, PartialEqNoBound, RuntimeDebugNoBound, Twox64Concat, +}; +use pallet_migration::WeightInfo; +use pallet_nft::{CollectionInformation, TokenInformation}; +use scale_info::TypeInfo; +use seed_pallet_common::utils::TokenUtilityFlags as TokenFlags; +use seed_primitives::migration::{MigrationStep, MigrationStepResult}; +use seed_primitives::{ + CollectionUuid, CrossChainCompatibility, MetadataScheme, OriginChain, RoyaltiesSchedule, + SerialNumber, TokenCount, +}; +use sp_runtime::{BoundedVec, DispatchResult}; + +use sp_std::marker::PhantomData; + +#[allow(dead_code)] +pub(crate) const LOG_TARGET: &str = "migration"; + +/// How many tokens max can we migrate per step? +const MAX_TOKENS_PER_STEP: u32 = 50; + +mod old { + use super::*; + use frame_support::pallet_prelude::ValueQuery; + use seed_primitives::TokenLockReason; + + #[storage_alias] + pub type CollectionInfo = StorageMap< + pallet_nft::Pallet, + Twox64Concat, + CollectionUuid, + CollectionInformation< + ::AccountId, + ::MaxTokensPerCollection, + ::StringLimit, + >, + >; + + #[storage_alias] + pub type TokenLocks = + StorageMap, Twox64Concat, TokenId, TokenLockReason>; + + /// Map from a token_id to transferable and burn authority flags + #[storage_alias] + pub type TokenUtilityFlags = + StorageMap, Twox64Concat, TokenId, TokenFlags, ValueQuery>; + + /// Information related to a specific collection + #[derive( + PartialEqNoBound, RuntimeDebugNoBound, CloneNoBound, Encode, Decode, TypeInfo, MaxEncodedLen, + )] + #[codec(mel_bound(AccountId: MaxEncodedLen))] + #[scale_info(skip_type_params(MaxTokensPerCollection, StringLimit))] + pub struct CollectionInformation + where + AccountId: Debug + PartialEq + Clone, + MaxTokensPerCollection: Get, + StringLimit: Get, + { + /// The owner of the collection + pub owner: AccountId, + /// A human friendly name + pub name: BoundedVec, + /// Collection metadata reference scheme + pub metadata_scheme: MetadataScheme, + /// configured royalties schedule + pub royalties_schedule: Option>, + /// Maximum number of tokens allowed in a collection + pub max_issuance: Option, + /// The chain in which the collection was minted originally + pub origin_chain: OriginChain, + /// The next available serial_number + pub next_serial_number: SerialNumber, + /// the total count of tokens in this collection + pub collection_issuance: TokenCount, + /// This collections compatibility with other chains + pub cross_chain_compatibility: CrossChainCompatibility, + /// All serial numbers owned by an account in a collection + pub owned_tokens: BoundedVec< + old::TokenOwnership, + MaxTokensPerCollection, + >, + } + + #[derive( + PartialEqNoBound, RuntimeDebugNoBound, Decode, Encode, CloneNoBound, TypeInfo, MaxEncodedLen, + )] + #[codec(mel_bound(AccountId: MaxEncodedLen))] + #[scale_info(skip_type_params(MaxTokensPerCollection))] + pub struct TokenOwnership + where + AccountId: Debug + PartialEq + Clone, + MaxTokensPerCollection: Get, + { + pub owner: AccountId, + pub owned_serials: BoundedVec, + } +} + +/// Convert from old CollectionInfo type to new type +/// Returns a bool if the CollectionInfo has been completely migrated, if not, this function +/// will re-insert the old CollectionInfo into the storage +fn convert( + collection_id: CollectionUuid, + old: &mut old::CollectionInformation, +) -> (bool, TokenCount) { + log::debug!(target: LOG_TARGET, "🦆 Migrating collection_id {}", collection_id); + // Is the collection info fully migrated + let mut completed = false; + let mut migrated_token_count: TokenCount = 0; + + // For simplicity, we will only migrate max 50 tokens from 1 account at a time + if let Some(mut ownership) = old.owned_tokens.pop() { + let mut serial_numbers = ownership.owned_serials.clone(); + // take at max 10 from serial_numbers + migrated_token_count = serial_numbers.len().min(MAX_TOKENS_PER_STEP as usize) as TokenCount; + let serials_to_migrate = serial_numbers.drain(..migrated_token_count as usize).collect(); + log::debug!(target: LOG_TARGET, "🦆 Migrating {:?} tokens for owner: {:?}", serials_to_migrate, ownership.owner); + + for serial_number in &serials_to_migrate { + let token_locks = old::TokenLocks::::take((collection_id, serial_number)); + let token_utility_flags = + old::TokenUtilityFlags::::take((collection_id, serial_number)); + + let token_info = TokenInformation { + owner: ownership.owner.clone(), + lock_status: token_locks, + utility_flags: token_utility_flags, + }; + pallet_nft::TokenInfo::::insert(collection_id, serial_number, token_info); + } + + // Update OwnedTokens with the migrated serials + let _ = pallet_nft::OwnedTokens::::try_mutate( + &ownership.owner, + collection_id, + |owned_serials| -> DispatchResult { + match owned_serials.as_mut() { + Some(owned_serials) => { + for serial_number in serials_to_migrate { + // Force push is safe as the existing bound is the same as the new bound + owned_serials.force_push(serial_number); + } + }, + None => { + *owned_serials = Some(BoundedVec::truncate_from(serials_to_migrate)); + }, + } + Ok(()) + }, + ); + + // If migration of this user's serial numbers is not complete, + // reinstate remaining serials into ownership + if serial_numbers.is_empty() { + log::debug!(target: LOG_TARGET, "🦆 Migration complete for owner: {:?}", ownership.owner); + if old.owned_tokens.is_empty() { + completed = true; + } + } else { + log::debug!(target: LOG_TARGET, "🦆 Migration on-going for owner: {:?}", ownership.owner); + ownership.owned_serials = serial_numbers; + old.owned_tokens.force_push(ownership); + } + } else { + // No serials to migrate, therefore complete + completed = true; + } + + if completed { + log::debug!(target: LOG_TARGET, "🦆 Migration complete for collection: {}", collection_id); + // Migration for this collection_info is complete. + // Delete the old collection_info and insert the new one + old::CollectionInfo::::remove(collection_id); + let new_collection_info = CollectionInformation { + owner: old.owner.clone(), + name: old.name.clone(), + metadata_scheme: old.metadata_scheme.clone(), + royalties_schedule: old.royalties_schedule.clone(), + max_issuance: old.max_issuance.clone(), + origin_chain: old.origin_chain.clone(), + next_serial_number: old.next_serial_number.clone(), + collection_issuance: old.collection_issuance.clone(), + cross_chain_compatibility: old.cross_chain_compatibility.clone(), + }; + pallet_nft::CollectionInfo::::insert(collection_id, new_collection_info); + } else { + log::debug!(target: LOG_TARGET, "🦆 Migration on-going for collection: {}", collection_id); + // Re-insert the old collection_info + old::CollectionInfo::::insert(collection_id, old); + } + + (completed, migrated_token_count) +} + +#[derive(Encode, Decode, MaxEncodedLen, DefaultNoBound)] +pub struct NftMigration { + phantom: PhantomData, +} + +impl MigrationStep for NftMigration { + const TARGET_VERSION: u16 = 9; + + fn version_check() -> bool { + Nft::on_chain_storage_version() == Self::TARGET_VERSION + } + + fn on_complete() { + StorageVersion::new(Self::TARGET_VERSION).put::(); + } + + fn max_step_weight() -> Weight { + ::WeightInfo::current_migration_step(MAX_TOKENS_PER_STEP) + } + + /// Migrate one token + fn step(last_key: Option>) -> MigrationStepResult { + let mut iter = if let Some(last_key) = last_key.clone() { + old::CollectionInfo::::iter_from(last_key) + } else { + old::CollectionInfo::::iter() + }; + + if let Some((key, mut old)) = iter.next() { + let (completed, migrated_count) = convert::(key, old.into_mut()); + + // If we have completed the migration for this collection, we can move on to the next one + let last_key = match completed { + true => Some(old::CollectionInfo::::hashed_key_for(key)), + false => last_key, + }; + MigrationStepResult::continue_step( + ::WeightInfo::current_migration_step(migrated_count), + last_key, + ) + } else { + log::debug!(target: LOG_TARGET, "🦆 No more tokens to migrate"); + MigrationStepResult::finish_step(Self::max_step_weight()) + } + } +} + +#[cfg(test)] +mod tests { + use super::*; + use crate::migrations::{tests::new_test_ext, Map}; + use frame_support::{StorageHasher, Twox64Concat}; + use seed_pallet_common::test_prelude::create_account; + use seed_pallet_common::utils::TokenBurnAuthority; + use seed_primitives::{ListingId, TokenLockReason}; + type AccountId = ::AccountId; + + /// Helper function to manually insert fake data into storage map + fn insert_old_data( + collection_id: CollectionUuid, + old_value: old::CollectionInformation< + AccountId, + MaxTokensPerCollection, + CollectionNameStringLimit, + >, + ) { + let key = Twox64Concat::hash(&(collection_id).encode()); + Map::unsafe_storage_put::< + old::CollectionInformation< + AccountId, + MaxTokensPerCollection, + CollectionNameStringLimit, + >, + >(b"Nft", b"CollectionInfo", &key, old_value); + } + + fn insert_old_token_flags(token_id: TokenId, token_flags: TokenFlags) { + let key = Twox64Concat::hash(&(token_id).encode()); + Map::unsafe_storage_put::(b"Nft", b"TokenUtilityFlags", &key, token_flags); + } + + fn insert_old_token_locks(token_id: TokenId, token_lock_reason: TokenLockReason) { + let key = Twox64Concat::hash(&(token_id).encode()); + Map::unsafe_storage_put::(b"Nft", b"TokenLocks", &key, token_lock_reason); + } + + #[test] + fn migrate_single_step() { + new_test_ext().execute_with(|| { + let account = create_account(123); + let serials: Vec = (1..=MAX_TOKENS_PER_STEP).collect(); + let old_token_ownership = old::TokenOwnership:: { + owner: account, + owned_serials: BoundedVec::::truncate_from( + serials.clone(), + ), + }; + // Add token locks and TokenFlags for each serial + for serial in serials.clone() { + let token_lock_reason = TokenLockReason::Listed(serial as ListingId); + insert_old_token_locks((123, serial), token_lock_reason); + let token_flags = TokenFlags { + transferable: false, + burn_authority: Some(TokenBurnAuthority::Both), + }; + insert_old_token_flags((123, serial), token_flags); + old::TokenUtilityFlags::::insert((123, serial), token_flags); + } + + let old = old::CollectionInformation { + owner: create_account(126), + name: BoundedVec::::truncate_from(vec![1, 2, 3, 4]), + metadata_scheme: MetadataScheme::try_from(b"metadata".as_slice()).unwrap(), + royalties_schedule: None, + max_issuance: Some(500), + origin_chain: OriginChain::Root, + next_serial_number: 2, + collection_issuance: 5, + cross_chain_compatibility: CrossChainCompatibility::default(), + owned_tokens: BoundedVec::< + old::TokenOwnership, + MaxTokensPerCollection, + >::truncate_from(vec![old_token_ownership.clone()]), + }; + let collection_id = 123; + insert_old_data(collection_id, old.clone()); + + // Perform 1 step to migrate the user's tokens + let result = NftMigration::::step(None); + assert!(!result.is_finished()); + + // Check TokenInfo and OwnedTokens + for serial_number in serials.clone() { + let token_lock_reason = TokenLockReason::Listed(serial_number as ListingId); + let token_flags = TokenFlags { + transferable: false, + burn_authority: Some(TokenBurnAuthority::Both), + }; + let expected_token_info = TokenInformation { + owner: account, + lock_status: Some(token_lock_reason), + utility_flags: token_flags, + }; + // assert!(!old::TokenLocks::::contains_key((123, serial_number))); + // assert!(!old::TokenUtilityFlags::::contains_key((123, serial_number))); + let token_info = + pallet_nft::TokenInfo::::get(collection_id, serial_number).unwrap(); + assert_eq!(expected_token_info, token_info); + } + let owned_tokens = + pallet_nft::OwnedTokens::::get(account, collection_id).unwrap(); + assert_eq!(owned_tokens.into_inner(), serials); + + // Collection info should now be the new format + let expected_collection_info = CollectionInformation { + owner: create_account(126), + name: BoundedVec::::truncate_from(vec![1, 2, 3, 4]), + metadata_scheme: MetadataScheme::try_from(b"metadata".as_slice()).unwrap(), + royalties_schedule: None, + max_issuance: Some(500), + origin_chain: OriginChain::Root, + next_serial_number: 2, + collection_issuance: 5, + cross_chain_compatibility: CrossChainCompatibility::default(), + }; + let new_collection_info = + pallet_nft::CollectionInfo::::get(collection_id).unwrap(); + assert_eq!(new_collection_info, expected_collection_info); + + // Attempting to perform one more step should return Finished + let last_key = result.last_key; + let result = NftMigration::::step(last_key.clone()); + assert!(result.is_finished()); + }); + } + + #[test] + fn migrate_two_owners() { + new_test_ext().execute_with(|| { + let account_1 = create_account(123); + let serials_1 = vec![1, 2, 3]; + let old_token_ownership_1 = old::TokenOwnership:: { + owner: account_1, + owned_serials: BoundedVec::::truncate_from( + serials_1.clone(), + ), + }; + + let account_2 = create_account(126); + let serials_2 = vec![6, 7, 8, 9]; + let old_token_ownership_2 = old::TokenOwnership:: { + owner: account_2, + owned_serials: BoundedVec::::truncate_from( + serials_2.clone(), + ), + }; + + let old = old::CollectionInformation { + owner: create_account(126), + name: BoundedVec::::truncate_from(vec![1, 2, 3, 4]), + metadata_scheme: MetadataScheme::try_from(b"metadata".as_slice()).unwrap(), + royalties_schedule: None, + max_issuance: Some(500), + origin_chain: OriginChain::Root, + next_serial_number: 2, + collection_issuance: 5, + cross_chain_compatibility: CrossChainCompatibility::default(), + owned_tokens: BoundedVec::< + old::TokenOwnership, + MaxTokensPerCollection, + >::truncate_from(vec![ + old_token_ownership_1.clone(), + old_token_ownership_2, + ]), + }; + let collection_id = 123; + insert_old_data(collection_id, old.clone()); + + // Perform 1 step to migrate the last user (user_2) + let result = NftMigration::::step(None); + assert!(!result.is_finished()); + + // Check user 2 tokens + for serial_number in serials_2.clone() { + let token_info = + pallet_nft::TokenInfo::::get(collection_id, serial_number).unwrap(); + assert_eq!(token_info.owner, account_2); + } + let owned_tokens = + pallet_nft::OwnedTokens::::get(account_2, collection_id).unwrap(); + assert_eq!(owned_tokens.into_inner(), serials_2); + + // Old collection info still exists in the old format, but only has one user left to migrate + let old_collection_info = old::CollectionInfo::::get(collection_id).unwrap(); + let expected = old::CollectionInformation { + owned_tokens: BoundedVec::< + old::TokenOwnership, + MaxTokensPerCollection, + >::truncate_from(vec![old_token_ownership_1]), + ..old + }; + assert_eq!(old_collection_info, expected); + + // Perform 1 more step to migrate the first user + let result = NftMigration::::step(result.last_key); + assert!(!result.is_finished()); + + // Collection info should now be the new format + let expected_collection_info = CollectionInformation { + owner: create_account(126), + name: BoundedVec::::truncate_from(vec![1, 2, 3, 4]), + metadata_scheme: MetadataScheme::try_from(b"metadata".as_slice()).unwrap(), + royalties_schedule: None, + max_issuance: Some(500), + origin_chain: OriginChain::Root, + next_serial_number: 2, + collection_issuance: 5, + cross_chain_compatibility: CrossChainCompatibility::default(), + }; + let new_collection_info = + pallet_nft::CollectionInfo::::get(collection_id).unwrap(); + assert_eq!(new_collection_info, expected_collection_info); + + // Check user 1 tokens + for serial_number in serials_1.clone() { + let token_info = + pallet_nft::TokenInfo::::get(collection_id, serial_number).unwrap(); + assert_eq!(token_info.owner, account_1); + } + let owned_tokens = + pallet_nft::OwnedTokens::::get(account_1, collection_id).unwrap(); + assert_eq!(owned_tokens.into_inner(), serials_1); + + // Attempting to perform one more step should return Finished + let last_key = result.last_key; + let result = NftMigration::::step(last_key.clone()); + assert!(result.is_finished()); + }); + } + + #[test] + fn migrate_many_collections() { + new_test_ext().execute_with(|| { + // Insert 100 collections + let collection_count: CollectionUuid = 100; + for i in 0..collection_count { + let serials = vec![1, 2, 3, i + 4]; + // Add token locks and TokenFlags for each serial + for serial in serials.clone() { + println!("token: ({},{})", i, serial); + let token_lock_reason = TokenLockReason::Listed(serial as ListingId); + insert_old_token_locks((i, serial), token_lock_reason); + let token_flags = TokenFlags { + transferable: false, + burn_authority: Some(TokenBurnAuthority::Both), + }; + insert_old_token_flags((i, serial), token_flags); + } + + // Insert token ownership and collection info + let old_token_ownership = old::TokenOwnership:: { + owner: create_account(1 + i as u64), + owned_serials: + BoundedVec::::truncate_from(serials), + }; + let old = old::CollectionInformation { + owner: create_account(2 + i as u64), + name: BoundedVec::::truncate_from(vec![ + 1, 2, 3, 4, + ]), + metadata_scheme: MetadataScheme::try_from(b"metadata".as_slice()).unwrap(), + royalties_schedule: None, + max_issuance: Some(i), + origin_chain: OriginChain::Root, + next_serial_number: i + 4, + collection_issuance: i + 5, + cross_chain_compatibility: CrossChainCompatibility::default(), + owned_tokens: BoundedVec::< + old::TokenOwnership, + MaxTokensPerCollection, + >::truncate_from(vec![old_token_ownership]), + }; + insert_old_data(i, old); + } + + // Perform migration + let mut last_key = None; + for _ in 0..collection_count { + let result = NftMigration::::step(last_key.clone()); + assert!(!result.is_finished()); + last_key = result.last_key; + } + // One last step to finish migration + let result = NftMigration::::step(last_key.clone()); + assert!(result.is_finished()); + + // Check that all collections have been migrated + for i in 0..collection_count { + let new_collection_info = pallet_nft::CollectionInfo::::get(i).unwrap(); + let expected_collection_info = CollectionInformation { + owner: create_account(2 + i as u64), + name: BoundedVec::::truncate_from(vec![ + 1, 2, 3, 4, + ]), + metadata_scheme: MetadataScheme::try_from(b"metadata".as_slice()).unwrap(), + royalties_schedule: None, + max_issuance: Some(i), + origin_chain: OriginChain::Root, + next_serial_number: i + 4, + collection_issuance: i + 5, + cross_chain_compatibility: CrossChainCompatibility::default(), + }; + assert_eq!(new_collection_info, expected_collection_info); + + // Check ownership + let account = create_account(1 + i as u64); + let serials = vec![1, 2, 3, i + 4]; + for serial_number in serials.clone() { + let token_lock_reason = TokenLockReason::Listed(serial_number as ListingId); + let token_flags = TokenFlags { + transferable: false, + burn_authority: Some(TokenBurnAuthority::Both), + }; + let expected_token_info = TokenInformation { + owner: account, + lock_status: Some(token_lock_reason), + utility_flags: token_flags, + }; + assert!(!old::TokenLocks::::contains_key((i, serial_number))); + assert!(!old::TokenUtilityFlags::::contains_key((i, serial_number))); + let token_info = + pallet_nft::TokenInfo::::get(i, serial_number).unwrap(); + assert_eq!(token_info, expected_token_info); + } + let owned_tokens = pallet_nft::OwnedTokens::::get(account, i).unwrap(); + assert_eq!(owned_tokens.into_inner(), serials); + } + }); + } + + #[test] + fn migrate_owner_with_many_tokens() { + new_test_ext().execute_with(|| { + let account_1 = create_account(123); + let serials_1: Vec = (1..=MAX_TOKENS_PER_STEP * 2).collect(); + let old_token_ownership_1 = old::TokenOwnership:: { + owner: account_1, + owned_serials: BoundedVec::::truncate_from( + serials_1.clone(), + ), + }; + + let old = old::CollectionInformation { + owner: create_account(126), + name: BoundedVec::::truncate_from(vec![1, 2, 3, 4]), + metadata_scheme: MetadataScheme::try_from(b"metadata".as_slice()).unwrap(), + royalties_schedule: None, + max_issuance: Some(500), + origin_chain: OriginChain::Root, + next_serial_number: 2, + collection_issuance: 5, + cross_chain_compatibility: CrossChainCompatibility::default(), + owned_tokens: BoundedVec::< + old::TokenOwnership, + MaxTokensPerCollection, + >::truncate_from(vec![old_token_ownership_1.clone()]), + }; + let collection_id = 123; + insert_old_data(collection_id, old.clone()); + + // Perform 1 step to migrate MAX_TOKENS_PER_STEP tokens + let result = NftMigration::::step(None); + assert!(!result.is_finished()); + + // Check tokens + let migrated_serials = (1..=MAX_TOKENS_PER_STEP).collect::>(); + for serial_number in migrated_serials.clone() { + let token_info = + pallet_nft::TokenInfo::::get(collection_id, serial_number).unwrap(); + assert_eq!(token_info.owner, account_1); + } + let owned_tokens = + pallet_nft::OwnedTokens::::get(account_1, collection_id).unwrap(); + assert_eq!(owned_tokens.into_inner(), migrated_serials); + + // Old collection info still exists in the old format, but the number of serials is reduced + let serials_to_migrate: Vec = + (MAX_TOKENS_PER_STEP + 1..=MAX_TOKENS_PER_STEP * 2).collect(); + let old_token_ownership_1 = old::TokenOwnership:: { + owner: account_1, + owned_serials: BoundedVec::::truncate_from( + serials_to_migrate.clone(), + ), + }; + let old_collection_info = old::CollectionInfo::::get(collection_id).unwrap(); + let expected = old::CollectionInformation { + owned_tokens: BoundedVec::< + old::TokenOwnership, + MaxTokensPerCollection, + >::truncate_from(vec![old_token_ownership_1]), + ..old + }; + assert_eq!(old_collection_info, expected); + + // Perform 1 more step to migrate the rest of the serials + let result = NftMigration::::step(result.last_key); + assert!(!result.is_finished()); + + // Check tokens + let migrated_serials = (1..=MAX_TOKENS_PER_STEP * 2).collect::>(); + for serial_number in migrated_serials.clone() { + let token_info = + pallet_nft::TokenInfo::::get(collection_id, serial_number).unwrap(); + assert_eq!(token_info.owner, account_1); + } + let owned_tokens = + pallet_nft::OwnedTokens::::get(account_1, collection_id).unwrap(); + assert_eq!(owned_tokens.into_inner(), migrated_serials); + + // Collection info should now be the new format + let expected_collection_info = CollectionInformation { + owner: create_account(126), + name: BoundedVec::::truncate_from(vec![1, 2, 3, 4]), + metadata_scheme: MetadataScheme::try_from(b"metadata".as_slice()).unwrap(), + royalties_schedule: None, + max_issuance: Some(500), + origin_chain: OriginChain::Root, + next_serial_number: 2, + collection_issuance: 5, + cross_chain_compatibility: CrossChainCompatibility::default(), + }; + let new_collection_info = + pallet_nft::CollectionInfo::::get(collection_id).unwrap(); + assert_eq!(new_collection_info, expected_collection_info); + + // Attempting to perform one more step should return Finished + let last_key = result.last_key; + let result = NftMigration::::step(last_key.clone()); + assert!(result.is_finished()); + }); + } +} diff --git a/runtime/src/migrations/sft.rs b/runtime/src/migrations/sft.rs new file mode 100644 index 000000000..8fdeef1ea --- /dev/null +++ b/runtime/src/migrations/sft.rs @@ -0,0 +1,203 @@ +// Copyright 2022-2023 Futureverse Corporation Limited +// +// Licensed under the LGPL, Version 3.0 (the "License"); +// you may not use this file except in compliance with the License. +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// You may obtain a copy of the License at the root of this project source code + +use crate::{Runtime, Sft, Weight}; +use frame_support::{ + dispatch::GetStorageVersion, + traits::{OnRuntimeUpgrade, StorageVersion}, +}; + +#[allow(unused_imports)] +use sp_runtime::DispatchError; +#[allow(unused_imports)] +use sp_std::vec::Vec; + +pub struct Upgrade; + +impl OnRuntimeUpgrade for Upgrade { + fn on_runtime_upgrade() -> Weight { + let current = Sft::current_storage_version(); + let onchain = Sft::on_chain_storage_version(); + log::info!(target: "Migration", "Sft: Running migration with current storage version {current:?} / on-chain {onchain:?}"); + + let mut weight = ::DbWeight::get().reads(2); + + if onchain != 0 { + log::info!( + target: "Migration", + "Sft: No migration was done, This migration should be on top of storage version 0. Migration code needs to be removed." + ); + return weight; + } + + log::info!(target: "Migration", "Sft: Migrating from on-chain version {onchain:?} to on-chain version {current:?}."); + weight += v1::migrate::(); + StorageVersion::new(1).put::(); + log::info!(target: "Migration", "Sft: Migration successfully completed."); + weight + } + + #[cfg(feature = "try-runtime")] + fn pre_upgrade() -> Result, DispatchError> { + log::info!(target: "Migration", "Sft: Upgrade to v1 Pre Upgrade."); + let onchain = Sft::on_chain_storage_version(); + // Return OK(()) if upgrade has already been done + if onchain == 1 { + return Ok(Vec::new()); + } + assert_eq!(onchain, 0); + + Ok(Vec::new()) + } + + #[cfg(feature = "try-runtime")] + fn post_upgrade(_state: Vec) -> Result<(), DispatchError> { + log::info!(target: "Migration", "Sft: Upgrade to v1 Post Upgrade."); + let current = Sft::current_storage_version(); + let onchain = Sft::on_chain_storage_version(); + assert_eq!(current, 1); + assert_eq!(onchain, 1); + Ok(()) + } +} + +#[allow(dead_code)] +#[allow(unused_imports)] +pub mod v1 { + use super::*; + use crate::migrations::Value; + use codec::{Decode, Encode, MaxEncodedLen}; + use core::fmt::Debug; + use frame_support::pallet_prelude::ValueQuery; + use frame_support::weights::Weight; + use frame_support::{storage_alias, CloneNoBound, PartialEqNoBound, RuntimeDebugNoBound}; + use frame_support::{BoundedVec, Twox64Concat}; + use scale_info::TypeInfo; + use seed_primitives::{Balance, CollectionUuid, SerialNumber, TokenId}; + use sp_core::{Get, H160}; + + #[frame_support::storage_alias] + pub type PendingIssuances = StorageMap< + pallet_sft::Pallet, + Twox64Concat, + CollectionUuid, + SftCollectionPendingIssuances< + ::AccountId, + ::MaxSerialsPerMint, + ::MaxSftPendingIssuances, + >, + ValueQuery, + >; + + type IssuanceId = u32; + + #[derive( + PartialEqNoBound, RuntimeDebugNoBound, CloneNoBound, Encode, Decode, TypeInfo, MaxEncodedLen, + )] + #[scale_info(skip_type_params(MaxSerialsPerMint))] + pub struct SftPendingIssuance> { + pub issuance_id: IssuanceId, + pub serial_numbers: BoundedVec<(SerialNumber, Balance), MaxSerialsPerMint>, + } + + /// The state of a sft collection's pending issuances + #[derive( + PartialEqNoBound, RuntimeDebugNoBound, CloneNoBound, Encode, Decode, TypeInfo, MaxEncodedLen, + )] + #[codec(mel_bound(AccountId: MaxEncodedLen))] + #[scale_info(skip_type_params(MaxSerialsPerMint, MaxPendingIssuances))] + pub struct SftCollectionPendingIssuances + where + AccountId: Debug + PartialEq + Clone, + MaxSerialsPerMint: Get, + MaxPendingIssuances: Get, + { + pub next_issuance_id: IssuanceId, + pub pending_issuances: BoundedVec< + (AccountId, BoundedVec, MaxPendingIssuances>), + MaxPendingIssuances, + >, + } + + impl Default + for SftCollectionPendingIssuances + where + AccountId: Debug + PartialEq + Clone, + MaxSerialsPerMint: Get, + MaxPendingIssuances: Get, + { + fn default() -> Self { + SftCollectionPendingIssuances { + next_issuance_id: 0, + pending_issuances: BoundedVec::new(), + } + } + } + + pub fn migrate() -> Weight { + log::info!(target: "Migration", "Sft: removing PendingIssuances"); + + let results = PendingIssuances::::clear(u32::MAX, None); + let weight: Weight = + ::DbWeight::get().reads(results.loops as u64); + + log::info!(target: "Migration", "Sft: removal of PendingIssuance successful"); + weight + } + + #[cfg(test)] + mod tests { + use super::*; + use crate::migrations::tests::create_account; + use crate::migrations::tests::new_test_ext; + use crate::migrations::Map; + use codec::{Decode, Encode}; + use frame_support::{BoundedVec, StorageHasher}; + use scale_info::TypeInfo; + use sp_core::H256; + + #[test] + fn migrate_with_data() { + new_test_ext().execute_with(|| { + // Setup storage + StorageVersion::new(0).put::(); + let item_count = 10_u32; + for i in 0..item_count { + let pending_issuance = SftCollectionPendingIssuances { + next_issuance_id: 0, + pending_issuances: BoundedVec::truncate_from(vec![( + create_account(i as u64), + BoundedVec::truncate_from(vec![SftPendingIssuance { + issuance_id: 0, + serial_numbers: BoundedVec::truncate_from(vec![(i, 100)]), + }]), + )]), + }; + PendingIssuances::::insert(i, pending_issuance); + } + + // Sanity check + for i in 0..item_count { + assert!(PendingIssuances::::contains_key(i)); + } + + // Do runtime upgrade + let used_weight = Upgrade::on_runtime_upgrade(); + assert_eq!(Sft::on_chain_storage_version(), 1); + + // Check storage is removed + for i in 0..item_count { + assert!(!PendingIssuances::::contains_key(i)); + } + }); + } + } +} diff --git a/runtime/src/weights/pallet_migration.rs b/runtime/src/weights/pallet_migration.rs index e56ffad4b..93707663e 100644 --- a/runtime/src/weights/pallet_migration.rs +++ b/runtime/src/weights/pallet_migration.rs @@ -2,7 +2,7 @@ //! Autogenerated weights for `pallet_migration` //! //! THIS FILE WAS AUTO-GENERATED USING THE SUBSTRATE BENCHMARK CLI VERSION 4.0.0-dev -//! DATE: 2024-10-29, STEPS: `50`, REPEAT: `20`, LOW RANGE: `[]`, HIGH RANGE: `[]` +//! DATE: 2025-03-05, STEPS: `50`, REPEAT: `20`, LOW RANGE: `[]`, HIGH RANGE: `[]` //! WORST CASE MAP SIZE: `1000000` //! HOSTNAME: `ip-172-31-102-147`, CPU: `Intel(R) Xeon(R) CPU E5-2686 v4 @ 2.30GHz` //! EXECUTION: ``, WASM-EXECUTION: `Compiled`, CHAIN: `Some("dev")`, DB CACHE: 1024 @@ -34,7 +34,7 @@ pub struct WeightInfo(PhantomData); impl pallet_migration::WeightInfo for WeightInfo { /// Storage: `Migration::Status` (r:1 w:1) /// Proof: `Migration::Status` (`max_values`: Some(1), `max_size`: None, mode: `Measured`) - /// Storage: `Migration::MigrationEnabled` (r:1 w:0) + /// Storage: `Migration::MigrationEnabled` (r:1 w:1) /// Proof: `Migration::MigrationEnabled` (`max_values`: Some(1), `max_size`: None, mode: `Measured`) /// Storage: `Migration::BlockDelay` (r:1 w:0) /// Proof: `Migration::BlockDelay` (`max_values`: Some(1), `max_size`: None, mode: `Measured`) @@ -42,31 +42,45 @@ impl pallet_migration::WeightInfo for WeightInfo { /// Proof: `Migration::BlockLimit` (`max_values`: Some(1), `max_size`: None, mode: `Measured`) /// Storage: `Migration::LastKey` (r:1 w:1) /// Proof: `Migration::LastKey` (`max_values`: Some(1), `max_size`: None, mode: `Measured`) - /// Storage: `Xls20::Xls20TokenMap` (r:1 w:0) - /// Proof: `Xls20::Xls20TokenMap` (`max_values`: None, `max_size`: Some(56), added: 2531, mode: `MaxEncodedLen`) - /// Storage: UNKNOWN KEY `0x28fc2cbf777640e8e3e472d285713c8d4e7b9012096b41c4eb3aaf947f6ea429` (r:0 w:1) - /// Proof: UNKNOWN KEY `0x28fc2cbf777640e8e3e472d285713c8d4e7b9012096b41c4eb3aaf947f6ea429` (r:0 w:1) + /// Storage: `Nft::CollectionInfo` (r:1 w:0) + /// Proof: `Nft::CollectionInfo` (`max_values`: None, `max_size`: Some(494), added: 2969, mode: `MaxEncodedLen`) + /// Storage: UNKNOWN KEY `0xf43ffbe61ef468749d3617ac1a63c4b74e7b9012096b41c4eb3aaf947f6ea429` (r:0 w:1) + /// Proof: UNKNOWN KEY `0xf43ffbe61ef468749d3617ac1a63c4b74e7b9012096b41c4eb3aaf947f6ea429` (r:0 w:1) fn migrate() -> Weight { // Proof Size summary in bytes: - // Measured: `98` - // Estimated: `3521` - // Minimum execution time: 43_662_000 picoseconds. - Weight::from_parts(44_406_000, 0) - .saturating_add(Weight::from_parts(0, 3521)) + // Measured: `258` + // Estimated: `3959` + // Minimum execution time: 46_647_000 picoseconds. + Weight::from_parts(47_399_000, 0) + .saturating_add(Weight::from_parts(0, 3959)) .saturating_add(T::DbWeight::get().reads(6)) - .saturating_add(T::DbWeight::get().writes(3)) + .saturating_add(T::DbWeight::get().writes(4)) } - /// Storage: `Xls20::Xls20TokenMap` (r:2 w:1) - /// Proof: `Xls20::Xls20TokenMap` (`max_values`: None, `max_size`: Some(56), added: 2531, mode: `MaxEncodedLen`) - fn current_migration_step() -> Weight { + /// Storage: `Nft::CollectionInfo` (r:2 w:1) + /// Proof: `Nft::CollectionInfo` (`max_values`: None, `max_size`: Some(494), added: 2969, mode: `MaxEncodedLen`) + /// Storage: UNKNOWN KEY `0xf43ffbe61ef468749d3617ac1a63c4b70d96583a751bd644fd42252931d83e5f` (r:50 w:50) + /// Proof: UNKNOWN KEY `0xf43ffbe61ef468749d3617ac1a63c4b70d96583a751bd644fd42252931d83e5f` (r:50 w:50) + /// Storage: UNKNOWN KEY `0xf43ffbe61ef468749d3617ac1a63c4b726db73e92fe0bb513cac8d5ccc97e899` (r:50 w:50) + /// Proof: UNKNOWN KEY `0xf43ffbe61ef468749d3617ac1a63c4b726db73e92fe0bb513cac8d5ccc97e899` (r:50 w:50) + /// Storage: `Nft::OwnedTokens` (r:1 w:1) + /// Proof: `Nft::OwnedTokens` (`max_values`: None, `max_size`: Some(4000052), added: 4002527, mode: `MaxEncodedLen`) + /// Storage: `Nft::TokenInfo` (r:0 w:50) + /// Proof: `Nft::TokenInfo` (`max_values`: None, `max_size`: Some(65), added: 2540, mode: `MaxEncodedLen`) + /// The range of component `p` is `[1, 50]`. + fn current_migration_step(p: u32, ) -> Weight { // Proof Size summary in bytes: - // Measured: `120` - // Estimated: `6052` - // Minimum execution time: 31_035_000 picoseconds. - Weight::from_parts(31_589_000, 0) - .saturating_add(Weight::from_parts(0, 6052)) - .saturating_add(T::DbWeight::get().reads(2)) - .saturating_add(T::DbWeight::get().writes(1)) + // Measured: `340 + p * (65 ±0)` + // Estimated: `4003517 + p * (2540 ±0)` + // Minimum execution time: 64_580_000 picoseconds. + Weight::from_parts(46_443_717, 0) + .saturating_add(Weight::from_parts(0, 4003517)) + // Standard Error: 24_383 + .saturating_add(Weight::from_parts(17_054_066, 0).saturating_mul(p.into())) + .saturating_add(T::DbWeight::get().reads(3)) + .saturating_add(T::DbWeight::get().reads((2_u64).saturating_mul(p.into()))) + .saturating_add(T::DbWeight::get().writes(2)) + .saturating_add(T::DbWeight::get().writes((3_u64).saturating_mul(p.into()))) + .saturating_add(Weight::from_parts(0, 2540).saturating_mul(p.into())) } /// Storage: `Migration::MigrationEnabled` (r:0 w:1) /// Proof: `Migration::MigrationEnabled` (`max_values`: Some(1), `max_size`: None, mode: `Measured`) @@ -74,8 +88,8 @@ impl pallet_migration::WeightInfo for WeightInfo { // Proof Size summary in bytes: // Measured: `0` // Estimated: `0` - // Minimum execution time: 21_241_000 picoseconds. - Weight::from_parts(22_112_000, 0) + // Minimum execution time: 22_982_000 picoseconds. + Weight::from_parts(23_673_000, 0) .saturating_add(Weight::from_parts(0, 0)) .saturating_add(T::DbWeight::get().writes(1)) } @@ -85,8 +99,8 @@ impl pallet_migration::WeightInfo for WeightInfo { // Proof Size summary in bytes: // Measured: `0` // Estimated: `0` - // Minimum execution time: 22_220_000 picoseconds. - Weight::from_parts(22_742_000, 0) + // Minimum execution time: 23_204_000 picoseconds. + Weight::from_parts(23_813_000, 0) .saturating_add(Weight::from_parts(0, 0)) .saturating_add(T::DbWeight::get().writes(1)) } @@ -96,8 +110,8 @@ impl pallet_migration::WeightInfo for WeightInfo { // Proof Size summary in bytes: // Measured: `0` // Estimated: `0` - // Minimum execution time: 21_630_000 picoseconds. - Weight::from_parts(22_504_000, 0) + // Minimum execution time: 22_963_000 picoseconds. + Weight::from_parts(23_368_000, 0) .saturating_add(Weight::from_parts(0, 0)) .saturating_add(T::DbWeight::get().writes(1)) } diff --git a/runtime/src/weights/pallet_nft.rs b/runtime/src/weights/pallet_nft.rs index 0cdc789dd..40202dc75 100644 --- a/runtime/src/weights/pallet_nft.rs +++ b/runtime/src/weights/pallet_nft.rs @@ -2,7 +2,7 @@ //! Autogenerated weights for `pallet_nft` //! //! THIS FILE WAS AUTO-GENERATED USING THE SUBSTRATE BENCHMARK CLI VERSION 4.0.0-dev -//! DATE: 2025-02-26, STEPS: `50`, REPEAT: `20`, LOW RANGE: `[]`, HIGH RANGE: `[]` +//! DATE: 2025-03-06, STEPS: `50`, REPEAT: `20`, LOW RANGE: `[]`, HIGH RANGE: `[]` //! WORST CASE MAP SIZE: `1000000` //! HOSTNAME: `ip-172-31-102-147`, CPU: `Intel(R) Xeon(R) CPU E5-2686 v4 @ 2.30GHz` //! EXECUTION: ``, WASM-EXECUTION: `Compiled`, CHAIN: `Some("dev")`, DB CACHE: 1024 @@ -32,80 +32,96 @@ use core::marker::PhantomData; /// Weight functions for `pallet_nft`. pub struct WeightInfo(PhantomData); impl pallet_nft::WeightInfo for WeightInfo { + /// Storage: `Migration::Status` (r:1 w:0) + /// Proof: `Migration::Status` (`max_values`: Some(1), `max_size`: None, mode: `Measured`) /// Storage: `Nft::CollectionInfo` (r:1 w:1) - /// Proof: `Nft::CollectionInfo` (`max_values`: None, `max_size`: Some(4294967295), added: 2474, mode: `MaxEncodedLen`) + /// Proof: `Nft::CollectionInfo` (`max_values`: None, `max_size`: Some(494), added: 2969, mode: `MaxEncodedLen`) fn claim_unowned_collection() -> Weight { // Proof Size summary in bytes: - // Measured: `331` - // Estimated: `3464` - // Minimum execution time: 43_799_000 picoseconds. - Weight::from_parts(44_656_000, 0) - .saturating_add(Weight::from_parts(0, 3464)) - .saturating_add(T::DbWeight::get().reads(1)) + // Measured: `375` + // Estimated: `3959` + // Minimum execution time: 45_110_000 picoseconds. + Weight::from_parts(46_034_000, 0) + .saturating_add(Weight::from_parts(0, 3959)) + .saturating_add(T::DbWeight::get().reads(2)) .saturating_add(T::DbWeight::get().writes(1)) } + /// Storage: `Migration::Status` (r:1 w:0) + /// Proof: `Migration::Status` (`max_values`: Some(1), `max_size`: None, mode: `Measured`) /// Storage: `Nft::CollectionInfo` (r:1 w:1) - /// Proof: `Nft::CollectionInfo` (`max_values`: None, `max_size`: Some(4294967295), added: 2474, mode: `MaxEncodedLen`) + /// Proof: `Nft::CollectionInfo` (`max_values`: None, `max_size`: Some(494), added: 2969, mode: `MaxEncodedLen`) fn set_owner() -> Weight { // Proof Size summary in bytes: - // Measured: `331` - // Estimated: `3464` - // Minimum execution time: 46_158_000 picoseconds. - Weight::from_parts(47_066_000, 0) - .saturating_add(Weight::from_parts(0, 3464)) - .saturating_add(T::DbWeight::get().reads(1)) + // Measured: `375` + // Estimated: `3959` + // Minimum execution time: 49_803_000 picoseconds. + Weight::from_parts(50_337_000, 0) + .saturating_add(Weight::from_parts(0, 3959)) + .saturating_add(T::DbWeight::get().reads(2)) .saturating_add(T::DbWeight::get().writes(1)) } + /// Storage: `Migration::Status` (r:1 w:0) + /// Proof: `Migration::Status` (`max_values`: Some(1), `max_size`: None, mode: `Measured`) /// Storage: `Nft::CollectionInfo` (r:1 w:1) - /// Proof: `Nft::CollectionInfo` (`max_values`: None, `max_size`: Some(4294967295), added: 2474, mode: `MaxEncodedLen`) + /// Proof: `Nft::CollectionInfo` (`max_values`: None, `max_size`: Some(494), added: 2969, mode: `MaxEncodedLen`) fn set_max_issuance() -> Weight { // Proof Size summary in bytes: - // Measured: `331` - // Estimated: `3464` - // Minimum execution time: 46_631_000 picoseconds. - Weight::from_parts(47_328_000, 0) - .saturating_add(Weight::from_parts(0, 3464)) - .saturating_add(T::DbWeight::get().reads(1)) + // Measured: `375` + // Estimated: `3959` + // Minimum execution time: 48_681_000 picoseconds. + Weight::from_parts(49_241_000, 0) + .saturating_add(Weight::from_parts(0, 3959)) + .saturating_add(T::DbWeight::get().reads(2)) .saturating_add(T::DbWeight::get().writes(1)) } + /// Storage: `Migration::Status` (r:1 w:0) + /// Proof: `Migration::Status` (`max_values`: Some(1), `max_size`: None, mode: `Measured`) /// Storage: `Nft::CollectionInfo` (r:1 w:1) - /// Proof: `Nft::CollectionInfo` (`max_values`: None, `max_size`: Some(4294967295), added: 2474, mode: `MaxEncodedLen`) + /// Proof: `Nft::CollectionInfo` (`max_values`: None, `max_size`: Some(494), added: 2969, mode: `MaxEncodedLen`) fn set_base_uri() -> Weight { // Proof Size summary in bytes: - // Measured: `331` - // Estimated: `3464` - // Minimum execution time: 48_404_000 picoseconds. - Weight::from_parts(49_080_000, 0) - .saturating_add(Weight::from_parts(0, 3464)) - .saturating_add(T::DbWeight::get().reads(1)) + // Measured: `375` + // Estimated: `3959` + // Minimum execution time: 50_129_000 picoseconds. + Weight::from_parts(50_695_000, 0) + .saturating_add(Weight::from_parts(0, 3959)) + .saturating_add(T::DbWeight::get().reads(2)) .saturating_add(T::DbWeight::get().writes(1)) } + /// Storage: `Migration::Status` (r:1 w:0) + /// Proof: `Migration::Status` (`max_values`: Some(1), `max_size`: None, mode: `Measured`) /// Storage: `Nft::CollectionInfo` (r:1 w:1) - /// Proof: `Nft::CollectionInfo` (`max_values`: None, `max_size`: Some(4294967295), added: 2474, mode: `MaxEncodedLen`) + /// Proof: `Nft::CollectionInfo` (`max_values`: None, `max_size`: Some(494), added: 2969, mode: `MaxEncodedLen`) fn set_name() -> Weight { // Proof Size summary in bytes: - // Measured: `331` - // Estimated: `3464` - // Minimum execution time: 47_630_000 picoseconds. - Weight::from_parts(48_746_000, 0) - .saturating_add(Weight::from_parts(0, 3464)) - .saturating_add(T::DbWeight::get().reads(1)) + // Measured: `375` + // Estimated: `3959` + // Minimum execution time: 49_179_000 picoseconds. + Weight::from_parts(49_799_000, 0) + .saturating_add(Weight::from_parts(0, 3959)) + .saturating_add(T::DbWeight::get().reads(2)) .saturating_add(T::DbWeight::get().writes(1)) } + /// Storage: `Migration::Status` (r:1 w:0) + /// Proof: `Migration::Status` (`max_values`: Some(1), `max_size`: None, mode: `Measured`) /// Storage: `Nft::CollectionInfo` (r:1 w:1) - /// Proof: `Nft::CollectionInfo` (`max_values`: None, `max_size`: Some(4294967295), added: 2474, mode: `MaxEncodedLen`) + /// Proof: `Nft::CollectionInfo` (`max_values`: None, `max_size`: Some(494), added: 2969, mode: `MaxEncodedLen`) fn set_royalties_schedule() -> Weight { // Proof Size summary in bytes: - // Measured: `331` - // Estimated: `3464` - // Minimum execution time: 47_935_000 picoseconds. - Weight::from_parts(48_653_000, 0) - .saturating_add(Weight::from_parts(0, 3464)) - .saturating_add(T::DbWeight::get().reads(1)) + // Measured: `375` + // Estimated: `3959` + // Minimum execution time: 49_166_000 picoseconds. + Weight::from_parts(50_477_000, 0) + .saturating_add(Weight::from_parts(0, 3959)) + .saturating_add(T::DbWeight::get().reads(2)) .saturating_add(T::DbWeight::get().writes(1)) } + /// Storage: `Migration::Status` (r:1 w:0) + /// Proof: `Migration::Status` (`max_values`: Some(1), `max_size`: None, mode: `Measured`) /// Storage: `Nft::NextCollectionId` (r:1 w:1) /// Proof: `Nft::NextCollectionId` (`max_values`: Some(1), `max_size`: Some(4), added: 499, mode: `MaxEncodedLen`) + /// Storage: `Nft::OwnedTokens` (r:1 w:1) + /// Proof: `Nft::OwnedTokens` (`max_values`: None, `max_size`: Some(4000052), added: 4002527, mode: `MaxEncodedLen`) /// Storage: `EVM::AccountCodes` (r:1 w:1) /// Proof: `EVM::AccountCodes` (`max_values`: None, `max_size`: None, mode: `Measured`) /// Storage: `Futurepass::DefaultProxy` (r:1 w:0) @@ -114,178 +130,218 @@ impl pallet_nft::WeightInfo for WeightInfo { /// Proof: `System::Account` (`max_values`: None, `max_size`: Some(116), added: 2591, mode: `MaxEncodedLen`) /// Storage: `EVM::AccountCodesMetadata` (r:0 w:1) /// Proof: `EVM::AccountCodesMetadata` (`max_values`: None, `max_size`: None, mode: `Measured`) + /// Storage: `Nft::TokenInfo` (r:0 w:500) + /// Proof: `Nft::TokenInfo` (`max_values`: None, `max_size`: Some(65), added: 2540, mode: `MaxEncodedLen`) /// Storage: `Nft::CollectionInfo` (r:0 w:1) - /// Proof: `Nft::CollectionInfo` (`max_values`: None, `max_size`: Some(4294967295), added: 2474, mode: `MaxEncodedLen`) - fn create_collection() -> Weight { + /// Proof: `Nft::CollectionInfo` (`max_values`: None, `max_size`: Some(494), added: 2969, mode: `MaxEncodedLen`) + /// The range of component `p` is `[1, 500]`. + fn create_collection(p: u32, ) -> Weight { // Proof Size summary in bytes: - // Measured: `282` - // Estimated: `3747` - // Minimum execution time: 94_058_000 picoseconds. - Weight::from_parts(95_103_000, 0) - .saturating_add(Weight::from_parts(0, 3747)) - .saturating_add(T::DbWeight::get().reads(4)) - .saturating_add(T::DbWeight::get().writes(5)) + // Measured: `285` + // Estimated: `4003517` + // Minimum execution time: 113_002_000 picoseconds. + Weight::from_parts(102_684_578, 0) + .saturating_add(Weight::from_parts(0, 4003517)) + // Standard Error: 6_016 + .saturating_add(Weight::from_parts(3_942_376, 0).saturating_mul(p.into())) + .saturating_add(T::DbWeight::get().reads(6)) + .saturating_add(T::DbWeight::get().writes(6)) + .saturating_add(T::DbWeight::get().writes((1_u64).saturating_mul(p.into()))) } + /// Storage: `Migration::Status` (r:1 w:0) + /// Proof: `Migration::Status` (`max_values`: Some(1), `max_size`: None, mode: `Measured`) /// Storage: `Nft::CollectionInfo` (r:1 w:0) - /// Proof: `Nft::CollectionInfo` (`max_values`: None, `max_size`: Some(4294967295), added: 2474, mode: `MaxEncodedLen`) + /// Proof: `Nft::CollectionInfo` (`max_values`: None, `max_size`: Some(494), added: 2969, mode: `MaxEncodedLen`) /// Storage: `Nft::PublicMintInfo` (r:1 w:1) /// Proof: `Nft::PublicMintInfo` (`max_values`: None, `max_size`: Some(34), added: 2509, mode: `MaxEncodedLen`) fn toggle_public_mint() -> Weight { // Proof Size summary in bytes: - // Measured: `331` - // Estimated: `3499` - // Minimum execution time: 49_463_000 picoseconds. - Weight::from_parts(50_499_000, 0) - .saturating_add(Weight::from_parts(0, 3499)) - .saturating_add(T::DbWeight::get().reads(2)) + // Measured: `375` + // Estimated: `3959` + // Minimum execution time: 52_004_000 picoseconds. + Weight::from_parts(52_860_000, 0) + .saturating_add(Weight::from_parts(0, 3959)) + .saturating_add(T::DbWeight::get().reads(3)) .saturating_add(T::DbWeight::get().writes(1)) } + /// Storage: `Migration::Status` (r:1 w:0) + /// Proof: `Migration::Status` (`max_values`: Some(1), `max_size`: None, mode: `Measured`) /// Storage: `Nft::CollectionInfo` (r:1 w:0) - /// Proof: `Nft::CollectionInfo` (`max_values`: None, `max_size`: Some(4294967295), added: 2474, mode: `MaxEncodedLen`) + /// Proof: `Nft::CollectionInfo` (`max_values`: None, `max_size`: Some(494), added: 2969, mode: `MaxEncodedLen`) /// Storage: `Nft::PublicMintInfo` (r:1 w:1) /// Proof: `Nft::PublicMintInfo` (`max_values`: None, `max_size`: Some(34), added: 2509, mode: `MaxEncodedLen`) fn set_mint_fee() -> Weight { // Proof Size summary in bytes: - // Measured: `331` - // Estimated: `3499` - // Minimum execution time: 50_344_000 picoseconds. - Weight::from_parts(51_022_000, 0) - .saturating_add(Weight::from_parts(0, 3499)) - .saturating_add(T::DbWeight::get().reads(2)) + // Measured: `375` + // Estimated: `3959` + // Minimum execution time: 52_674_000 picoseconds. + Weight::from_parts(54_029_000, 0) + .saturating_add(Weight::from_parts(0, 3959)) + .saturating_add(T::DbWeight::get().reads(3)) .saturating_add(T::DbWeight::get().writes(1)) } + /// Storage: `Migration::Status` (r:1 w:0) + /// Proof: `Migration::Status` (`max_values`: Some(1), `max_size`: None, mode: `Measured`) /// Storage: `Nft::CollectionInfo` (r:1 w:1) - /// Proof: `Nft::CollectionInfo` (`max_values`: None, `max_size`: Some(4294967295), added: 2474, mode: `MaxEncodedLen`) + /// Proof: `Nft::CollectionInfo` (`max_values`: None, `max_size`: Some(494), added: 2969, mode: `MaxEncodedLen`) /// Storage: `Nft::PublicMintInfo` (r:1 w:0) /// Proof: `Nft::PublicMintInfo` (`max_values`: None, `max_size`: Some(34), added: 2509, mode: `MaxEncodedLen`) /// Storage: `Nft::UtilityFlags` (r:1 w:0) /// Proof: `Nft::UtilityFlags` (`max_values`: None, `max_size`: Some(15), added: 2490, mode: `MaxEncodedLen`) + /// Storage: `System::Account` (r:2 w:2) + /// Proof: `System::Account` (`max_values`: None, `max_size`: Some(116), added: 2591, mode: `MaxEncodedLen`) + /// Storage: `Nft::OwnedTokens` (r:1 w:1) + /// Proof: `Nft::OwnedTokens` (`max_values`: None, `max_size`: Some(4000052), added: 4002527, mode: `MaxEncodedLen`) /// Storage: `EVMChainId::ChainId` (r:1 w:0) /// Proof: `EVMChainId::ChainId` (`max_values`: Some(1), `max_size`: Some(8), added: 503, mode: `MaxEncodedLen`) /// Storage: `Nfi::NfiEnabled` (r:1 w:0) /// Proof: `Nfi::NfiEnabled` (`max_values`: None, `max_size`: Some(529), added: 3004, mode: `MaxEncodedLen`) - fn mint() -> Weight { + /// Storage: `Nft::TokenInfo` (r:0 w:500) + /// Proof: `Nft::TokenInfo` (`max_values`: None, `max_size`: Some(65), added: 2540, mode: `MaxEncodedLen`) + /// The range of component `p` is `[1, 500]`. + fn mint(p: u32, ) -> Weight { // Proof Size summary in bytes: - // Measured: `377` - // Estimated: `3994` - // Minimum execution time: 73_930_000 picoseconds. - Weight::from_parts(74_994_000, 0) - .saturating_add(Weight::from_parts(0, 3994)) - .saturating_add(T::DbWeight::get().reads(5)) - .saturating_add(T::DbWeight::get().writes(1)) + // Measured: `684` + // Estimated: `4003517` + // Minimum execution time: 204_783_000 picoseconds. + Weight::from_parts(186_891_339, 0) + .saturating_add(Weight::from_parts(0, 4003517)) + // Standard Error: 5_959 + .saturating_add(Weight::from_parts(3_906_145, 0).saturating_mul(p.into())) + .saturating_add(T::DbWeight::get().reads(9)) + .saturating_add(T::DbWeight::get().writes(4)) + .saturating_add(T::DbWeight::get().writes((1_u64).saturating_mul(p.into()))) } + /// Storage: `Migration::Status` (r:1 w:0) + /// Proof: `Migration::Status` (`max_values`: Some(1), `max_size`: None, mode: `Measured`) /// Storage: `Nft::UtilityFlags` (r:1 w:0) /// Proof: `Nft::UtilityFlags` (`max_values`: None, `max_size`: Some(15), added: 2490, mode: `MaxEncodedLen`) - /// Storage: `Nft::CollectionInfo` (r:1 w:1) - /// Proof: `Nft::CollectionInfo` (`max_values`: None, `max_size`: Some(4294967295), added: 2474, mode: `MaxEncodedLen`) - /// Storage: `Nft::TokenLocks` (r:500 w:0) - /// Proof: `Nft::TokenLocks` (`max_values`: None, `max_size`: Some(33), added: 2508, mode: `MaxEncodedLen`) - /// Storage: `Nft::TokenUtilityFlags` (r:500 w:0) - /// Proof: `Nft::TokenUtilityFlags` (`max_values`: None, `max_size`: Some(19), added: 2494, mode: `MaxEncodedLen`) + /// Storage: `Nft::CollectionInfo` (r:1 w:0) + /// Proof: `Nft::CollectionInfo` (`max_values`: None, `max_size`: Some(494), added: 2969, mode: `MaxEncodedLen`) + /// Storage: `Nft::TokenInfo` (r:500 w:500) + /// Proof: `Nft::TokenInfo` (`max_values`: None, `max_size`: Some(65), added: 2540, mode: `MaxEncodedLen`) + /// Storage: `Nft::OwnedTokens` (r:2 w:2) + /// Proof: `Nft::OwnedTokens` (`max_values`: None, `max_size`: Some(4000052), added: 4002527, mode: `MaxEncodedLen`) /// Storage: `TokenApprovals::ERC721Approvals` (r:0 w:500) /// Proof: `TokenApprovals::ERC721Approvals` (`max_values`: None, `max_size`: Some(36), added: 2511, mode: `MaxEncodedLen`) /// The range of component `p` is `[1, 500]`. fn transfer(p: u32, ) -> Weight { // Proof Size summary in bytes: - // Measured: `332 + p * (4 ±0)` - // Estimated: `3480 + p * (2508 ±0)` - // Minimum execution time: 68_300_000 picoseconds. - Weight::from_parts(24_948_196, 0) - .saturating_add(Weight::from_parts(0, 3480)) - // Standard Error: 17_815 - .saturating_add(Weight::from_parts(9_257_771, 0).saturating_mul(p.into())) - .saturating_add(T::DbWeight::get().reads(2)) - .saturating_add(T::DbWeight::get().reads((2_u64).saturating_mul(p.into()))) - .saturating_add(T::DbWeight::get().writes(1)) - .saturating_add(T::DbWeight::get().writes((1_u64).saturating_mul(p.into()))) - .saturating_add(Weight::from_parts(0, 2508).saturating_mul(p.into())) + // Measured: `394 + p * (43 ±0)` + // Estimated: `8006044 + p * (2540 ±0)` + // Minimum execution time: 88_330_000 picoseconds. + Weight::from_parts(90_568_000, 0) + .saturating_add(Weight::from_parts(0, 8006044)) + // Standard Error: 17_879 + .saturating_add(Weight::from_parts(12_373_415, 0).saturating_mul(p.into())) + .saturating_add(T::DbWeight::get().reads(5)) + .saturating_add(T::DbWeight::get().reads((1_u64).saturating_mul(p.into()))) + .saturating_add(T::DbWeight::get().writes(2)) + .saturating_add(T::DbWeight::get().writes((2_u64).saturating_mul(p.into()))) + .saturating_add(Weight::from_parts(0, 2540).saturating_mul(p.into())) } - /// Storage: `Nft::TokenLocks` (r:1 w:0) - /// Proof: `Nft::TokenLocks` (`max_values`: None, `max_size`: Some(33), added: 2508, mode: `MaxEncodedLen`) + /// Storage: `Migration::Status` (r:1 w:0) + /// Proof: `Migration::Status` (`max_values`: Some(1), `max_size`: None, mode: `Measured`) /// Storage: `Nft::UtilityFlags` (r:1 w:0) /// Proof: `Nft::UtilityFlags` (`max_values`: None, `max_size`: Some(15), added: 2490, mode: `MaxEncodedLen`) + /// Storage: `Nft::CollectionInfo` (r:1 w:1) + /// Proof: `Nft::CollectionInfo` (`max_values`: None, `max_size`: Some(494), added: 2969, mode: `MaxEncodedLen`) + /// Storage: `Nft::TokenInfo` (r:1 w:1) + /// Proof: `Nft::TokenInfo` (`max_values`: None, `max_size`: Some(65), added: 2540, mode: `MaxEncodedLen`) + /// Storage: `Nft::OwnedTokens` (r:1 w:1) + /// Proof: `Nft::OwnedTokens` (`max_values`: None, `max_size`: Some(4000052), added: 4002527, mode: `MaxEncodedLen`) /// Storage: `EVMChainId::ChainId` (r:1 w:0) /// Proof: `EVMChainId::ChainId` (`max_values`: Some(1), `max_size`: Some(8), added: 503, mode: `MaxEncodedLen`) /// Storage: `Nfi::NfiData` (r:1 w:0) /// Proof: `Nfi::NfiData` (`max_values`: None, `max_size`: Some(1166), added: 3641, mode: `MaxEncodedLen`) - /// Storage: `Nft::CollectionInfo` (r:1 w:1) - /// Proof: `Nft::CollectionInfo` (`max_values`: None, `max_size`: Some(4294967295), added: 2474, mode: `MaxEncodedLen`) - /// Storage: `Nft::TokenUtilityFlags` (r:1 w:0) - /// Proof: `Nft::TokenUtilityFlags` (`max_values`: None, `max_size`: Some(19), added: 2494, mode: `MaxEncodedLen`) /// Storage: `TokenApprovals::ERC721Approvals` (r:0 w:1) /// Proof: `TokenApprovals::ERC721Approvals` (`max_values`: None, `max_size`: Some(36), added: 2511, mode: `MaxEncodedLen`) fn burn() -> Weight { // Proof Size summary in bytes: - // Measured: `377` - // Estimated: `4631` - // Minimum execution time: 78_614_000 picoseconds. - Weight::from_parts(79_500_000, 0) - .saturating_add(Weight::from_parts(0, 4631)) - .saturating_add(T::DbWeight::get().reads(6)) - .saturating_add(T::DbWeight::get().writes(2)) + // Measured: `499` + // Estimated: `4003517` + // Minimum execution time: 95_021_000 picoseconds. + Weight::from_parts(96_689_000, 0) + .saturating_add(Weight::from_parts(0, 4003517)) + .saturating_add(T::DbWeight::get().reads(7)) + .saturating_add(T::DbWeight::get().writes(4)) } + /// Storage: `Migration::Status` (r:1 w:0) + /// Proof: `Migration::Status` (`max_values`: Some(1), `max_size`: None, mode: `Measured`) /// Storage: `Nft::CollectionInfo` (r:1 w:0) - /// Proof: `Nft::CollectionInfo` (`max_values`: None, `max_size`: Some(4294967295), added: 2474, mode: `MaxEncodedLen`) + /// Proof: `Nft::CollectionInfo` (`max_values`: None, `max_size`: Some(494), added: 2969, mode: `MaxEncodedLen`) /// Storage: `Nft::UtilityFlags` (r:0 w:1) /// Proof: `Nft::UtilityFlags` (`max_values`: None, `max_size`: Some(15), added: 2490, mode: `MaxEncodedLen`) fn set_utility_flags() -> Weight { // Proof Size summary in bytes: - // Measured: `331` - // Estimated: `3464` - // Minimum execution time: 50_628_000 picoseconds. - Weight::from_parts(51_420_000, 0) - .saturating_add(Weight::from_parts(0, 3464)) - .saturating_add(T::DbWeight::get().reads(1)) + // Measured: `375` + // Estimated: `3959` + // Minimum execution time: 53_880_000 picoseconds. + Weight::from_parts(55_239_000, 0) + .saturating_add(Weight::from_parts(0, 3959)) + .saturating_add(T::DbWeight::get().reads(2)) .saturating_add(T::DbWeight::get().writes(1)) } + /// Storage: `Migration::Status` (r:1 w:0) + /// Proof: `Migration::Status` (`max_values`: Some(1), `max_size`: None, mode: `Measured`) /// Storage: `Nft::CollectionInfo` (r:1 w:0) - /// Proof: `Nft::CollectionInfo` (`max_values`: None, `max_size`: Some(4294967295), added: 2474, mode: `MaxEncodedLen`) - /// Storage: `Nft::TokenUtilityFlags` (r:1 w:1) - /// Proof: `Nft::TokenUtilityFlags` (`max_values`: None, `max_size`: Some(19), added: 2494, mode: `MaxEncodedLen`) + /// Proof: `Nft::CollectionInfo` (`max_values`: None, `max_size`: Some(494), added: 2969, mode: `MaxEncodedLen`) + /// Storage: `Nft::TokenInfo` (r:1 w:1) + /// Proof: `Nft::TokenInfo` (`max_values`: None, `max_size`: Some(65), added: 2540, mode: `MaxEncodedLen`) fn set_token_transferable_flag() -> Weight { // Proof Size summary in bytes: - // Measured: `331` - // Estimated: `3484` - // Minimum execution time: 55_378_000 picoseconds. - Weight::from_parts(56_210_000, 0) - .saturating_add(Weight::from_parts(0, 3484)) - .saturating_add(T::DbWeight::get().reads(2)) + // Measured: `411` + // Estimated: `3959` + // Minimum execution time: 58_003_000 picoseconds. + Weight::from_parts(59_378_000, 0) + .saturating_add(Weight::from_parts(0, 3959)) + .saturating_add(T::DbWeight::get().reads(3)) .saturating_add(T::DbWeight::get().writes(1)) } + /// Storage: `Migration::Status` (r:1 w:0) + /// Proof: `Migration::Status` (`max_values`: Some(1), `max_size`: None, mode: `Measured`) /// Storage: `Nft::CollectionInfo` (r:1 w:0) - /// Proof: `Nft::CollectionInfo` (`max_values`: None, `max_size`: Some(4294967295), added: 2474, mode: `MaxEncodedLen`) - /// Storage: `Nft::PendingIssuances` (r:1 w:1) - /// Proof: `Nft::PendingIssuances` (`max_values`: None, `max_size`: Some(4294967295), added: 2474, mode: `MaxEncodedLen`) + /// Proof: `Nft::CollectionInfo` (`max_values`: None, `max_size`: Some(494), added: 2969, mode: `MaxEncodedLen`) + /// Storage: `Nft::UtilityFlags` (r:1 w:0) + /// Proof: `Nft::UtilityFlags` (`max_values`: None, `max_size`: Some(15), added: 2490, mode: `MaxEncodedLen`) + /// Storage: `Nft::NextIssuanceId` (r:1 w:0) + /// Proof: `Nft::NextIssuanceId` (`max_values`: Some(1), `max_size`: Some(8), added: 503, mode: `MaxEncodedLen`) + /// Storage: `Nft::PendingIssuances` (r:0 w:1) + /// Proof: `Nft::PendingIssuances` (`max_values`: None, `max_size`: Some(61), added: 2536, mode: `MaxEncodedLen`) fn issue_soulbound() -> Weight { // Proof Size summary in bytes: - // Measured: `331` - // Estimated: `3464` - // Minimum execution time: 53_087_000 picoseconds. - Weight::from_parts(54_109_000, 0) - .saturating_add(Weight::from_parts(0, 3464)) - .saturating_add(T::DbWeight::get().reads(2)) + // Measured: `375` + // Estimated: `3959` + // Minimum execution time: 66_579_000 picoseconds. + Weight::from_parts(67_398_000, 0) + .saturating_add(Weight::from_parts(0, 3959)) + .saturating_add(T::DbWeight::get().reads(4)) .saturating_add(T::DbWeight::get().writes(1)) } + /// Storage: `Migration::Status` (r:1 w:0) + /// Proof: `Migration::Status` (`max_values`: Some(1), `max_size`: None, mode: `Measured`) /// Storage: `Nft::PendingIssuances` (r:1 w:1) - /// Proof: `Nft::PendingIssuances` (`max_values`: None, `max_size`: Some(4294967295), added: 2474, mode: `MaxEncodedLen`) + /// Proof: `Nft::PendingIssuances` (`max_values`: None, `max_size`: Some(61), added: 2536, mode: `MaxEncodedLen`) /// Storage: `Nft::CollectionInfo` (r:1 w:1) - /// Proof: `Nft::CollectionInfo` (`max_values`: None, `max_size`: Some(4294967295), added: 2474, mode: `MaxEncodedLen`) + /// Proof: `Nft::CollectionInfo` (`max_values`: None, `max_size`: Some(494), added: 2969, mode: `MaxEncodedLen`) /// Storage: `Nft::UtilityFlags` (r:1 w:0) /// Proof: `Nft::UtilityFlags` (`max_values`: None, `max_size`: Some(15), added: 2490, mode: `MaxEncodedLen`) + /// Storage: `Nft::OwnedTokens` (r:1 w:1) + /// Proof: `Nft::OwnedTokens` (`max_values`: None, `max_size`: Some(4000052), added: 4002527, mode: `MaxEncodedLen`) /// Storage: `EVMChainId::ChainId` (r:1 w:0) /// Proof: `EVMChainId::ChainId` (`max_values`: Some(1), `max_size`: Some(8), added: 503, mode: `MaxEncodedLen`) /// Storage: `Nfi::NfiEnabled` (r:1 w:0) /// Proof: `Nfi::NfiEnabled` (`max_values`: None, `max_size`: Some(529), added: 3004, mode: `MaxEncodedLen`) - /// Storage: `Nft::TokenUtilityFlags` (r:1 w:1) - /// Proof: `Nft::TokenUtilityFlags` (`max_values`: None, `max_size`: Some(19), added: 2494, mode: `MaxEncodedLen`) + /// Storage: `Nft::TokenInfo` (r:0 w:1) + /// Proof: `Nft::TokenInfo` (`max_values`: None, `max_size`: Some(65), added: 2540, mode: `MaxEncodedLen`) fn accept_soulbound_issuance() -> Weight { // Proof Size summary in bytes: - // Measured: `447` - // Estimated: `3994` - // Minimum execution time: 88_964_000 picoseconds. - Weight::from_parts(90_696_000, 0) - .saturating_add(Weight::from_parts(0, 3994)) - .saturating_add(T::DbWeight::get().reads(6)) - .saturating_add(T::DbWeight::get().writes(3)) + // Measured: `546` + // Estimated: `4003517` + // Minimum execution time: 110_764_000 picoseconds. + Weight::from_parts(112_107_000, 0) + .saturating_add(Weight::from_parts(0, 4003517)) + .saturating_add(T::DbWeight::get().reads(7)) + .saturating_add(T::DbWeight::get().writes(4)) } }