diff --git a/Cargo.lock b/Cargo.lock index d0c7480d2..68cc86c8b 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -299,7 +299,7 @@ version = "0.3.0" dependencies = [ "cosmwasm-schema", "cosmwasm-std", - "cosmwasm-storage", + "cw-storage-plus", "cw0", "cw2", "cw721", diff --git a/contracts/cw721-base/Cargo.toml b/contracts/cw721-base/Cargo.toml index f8cda8012..e9390c035 100644 --- a/contracts/cw721-base/Cargo.toml +++ b/contracts/cw721-base/Cargo.toml @@ -28,8 +28,8 @@ library = [] cw0 = { path = "../../packages/cw0", version = "0.3.0" } cw2 = { path = "../../packages/cw2", version = "0.3.0" } cw721 = { path = "../../packages/cw721", version = "0.3.0" } +cw-storage-plus = { path = "../../packages/storage-plus", version = "0.3.0" , features = ["iterator"]} cosmwasm-std = { version = "0.11.0" } -cosmwasm-storage = { version = "0.11.0", features = ["iterator"] } schemars = "0.7" serde = { version = "1.0.103", default-features = false, features = ["derive"] } thiserror = { version = "1.0.20" } diff --git a/contracts/cw721-base/schema/query_msg.json b/contracts/cw721-base/schema/query_msg.json index 743214587..06f5c55a3 100644 --- a/contracts/cw721-base/schema/query_msg.json +++ b/contracts/cw721-base/schema/query_msg.json @@ -145,6 +145,40 @@ } } }, + { + "description": "With Enumerable extension. Returns all tokens owned by the given address, [] if unset. Return type: TokensResponse.", + "type": "object", + "required": [ + "tokens" + ], + "properties": { + "tokens": { + "type": "object", + "required": [ + "owner" + ], + "properties": { + "limit": { + "type": [ + "integer", + "null" + ], + "format": "uint32", + "minimum": 0.0 + }, + "owner": { + "$ref": "#/definitions/HumanAddr" + }, + "start_after": { + "type": [ + "string", + "null" + ] + } + } + } + } + }, { "description": "With Enumerable extension. Requires pagination. Lists all token_ids controlled by the contract. Return type: TokensResponse.", "type": "object", diff --git a/contracts/cw721-base/src/contract.rs b/contracts/cw721-base/src/contract.rs index 91c010183..858b00135 100644 --- a/contracts/cw721-base/src/contract.rs +++ b/contracts/cw721-base/src/contract.rs @@ -1,9 +1,9 @@ use cosmwasm_std::{ attr, from_binary, to_binary, Api, Binary, BlockInfo, CosmosMsg, Env, Extern, HandleResponse, - HumanAddr, InitResponse, MessageInfo, Order, Querier, StdResult, Storage, KV, + HumanAddr, InitResponse, MessageInfo, Order, Querier, StdError, StdResult, Storage, KV, }; -use cw0::{calc_range_start_human, calc_range_start_string}; +use cw0::maybe_canonical; use cw2::set_contract_version; use cw721::{ AllNftInfoResponse, ApprovedForAllResponse, ContractInfoResponse, Expiration, NftInfoResponse, @@ -13,9 +13,9 @@ use cw721::{ use crate::error::ContractError; use crate::msg::{HandleMsg, InitMsg, MintMsg, MinterResponse, QueryMsg}; use crate::state::{ - contract_info, contract_info_read, increment_tokens, mint, mint_read, num_tokens, operators, - operators_read, tokens, tokens_read, Approval, TokenInfo, + increment_tokens, num_tokens, tokens, Approval, TokenInfo, CONTRACT_INFO, MINTER, OPERATORS, }; +use cw_storage_plus::Bound; // version info for migration info const CONTRACT_NAME: &str = "crates.io:cw721-base"; @@ -33,9 +33,9 @@ pub fn init( name: msg.name, symbol: msg.symbol, }; - contract_info(&mut deps.storage).save(&info)?; + CONTRACT_INFO.save(&mut deps.storage, &info)?; let minter = deps.api.canonical_address(&msg.minter)?; - mint(&mut deps.storage).save(&minter)?; + MINTER.save(&mut deps.storage, &minter)?; Ok(InitResponse::default()) } @@ -77,7 +77,7 @@ pub fn handle_mint( info: MessageInfo, msg: MintMsg, ) -> Result { - let minter = mint(&mut deps.storage).load()?; + let minter = MINTER.load(&deps.storage)?; let sender_raw = deps.api.canonical_address(&info.sender)?; if sender_raw != minter { @@ -92,7 +92,7 @@ pub fn handle_mint( description: msg.description.unwrap_or_default(), image: msg.image, }; - tokens(&mut deps.storage).update(msg.token_id.as_bytes(), |old| match old { + tokens().update(&mut deps.storage, &msg.token_id, |old| match old { Some(_) => Err(ContractError::Claimed {}), None => Ok(token), })?; @@ -168,13 +168,13 @@ pub fn _transfer_nft( recipient: &HumanAddr, token_id: &str, ) -> Result { - let mut token = tokens(&mut deps.storage).load(token_id.as_bytes())?; + let mut token = tokens().load(&deps.storage, &token_id)?; // ensure we have permissions check_can_send(&deps, env, info, &token)?; // set owner and remove existing approvals token.owner = deps.api.canonical_address(recipient)?; token.approvals = vec![]; - tokens(&mut deps.storage).save(token_id.as_bytes(), &token)?; + tokens().save(&mut deps.storage, &token_id, &token)?; Ok(token) } @@ -231,7 +231,7 @@ pub fn _update_approvals( add: bool, expires: Option, ) -> Result { - let mut token = tokens(&mut deps.storage).load(token_id.as_bytes())?; + let mut token = tokens().load(&deps.storage, &token_id)?; // ensure we have permissions check_can_approve(&deps, env, info, &token)?; @@ -257,7 +257,7 @@ pub fn _update_approvals( token.approvals.push(approval); } - tokens(&mut deps.storage).save(token_id.as_bytes(), &token)?; + tokens().save(&mut deps.storage, &token_id, &token)?; Ok(token) } @@ -278,7 +278,7 @@ pub fn handle_approve_all( // set the operator for us let sender_raw = deps.api.canonical_address(&info.sender)?; let operator_raw = deps.api.canonical_address(&operator)?; - operators(&mut deps.storage, &sender_raw).save(operator_raw.as_slice(), &expires)?; + OPERATORS.save(&mut deps.storage, (&sender_raw, &operator_raw), &expires)?; Ok(HandleResponse { messages: vec![], @@ -299,7 +299,7 @@ pub fn handle_revoke_all( ) -> Result { let sender_raw = deps.api.canonical_address(&info.sender)?; let operator_raw = deps.api.canonical_address(&operator)?; - operators(&mut deps.storage, &sender_raw).remove(operator_raw.as_slice()); + OPERATORS.remove(&mut deps.storage, (&sender_raw, &operator_raw)); Ok(HandleResponse { messages: vec![], @@ -325,7 +325,7 @@ fn check_can_approve( return Ok(()); } // operator can approve - let op = operators_read(&deps.storage, &token.owner).may_load(sender_raw.as_slice())?; + let op = OPERATORS.may_load(&deps.storage, (&token.owner, &sender_raw))?; match op { Some(ex) => { if ex.is_expired(&env.block) { @@ -361,7 +361,7 @@ fn check_can_send( } // operator can send - let op = operators_read(&deps.storage, &token.owner).may_load(sender_raw.as_slice())?; + let op = OPERATORS.may_load(&deps.storage, (&token.owner, &sender_raw))?; match op { Some(ex) => { if ex.is_expired(&env.block) { @@ -415,6 +415,11 @@ pub fn query( limit, )?), QueryMsg::NumTokens {} => to_binary(&query_num_tokens(deps)?), + QueryMsg::Tokens { + owner, + start_after, + limit, + } => to_binary(&query_tokens(deps, owner, start_after, limit)?), QueryMsg::AllTokens { start_after, limit } => { to_binary(&query_all_tokens(deps, start_after, limit)?) } @@ -424,7 +429,7 @@ pub fn query( fn query_minter( deps: &Extern, ) -> StdResult { - let minter_raw = mint_read(&deps.storage).load()?; + let minter_raw = MINTER.load(&deps.storage)?; let minter = deps.api.human_address(&minter_raw)?; Ok(MinterResponse { minter }) } @@ -432,7 +437,7 @@ fn query_minter( fn query_contract_info( deps: &Extern, ) -> StdResult { - contract_info_read(&deps.storage).load() + CONTRACT_INFO.load(&deps.storage) } fn query_num_tokens( @@ -446,7 +451,7 @@ fn query_nft_info( deps: &Extern, token_id: String, ) -> StdResult { - let info = tokens_read(&deps.storage).load(token_id.as_bytes())?; + let info = tokens().load(&deps.storage, &token_id)?; Ok(NftInfoResponse { name: info.name, description: info.description, @@ -460,7 +465,7 @@ fn query_owner_of( token_id: String, include_expired: bool, ) -> StdResult { - let info = tokens_read(&deps.storage).load(token_id.as_bytes())?; + let info = tokens().load(&deps.storage, &token_id)?; Ok(OwnerOfResponse { owner: deps.api.human_address(&info.owner)?, approvals: humanize_approvals(deps.api, &env.block, &info, include_expired)?, @@ -478,12 +483,14 @@ fn query_all_approvals( start_after: Option, limit: Option, ) -> StdResult { - let owner_raw = deps.api.canonical_address(&owner)?; let limit = limit.unwrap_or(DEFAULT_LIMIT).min(MAX_LIMIT) as usize; - let start = calc_range_start_human(deps.api, start_after)?; + let start_canon = maybe_canonical(deps.api, start_after)?; + let start = start_canon.map(Bound::exclusive); - let res: StdResult> = operators_read(&deps.storage, &owner_raw) - .range(start.as_deref(), None, Order::Ascending) + let owner_raw = deps.api.canonical_address(&owner)?; + let res: StdResult> = OPERATORS + .prefix(&owner_raw) + .range(&deps.storage, start, None, Order::Ascending) .filter(|r| include_expired || r.is_err() || !r.as_ref().unwrap().1.is_expired(&env.block)) .take(limit) .map(|item| parse_approval(deps.api, item)) @@ -498,16 +505,37 @@ fn parse_approval(api: A, item: StdResult>) -> StdResult< }) } +fn query_tokens( + deps: &Extern, + owner: HumanAddr, + start_after: Option, + limit: Option, +) -> StdResult { + let limit = limit.unwrap_or(DEFAULT_LIMIT).min(MAX_LIMIT) as usize; + let start = start_after.map(Bound::exclusive); + + let owner_raw = deps.api.canonical_address(&owner)?; + let tokens: Result, _> = tokens::() + .idx + .owner + .pks(&deps.storage, &owner_raw, start, None, Order::Ascending) + .take(limit) + .map(String::from_utf8) + .collect(); + let tokens = tokens.map_err(StdError::invalid_utf8)?; + Ok(TokensResponse { tokens }) +} + fn query_all_tokens( deps: &Extern, start_after: Option, limit: Option, ) -> StdResult { let limit = limit.unwrap_or(DEFAULT_LIMIT).min(MAX_LIMIT) as usize; - let start = calc_range_start_string(start_after); + let start = start_after.map(Bound::exclusive); - let tokens: StdResult> = tokens_read(&deps.storage) - .range(start.as_deref(), None, Order::Ascending) + let tokens: StdResult> = tokens::() + .range(&deps.storage, start, None, Order::Ascending) .take(limit) .map(|item| item.map(|(k, _)| String::from_utf8_lossy(&k).to_string())) .collect(); @@ -520,7 +548,7 @@ fn query_all_nft_info( token_id: String, include_expired: bool, ) -> StdResult { - let info = tokens_read(&deps.storage).load(token_id.as_bytes())?; + let info = tokens().load(&deps.storage, &token_id)?; Ok(AllNftInfoResponse { access: OwnerOfResponse { owner: deps.api.human_address(&info.owner)?, @@ -1072,4 +1100,73 @@ mod tests { let res = query_all_approvals(&deps, late_env, "person".into(), false, None, None).unwrap(); assert_eq!(0, res.operators.len()); } + + #[test] + fn query_tokens_by_owner() { + let mut deps = mock_dependencies(&[]); + setup_contract(&mut deps); + let minter = mock_info(MINTER, &[]); + + // Mint a couple tokens (from the same owner) + let token_id1 = "grow1".to_string(); + let demeter = HumanAddr::from("Demeter"); + let token_id2 = "grow2".to_string(); + let ceres = HumanAddr::from("Ceres"); + let token_id3 = "sing".to_string(); + + let mint_msg = HandleMsg::Mint(MintMsg { + token_id: token_id1.clone(), + owner: demeter.clone(), + name: "Growing power".to_string(), + description: Some("Allows the owner the power to grow anything".to_string()), + image: None, + }); + handle(&mut deps, mock_env(), minter.clone(), mint_msg).unwrap(); + + let mint_msg = HandleMsg::Mint(MintMsg { + token_id: token_id2.clone(), + owner: ceres.clone(), + name: "More growing power".to_string(), + description: Some( + "Allows the owner the power to grow anything even faster".to_string(), + ), + image: None, + }); + handle(&mut deps, mock_env(), minter.clone(), mint_msg).unwrap(); + + let mint_msg = HandleMsg::Mint(MintMsg { + token_id: token_id3.clone(), + owner: demeter.clone(), + name: "Sing a lullaby".to_string(), + description: Some("Calm even the most excited children".to_string()), + image: None, + }); + handle(&mut deps, mock_env(), minter.clone(), mint_msg).unwrap(); + + // get all tokens in order: + let expected = vec![token_id1.clone(), token_id2.clone(), token_id3.clone()]; + let tokens = query_all_tokens(&deps, None, None).unwrap(); + assert_eq!(&expected, &tokens.tokens); + // paginate + let tokens = query_all_tokens(&deps, None, Some(2)).unwrap(); + assert_eq!(&expected[..2], &tokens.tokens[..]); + let tokens = query_all_tokens(&deps, Some(expected[1].clone()), None).unwrap(); + assert_eq!(&expected[2..], &tokens.tokens[..]); + + // get by owner + let by_ceres = vec![token_id2.clone()]; + let by_demeter = vec![token_id1.clone(), token_id3.clone()]; + // all tokens by owner + let tokens = query_tokens(&deps, demeter.clone(), None, None).unwrap(); + assert_eq!(&by_demeter, &tokens.tokens); + let tokens = query_tokens(&deps, ceres.clone(), None, None).unwrap(); + assert_eq!(&by_ceres, &tokens.tokens); + + // paginate for demeter + let tokens = query_tokens(&deps, demeter.clone(), None, Some(1)).unwrap(); + assert_eq!(&by_demeter[..1], &tokens.tokens[..]); + let tokens = + query_tokens(&deps, demeter.clone(), Some(by_demeter[0].clone()), Some(3)).unwrap(); + assert_eq!(&by_demeter[1..], &tokens.tokens[..]); + } } diff --git a/contracts/cw721-base/src/msg.rs b/contracts/cw721-base/src/msg.rs index 023771c5c..92359c517 100644 --- a/contracts/cw721-base/src/msg.rs +++ b/contracts/cw721-base/src/msg.rs @@ -113,6 +113,14 @@ pub enum QueryMsg { include_expired: Option, }, + /// With Enumerable extension. + /// Returns all tokens owned by the given address, [] if unset. + /// Return type: TokensResponse. + Tokens { + owner: HumanAddr, + start_after: Option, + limit: Option, + }, /// With Enumerable extension. /// Requires pagination. Lists all token_ids controlled by the contract. /// Return type: TokensResponse. diff --git a/contracts/cw721-base/src/state.rs b/contracts/cw721-base/src/state.rs index 67947d1da..1180f8de9 100644 --- a/contracts/cw721-base/src/state.rs +++ b/contracts/cw721-base/src/state.rs @@ -1,20 +1,9 @@ use schemars::JsonSchema; use serde::{Deserialize, Serialize}; -use cosmwasm_std::{CanonicalAddr, ReadonlyStorage, StdResult, Storage}; -use cosmwasm_storage::{ - bucket, bucket_read, singleton, singleton_read, Bucket, ReadonlyBucket, ReadonlySingleton, - Singleton, -}; +use cosmwasm_std::{CanonicalAddr, StdResult, Storage}; use cw721::{ContractInfoResponse, Expiration}; - -pub const CONFIG_KEY: &[u8] = b"config"; -pub const MINTER_KEY: &[u8] = b"minter"; -pub const CONTRACT_INFO_KEY: &[u8] = b"nft_info"; -pub const NUM_TOKENS_KEY: &[u8] = b"num_tokens"; - -pub const TOKEN_PREFIX: &[u8] = b"tokens"; -pub const OPERATOR_PREFIX: &[u8] = b"operators"; +use cw_storage_plus::{Index, IndexList, IndexedMap, Item, Map, MultiIndex}; #[derive(Serialize, Deserialize, Clone, Debug, PartialEq, JsonSchema)] pub struct TokenInfo { @@ -39,60 +28,37 @@ pub struct Approval { pub expires: Expiration, } -pub fn contract_info(storage: &mut S) -> Singleton { - singleton(storage, CONTRACT_INFO_KEY) -} - -pub fn contract_info_read( - storage: &S, -) -> ReadonlySingleton { - singleton_read(storage, CONTRACT_INFO_KEY) -} - -pub fn mint(storage: &mut S) -> Singleton { - singleton(storage, MINTER_KEY) -} - -pub fn mint_read(storage: &S) -> ReadonlySingleton { - singleton_read(storage, MINTER_KEY) -} +pub const CONTRACT_INFO: Item = Item::new(b"nft_info"); +pub const MINTER: Item = Item::new(b"minter"); +pub const TOKEN_COUNT: Item = Item::new(b"num_tokens"); -fn token_count(storage: &mut S) -> Singleton { - singleton(storage, NUM_TOKENS_KEY) -} +// pub const TOKENS: Map<&str, TokenInfo> = Map::new(b"tokens"); +pub const OPERATORS: Map<(&[u8], &[u8]), Expiration> = Map::new(b"operators"); -fn token_count_read(storage: &S) -> ReadonlySingleton { - singleton_read(storage, NUM_TOKENS_KEY) -} - -pub fn num_tokens(storage: &S) -> StdResult { - Ok(token_count_read(storage).may_load()?.unwrap_or_default()) +pub fn num_tokens(storage: &S) -> StdResult { + Ok(TOKEN_COUNT.may_load(storage)?.unwrap_or_default()) } pub fn increment_tokens(storage: &mut S) -> StdResult { let val = num_tokens(storage)? + 1; - token_count(storage).save(&val)?; + TOKEN_COUNT.save(storage, &val)?; Ok(val) } -pub fn tokens(storage: &mut S) -> Bucket { - bucket(storage, TOKEN_PREFIX) -} - -pub fn tokens_read(storage: &S) -> ReadonlyBucket { - bucket_read(storage, TOKEN_PREFIX) +pub struct TokenIndexes<'a, S: Storage> { + pub owner: MultiIndex<'a, S, TokenInfo>, } -pub fn operators<'a, S: Storage>( - storage: &'a mut S, - owner: &CanonicalAddr, -) -> Bucket<'a, S, Expiration> { - Bucket::multilevel(storage, &[OPERATOR_PREFIX, owner.as_slice()]) +impl<'a, S: Storage> IndexList for TokenIndexes<'a, S> { + fn get_indexes(&'_ self) -> Box> + '_> { + let v: Vec<&dyn Index> = vec![&self.owner]; + Box::new(v.into_iter()) + } } -pub fn operators_read<'a, S: ReadonlyStorage>( - storage: &'a S, - owner: &CanonicalAddr, -) -> ReadonlyBucket<'a, S, Expiration> { - ReadonlyBucket::multilevel(storage, &[OPERATOR_PREFIX, owner.as_slice()]) +pub fn tokens<'a, S: Storage>() -> IndexedMap<'a, &'a str, TokenInfo, S, TokenIndexes<'a, S>> { + let indexes = TokenIndexes { + owner: MultiIndex::new(|d| d.owner.to_vec(), b"tokens", b"tokens__owner"), + }; + IndexedMap::new(b"tokens", indexes) } diff --git a/packages/storage-plus/src/indexed_map.rs b/packages/storage-plus/src/indexed_map.rs index 9aa7af0cc..a24ac62c1 100644 --- a/packages/storage-plus/src/indexed_map.rs +++ b/packages/storage-plus/src/indexed_map.rs @@ -7,9 +7,9 @@ use serde::Serialize; use std::marker::PhantomData; use crate::indexes::Index; -use crate::keys::{Prefixer, PrimaryKey}; +use crate::keys::{EmptyPrefix, Prefixer, PrimaryKey}; use crate::map::Map; -use crate::prefix::Prefix; +use crate::prefix::{Bound, Prefix}; pub trait IndexList { fn get_indexes(&'_ self) -> Box> + '_>; @@ -134,6 +134,31 @@ where } } +// short-cut for simple keys, rather than .prefix(()).range(...) +impl<'a, K, T, S, I> IndexedMap<'a, K, T, S, I> +where + K: PrimaryKey<'a>, + T: Serialize + DeserializeOwned + Clone, + S: Storage, + I: IndexList, + K::Prefix: EmptyPrefix, +{ + // I would prefer not to copy code from Prefix, but no other way + // with lifetimes (create Prefix inside function and return ref = no no) + pub fn range<'c>( + &self, + store: &'c S, + min: Option, + max: Option, + order: cosmwasm_std::Order, + ) -> Box>> + 'c> + where + T: 'c, + { + self.prefix(K::Prefix::new()).range(store, min, max, order) + } +} + #[cfg(test)] mod test { use super::*; diff --git a/packages/storage-plus/src/indexes.rs b/packages/storage-plus/src/indexes.rs index 244b346d7..899ee3e88 100644 --- a/packages/storage-plus/src/indexes.rs +++ b/packages/storage-plus/src/indexes.rs @@ -37,7 +37,6 @@ where S: Storage, T: Serialize + DeserializeOwned + Clone, { - // TODO: pk: PrimaryKey not just &[u8] ??? fn save(&self, store: &mut S, pk: &[u8], data: &T) -> StdResult<()>; fn remove(&self, store: &mut S, pk: &[u8], old_data: &T) -> StdResult<()>; } diff --git a/packages/storage-plus/src/keys.rs b/packages/storage-plus/src/keys.rs index 1a07ff0ee..ecc68a680 100644 --- a/packages/storage-plus/src/keys.rs +++ b/packages/storage-plus/src/keys.rs @@ -1,6 +1,8 @@ +use std::marker::PhantomData; +use std::str::from_utf8; + use crate::helpers::{decode_length, namespaces_with_key}; use crate::Endian; -use std::marker::PhantomData; // pub trait PrimaryKey<'a>: Copy { pub trait PrimaryKey<'a>: Clone { @@ -25,6 +27,8 @@ type Pk0 = (); type Pk1<'a> = &'a [u8]; type Pk2<'a, T = &'a [u8], U = &'a [u8]> = (T, U); +type PkStr<'a> = &'a str; + impl<'a> PrimaryKey<'a> for Pk1<'a> { type Prefix = Pk0; @@ -38,6 +42,20 @@ impl<'a> PrimaryKey<'a> for Pk1<'a> { } } +// Provide a string version of this to raw encode strings +impl<'a> PrimaryKey<'a> for PkStr<'a> { + type Prefix = Pk0; + + fn key<'b>(&'b self) -> Vec<&'b [u8]> { + // this is simple, we don't add more prefixes + vec![self.as_bytes()] + } + + fn parse_key(serialized: &'a [u8]) -> Self { + from_utf8(serialized).unwrap() + } +} + // use generics for combining there - so we can use &[u8], PkOwned, or IntKey impl<'a, T: PrimaryKey<'a> + Prefixer<'a>, U: PrimaryKey<'a>> PrimaryKey<'a> for (T, U) { type Prefix = T; @@ -80,6 +98,13 @@ impl<'a> Prefixer<'a> for Pk2<'a> { } } +// Provide a string version of this to raw encode strings +impl<'a> Prefixer<'a> for PkStr<'a> { + fn prefix<'b>(&'b self) -> Vec<&'b [u8]> { + vec![self.as_bytes()] + } +} + // this is a marker for the Map.range() helper, so we can detect () in Generic bounds pub trait EmptyPrefix { fn new() -> Self; @@ -90,7 +115,7 @@ impl EmptyPrefix for () { } // Add support for an dynamic keys - constructor functions below -#[derive(Clone, Debug)] +#[derive(Clone, Debug, PartialEq, Eq)] pub struct PkOwned(pub Vec); impl<'a> PrimaryKey<'a> for PkOwned { @@ -142,7 +167,7 @@ pub type U128Key = IntKey; /// let k = U64Key::new(12345); /// let k = U32Key::from(12345); /// let k: U16Key = 12345.into(); -#[derive(Clone, Debug)] +#[derive(Clone, Debug, PartialEq, Eq)] pub struct IntKey { pub wrapped: PkOwned, pub data: PhantomData, @@ -198,6 +223,27 @@ mod test { assert_eq!(4242u32.to_be_bytes().to_vec(), path[0].to_vec()); } + #[test] + fn str_key_works() { + let k: &str = "hello"; + let path = k.key(); + assert_eq!(1, path.len()); + assert_eq!("hello".as_bytes(), path[0]); + + let joined = k.joined_key(); + let parsed = PkStr::parse_key(&joined); + assert_eq!(parsed, "hello"); + } + + #[test] + fn nested_str_key_works() { + let k: (&str, &[u8]) = ("hello", b"world"); + let path = k.key(); + assert_eq!(2, path.len()); + assert_eq!("hello".as_bytes(), path[0]); + assert_eq!("world".as_bytes(), path[1]); + } + #[test] fn composite_byte_key() { let k: (&[u8], &[u8]) = (b"foo", b"bar"); @@ -253,5 +299,22 @@ mod test { assert_eq!(key, parsed); } - // TODO: parse joined with int/owned keys + #[test] + fn parse_joined_keys_int() { + let key: U64Key = 12345678.into(); + let joined = key.joined_key(); + assert_eq!(8, joined.len()); + let parsed = U64Key::parse_key(&joined); + assert_eq!(key, parsed); + } + + #[test] + fn parse_joined_keys_string_int() { + let key: (U32Key, &str) = (54321.into(), "random"); + let joined = key.joined_key(); + assert_eq!(2 + 4 + 6, joined.len()); + let parsed = <(U32Key, &str)>::parse_key(&joined); + assert_eq!(key, parsed); + assert_eq!("random", parsed.1); + } } diff --git a/packages/storage-plus/src/lib.rs b/packages/storage-plus/src/lib.rs index d8bd42744..4e5066175 100644 --- a/packages/storage-plus/src/lib.rs +++ b/packages/storage-plus/src/lib.rs @@ -11,7 +11,7 @@ mod prefix; pub use endian::Endian; #[cfg(feature = "iterator")] -pub use indexed_map::IndexedMap; +pub use indexed_map::{IndexList, IndexedMap}; #[cfg(feature = "iterator")] pub use indexes::{index_int, index_string, Index, MultiIndex, UniqueIndex}; pub use item::Item;