Skip to content

Commit

Permalink
feat: lun3x#21
Browse files Browse the repository at this point in the history
support composite key
  • Loading branch information
huang12zheng committed Jun 9, 2023
1 parent a086ff8 commit 324baa1
Show file tree
Hide file tree
Showing 3 changed files with 165 additions and 21 deletions.
79 changes: 79 additions & 0 deletions src/extra_fields.rs
Original file line number Diff line number Diff line change
@@ -0,0 +1,79 @@
use proc_macro2::TokenStream;

use syn::{
parenthesized,
parse::{Parse, ParseStream},
parse_quote, token, Attribute, Field, Ident, Path, Token, Type, Visibility,
};

struct AttrInner {
_paren_token: token::Paren,
pub path: Path,
pub tokens: TokenStream,
}
impl Parse for AttrInner {
fn parse(input: ParseStream) -> Result<Self, syn::Error> {
let content;
Ok(Self {
_paren_token: parenthesized!(content in input),
path: content.call(Path::parse_mod_style)?,
tokens: content.parse()?,
})
}
}
struct ExtraFiledAst {
_paren_token: token::Paren,
pub directly: Ident,
_s1: Token![,],
pub func: Ident,
_s2: Token![,],
pub ty: Type,
}
impl Parse for ExtraFiledAst {
fn parse(input: ParseStream) -> Result<Self, syn::Error> {
let content;
Ok(Self {
_paren_token: parenthesized!(content in input),
directly: content.parse()?,
_s1: content.parse()?,
func: content.parse()?,
_s2: content.parse()?,
ty: content.parse()?,
})
}
}

pub trait IntoExtraFiled {
type Error;
fn into_extra_filed(&self, vis: Visibility) -> Result<Option<Field>, Self::Error>;
}
// impl TryFrom<Attribute> for Option<ExtraField> {
impl IntoExtraFiled for Attribute {
fn into_extra_filed(&self, vis: Visibility) -> Result<Option<Field>, Self::Error> {
if self.path.is_ident("multi_index") {
let tokens = &self.tokens;
let inner: AttrInner = parse_quote!(#tokens);
if inner.path.is_ident("extra_field") {
let tokens = &inner.tokens;
let ExtraFiledAst {
directly,
func: ident,
ty,
..
} = parse_quote!(#tokens);
let attr: Attribute = parse_quote!(#[multi_index(#directly)] );
let ident = Some(ident);
return Ok(Some(Field {
attrs: vec![attr],
vis: vis,
ident,
colon_token: None,
ty,
}));
}
}
Ok(None)
}

type Error = syn::Error;
}
73 changes: 52 additions & 21 deletions src/lib.rs
Original file line number Diff line number Diff line change
@@ -1,7 +1,10 @@
mod extra_fields;

use convert_case::Casing;
use extra_fields::IntoExtraFiled;
use proc_macro_error::{abort_call_site, proc_macro_error};
use quote::{format_ident, quote};
use syn::{parse_macro_input, DeriveInput};
use quote::{format_ident, quote, ToTokens};
use syn::{parse_macro_input, DeriveInput, Field};

#[proc_macro_derive(MultiIndexMap, attributes(multi_index))]
#[proc_macro_error]
Expand All @@ -15,6 +18,13 @@ pub fn multi_index_map(input: proc_macro::TokenStream) -> proc_macro::TokenStrea
_ => abort_call_site!("MultiIndexMap only supports structs as elements"),
};

// Extract the struct attrs for extra fileds from func
let extra_fields_ivs = input.vis.clone();
let extra_fileds: Vec<Field> = (&input.attrs)
.iter()
.map(|e| IntoExtraFiled::into_extra_filed(e, extra_fields_ivs.clone()))
.filter_map(|e| e.unwrap())
.collect();
// Verify the struct fields are named fields, otherwise throw an error as we do not support Unnamed of Unit structs.
let named_fields = match fields {
syn::Fields::Named(f) => f,
Expand All @@ -25,16 +35,23 @@ pub fn multi_index_map(input: proc_macro::TokenStream) -> proc_macro::TokenStrea

// Filter out all the fields that do not have a multi_index attribute, so we can ignore the non-indexed fields.
let fields_to_index = || {
named_fields.named.iter().filter(|f| {
f.attrs.first().is_some() && f.attrs.first().unwrap().path.is_ident("multi_index")
})
named_fields
.named
.iter()
.filter(|f| {
f.attrs.first().is_some() && f.attrs.first().unwrap().path.is_ident("multi_index")
})
.map(|e| (false, e))
.chain(extra_fileds.iter().map(|e| (true, e)))
};

// For each indexed field generate a TokenStream representing the lookup table for that field
// Each lookup table maps it's index to a position in the backing storage,
// or multiple positions in the backing storage in the non-unique indexes.
let lookup_table_fields = fields_to_index().map(|f| {
let index_name = format_ident!("_{}_index", f.ident.as_ref().unwrap());
let (_, f) = f;
eprintln!("{}", f.to_token_stream());
let index_name: proc_macro2::Ident = format_ident!("_{}_index", f.ident.as_ref().unwrap());
let ty = &f.ty;

let (ordering, uniqueness) = get_index_kind(f).unwrap_or_else(|| {
Expand All @@ -48,16 +65,16 @@ pub fn multi_index_map(input: proc_macro::TokenStream) -> proc_macro::TokenStrea
},
Ordering::Ordered => quote! {
#index_name: std::collections::BTreeMap<#ty, usize>,
}
}
},
},
Uniqueness::NonUnique => match ordering {
Ordering::Hashed => quote! {
#index_name: rustc_hash::FxHashMap<#ty, Vec<usize>>,
},
Ordering::Ordered => quote! {
#index_name: std::collections::BTreeMap<#ty, Vec<usize>>,
}
}
},
},
}
});

Expand All @@ -66,22 +83,26 @@ pub fn multi_index_map(input: proc_macro::TokenStream) -> proc_macro::TokenStrea
// creating a new Vec if necessary.
let inserts: Vec<proc_macro2::TokenStream> = fields_to_index()
.map(|f| {
let (is_extra_field,f) = f;
let field_name = f.ident.as_ref().unwrap();
let field_name_string = field_name.to_string();
let index_name = format_ident!("_{}_index", field_name);
let (_ordering, uniqueness) = get_index_kind(f).unwrap_or_else(|| {
abort_call_site!("Attributes must be in the style #[multi_index(hashed_unique)]")
});
let field_name = if is_extra_field {
quote!(#field_name())
} else {quote!(#field_name)};

match uniqueness {
Uniqueness::Unique => quote! {
Uniqueness::Unique => quote! {
let orig_elem_idx = self.#index_name.insert(elem.#field_name.clone(), idx);
if orig_elem_idx.is_some() {
panic!("Unable to insert element, uniqueness constraint violated on field '{}'", #field_name_string);
}
},
Uniqueness::NonUnique => quote! {
self.#index_name.entry(elem.#field_name.clone()).or_insert(Vec::with_capacity(1)).push(idx);
self.#index_name.entry(elem.#field_name.clone()).or_insert(Vec::with_capacity(1)).push(idx);
},
}
})
Expand All @@ -90,13 +111,17 @@ pub fn multi_index_map(input: proc_macro::TokenStream) -> proc_macro::TokenStrea
// For each indexed field generate a TokenStream representing the remove from that field's lookup table.
let removes: Vec<proc_macro2::TokenStream> = fields_to_index()
.map(|f| {
let (is_extra_field,f) = f;
let field_name = f.ident.as_ref().unwrap();
let index_name = format_ident!("_{}_index", field_name);
let field_name_string = field_name.to_string();
let error_msg = format!("Internal invariants broken, unable to find element in index '{field_name_string}' despite being present in another");
let (_ordering, uniqueness) = get_index_kind(f).unwrap_or_else(|| {
abort_call_site!("Attributes must be in the style #[multi_index(hashed_unique)]")
});
let field_name = if is_extra_field {
quote!(#field_name())
} else {quote!(#field_name)};

match uniqueness {
Uniqueness::Unique => quote! {
Expand All @@ -119,16 +144,19 @@ pub fn multi_index_map(input: proc_macro::TokenStream) -> proc_macro::TokenStrea
})
.collect();


// For each indexed field generate a TokenStream representing the combined remove and insert from that field's lookup table.
let modifies: Vec<proc_macro2::TokenStream> = fields_to_index().map(|f| {
let (is_extra_field,f) = f;
let field_name = f.ident.as_ref().unwrap();
let field_name_string = field_name.to_string();
let index_name = format_ident!("_{}_index", field_name);
let error_msg = format!("Internal invariants broken, unable to find element in index '{field_name_string}' despite being present in another");
let (_ordering, uniqueness) = get_index_kind(f).unwrap_or_else(|| {
abort_call_site!("Attributes must be in the style #[multi_index(hashed_unique)]")
});
let field_name = if is_extra_field {
quote!(#field_name())
} else {quote!(#field_name)};

match uniqueness {
Uniqueness::Unique => quote! {
Expand All @@ -142,17 +170,18 @@ pub fn multi_index_map(input: proc_macro::TokenStream) -> proc_macro::TokenStrea
let idxs = self.#index_name.get_mut(&elem_orig.#field_name).expect(#error_msg);
let pos = idxs.iter().position(|x| *x == idx).expect(#error_msg);
idxs.remove(pos);
self.#index_name.entry(elem.#field_name.clone()).or_insert(Vec::with_capacity(1)).push(idx);
self.#index_name.entry(elem.#field_name.clone()).or_insert(Vec::with_capacity(1)).push(idx);
},
}
}).collect();

let clears: Vec<proc_macro2::TokenStream> = fields_to_index()
.map(|f| {
let (_, f) = f;
let field_name = f.ident.as_ref().unwrap();
let index_name = format_ident!("_{}_index", field_name);
quote!{

quote! {
self.#index_name.clear();
}
})
Expand All @@ -165,6 +194,7 @@ pub fn multi_index_map(input: proc_macro::TokenStream) -> proc_macro::TokenStrea

// For each indexed field generate a TokenStream representing all the accessors for the underlying storage via that field's lookup table.
let accessors = fields_to_index().map(|f| {
let (_,f) = f;
let field_name = f.ident.as_ref().unwrap();
let field_vis = &f.vis;
let index_name = format_ident!("_{}_index", field_name);
Expand Down Expand Up @@ -243,7 +273,7 @@ pub fn multi_index_map(input: proc_macro::TokenStream) -> proc_macro::TokenStrea
} else {
Vec::new()
}
}
}
},
};

Expand All @@ -256,9 +286,9 @@ pub fn multi_index_map(input: proc_macro::TokenStream) -> proc_macro::TokenStrea
let elem = &mut self._store[idx];
let elem_orig = elem.clone();
f(elem);

#(#modifies)*

Some(elem)
}
},
Expand Down Expand Up @@ -288,6 +318,7 @@ pub fn multi_index_map(input: proc_macro::TokenStream) -> proc_macro::TokenStrea
// For each indexed field generate a TokenStream representing the Iterator over the backing storage via that field,
// such that the elements are accessed in an order defined by the index rather than the backing storage.
let iterators = fields_to_index().map(|f| {
let (_,f) = f;
let field_name = f.ident.as_ref().unwrap();
let field_vis = &f.vis;
let field_name_string = field_name.to_string();
Expand Down Expand Up @@ -407,7 +438,7 @@ pub fn multi_index_map(input: proc_macro::TokenStream) -> proc_macro::TokenStrea
}

#(#iterators)*

};

// Hand the output tokens back to the compiler.
Expand Down Expand Up @@ -453,4 +484,4 @@ fn get_index_kind(f: &syn::Field) -> Option<(Ordering, Uniqueness)> {
} else {
None
}
}
}
34 changes: 34 additions & 0 deletions tests/extra_field.rs
Original file line number Diff line number Diff line change
@@ -0,0 +1,34 @@
use multi_index_map::MultiIndexMap;

#[derive(Hash, PartialEq, Eq, Clone)]
struct TestNonPrimitiveType(u64);

#[derive(MultiIndexMap, Clone)]
#[multi_index(extra_field(hashed_unique, field2_extra, String))]
struct TestElement {
#[multi_index(hashed_unique)]
field1: TestNonPrimitiveType,
field2: String,
}
impl TestElement {
pub fn field2_extra(&self) -> String {
format!("{}{}", self.field2, "111")
}
}
#[test]
fn test_insert_and_get() {
let mut map = MultiIndexTestElementMap::default();
let elem1 = TestElement {
field1: TestNonPrimitiveType(42),
field2: "ElementOne".to_string(),
};
map.insert(elem1);

assert_eq!(map.len(), 1);
assert_eq!(
map.get_by_field2_extra(&"ElementOne111".to_owned())
.unwrap()
.field2,
"ElementOne"
);
}

0 comments on commit 324baa1

Please sign in to comment.