Skip to content

Commit

Permalink
Sorting out FxBuildHasher issue
Browse files Browse the repository at this point in the history
  • Loading branch information
robinbernon committed Mar 30, 2021
1 parent 75c4a17 commit b5a3ab0
Show file tree
Hide file tree
Showing 11 changed files with 15 additions and 16 deletions.
2 changes: 1 addition & 1 deletion amadeus-parquet/Cargo.toml
Original file line number Diff line number Diff line change
Expand Up @@ -29,7 +29,7 @@ educe = "0.4"
flate2 = { version = "1.0.2", features = ["rust_backend"], default-features = false }
futures = "0.3"
fxhash = "0.2"
hashlink = { version = "0.6", features = ["serde_impl"] }
hashlink = { version = "0.6.1-alpha.0", features = ["serde_impl"], git = "https://github.com/robinbernon/hashlink", branch = "generic_hashmap_deserialization" }
lz-fear = "0.1"
num-bigint = "0.3"
quick-error = "1.2.2"
Expand Down
2 changes: 1 addition & 1 deletion amadeus-parquet/src/internal/record/impls.rs
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
use hashlink::linked_hash_map::LinkedHashMap;
use hashlink::LinkedHashMap;
use std::{
any::type_name, collections::HashMap, convert::{TryFrom, TryInto}, fmt, hash::{BuildHasher, Hash}, marker::PhantomData, string::FromUtf8Error, sync::Arc
};
Expand Down
5 changes: 3 additions & 2 deletions amadeus-parquet/src/internal/record/predicates.rs
Original file line number Diff line number Diff line change
@@ -1,10 +1,11 @@
use hashlink::linked_hash_map::LinkedHashMap;
use hashlink::LinkedHashMap;
use serde::{Deserialize, Serialize};
use std::collections::HashMap;

use amadeus_types::{Bson, Date, DateTime, Decimal, Enum, Group, Json, List, Time, Value};

use crate::internal::record::ParquetData;
use fxhash::FxBuildHasher;

#[derive(Clone, Debug, Serialize, Deserialize)]
/// Predicate for [`Group`]s
Expand All @@ -22,7 +23,7 @@ impl<K, V> MapPredicate<K, V> {
/// Predicate for [`Group`]s
pub struct GroupPredicate(
/// Map of field names to predicates for the fields in the group
pub(super) LinkedHashMap<String, Option<<Value as ParquetData>::Predicate>>,
pub(super) LinkedHashMap<String, Option<<Value as ParquetData>::Predicate>, FxBuildHasher>,
);
impl GroupPredicate {
pub fn new<I>(fields: I) -> Self
Expand Down
4 changes: 2 additions & 2 deletions amadeus-parquet/src/internal/record/reader.rs
Original file line number Diff line number Diff line change
Expand Up @@ -24,7 +24,7 @@
//! that are optional or repeated.
use fxhash::FxBuildHasher;
use hashlink::linked_hash_map::LinkedHashMap;
use hashlink::LinkedHashMap;
use std::{
collections::HashMap, convert::TryInto, error::Error, marker::PhantomData, mem, sync::Arc
};
Expand Down Expand Up @@ -948,7 +948,7 @@ where
mod tests {
use super::*;

use hashlink::linked_hash_map::LinkedHashMap;
use hashlink::LinkedHashMap;
use std::{collections::HashMap, sync::Arc};

use crate::internal::{
Expand Down
2 changes: 1 addition & 1 deletion amadeus-parquet/src/internal/record/schemas.rs
Original file line number Diff line number Diff line change
Expand Up @@ -31,7 +31,7 @@
//! ```
use fxhash::FxBuildHasher;
use hashlink::linked_hash_map::LinkedHashMap;
use hashlink::LinkedHashMap;
use std::{
fmt::{self, Debug, Display}, marker::PhantomData, mem, str::FromStr
};
Expand Down
2 changes: 1 addition & 1 deletion amadeus-serde/Cargo.toml
Original file line number Diff line number Diff line change
Expand Up @@ -25,7 +25,7 @@ chrono = { version = "0.4", default-features = false, features = ["serde"] }
csv = "1.0"
educe = "0.4"
futures = "0.3"
hashlink = "0.6"
hashlink = { version = "0.6.1-alpha.0", git = "https://github.com/robinbernon/hashlink", branch = "generic_hashmap_deserialization" }
serde = { version = "1.0", features = ["derive"] }
serde_bytes = "0.11"
serde_closure = "0.3"
Expand Down
2 changes: 1 addition & 1 deletion amadeus-serde/src/impls.rs
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
#![allow(clippy::too_many_lines)]

use hashlink::linked_hash_map::LinkedHashMap;
use hashlink::LinkedHashMap;
use recycle::VecExt;
use serde::{
de::{self, MapAccess, SeqAccess, Visitor}, ser::{SerializeSeq, SerializeStruct, SerializeTupleStruct}, Deserializer, Serializer
Expand Down
2 changes: 1 addition & 1 deletion amadeus-types/Cargo.toml
Original file line number Diff line number Diff line change
Expand Up @@ -23,7 +23,7 @@ amadeus-core = { version = "=0.4.2", path = "../amadeus-core" }
chrono = { version = "0.4", default-features = false, features = ["std", "serde"] }
chrono-tz = { version = "0.5", features = ["serde"] }
fxhash = "0.2"
hashlink = "0.6"
hashlink = { version = "0.6.1-alpha.0", git = "https://github.com/robinbernon/hashlink", branch = "generic_hashmap_deserialization" }
once_cell = "1.0"
ordered-float = "2.0"
serde = { version = "1.0", features = ["derive"] }
Expand Down
2 changes: 1 addition & 1 deletion amadeus-types/src/group.rs
Original file line number Diff line number Diff line change
@@ -1,7 +1,7 @@
//! Implement [`Record`] for [`Group`] aka [`Row`].
use fxhash::FxBuildHasher;
use hashlink::linked_hash_map::LinkedHashMap;
use hashlink::LinkedHashMap;
use serde::{Deserialize, Deserializer, Serialize, Serializer};
use std::{
cmp::Ordering, fmt::{self, Debug}, ops::Index, slice::SliceIndex, str, sync::Arc
Expand Down
2 changes: 1 addition & 1 deletion amadeus-types/src/value.rs
Original file line number Diff line number Diff line change
Expand Up @@ -3,7 +3,7 @@
#![allow(clippy::type_complexity)]

use fxhash::FxBuildHasher;
use hashlink::linked_hash_map::LinkedHashMap;
use hashlink::LinkedHashMap;
use recycle::VecExt;
use serde::{de::Deserializer, ser::Serializer, Deserialize, Serialize};
use std::{
Expand Down
6 changes: 2 additions & 4 deletions src/helpers/mod.rs
Original file line number Diff line number Diff line change
@@ -1,7 +1,5 @@
mod filter_nulls_and_unwrap;
mod get_field_from_value;

pub use filter_nulls_and_unwrap::{
DoubleOptionFilterNullHandler, FilterNullsAndDoubleUnwrap, FilterNullsAndUnwrap, OptionFilterNullHandler
};
pub use get_field_from_value::{GetFieldFromValue, UnwrapFieldHandler};
pub use filter_nulls_and_unwrap::{FilterNullsAndDoubleUnwrap, FilterNullsAndUnwrap};
pub use get_field_from_value::GetFieldFromValue;

0 comments on commit b5a3ab0

Please sign in to comment.