@@ -34,7 +34,7 @@ struct DefaultComparer<double>
3434 inline static hash_t GetHashCode (double d)
3535 {
3636 __int64 i64 = *(__int64*)&d;
37- return (uint )((i64 >>32 ) ^ (uint)i64 );
37+ return (hash_t )((i64 >>32 ) ^ (uint)i64 );
3838 }
3939};
4040
@@ -51,8 +51,7 @@ struct DefaultComparer<T *>
5151 // Shifting helps us eliminate any sameness due to our alignment strategy.
5252 // TODO: This works for Arena memory only. Recycler memory is 16 byte aligned.
5353 // Find a good universal hash for pointers.
54- uint hash = (uint)(((size_t )i) >> ArenaAllocator::ObjectAlignmentBitShift);
55- return hash;
54+ return (hash_t )(((size_t )i) >> ArenaAllocator::ObjectAlignmentBitShift);
5655 }
5756};
5857
@@ -64,15 +63,15 @@ struct DefaultComparer<size_t>
6463 return x == y;
6564 }
6665
67- inline static uint GetHashCode (size_t i)
66+ inline static hash_t GetHashCode (size_t i)
6867 {
6968#if _WIN64
7069 // For 64 bits we want all 64 bits of the pointer to be represented in the hash code.
7170 uint32 hi = ((UINT_PTR) i >> 32 );
7271 uint32 lo = (uint32) (i & 0xFFFFFFFF );
73- uint hash = hi ^ lo;
72+ hash_t hash = hi ^ lo;
7473#else
75- uint hash = i;
74+ hash_t hash = i;
7675#endif
7776 return hash;
7877 }
@@ -109,8 +108,7 @@ struct RecyclerPointerComparer
109108 // Shifting helps us eliminate any sameness due to our alignment strategy.
110109 // TODO: This works for Recycler memory only. Arena memory is 8 byte aligned.
111110 // Find a good universal hash for pointers.
112- uint hash = (uint)(((size_t )i) >> HeapConstants::ObjectAllocationShift);
113- return hash;
111+ return (hash_t )(((size_t )i) >> HeapConstants::ObjectAllocationShift);
114112 }
115113};
116114
@@ -125,7 +123,7 @@ struct RecyclerPointerComparer
125123// FNV-1a above results better but expensive for lookups in small data sets.
126124#define CC_HASH_OFFSET_VALUE 0
127125#define CC_HASH_LOGIC (hash, byte ) \
128- hash ^ = _rotl(hash, 7 ); \
126+ hash = _rotl(hash, 7 ); \
129127 hash ^= byte;
130128
131129template <>
0 commit comments