@@ -134,15 +134,22 @@ fn h1(hash: u64) -> usize {
134134 hash as usize
135135}
136136
137+ // Constant for h2 function that grabing the top 7 bits of the hash.
138+ const MIN_HASH_LEN : usize = if mem:: size_of :: < usize > ( ) < mem:: size_of :: < u64 > ( ) {
139+ mem:: size_of :: < usize > ( )
140+ } else {
141+ mem:: size_of :: < u64 > ( )
142+ } ;
143+
137144/// Secondary hash function, saved in the low 7 bits of the control byte.
138145#[ inline]
139146#[ allow( clippy:: cast_possible_truncation) ]
140147fn h2 ( hash : u64 ) -> u8 {
141148 // Grab the top 7 bits of the hash. While the hash is normally a full 64-bit
142149 // value, some hash functions (such as FxHash) produce a usize result
143150 // instead, which means that the top 32 bits are 0 on 32-bit platforms.
144- let hash_len = usize :: min ( mem :: size_of :: < usize > ( ) , mem :: size_of :: < u64 > ( ) ) ;
145- let top7 = hash >> ( hash_len * 8 - 7 ) ;
151+ // So we use MIN_HASH_LEN constant to handle this.
152+ let top7 = hash >> ( MIN_HASH_LEN * 8 - 7 ) ;
146153 ( top7 & 0x7f ) as u8 // truncation
147154}
148155
@@ -230,11 +237,15 @@ struct TableLayout {
230237
231238impl TableLayout {
232239 #[ inline]
233- fn new < T > ( ) -> Self {
240+ const fn new < T > ( ) -> Self {
234241 let layout = Layout :: new :: < T > ( ) ;
235242 Self {
236243 size : layout. size ( ) ,
237- ctrl_align : usize:: max ( layout. align ( ) , Group :: WIDTH ) ,
244+ ctrl_align : if layout. align ( ) > Group :: WIDTH {
245+ layout. align ( )
246+ } else {
247+ Group :: WIDTH
248+ } ,
238249 }
239250 }
240251
@@ -261,16 +272,6 @@ impl TableLayout {
261272 }
262273}
263274
264- /// Returns a Layout which describes the allocation required for a hash table,
265- /// and the offset of the control bytes in the allocation.
266- /// (the offset is also one past last element of buckets)
267- ///
268- /// Returns `None` if an overflow occurs.
269- #[ cfg_attr( feature = "inline-more" , inline) ]
270- fn calculate_layout < T > ( buckets : usize ) -> Option < ( Layout , usize ) > {
271- TableLayout :: new :: < T > ( ) . calculate_layout_for ( buckets)
272- }
273-
274275/// A reference to a hash table bucket containing a `T`.
275276///
276277/// This is usually just a pointer to the element itself. However if the element
@@ -296,9 +297,11 @@ impl<T> Clone for Bucket<T> {
296297}
297298
298299impl < T > Bucket < T > {
300+ const IS_ZERO_SIZED_TYPE : bool = mem:: size_of :: < T > ( ) == 0 ;
301+
299302 #[ inline]
300303 unsafe fn from_base_index ( base : NonNull < T > , index : usize ) -> Self {
301- let ptr = if mem :: size_of :: < T > ( ) == 0 {
304+ let ptr = if Self :: IS_ZERO_SIZED_TYPE {
302305 // won't overflow because index must be less than length
303306 ( index + 1 ) as * mut T
304307 } else {
@@ -310,15 +313,15 @@ impl<T> Bucket<T> {
310313 }
311314 #[ inline]
312315 unsafe fn to_base_index ( & self , base : NonNull < T > ) -> usize {
313- if mem :: size_of :: < T > ( ) == 0 {
316+ if Self :: IS_ZERO_SIZED_TYPE {
314317 self . ptr . as_ptr ( ) as usize - 1
315318 } else {
316319 offset_from ( base. as_ptr ( ) , self . ptr . as_ptr ( ) )
317320 }
318321 }
319322 #[ inline]
320323 pub fn as_ptr ( & self ) -> * mut T {
321- if mem :: size_of :: < T > ( ) == 0 {
324+ if Self :: IS_ZERO_SIZED_TYPE {
322325 // Just return an arbitrary ZST pointer which is properly aligned
323326 mem:: align_of :: < T > ( ) as * mut T
324327 } else {
@@ -327,7 +330,7 @@ impl<T> Bucket<T> {
327330 }
328331 #[ inline]
329332 unsafe fn next_n ( & self , offset : usize ) -> Self {
330- let ptr = if mem :: size_of :: < T > ( ) == 0 {
333+ let ptr = if Self :: IS_ZERO_SIZED_TYPE {
331334 ( self . ptr . as_ptr ( ) as usize + offset) as * mut T
332335 } else {
333336 self . ptr . as_ptr ( ) . sub ( offset)
@@ -419,6 +422,9 @@ impl<T> RawTable<T, Global> {
419422}
420423
421424impl < T , A : Allocator + Clone > RawTable < T , A > {
425+ const TABLE_LAYOUT : TableLayout = TableLayout :: new :: < T > ( ) ;
426+ const DATA_NEEDS_DROP : bool = mem:: needs_drop :: < T > ( ) ;
427+
422428 /// Creates a new empty hash table without allocating any memory, using the
423429 /// given allocator.
424430 ///
@@ -447,7 +453,7 @@ impl<T, A: Allocator + Clone> RawTable<T, A> {
447453 Ok ( Self {
448454 table : RawTableInner :: new_uninitialized (
449455 alloc,
450- TableLayout :: new :: < T > ( ) ,
456+ Self :: TABLE_LAYOUT ,
451457 buckets,
452458 fallibility,
453459 ) ?,
@@ -465,7 +471,7 @@ impl<T, A: Allocator + Clone> RawTable<T, A> {
465471 Ok ( Self {
466472 table : RawTableInner :: fallible_with_capacity (
467473 alloc,
468- TableLayout :: new :: < T > ( ) ,
474+ Self :: TABLE_LAYOUT ,
469475 capacity,
470476 fallibility,
471477 ) ?,
@@ -499,7 +505,7 @@ impl<T, A: Allocator + Clone> RawTable<T, A> {
499505 /// Deallocates the table without dropping any entries.
500506 #[ cfg_attr( feature = "inline-more" , inline) ]
501507 unsafe fn free_buckets ( & mut self ) {
502- self . table . free_buckets ( TableLayout :: new :: < T > ( ) ) ;
508+ self . table . free_buckets ( Self :: TABLE_LAYOUT ) ;
503509 }
504510
505511 /// Returns pointer to one past last element of data table.
@@ -599,7 +605,7 @@ impl<T, A: Allocator + Clone> RawTable<T, A> {
599605 }
600606
601607 unsafe fn drop_elements ( & mut self ) {
602- if mem :: needs_drop :: < T > ( ) && !self . is_empty ( ) {
608+ if Self :: DATA_NEEDS_DROP && !self . is_empty ( ) {
603609 for item in self . iter ( ) {
604610 item. drop ( ) ;
605611 }
@@ -687,8 +693,8 @@ impl<T, A: Allocator + Clone> RawTable<T, A> {
687693 additional,
688694 & |table, index| hasher ( table. bucket :: < T > ( index) . as_ref ( ) ) ,
689695 fallibility,
690- TableLayout :: new :: < T > ( ) ,
691- if mem :: needs_drop :: < T > ( ) {
696+ Self :: TABLE_LAYOUT ,
697+ if Self :: DATA_NEEDS_DROP {
692698 Some ( mem:: transmute ( ptr:: drop_in_place :: < T > as unsafe fn ( * mut T ) ) )
693699 } else {
694700 None
@@ -710,7 +716,7 @@ impl<T, A: Allocator + Clone> RawTable<T, A> {
710716 capacity,
711717 & |table, index| hasher ( table. bucket :: < T > ( index) . as_ref ( ) ) ,
712718 fallibility,
713- TableLayout :: new :: < T > ( ) ,
719+ Self :: TABLE_LAYOUT ,
714720 )
715721 }
716722 }
@@ -1027,10 +1033,11 @@ impl<T, A: Allocator + Clone> RawTable<T, A> {
10271033 None
10281034 } else {
10291035 // Avoid `Option::unwrap_or_else` because it bloats LLVM IR.
1030- let ( layout, ctrl_offset) = match calculate_layout :: < T > ( self . table . buckets ( ) ) {
1031- Some ( lco) => lco,
1032- None => unsafe { hint:: unreachable_unchecked ( ) } ,
1033- } ;
1036+ let ( layout, ctrl_offset) =
1037+ match Self :: TABLE_LAYOUT . calculate_layout_for ( self . table . buckets ( ) ) {
1038+ Some ( lco) => lco,
1039+ None => unsafe { hint:: unreachable_unchecked ( ) } ,
1040+ } ;
10341041 Some ( (
10351042 unsafe { NonNull :: new_unchecked ( self . table . ctrl . as_ptr ( ) . sub ( ctrl_offset) ) } ,
10361043 layout,
@@ -1739,7 +1746,7 @@ impl<T: Clone, A: Allocator + Clone> RawTable<T, A> {
17391746 // to make sure we drop only the elements that have been
17401747 // cloned so far.
17411748 let mut guard = guard ( ( 0 , & mut * self ) , |( index, self_) | {
1742- if mem :: needs_drop :: < T > ( ) && !self_. is_empty ( ) {
1749+ if Self :: DATA_NEEDS_DROP && !self_. is_empty ( ) {
17431750 for i in 0 ..=* index {
17441751 if self_. is_bucket_full ( i) {
17451752 self_. bucket ( i) . drop ( ) ;
@@ -2027,6 +2034,8 @@ pub struct RawIter<T> {
20272034}
20282035
20292036impl < T > RawIter < T > {
2037+ const DATA_NEEDS_DROP : bool = mem:: needs_drop :: < T > ( ) ;
2038+
20302039 /// Refresh the iterator so that it reflects a removal from the given bucket.
20312040 ///
20322041 /// For the iterator to remain valid, this method must be called once
@@ -2144,7 +2153,7 @@ impl<T> RawIter<T> {
21442153 }
21452154
21462155 unsafe fn drop_elements ( & mut self ) {
2147- if mem :: needs_drop :: < T > ( ) && self . len ( ) != 0 {
2156+ if Self :: DATA_NEEDS_DROP && self . len ( ) != 0 {
21482157 for item in self {
21492158 item. drop ( ) ;
21502159 }
0 commit comments