1#![allow(unsafe_code)]
2use super::{equivalent, get_hash, Bucket, HashValue, IndexMapCore};
6use hashbrown::raw::RawTable;
7
8type RawBucket = hashbrown::raw::Bucket<usize>;
9
10pub(super) fn insert_bulk_no_grow<K, V>(indices: &mut RawTable<usize>, entries: &[Bucket<K, V>]) {
14 assert!(indices.capacity() - indices.len() >= entries.len());
15 for entry in entries {
16 unsafe {
18 indices.insert_no_grow(entry.hash.get(), indices.len());
19 }
20 }
21}
22
23#[cfg(feature = "test_debug")]
24pub(super) struct DebugIndices<'a>(pub &'a RawTable<usize>);
25
26#[cfg(feature = "test_debug")]
27impl core::fmt::Debug for DebugIndices<'_> {
28 fn fmt(&self, f: &mut core::fmt::Formatter<'_>) -> core::fmt::Result {
29 let indices = unsafe { self.0.iter().map(|raw_bucket| *raw_bucket.as_ref()) };
31 f.debug_list().entries(indices).finish()
32 }
33}
34
35impl<K, V> IndexMapCore<K, V> {
36 pub(super) fn erase_indices_sweep(&mut self, start: usize, end: usize) {
38 unsafe {
40 let offset = end - start;
41 for bucket in self.indices.iter() {
42 let i = bucket.as_mut();
43 if *i >= end {
44 *i -= offset;
45 } else if *i >= start {
46 self.indices.erase(bucket);
47 }
48 }
49 }
50 }
51
52 pub(crate) fn find_or_insert(&mut self, hash: HashValue, key: &K) -> Result<usize, usize>
59 where
60 K: Eq,
61 {
62 let hash = hash.get();
63 let eq = equivalent(key, &self.entries);
64 let hasher = get_hash(&self.entries);
65 unsafe {
67 match self.indices.find_or_find_insert_slot(hash, eq, hasher) {
68 Ok(raw_bucket) => Ok(*raw_bucket.as_ref()),
69 Err(slot) => {
70 let index = self.indices.len();
71 self.indices.insert_in_slot(hash, slot, index);
72 Err(index)
73 }
74 }
75 }
76 }
77
78 pub(super) fn raw_entry(
79 &mut self,
80 hash: HashValue,
81 mut is_match: impl FnMut(&K) -> bool,
82 ) -> Result<RawTableEntry<'_, K, V>, &mut Self> {
83 let entries = &*self.entries;
84 let eq = move |&i: &usize| is_match(&entries[i].key);
85 match self.indices.find(hash.get(), eq) {
86 Some(raw_bucket) => Ok(unsafe { RawTableEntry::new(self, raw_bucket) }),
88 None => Err(self),
89 }
90 }
91
92 pub(super) fn index_raw_entry(&mut self, index: usize) -> Option<RawTableEntry<'_, K, V>> {
93 let hash = self.entries.get(index)?.hash;
94 let raw_bucket = self.indices.find(hash.get(), move |&i| i == index)?;
95 Some(unsafe { RawTableEntry::new(self, raw_bucket) })
97 }
98
99 pub(super) fn indices_mut(&mut self) -> impl Iterator<Item = &mut usize> {
100 unsafe { self.indices.iter().map(|bucket| bucket.as_mut()) }
103 }
104}
105
106pub(super) struct RawTableEntry<'a, K, V> {
110 map: &'a mut IndexMapCore<K, V>,
111 raw_bucket: RawBucket,
112}
113
114unsafe impl<K: Sync, V: Sync> Sync for RawTableEntry<'_, K, V> {}
117
118impl<'a, K, V> RawTableEntry<'a, K, V> {
119 #[inline]
122 unsafe fn new(map: &'a mut IndexMapCore<K, V>, raw_bucket: RawBucket) -> Self {
123 Self { map, raw_bucket }
124 }
125
126 #[inline]
128 pub(super) fn index(&self) -> usize {
129 unsafe { *self.raw_bucket.as_ref() }
131 }
132
133 #[inline]
134 pub(super) fn bucket(&self) -> &Bucket<K, V> {
135 &self.map.entries[self.index()]
136 }
137
138 #[inline]
139 pub(super) fn bucket_mut(&mut self) -> &mut Bucket<K, V> {
140 let index = self.index();
141 &mut self.map.entries[index]
142 }
143
144 #[inline]
145 pub(super) fn into_bucket(self) -> &'a mut Bucket<K, V> {
146 let index = self.index();
147 &mut self.map.entries[index]
148 }
149
150 pub(super) fn remove_index(self) -> (&'a mut IndexMapCore<K, V>, usize) {
152 let (index, _slot) = unsafe { self.map.indices.remove(self.raw_bucket) };
155 (self.map, index)
156 }
157
158 #[inline]
160 pub(super) fn into_inner(self) -> (&'a mut IndexMapCore<K, V>, usize) {
161 let index = self.index();
162 (self.map, index)
163 }
164}