1
2
3
4
5 package maps
6
7 import (
8 "internal/abi"
9 "internal/goexperiment"
10 "internal/race"
11 "internal/runtime/sys"
12 "unsafe"
13 )
14
15
16 func runtime_mapaccess1_fast32(typ *abi.MapType, m *Map, key uint32) unsafe.Pointer {
17 p, _ := runtime_mapaccess2_fast32(typ, m, key)
18 return p
19 }
20
21
22 func runtime_mapaccess2_fast32(typ *abi.MapType, m *Map, key uint32) (unsafe.Pointer, bool) {
23 if race.Enabled && m != nil {
24 callerpc := sys.GetCallerPC()
25 pc := abi.FuncPCABIInternal(runtime_mapaccess2_fast32)
26 race.ReadPC(unsafe.Pointer(m), callerpc, pc)
27 }
28
29 if m == nil || m.Used() == 0 {
30 return unsafe.Pointer(&zeroVal[0]), false
31 }
32
33 if m.writing != 0 {
34 fatal("concurrent map read and map write")
35 return nil, false
36 }
37
38 if m.dirLen == 0 {
39 g := groupReference{
40 data: m.dirPtr,
41 }
42 full := g.ctrls().matchFull()
43 slotKey := g.key(typ, 0)
44 var keyStride uintptr
45 if goexperiment.MapSplitGroup {
46 keyStride = 4
47 } else {
48 keyStride = typ.KeyStride
49 }
50 var i uintptr
51 for full != 0 {
52 if key == *(*uint32)(slotKey) && full.lowestSet() {
53 if goexperiment.MapSplitGroup {
54 return g.elem(typ, i), true
55 } else {
56 return unsafe.Pointer(uintptr(slotKey) + typ.ElemOff), true
57 }
58 }
59 slotKey = unsafe.Pointer(uintptr(slotKey) + keyStride)
60 full = full.shiftOutLowest()
61 i++
62 }
63 return unsafe.Pointer(&zeroVal[0]), false
64 }
65
66
67
68
69
70
71
72
73
74
75
76
77 k := key
78 hash := memhash32(unsafe.Pointer(&k), m.seed)
79
80
81 idx := m.directoryIndex(hash)
82 t := m.directoryAt(idx)
83
84
85 seq := makeProbeSeq(h1(hash), t.groups.lengthMask)
86 h2Hash := h2(hash)
87 for ; ; seq = seq.next() {
88 g := t.groups.group(typ, seq.offset)
89
90 match := g.ctrls().matchH2(h2Hash)
91
92 for match != 0 {
93 i := match.first()
94
95 slotKey := g.key(typ, i)
96 if key == *(*uint32)(slotKey) {
97 if goexperiment.MapSplitGroup {
98 return g.elem(typ, i), true
99 } else {
100 return unsafe.Pointer(uintptr(slotKey) + typ.ElemOff), true
101 }
102 }
103 match = match.removeFirst()
104 }
105
106 match = g.ctrls().matchEmpty()
107 if match != 0 {
108
109
110 return unsafe.Pointer(&zeroVal[0]), false
111 }
112 }
113 }
114
115 func (m *Map) putSlotSmallFast32(typ *abi.MapType, hash uintptr, key uint32) unsafe.Pointer {
116 g := groupReference{
117 data: m.dirPtr,
118 }
119
120 match := g.ctrls().matchH2(h2(hash))
121
122
123 for match != 0 {
124 i := match.first()
125
126 slotKey := g.key(typ, i)
127 if key == *(*uint32)(slotKey) {
128 slotElem := g.elem(typ, i)
129 return slotElem
130 }
131 match = match.removeFirst()
132 }
133
134
135
136
137 match = g.ctrls().matchEmptyOrDeleted()
138 if match == 0 {
139 fatal("small map with no empty slot (concurrent map writes?)")
140 }
141
142 i := match.first()
143
144 slotKey := g.key(typ, i)
145 *(*uint32)(slotKey) = key
146
147 slotElem := g.elem(typ, i)
148
149 g.ctrls().set(i, ctrl(h2(hash)))
150 m.used++
151
152 return slotElem
153 }
154
155
156 func runtime_mapassign_fast32(typ *abi.MapType, m *Map, key uint32) unsafe.Pointer {
157 if m == nil {
158 panic(errNilAssign)
159 }
160 if race.Enabled {
161 callerpc := sys.GetCallerPC()
162 pc := abi.FuncPCABIInternal(runtime_mapassign_fast32)
163 race.WritePC(unsafe.Pointer(m), callerpc, pc)
164 }
165 if m.writing != 0 {
166 fatal("concurrent map writes")
167 }
168
169
170
171 k := key
172 hash := memhash32(unsafe.Pointer(&k), m.seed)
173
174
175
176 m.writing ^= 1
177
178 if m.dirPtr == nil {
179 m.growToSmall(typ)
180 }
181
182 if m.dirLen == 0 {
183 if m.used < abi.MapGroupSlots {
184 elem := m.putSlotSmallFast32(typ, hash, key)
185
186 if m.writing == 0 {
187 fatal("concurrent map writes")
188 }
189 m.writing ^= 1
190
191 return elem
192 }
193
194
195 m.growToTable(typ)
196 }
197
198 var slotElem unsafe.Pointer
199 outer:
200 for {
201
202 idx := m.directoryIndex(hash)
203 t := m.directoryAt(idx)
204
205 seq := makeProbeSeq(h1(hash), t.groups.lengthMask)
206
207
208
209
210 var firstDeletedGroup groupReference
211 var firstDeletedSlot uintptr
212
213 h2Hash := h2(hash)
214 for ; ; seq = seq.next() {
215 g := t.groups.group(typ, seq.offset)
216 match := g.ctrls().matchH2(h2Hash)
217
218
219 for match != 0 {
220 i := match.first()
221
222 slotKey := g.key(typ, i)
223 if key == *(*uint32)(slotKey) {
224 slotElem = g.elem(typ, i)
225
226 t.checkInvariants(typ, m)
227 break outer
228 }
229 match = match.removeFirst()
230 }
231
232
233
234 match = g.ctrls().matchEmptyOrDeleted()
235 if match == 0 {
236 continue
237 }
238 i := match.first()
239 if g.ctrls().get(i) == ctrlDeleted {
240
241
242 if firstDeletedGroup.data == nil {
243 firstDeletedGroup = g
244 firstDeletedSlot = i
245 }
246 continue
247 }
248
249
250
251
252
253 if firstDeletedGroup.data != nil {
254 g = firstDeletedGroup
255 i = firstDeletedSlot
256 t.growthLeft++
257 }
258
259
260 if t.growthLeft == 0 {
261 t.pruneTombstones(typ, m)
262 }
263
264
265 if t.growthLeft > 0 {
266 slotKey := g.key(typ, i)
267 *(*uint32)(slotKey) = key
268
269 slotElem = g.elem(typ, i)
270
271 g.ctrls().set(i, ctrl(h2Hash))
272 t.growthLeft--
273 t.used++
274 m.used++
275
276 t.checkInvariants(typ, m)
277 break outer
278 }
279
280 t.rehash(typ, m)
281 continue outer
282 }
283 }
284
285 if m.writing == 0 {
286 fatal("concurrent map writes")
287 }
288 m.writing ^= 1
289
290 return slotElem
291 }
292
293
294
295
296
297
298 func runtime_mapassign_fast32ptr(typ *abi.MapType, m *Map, key unsafe.Pointer) unsafe.Pointer {
299 if m == nil {
300 panic(errNilAssign)
301 }
302 if race.Enabled {
303 callerpc := sys.GetCallerPC()
304 pc := abi.FuncPCABIInternal(runtime_mapassign_fast32ptr)
305 race.WritePC(unsafe.Pointer(m), callerpc, pc)
306 }
307 if m.writing != 0 {
308 fatal("concurrent map writes")
309 }
310
311
312
313 k := key
314 hash := memhash32(unsafe.Pointer(&k), m.seed)
315
316
317
318 m.writing ^= 1
319
320 if m.dirPtr == nil {
321 m.growToSmall(typ)
322 }
323
324 if m.dirLen == 0 {
325 if m.used < abi.MapGroupSlots {
326 elem := m.putSlotSmallFastPtr(typ, hash, key)
327
328 if m.writing == 0 {
329 fatal("concurrent map writes")
330 }
331 m.writing ^= 1
332
333 return elem
334 }
335
336
337 m.growToTable(typ)
338 }
339
340 var slotElem unsafe.Pointer
341 outer:
342 for {
343
344 idx := m.directoryIndex(hash)
345 t := m.directoryAt(idx)
346
347 seq := makeProbeSeq(h1(hash), t.groups.lengthMask)
348
349
350
351 var firstDeletedGroup groupReference
352 var firstDeletedSlot uintptr
353
354 h2Hash := h2(hash)
355 for ; ; seq = seq.next() {
356 g := t.groups.group(typ, seq.offset)
357 match := g.ctrls().matchH2(h2Hash)
358
359
360 for match != 0 {
361 i := match.first()
362
363 slotKey := g.key(typ, i)
364 if key == *(*unsafe.Pointer)(slotKey) {
365 slotElem = g.elem(typ, i)
366
367 t.checkInvariants(typ, m)
368 break outer
369 }
370 match = match.removeFirst()
371 }
372
373
374
375 match = g.ctrls().matchEmptyOrDeleted()
376 if match == 0 {
377 continue
378 }
379 i := match.first()
380 if g.ctrls().get(i) == ctrlDeleted {
381
382
383 if firstDeletedGroup.data == nil {
384 firstDeletedGroup = g
385 firstDeletedSlot = i
386 }
387 continue
388 }
389
390
391
392
393
394 if firstDeletedGroup.data != nil {
395 g = firstDeletedGroup
396 i = firstDeletedSlot
397 t.growthLeft++
398 }
399
400
401 if t.growthLeft > 0 {
402 slotKey := g.key(typ, i)
403 *(*unsafe.Pointer)(slotKey) = key
404
405 slotElem = g.elem(typ, i)
406
407 g.ctrls().set(i, ctrl(h2Hash))
408 t.growthLeft--
409 t.used++
410 m.used++
411
412 t.checkInvariants(typ, m)
413 break outer
414 }
415
416 t.rehash(typ, m)
417 continue outer
418 }
419 }
420
421 if m.writing == 0 {
422 fatal("concurrent map writes")
423 }
424 m.writing ^= 1
425
426 return slotElem
427 }
428
429
430 func runtime_mapdelete_fast32(typ *abi.MapType, m *Map, key uint32) {
431 if race.Enabled {
432 callerpc := sys.GetCallerPC()
433 pc := abi.FuncPCABIInternal(runtime_mapdelete_fast32)
434 race.WritePC(unsafe.Pointer(m), callerpc, pc)
435 }
436
437 if m == nil || m.Used() == 0 {
438 return
439 }
440
441 m.Delete(typ, abi.NoEscape(unsafe.Pointer(&key)))
442 }
443
View as plain text