1
2
3
4
5 package maps
6
7 import (
8 "internal/abi"
9 "internal/goexperiment"
10 "internal/race"
11 "internal/runtime/sys"
12 "unsafe"
13 )
14
15
16 func runtime_mapaccess1_fast64(typ *abi.MapType, m *Map, key uint64) unsafe.Pointer {
17 p, _ := runtime_mapaccess2_fast64(typ, m, key)
18 return p
19 }
20
21
22 func runtime_mapaccess2_fast64(typ *abi.MapType, m *Map, key uint64) (unsafe.Pointer, bool) {
23 if race.Enabled && m != nil {
24 callerpc := sys.GetCallerPC()
25 pc := abi.FuncPCABIInternal(runtime_mapaccess2_fast64)
26 race.ReadPC(unsafe.Pointer(m), callerpc, pc)
27 }
28
29 if m == nil || m.Used() == 0 {
30 return unsafe.Pointer(&zeroVal[0]), false
31 }
32
33 if m.writing != 0 {
34 fatal("concurrent map read and map write")
35 return nil, false
36 }
37
38 if m.dirLen == 0 {
39 g := groupReference{
40 data: m.dirPtr,
41 }
42 full := g.ctrls().matchFull()
43 slotKey := g.key(typ, 0)
44 var keyStride uintptr
45 if goexperiment.MapSplitGroup {
46 keyStride = 8
47 } else {
48 keyStride = typ.KeyStride
49 }
50 var i uintptr
51 for full != 0 {
52 if key == *(*uint64)(slotKey) && full.lowestSet() {
53 if goexperiment.MapSplitGroup {
54 return g.elem(typ, i), true
55 } else {
56 return unsafe.Pointer(uintptr(slotKey) + 8), true
57 }
58 }
59 slotKey = unsafe.Pointer(uintptr(slotKey) + keyStride)
60 full = full.shiftOutLowest()
61 i++
62 }
63 return unsafe.Pointer(&zeroVal[0]), false
64 }
65
66
67
68 k := key
69 hash := memhash64(unsafe.Pointer(&k), m.seed)
70
71
72 idx := m.directoryIndex(hash)
73 t := m.directoryAt(idx)
74
75
76 seq := makeProbeSeq(h1(hash), t.groups.lengthMask)
77
78 h2Hash := h2(hash)
79 for ; ; seq = seq.next() {
80 g := t.groups.group(typ, seq.offset)
81
82 match := g.ctrls().matchH2(h2Hash)
83
84 for match != 0 {
85 i := match.first()
86
87 slotKey := g.key(typ, i)
88 if key == *(*uint64)(slotKey) {
89 if goexperiment.MapSplitGroup {
90 return g.elem(typ, i), true
91 } else {
92 return unsafe.Pointer(uintptr(slotKey) + 8), true
93 }
94 }
95 match = match.removeFirst()
96 }
97
98 match = g.ctrls().matchEmpty()
99 if match != 0 {
100
101
102 return unsafe.Pointer(&zeroVal[0]), false
103 }
104 }
105 }
106
107 func (m *Map) putSlotSmallFast64(typ *abi.MapType, hash uintptr, key uint64) unsafe.Pointer {
108 g := groupReference{
109 data: m.dirPtr,
110 }
111
112 match := g.ctrls().matchH2(h2(hash))
113
114
115 for match != 0 {
116 i := match.first()
117
118 slotKey := g.key(typ, i)
119 if key == *(*uint64)(slotKey) {
120 slotElem := g.elem(typ, i)
121 return slotElem
122 }
123 match = match.removeFirst()
124 }
125
126
127
128
129 match = g.ctrls().matchEmptyOrDeleted()
130 if match == 0 {
131 fatal("small map with no empty slot (concurrent map writes?)")
132 }
133
134 i := match.first()
135
136 slotKey := g.key(typ, i)
137 *(*uint64)(slotKey) = key
138
139 slotElem := g.elem(typ, i)
140
141 g.ctrls().set(i, ctrl(h2(hash)))
142 m.used++
143
144 return slotElem
145 }
146
147
148 func runtime_mapassign_fast64(typ *abi.MapType, m *Map, key uint64) unsafe.Pointer {
149 if m == nil {
150 panic(errNilAssign)
151 }
152 if race.Enabled {
153 callerpc := sys.GetCallerPC()
154 pc := abi.FuncPCABIInternal(runtime_mapassign_fast64)
155 race.WritePC(unsafe.Pointer(m), callerpc, pc)
156 }
157 if m.writing != 0 {
158 fatal("concurrent map writes")
159 }
160
161
162
163 k := key
164 hash := memhash64(unsafe.Pointer(&k), m.seed)
165
166
167
168 m.writing ^= 1
169
170 if m.dirPtr == nil {
171 m.growToSmall(typ)
172 }
173
174 if m.dirLen == 0 {
175 if m.used < abi.MapGroupSlots {
176 elem := m.putSlotSmallFast64(typ, hash, key)
177
178 if m.writing == 0 {
179 fatal("concurrent map writes")
180 }
181 m.writing ^= 1
182
183 return elem
184 }
185
186
187 m.growToTable(typ)
188 }
189
190 var slotElem unsafe.Pointer
191 outer:
192 for {
193
194 idx := m.directoryIndex(hash)
195 t := m.directoryAt(idx)
196
197 seq := makeProbeSeq(h1(hash), t.groups.lengthMask)
198
199
200
201
202 var firstDeletedGroup groupReference
203 var firstDeletedSlot uintptr
204
205 h2Hash := h2(hash)
206 for ; ; seq = seq.next() {
207 g := t.groups.group(typ, seq.offset)
208 match := g.ctrls().matchH2(h2Hash)
209
210
211 for match != 0 {
212 i := match.first()
213
214 slotKey := g.key(typ, i)
215 if key == *(*uint64)(slotKey) {
216 slotElem = g.elem(typ, i)
217
218 t.checkInvariants(typ, m)
219 break outer
220 }
221 match = match.removeFirst()
222 }
223
224
225
226 match = g.ctrls().matchEmptyOrDeleted()
227 if match == 0 {
228 continue
229 }
230 i := match.first()
231 if g.ctrls().get(i) == ctrlDeleted {
232
233
234 if firstDeletedGroup.data == nil {
235 firstDeletedGroup = g
236 firstDeletedSlot = i
237 }
238 continue
239 }
240
241
242
243
244
245 if firstDeletedGroup.data != nil {
246 g = firstDeletedGroup
247 i = firstDeletedSlot
248 t.growthLeft++
249 }
250
251
252 if t.growthLeft == 0 {
253 t.pruneTombstones(typ, m)
254 }
255
256
257 if t.growthLeft > 0 {
258 slotKey := g.key(typ, i)
259 *(*uint64)(slotKey) = key
260
261 slotElem = g.elem(typ, i)
262
263 g.ctrls().set(i, ctrl(h2Hash))
264 t.growthLeft--
265 t.used++
266 m.used++
267
268 t.checkInvariants(typ, m)
269 break outer
270 }
271
272 t.rehash(typ, m)
273 continue outer
274 }
275 }
276
277 if m.writing == 0 {
278 fatal("concurrent map writes")
279 }
280 m.writing ^= 1
281
282 return slotElem
283 }
284
285 func (m *Map) putSlotSmallFastPtr(typ *abi.MapType, hash uintptr, key unsafe.Pointer) unsafe.Pointer {
286 g := groupReference{
287 data: m.dirPtr,
288 }
289
290 match := g.ctrls().matchH2(h2(hash))
291
292
293 for match != 0 {
294 i := match.first()
295
296 slotKey := g.key(typ, i)
297 if key == *(*unsafe.Pointer)(slotKey) {
298 slotElem := g.elem(typ, i)
299 return slotElem
300 }
301 match = match.removeFirst()
302 }
303
304
305
306
307 match = g.ctrls().matchEmptyOrDeleted()
308 if match == 0 {
309 fatal("small map with no empty slot (concurrent map writes?)")
310 }
311
312 i := match.first()
313
314 slotKey := g.key(typ, i)
315 *(*unsafe.Pointer)(slotKey) = key
316
317 slotElem := g.elem(typ, i)
318
319 g.ctrls().set(i, ctrl(h2(hash)))
320 m.used++
321
322 return slotElem
323 }
324
325
326
327
328 func runtime_mapassign_fast64ptr(typ *abi.MapType, m *Map, key unsafe.Pointer) unsafe.Pointer {
329 if m == nil {
330 panic(errNilAssign)
331 }
332 if race.Enabled {
333 callerpc := sys.GetCallerPC()
334 pc := abi.FuncPCABIInternal(runtime_mapassign_fast64ptr)
335 race.WritePC(unsafe.Pointer(m), callerpc, pc)
336 }
337 if m.writing != 0 {
338 fatal("concurrent map writes")
339 }
340
341
342
343 k := key
344 hash := memhash64(unsafe.Pointer(&k), m.seed)
345
346
347
348 m.writing ^= 1
349
350 if m.dirPtr == nil {
351 m.growToSmall(typ)
352 }
353
354 if m.dirLen == 0 {
355 if m.used < abi.MapGroupSlots {
356 elem := m.putSlotSmallFastPtr(typ, hash, key)
357
358 if m.writing == 0 {
359 fatal("concurrent map writes")
360 }
361 m.writing ^= 1
362
363 return elem
364 }
365
366
367 m.growToTable(typ)
368 }
369
370 var slotElem unsafe.Pointer
371 outer:
372 for {
373
374 idx := m.directoryIndex(hash)
375 t := m.directoryAt(idx)
376
377 seq := makeProbeSeq(h1(hash), t.groups.lengthMask)
378
379
380
381
382 var firstDeletedGroup groupReference
383 var firstDeletedSlot uintptr
384
385 h2Hash := h2(hash)
386 for ; ; seq = seq.next() {
387 g := t.groups.group(typ, seq.offset)
388 match := g.ctrls().matchH2(h2Hash)
389
390
391 for match != 0 {
392 i := match.first()
393
394 slotKey := g.key(typ, i)
395 if key == *(*unsafe.Pointer)(slotKey) {
396 slotElem = g.elem(typ, i)
397
398 t.checkInvariants(typ, m)
399 break outer
400 }
401 match = match.removeFirst()
402 }
403
404
405
406 match = g.ctrls().matchEmptyOrDeleted()
407 if match == 0 {
408 continue
409 }
410 i := match.first()
411 if g.ctrls().get(i) == ctrlDeleted {
412
413
414 if firstDeletedGroup.data == nil {
415 firstDeletedGroup = g
416 firstDeletedSlot = i
417 }
418 continue
419 }
420
421
422
423
424
425 if firstDeletedGroup.data != nil {
426 g = firstDeletedGroup
427 i = firstDeletedSlot
428 t.growthLeft++
429 }
430
431
432 if t.growthLeft > 0 {
433 slotKey := g.key(typ, i)
434 *(*unsafe.Pointer)(slotKey) = key
435
436 slotElem = g.elem(typ, i)
437
438 g.ctrls().set(i, ctrl(h2Hash))
439 t.growthLeft--
440 t.used++
441 m.used++
442
443 t.checkInvariants(typ, m)
444 break outer
445 }
446
447 t.rehash(typ, m)
448 continue outer
449 }
450 }
451
452 if m.writing == 0 {
453 fatal("concurrent map writes")
454 }
455 m.writing ^= 1
456
457 return slotElem
458 }
459
460
461 func runtime_mapdelete_fast64(typ *abi.MapType, m *Map, key uint64) {
462 if race.Enabled {
463 callerpc := sys.GetCallerPC()
464 pc := abi.FuncPCABIInternal(runtime_mapdelete_fast64)
465 race.WritePC(unsafe.Pointer(m), callerpc, pc)
466 }
467
468 if m == nil || m.Used() == 0 {
469 return
470 }
471
472 m.Delete(typ, abi.NoEscape(unsafe.Pointer(&key)))
473 }
474
View as plain text