1
2
3
4
5
6
7
8
9
10
11
12
13
14
15 package liveness
16
17 import (
18 "cmp"
19 "fmt"
20 "math"
21 "os"
22 "slices"
23 "sort"
24 "strings"
25
26 "cmd/compile/internal/abi"
27 "cmd/compile/internal/base"
28 "cmd/compile/internal/bitvec"
29 "cmd/compile/internal/ir"
30 "cmd/compile/internal/objw"
31 "cmd/compile/internal/reflectdata"
32 "cmd/compile/internal/ssa"
33 "cmd/compile/internal/typebits"
34 "cmd/compile/internal/types"
35 "cmd/internal/hash"
36 "cmd/internal/obj"
37 "cmd/internal/src"
38
39 rtabi "internal/abi"
40 )
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89 type blockEffects struct {
90
91
92
93
94
95 uevar bitvec.BitVec
96 varkill bitvec.BitVec
97
98
99
100
101
102 livein bitvec.BitVec
103 liveout bitvec.BitVec
104 }
105
106
107 type Liveness struct {
108 fn *ir.Func
109 f *ssa.Func
110 vars []*ir.Name
111 idx map[*ir.Name]int32
112 stkptrsize int64
113
114 be []blockEffects
115
116
117
118 allUnsafe bool
119
120
121 unsafePoints bitvec.BitVec
122
123
124
125 unsafeBlocks bitvec.BitVec
126
127
128
129
130
131
132 livevars []bitvec.BitVec
133
134
135
136 livenessMap Map
137 stackMapSet bvecSet
138 stackMaps []bitvec.BitVec
139
140 cache progeffectscache
141
142
143
144
145 partLiveArgs map[*ir.Name]bool
146
147 doClobber bool
148 noClobberArgs bool
149
150
151
152
153 conservativeWrites bool
154 }
155
156
157
158
159 type Map struct {
160 Vals map[ssa.ID]objw.StackMapIndex
161 UnsafeVals map[ssa.ID]bool
162 UnsafeBlocks map[ssa.ID]bool
163
164
165 DeferReturn objw.StackMapIndex
166 }
167
168 func (m *Map) reset() {
169 if m.Vals == nil {
170 m.Vals = make(map[ssa.ID]objw.StackMapIndex)
171 m.UnsafeVals = make(map[ssa.ID]bool)
172 m.UnsafeBlocks = make(map[ssa.ID]bool)
173 } else {
174 clear(m.Vals)
175 clear(m.UnsafeVals)
176 clear(m.UnsafeBlocks)
177 }
178 m.DeferReturn = objw.StackMapDontCare
179 }
180
181 func (m *Map) set(v *ssa.Value, i objw.StackMapIndex) {
182 m.Vals[v.ID] = i
183 }
184 func (m *Map) setUnsafeVal(v *ssa.Value) {
185 m.UnsafeVals[v.ID] = true
186 }
187 func (m *Map) setUnsafeBlock(b *ssa.Block) {
188 m.UnsafeBlocks[b.ID] = true
189 }
190
191 func (m Map) Get(v *ssa.Value) objw.StackMapIndex {
192
193 if idx, ok := m.Vals[v.ID]; ok {
194 return idx
195 }
196 return objw.StackMapDontCare
197 }
198 func (m Map) GetUnsafe(v *ssa.Value) bool {
199
200 return m.UnsafeVals[v.ID]
201 }
202 func (m Map) GetUnsafeBlock(b *ssa.Block) bool {
203
204 return m.UnsafeBlocks[b.ID]
205 }
206
207 type progeffectscache struct {
208 retuevar []int32
209 tailuevar []int32
210 initialized bool
211 }
212
213
214
215
216
217
218
219 func shouldTrack(n *ir.Name) bool {
220 return (n.Class == ir.PAUTO && n.Esc() != ir.EscHeap || n.Class == ir.PPARAM || n.Class == ir.PPARAMOUT) && n.Type().HasPointers()
221 }
222
223
224
225 func getvariables(fn *ir.Func) ([]*ir.Name, map[*ir.Name]int32) {
226 var vars []*ir.Name
227 for _, n := range fn.Dcl {
228 if shouldTrack(n) {
229 vars = append(vars, n)
230 }
231 }
232 idx := make(map[*ir.Name]int32, len(vars))
233 for i, n := range vars {
234 idx[n] = int32(i)
235 }
236 return vars, idx
237 }
238
239 func (lv *Liveness) initcache() {
240 if lv.cache.initialized {
241 base.Fatalf("liveness cache initialized twice")
242 return
243 }
244 lv.cache.initialized = true
245
246 for i, node := range lv.vars {
247 switch node.Class {
248 case ir.PPARAM:
249
250
251
252
253
254
255 lv.cache.tailuevar = append(lv.cache.tailuevar, int32(i))
256
257 case ir.PPARAMOUT:
258
259
260
261 lv.cache.retuevar = append(lv.cache.retuevar, int32(i))
262 }
263 }
264 }
265
266
267
268
269
270
271
272
273
274
275 type liveEffect int
276
277 const (
278 uevar liveEffect = 1 << iota
279 varkill
280 )
281
282
283
284
285 func (lv *Liveness) valueEffects(v *ssa.Value) (int32, liveEffect) {
286 n, e := affectedVar(v)
287 if e == 0 || n == nil {
288 return -1, 0
289 }
290
291
292
293
294 switch v.Op {
295 case ssa.OpVarDef, ssa.OpVarLive, ssa.OpKeepAlive:
296 if !n.Used() {
297 return -1, 0
298 }
299 }
300
301 if n.Class == ir.PPARAM && !n.Addrtaken() && n.Type().Size() > int64(types.PtrSize) {
302
303
304 lv.partLiveArgs[n] = true
305 }
306
307 var effect liveEffect
308
309
310
311
312
313
314 if e&(ssa.SymRead|ssa.SymAddr) != 0 {
315 effect |= uevar
316 }
317 if e&ssa.SymWrite != 0 {
318 if !isfat(n.Type()) || v.Op == ssa.OpVarDef {
319 effect |= varkill
320 } else if lv.conservativeWrites {
321 effect |= uevar
322 }
323 }
324
325 if effect == 0 {
326 return -1, 0
327 }
328
329 if pos, ok := lv.idx[n]; ok {
330 return pos, effect
331 }
332 return -1, 0
333 }
334
335
336 func affectedVar(v *ssa.Value) (*ir.Name, ssa.SymEffect) {
337
338 switch v.Op {
339 case ssa.OpLoadReg:
340 n, _ := ssa.AutoVar(v.Args[0])
341 return n, ssa.SymRead
342 case ssa.OpStoreReg:
343 n, _ := ssa.AutoVar(v)
344 return n, ssa.SymWrite
345
346 case ssa.OpArgIntReg:
347
348
349
350
351
352
353
354
355
356
357
358
359 n, _ := ssa.AutoVar(v)
360 return n, ssa.SymRead
361
362 case ssa.OpVarLive:
363 return v.Aux.(*ir.Name), ssa.SymRead
364 case ssa.OpVarDef:
365 return v.Aux.(*ir.Name), ssa.SymWrite
366 case ssa.OpKeepAlive:
367 n, _ := ssa.AutoVar(v.Args[0])
368 return n, ssa.SymRead
369 }
370
371 e := v.Op.SymEffect()
372 if e == 0 {
373 return nil, 0
374 }
375
376 switch a := v.Aux.(type) {
377 case nil, *obj.LSym:
378
379 return nil, e
380 case *ir.Name:
381 return a, e
382 default:
383 base.Fatalf("weird aux: %s", v.LongString())
384 return nil, e
385 }
386 }
387
388 type livenessFuncCache struct {
389 be []blockEffects
390 livenessMap Map
391 }
392
393
394
395
396 func newliveness(fn *ir.Func, f *ssa.Func, vars []*ir.Name, idx map[*ir.Name]int32, stkptrsize int64) *Liveness {
397 lv := &Liveness{
398 fn: fn,
399 f: f,
400 vars: vars,
401 idx: idx,
402 stkptrsize: stkptrsize,
403 }
404
405
406
407
408 if lc, _ := f.Cache.Liveness.(*livenessFuncCache); lc == nil {
409
410 f.Cache.Liveness = new(livenessFuncCache)
411 } else {
412 if cap(lc.be) >= f.NumBlocks() {
413 lv.be = lc.be[:f.NumBlocks()]
414 }
415 lv.livenessMap = Map{
416 Vals: lc.livenessMap.Vals,
417 UnsafeVals: lc.livenessMap.UnsafeVals,
418 UnsafeBlocks: lc.livenessMap.UnsafeBlocks,
419 DeferReturn: objw.StackMapDontCare,
420 }
421 lc.livenessMap.Vals = nil
422 lc.livenessMap.UnsafeVals = nil
423 lc.livenessMap.UnsafeBlocks = nil
424 }
425 if lv.be == nil {
426 lv.be = make([]blockEffects, f.NumBlocks())
427 }
428
429 nblocks := int32(len(f.Blocks))
430 nvars := int32(len(vars))
431 bulk := bitvec.NewBulk(nvars, nblocks*4)
432 for _, b := range f.Blocks {
433 be := lv.blockEffects(b)
434
435 be.uevar = bulk.Next()
436 be.varkill = bulk.Next()
437 be.livein = bulk.Next()
438 be.liveout = bulk.Next()
439 }
440 lv.livenessMap.reset()
441
442 lv.markUnsafePoints()
443
444 lv.partLiveArgs = make(map[*ir.Name]bool)
445
446 lv.enableClobber()
447
448 return lv
449 }
450
451 func (lv *Liveness) blockEffects(b *ssa.Block) *blockEffects {
452 return &lv.be[b.ID]
453 }
454
455
456
457
458 func (lv *Liveness) pointerMap(liveout bitvec.BitVec, vars []*ir.Name, args, locals bitvec.BitVec) {
459 var slotsSeen map[int64]*ir.Name
460 checkForDuplicateSlots := base.Debug.MergeLocals != 0
461 if checkForDuplicateSlots {
462 slotsSeen = make(map[int64]*ir.Name)
463 }
464 for i := int32(0); ; i++ {
465 i = liveout.Next(i)
466 if i < 0 {
467 break
468 }
469 node := vars[i]
470 switch node.Class {
471 case ir.PPARAM, ir.PPARAMOUT:
472 if !node.IsOutputParamInRegisters() {
473 if node.FrameOffset() < 0 {
474 lv.f.Fatalf("Node %v has frameoffset %d\n", node.Sym().Name, node.FrameOffset())
475 }
476 typebits.SetNoCheck(node.Type(), node.FrameOffset(), args)
477 break
478 }
479 fallthrough
480 case ir.PAUTO:
481 if checkForDuplicateSlots {
482 if prev, ok := slotsSeen[node.FrameOffset()]; ok {
483 base.FatalfAt(node.Pos(), "two vars live at pointerMap generation: %q and %q", prev.Sym().Name, node.Sym().Name)
484 }
485 slotsSeen[node.FrameOffset()] = node
486 }
487 typebits.Set(node.Type(), node.FrameOffset()+lv.stkptrsize, locals)
488 }
489 }
490 }
491
492
493
494 func IsUnsafe(f *ssa.Func) bool {
495
496
497
498
499
500
501
502
503
504 return base.Flag.CompilingRuntime || f.NoSplit
505 }
506
507
508 func (lv *Liveness) markUnsafePoints() {
509 if IsUnsafe(lv.f) {
510
511 lv.allUnsafe = true
512 return
513 }
514
515 lv.unsafePoints = bitvec.New(int32(lv.f.NumValues()))
516 lv.unsafeBlocks = bitvec.New(int32(lv.f.NumBlocks()))
517
518
519 for _, b := range lv.f.Blocks {
520 for _, v := range b.Values {
521 if v.Op.UnsafePoint() {
522 lv.unsafePoints.Set(int32(v.ID))
523 }
524 }
525 }
526
527 for _, b := range lv.f.Blocks {
528 for _, v := range b.Values {
529 if v.Op != ssa.OpWBend {
530 continue
531 }
532
533
534
535
536
537
538
539
540
541
542
543
544
545
546
547
548 m := v
549 for {
550 m = m.MemoryArg()
551 if m.Block != b {
552 lv.f.Fatalf("can't find Phi before write barrier end mark %v", v)
553 }
554 if m.Op == ssa.OpPhi {
555 break
556 }
557 }
558
559 if len(m.Args) != 2 {
560 lv.f.Fatalf("phi before write barrier end mark has %d args, want 2", len(m.Args))
561 }
562 c := b.Preds[0].Block()
563 d := b.Preds[1].Block()
564
565
566
567
568 var decisionBlock *ssa.Block
569 if len(c.Preds) == 1 && c.Preds[0].Block() == d {
570 decisionBlock = d
571 } else if len(d.Preds) == 1 && d.Preds[0].Block() == c {
572 decisionBlock = c
573 } else if len(c.Preds) == 1 && len(d.Preds) == 1 && c.Preds[0].Block() == d.Preds[0].Block() {
574 decisionBlock = c.Preds[0].Block()
575 } else {
576 lv.f.Fatalf("can't find write barrier pattern %v", v)
577 }
578 if len(decisionBlock.Succs) != 2 {
579 lv.f.Fatalf("common predecessor block the wrong type %s", decisionBlock.Kind)
580 }
581
582
583
584
585
586
587 var load *ssa.Value
588 v := decisionBlock.Controls[0]
589 for {
590 if v.MemoryArg() != nil {
591
592 if sym, ok := v.Aux.(*obj.LSym); ok && sym == ir.Syms.WriteBarrier {
593 load = v
594 break
595 }
596
597
598 if sym, ok := v.Args[0].Aux.(*obj.LSym); ok && sym == ir.Syms.WriteBarrier {
599 load = v
600 break
601 }
602 v.Fatalf("load of write barrier flag not from correct global: %s", v.LongString())
603 }
604
605 if len(v.Args) == 1 || len(v.Args) == 2 && v.Args[0] == v.Args[1] {
606
607 v = v.Args[0]
608 continue
609 }
610 v.Fatalf("write barrier control value has more than one argument: %s", v.LongString())
611 }
612
613
614 found := false
615 for _, v := range decisionBlock.Values {
616 if found {
617 lv.unsafePoints.Set(int32(v.ID))
618 }
619 found = found || v == load
620 }
621 lv.unsafeBlocks.Set(int32(decisionBlock.ID))
622
623
624 for _, e := range decisionBlock.Succs {
625 x := e.Block()
626 if x == b {
627 continue
628 }
629 for _, v := range x.Values {
630 lv.unsafePoints.Set(int32(v.ID))
631 }
632 lv.unsafeBlocks.Set(int32(x.ID))
633 }
634
635
636 for _, v := range b.Values {
637 if v.Op == ssa.OpWBend {
638 break
639 }
640 lv.unsafePoints.Set(int32(v.ID))
641 }
642 }
643 }
644 }
645
646
647
648
649
650
651 func (lv *Liveness) hasStackMap(v *ssa.Value) bool {
652 if !v.Op.IsCall() {
653 return false
654 }
655
656
657
658 if sym, ok := v.Aux.(*ssa.AuxCall); ok && (sym.Fn == ir.Syms.WBZero || sym.Fn == ir.Syms.WBMove) {
659 return false
660 }
661 return true
662 }
663
664
665
666
667 func (lv *Liveness) prologue() {
668 lv.initcache()
669
670 for _, b := range lv.f.Blocks {
671 be := lv.blockEffects(b)
672
673
674
675 for j := len(b.Values) - 1; j >= 0; j-- {
676 pos, e := lv.valueEffects(b.Values[j])
677 if e&varkill != 0 {
678 be.varkill.Set(pos)
679 be.uevar.Unset(pos)
680 }
681 if e&uevar != 0 {
682 be.uevar.Set(pos)
683 }
684 }
685 }
686 }
687
688
689 func (lv *Liveness) solve() {
690
691
692 nvars := int32(len(lv.vars))
693 newlivein := bitvec.New(nvars)
694 newliveout := bitvec.New(nvars)
695
696
697 po := lv.f.Postorder()
698
699
700
701
702
703 for change := true; change; {
704 change = false
705 for _, b := range po {
706 be := lv.blockEffects(b)
707
708 newliveout.Clear()
709 switch b.Kind {
710 case ssa.BlockRet:
711 for _, pos := range lv.cache.retuevar {
712 newliveout.Set(pos)
713 }
714 case ssa.BlockRetJmp:
715 for _, pos := range lv.cache.tailuevar {
716 newliveout.Set(pos)
717 }
718 case ssa.BlockExit:
719
720 default:
721
722
723
724
725 newliveout.Copy(lv.blockEffects(b.Succs[0].Block()).livein)
726 for _, succ := range b.Succs[1:] {
727 newliveout.Or(newliveout, lv.blockEffects(succ.Block()).livein)
728 }
729 }
730
731 if !be.liveout.Eq(newliveout) {
732 change = true
733 be.liveout.Copy(newliveout)
734 }
735
736
737
738
739
740
741 newlivein.AndNot(be.liveout, be.varkill)
742 be.livein.Or(newlivein, be.uevar)
743 }
744 }
745 }
746
747
748
749 func (lv *Liveness) epilogue() {
750 nvars := int32(len(lv.vars))
751 liveout := bitvec.New(nvars)
752 livedefer := bitvec.New(nvars)
753
754
755
756
757
758
759
760
761 if lv.fn.HasDefer() {
762 for i, n := range lv.vars {
763 if n.Class == ir.PPARAMOUT {
764 if n.IsOutputParamHeapAddr() {
765
766 base.Fatalf("variable %v both output param and heap output param", n)
767 }
768 if n.Heapaddr != nil {
769
770
771 continue
772 }
773
774 livedefer.Set(int32(i))
775 }
776 if n.IsOutputParamHeapAddr() {
777
778
779
780 n.SetNeedzero(true)
781 livedefer.Set(int32(i))
782 }
783 if n.OpenDeferSlot() {
784
785
786
787
788 livedefer.Set(int32(i))
789
790 if !n.Needzero() {
791 base.Fatalf("all pointer-containing defer arg slots should have Needzero set")
792 }
793 }
794 }
795 }
796
797
798
799
800 if lv.f.Entry != lv.f.Blocks[0] {
801 lv.f.Fatalf("entry block must be first")
802 }
803
804 {
805
806 live := bitvec.New(nvars)
807 lv.livevars = append(lv.livevars, live)
808 }
809
810 for _, b := range lv.f.Blocks {
811 be := lv.blockEffects(b)
812
813
814
815 for _, v := range b.Values {
816 if !lv.hasStackMap(v) {
817 continue
818 }
819
820 live := bitvec.New(nvars)
821 lv.livevars = append(lv.livevars, live)
822 }
823
824
825 index := int32(len(lv.livevars) - 1)
826
827 liveout.Copy(be.liveout)
828 for i := len(b.Values) - 1; i >= 0; i-- {
829 v := b.Values[i]
830
831 if lv.hasStackMap(v) {
832
833
834
835 live := &lv.livevars[index]
836 live.Or(*live, liveout)
837 live.Or(*live, livedefer)
838 index--
839 }
840
841
842 pos, e := lv.valueEffects(v)
843 if e&varkill != 0 {
844 liveout.Unset(pos)
845 }
846 if e&uevar != 0 {
847 liveout.Set(pos)
848 }
849 }
850
851 if b == lv.f.Entry {
852 if index != 0 {
853 base.Fatalf("bad index for entry point: %v", index)
854 }
855
856
857 for i, n := range lv.vars {
858 if !liveout.Get(int32(i)) {
859 continue
860 }
861 if n.Class == ir.PPARAM {
862 continue
863 }
864 base.FatalfAt(n.Pos(), "bad live variable at entry of %v: %L", lv.fn.Nname, n)
865 }
866
867
868 live := &lv.livevars[index]
869 live.Or(*live, liveout)
870 }
871
872 if lv.doClobber {
873 lv.clobber(b)
874 }
875
876
877 lv.compact(b)
878 }
879
880
881 if lv.fn.OpenCodedDeferDisallowed() {
882 lv.livenessMap.DeferReturn = objw.StackMapDontCare
883 } else {
884 idx, _ := lv.stackMapSet.add(livedefer)
885 lv.livenessMap.DeferReturn = objw.StackMapIndex(idx)
886 }
887
888
889 lv.stackMaps = lv.stackMapSet.extractUnique()
890 lv.stackMapSet = bvecSet{}
891
892
893
894
895 for j, n := range lv.vars {
896 if n.Class != ir.PPARAM && lv.stackMaps[0].Get(int32(j)) {
897 lv.f.Fatalf("%v %L recorded as live on entry", lv.fn.Nname, n)
898 }
899 }
900 }
901
902
903
904
905
906
907
908
909
910
911
912
913
914
915
916
917
918 func (lv *Liveness) compact(b *ssa.Block) {
919 pos := 0
920 if b == lv.f.Entry {
921
922 lv.stackMapSet.add(lv.livevars[0])
923 pos++
924 }
925 for _, v := range b.Values {
926 if lv.hasStackMap(v) {
927 idx, _ := lv.stackMapSet.add(lv.livevars[pos])
928 pos++
929 lv.livenessMap.set(v, objw.StackMapIndex(idx))
930 }
931 if lv.allUnsafe || v.Op != ssa.OpClobber && lv.unsafePoints.Get(int32(v.ID)) {
932 lv.livenessMap.setUnsafeVal(v)
933 }
934 }
935 if lv.allUnsafe || lv.unsafeBlocks.Get(int32(b.ID)) {
936 lv.livenessMap.setUnsafeBlock(b)
937 }
938
939
940 lv.livevars = lv.livevars[:0]
941 }
942
943 func (lv *Liveness) enableClobber() {
944
945
946 if !base.Flag.ClobberDead {
947 return
948 }
949 if lv.fn.Pragma&ir.CgoUnsafeArgs != 0 {
950
951 return
952 }
953 if len(lv.vars) > 10000 || len(lv.f.Blocks) > 10000 {
954
955
956
957 return
958 }
959 if lv.f.Name == "forkAndExecInChild" {
960
961
962
963
964
965 return
966 }
967 if lv.f.Name == "wbBufFlush" ||
968 ((lv.f.Name == "callReflect" || lv.f.Name == "callMethod") && lv.fn.ABIWrapper()) {
969
970
971
972
973
974
975
976
977
978
979 lv.noClobberArgs = true
980 }
981 if h := os.Getenv("GOCLOBBERDEADHASH"); h != "" {
982
983
984 hstr := ""
985 for _, b := range hash.Sum32([]byte(lv.f.Name)) {
986 hstr += fmt.Sprintf("%08b", b)
987 }
988 if !strings.HasSuffix(hstr, h) {
989 return
990 }
991 fmt.Printf("\t\t\tCLOBBERDEAD %s\n", lv.f.Name)
992 }
993 lv.doClobber = true
994 }
995
996
997
998 func (lv *Liveness) clobber(b *ssa.Block) {
999
1000 oldSched := append([]*ssa.Value{}, b.Values...)
1001 b.Values = b.Values[:0]
1002 idx := 0
1003
1004
1005 if b == lv.f.Entry {
1006 for len(oldSched) > 0 && len(oldSched[0].Args) == 0 {
1007
1008
1009
1010
1011 b.Values = append(b.Values, oldSched[0])
1012 oldSched = oldSched[1:]
1013 }
1014 clobber(lv, b, lv.livevars[0])
1015 idx++
1016 }
1017
1018
1019 for _, v := range oldSched {
1020 if !lv.hasStackMap(v) {
1021 b.Values = append(b.Values, v)
1022 continue
1023 }
1024 clobber(lv, b, lv.livevars[idx])
1025 b.Values = append(b.Values, v)
1026 idx++
1027 }
1028 }
1029
1030
1031
1032
1033 func clobber(lv *Liveness, b *ssa.Block, live bitvec.BitVec) {
1034 for i, n := range lv.vars {
1035 if !live.Get(int32(i)) && !n.Addrtaken() && !n.OpenDeferSlot() && !n.IsOutputParamHeapAddr() {
1036
1037
1038
1039
1040 if lv.noClobberArgs && n.Class == ir.PPARAM {
1041 continue
1042 }
1043 clobberVar(b, n)
1044 }
1045 }
1046 }
1047
1048
1049
1050 func clobberVar(b *ssa.Block, v *ir.Name) {
1051 clobberWalk(b, v, 0, v.Type())
1052 }
1053
1054
1055
1056
1057
1058 func clobberWalk(b *ssa.Block, v *ir.Name, offset int64, t *types.Type) {
1059 if !t.HasPointers() {
1060 return
1061 }
1062 switch t.Kind() {
1063 case types.TPTR,
1064 types.TUNSAFEPTR,
1065 types.TFUNC,
1066 types.TCHAN,
1067 types.TMAP:
1068 clobberPtr(b, v, offset)
1069
1070 case types.TSTRING:
1071
1072 clobberPtr(b, v, offset)
1073
1074 case types.TINTER:
1075
1076
1077
1078 clobberPtr(b, v, offset)
1079 clobberPtr(b, v, offset+int64(types.PtrSize))
1080
1081 case types.TSLICE:
1082
1083 clobberPtr(b, v, offset)
1084
1085 case types.TARRAY:
1086 for i := int64(0); i < t.NumElem(); i++ {
1087 clobberWalk(b, v, offset+i*t.Elem().Size(), t.Elem())
1088 }
1089
1090 case types.TSTRUCT:
1091 for _, t1 := range t.Fields() {
1092 clobberWalk(b, v, offset+t1.Offset, t1.Type)
1093 }
1094
1095 default:
1096 base.Fatalf("clobberWalk: unexpected type, %v", t)
1097 }
1098 }
1099
1100
1101
1102 func clobberPtr(b *ssa.Block, v *ir.Name, offset int64) {
1103 b.NewValue0IA(src.NoXPos, ssa.OpClobber, types.TypeVoid, offset, v)
1104 }
1105
1106 func (lv *Liveness) showlive(v *ssa.Value, live bitvec.BitVec) {
1107 if base.Flag.Live == 0 || ir.FuncName(lv.fn) == "init" || strings.HasPrefix(ir.FuncName(lv.fn), ".") {
1108 return
1109 }
1110 if lv.fn.Wrapper() || lv.fn.Dupok() {
1111
1112 return
1113 }
1114 if !(v == nil || v.Op.IsCall()) {
1115
1116
1117 return
1118 }
1119 if live.IsEmpty() {
1120 return
1121 }
1122
1123 pos, s := lv.format(v, live)
1124
1125 base.WarnfAt(pos, "%s", s)
1126 }
1127
1128 func (lv *Liveness) Format(v *ssa.Value) string {
1129 if v == nil {
1130 _, s := lv.format(nil, lv.stackMaps[0])
1131 return s
1132 }
1133 if idx := lv.livenessMap.Get(v); idx.StackMapValid() {
1134 _, s := lv.format(v, lv.stackMaps[idx])
1135 return s
1136 }
1137 return ""
1138 }
1139
1140 func (lv *Liveness) format(v *ssa.Value, live bitvec.BitVec) (src.XPos, string) {
1141 pos := lv.fn.Nname.Pos()
1142 if v != nil {
1143 pos = v.Pos
1144 }
1145
1146 s := "live at "
1147 if v == nil {
1148 s += fmt.Sprintf("entry to %s:", ir.FuncName(lv.fn))
1149 } else if sym, ok := v.Aux.(*ssa.AuxCall); ok && sym.Fn != nil {
1150 fn := sym.Fn.Name
1151 if pos := strings.Index(fn, "."); pos >= 0 {
1152 fn = fn[pos+1:]
1153 }
1154 s += fmt.Sprintf("call to %s:", fn)
1155 } else {
1156 s += "indirect call:"
1157 }
1158
1159
1160
1161 var names []string
1162 for j, n := range lv.vars {
1163 if live.Get(int32(j)) {
1164 names = append(names, n.Sym().Name)
1165 }
1166 }
1167 sort.Strings(names)
1168 for _, v := range names {
1169 s += " " + v
1170 }
1171 return pos, s
1172 }
1173
1174 func (lv *Liveness) printbvec(printed bool, name string, live bitvec.BitVec) bool {
1175 if live.IsEmpty() {
1176 return printed
1177 }
1178
1179 if !printed {
1180 fmt.Printf("\t")
1181 } else {
1182 fmt.Printf(" ")
1183 }
1184 fmt.Printf("%s=", name)
1185
1186 comma := ""
1187 for i, n := range lv.vars {
1188 if !live.Get(int32(i)) {
1189 continue
1190 }
1191 fmt.Printf("%s%s", comma, n.Sym().Name)
1192 comma = ","
1193 }
1194 return true
1195 }
1196
1197
1198 func (lv *Liveness) printeffect(printed bool, name string, pos int32, x bool) bool {
1199 if !x {
1200 return printed
1201 }
1202 if !printed {
1203 fmt.Printf("\t")
1204 } else {
1205 fmt.Printf(" ")
1206 }
1207 fmt.Printf("%s=", name)
1208 if x {
1209 fmt.Printf("%s", lv.vars[pos].Sym().Name)
1210 }
1211
1212 return true
1213 }
1214
1215
1216
1217
1218 func (lv *Liveness) printDebug() {
1219 fmt.Printf("liveness: %s\n", ir.FuncName(lv.fn))
1220
1221 for i, b := range lv.f.Blocks {
1222 if i > 0 {
1223 fmt.Printf("\n")
1224 }
1225
1226
1227 fmt.Printf("bb#%d pred=", b.ID)
1228 for j, pred := range b.Preds {
1229 if j > 0 {
1230 fmt.Printf(",")
1231 }
1232 fmt.Printf("%d", pred.Block().ID)
1233 }
1234 fmt.Printf(" succ=")
1235 for j, succ := range b.Succs {
1236 if j > 0 {
1237 fmt.Printf(",")
1238 }
1239 fmt.Printf("%d", succ.Block().ID)
1240 }
1241 fmt.Printf("\n")
1242
1243 be := lv.blockEffects(b)
1244
1245
1246 printed := false
1247 printed = lv.printbvec(printed, "uevar", be.uevar)
1248 printed = lv.printbvec(printed, "livein", be.livein)
1249 if printed {
1250 fmt.Printf("\n")
1251 }
1252
1253
1254
1255 if b == lv.f.Entry {
1256 live := lv.stackMaps[0]
1257 fmt.Printf("(%s) function entry\n", base.FmtPos(lv.fn.Nname.Pos()))
1258 fmt.Printf("\tlive=")
1259 printed = false
1260 for j, n := range lv.vars {
1261 if !live.Get(int32(j)) {
1262 continue
1263 }
1264 if printed {
1265 fmt.Printf(",")
1266 }
1267 fmt.Printf("%v", n)
1268 printed = true
1269 }
1270 fmt.Printf("\n")
1271 }
1272
1273 for _, v := range b.Values {
1274 fmt.Printf("(%s) %v\n", base.FmtPos(v.Pos), v.LongString())
1275
1276 pcdata := lv.livenessMap.Get(v)
1277
1278 pos, effect := lv.valueEffects(v)
1279 printed = false
1280 printed = lv.printeffect(printed, "uevar", pos, effect&uevar != 0)
1281 printed = lv.printeffect(printed, "varkill", pos, effect&varkill != 0)
1282 if printed {
1283 fmt.Printf("\n")
1284 }
1285
1286 if pcdata.StackMapValid() {
1287 fmt.Printf("\tlive=")
1288 printed = false
1289 if pcdata.StackMapValid() {
1290 live := lv.stackMaps[pcdata]
1291 for j, n := range lv.vars {
1292 if !live.Get(int32(j)) {
1293 continue
1294 }
1295 if printed {
1296 fmt.Printf(",")
1297 }
1298 fmt.Printf("%v", n)
1299 printed = true
1300 }
1301 }
1302 fmt.Printf("\n")
1303 }
1304
1305 if lv.livenessMap.GetUnsafe(v) {
1306 fmt.Printf("\tunsafe-point\n")
1307 }
1308 }
1309 if lv.livenessMap.GetUnsafeBlock(b) {
1310 fmt.Printf("\tunsafe-block\n")
1311 }
1312
1313
1314 fmt.Printf("end\n")
1315 printed = false
1316 printed = lv.printbvec(printed, "varkill", be.varkill)
1317 printed = lv.printbvec(printed, "liveout", be.liveout)
1318 if printed {
1319 fmt.Printf("\n")
1320 }
1321 }
1322
1323 fmt.Printf("\n")
1324 }
1325
1326
1327
1328
1329
1330 func (lv *Liveness) emit() (argsSym, liveSym *obj.LSym) {
1331
1332
1333
1334 var maxArgNode *ir.Name
1335 for _, n := range lv.vars {
1336 switch n.Class {
1337 case ir.PPARAM, ir.PPARAMOUT:
1338 if !n.IsOutputParamInRegisters() {
1339 if maxArgNode == nil || n.FrameOffset() > maxArgNode.FrameOffset() {
1340 maxArgNode = n
1341 }
1342 }
1343 }
1344 }
1345
1346 var maxArgs int64
1347 if maxArgNode != nil {
1348 maxArgs = maxArgNode.FrameOffset() + types.PtrDataSize(maxArgNode.Type())
1349 }
1350
1351
1352
1353
1354
1355
1356
1357
1358 maxLocals := lv.stkptrsize
1359
1360
1361 var argsSymTmp, liveSymTmp obj.LSym
1362
1363 args := bitvec.New(int32(maxArgs / int64(types.PtrSize)))
1364 aoff := objw.Uint32(&argsSymTmp, 0, uint32(len(lv.stackMaps)))
1365 aoff = objw.Uint32(&argsSymTmp, aoff, uint32(args.N))
1366
1367 locals := bitvec.New(int32(maxLocals / int64(types.PtrSize)))
1368 loff := objw.Uint32(&liveSymTmp, 0, uint32(len(lv.stackMaps)))
1369 loff = objw.Uint32(&liveSymTmp, loff, uint32(locals.N))
1370
1371
1372 checkStackmapOverflow(args, len(lv.stackMaps), lv.fn.Pos())
1373 checkStackmapOverflow(locals, len(lv.stackMaps), lv.fn.Pos())
1374
1375 for _, live := range lv.stackMaps {
1376 args.Clear()
1377 locals.Clear()
1378
1379 lv.pointerMap(live, lv.vars, args, locals)
1380
1381 aoff = objw.BitVec(&argsSymTmp, aoff, args)
1382 loff = objw.BitVec(&liveSymTmp, loff, locals)
1383 }
1384
1385
1386
1387 return base.Ctxt.GCLocalsSym(argsSymTmp.P), base.Ctxt.GCLocalsSym(liveSymTmp.P)
1388 }
1389
1390
1391
1392
1393
1394
1395 func Compute(curfn *ir.Func, f *ssa.Func, stkptrsize int64, pp *objw.Progs, retLiveness bool) (Map, map[*ir.Name]bool, *Liveness) {
1396
1397 vars, idx := getvariables(curfn)
1398 lv := newliveness(curfn, f, vars, idx, stkptrsize)
1399
1400
1401 lv.prologue()
1402 lv.solve()
1403 lv.epilogue()
1404 if base.Flag.Live > 0 {
1405 lv.showlive(nil, lv.stackMaps[0])
1406 for _, b := range f.Blocks {
1407 for _, val := range b.Values {
1408 if idx := lv.livenessMap.Get(val); idx.StackMapValid() {
1409 lv.showlive(val, lv.stackMaps[idx])
1410 }
1411 }
1412 }
1413 }
1414 if base.Flag.Live >= 2 {
1415 lv.printDebug()
1416 }
1417
1418
1419 {
1420 cache := f.Cache.Liveness.(*livenessFuncCache)
1421 if cap(lv.be) < 2000 {
1422 clear(lv.be)
1423 cache.be = lv.be
1424 }
1425 if len(lv.livenessMap.Vals) < 2000 {
1426 cache.livenessMap = lv.livenessMap
1427 }
1428 }
1429
1430
1431 ls := curfn.LSym
1432 fninfo := ls.Func()
1433 fninfo.GCArgs, fninfo.GCLocals = lv.emit()
1434
1435 p := pp.Prog(obj.AFUNCDATA)
1436 p.From.SetConst(rtabi.FUNCDATA_ArgsPointerMaps)
1437 p.To.Type = obj.TYPE_MEM
1438 p.To.Name = obj.NAME_EXTERN
1439 p.To.Sym = fninfo.GCArgs
1440
1441 p = pp.Prog(obj.AFUNCDATA)
1442 p.From.SetConst(rtabi.FUNCDATA_LocalsPointerMaps)
1443 p.To.Type = obj.TYPE_MEM
1444 p.To.Name = obj.NAME_EXTERN
1445 p.To.Sym = fninfo.GCLocals
1446
1447 if x := lv.emitStackObjects(); x != nil {
1448 p := pp.Prog(obj.AFUNCDATA)
1449 p.From.SetConst(rtabi.FUNCDATA_StackObjects)
1450 p.To.Type = obj.TYPE_MEM
1451 p.To.Name = obj.NAME_EXTERN
1452 p.To.Sym = x
1453 }
1454
1455 retLv := lv
1456 if !retLiveness {
1457 retLv = nil
1458 }
1459
1460 return lv.livenessMap, lv.partLiveArgs, retLv
1461 }
1462
1463 func (lv *Liveness) emitStackObjects() *obj.LSym {
1464 var vars []*ir.Name
1465 for _, n := range lv.fn.Dcl {
1466 if shouldTrack(n) && n.Addrtaken() && n.Esc() != ir.EscHeap {
1467 vars = append(vars, n)
1468 }
1469 }
1470 if len(vars) == 0 {
1471 return nil
1472 }
1473
1474
1475 slices.SortFunc(vars, func(a, b *ir.Name) int { return cmp.Compare(a.FrameOffset(), b.FrameOffset()) })
1476
1477
1478
1479 x := base.Ctxt.Lookup(lv.fn.LSym.Name + ".stkobj")
1480 x.Set(obj.AttrContentAddressable, true)
1481 x.Align = 4
1482 lv.fn.LSym.Func().StackObjects = x
1483 off := 0
1484 off = objw.Uintptr(x, off, uint64(len(vars)))
1485 for _, v := range vars {
1486
1487
1488
1489
1490
1491 frameOffset := v.FrameOffset()
1492 if frameOffset != int64(int32(frameOffset)) {
1493 base.Fatalf("frame offset too big: %v %d", v, frameOffset)
1494 }
1495 off = objw.Uint32(x, off, uint32(frameOffset))
1496
1497 t := v.Type()
1498 sz := t.Size()
1499 if sz != int64(int32(sz)) {
1500 base.Fatalf("stack object too big: %v of type %v, size %d", v, t, sz)
1501 }
1502 lsym, ptrBytes := reflectdata.GCSym(t, false)
1503 off = objw.Uint32(x, off, uint32(sz))
1504 off = objw.Uint32(x, off, uint32(ptrBytes))
1505 off = objw.SymPtrOff(x, off, lsym)
1506 }
1507
1508 if base.Flag.Live != 0 {
1509 for _, v := range vars {
1510 base.WarnfAt(v.Pos(), "stack object %v %v", v, v.Type())
1511 }
1512 }
1513
1514 return x
1515 }
1516
1517
1518
1519
1520
1521
1522
1523
1524
1525
1526
1527
1528
1529
1530 func isfat(t *types.Type) bool {
1531 if t != nil {
1532 switch t.Kind() {
1533 case types.TSLICE, types.TSTRING,
1534 types.TINTER:
1535 return true
1536 case types.TARRAY:
1537
1538 if t.NumElem() == 1 {
1539 return isfat(t.Elem())
1540 }
1541 return true
1542 case types.TSTRUCT:
1543 if t.IsSIMD() {
1544 return false
1545 }
1546
1547 if t.NumFields() == 1 {
1548 return isfat(t.Field(0).Type)
1549 }
1550 return true
1551 }
1552 }
1553
1554 return false
1555 }
1556
1557
1558
1559
1560 func WriteFuncMap(fn *ir.Func, abiInfo *abi.ABIParamResultInfo) {
1561 if ir.FuncName(fn) == "_" {
1562 return
1563 }
1564 nptr := int(abiInfo.ArgWidth() / int64(types.PtrSize))
1565 bv := bitvec.New(int32(nptr))
1566
1567 for _, p := range abiInfo.InParams() {
1568 typebits.SetNoCheck(p.Type, p.FrameOffset(abiInfo), bv)
1569 }
1570
1571 nbitmap := 1
1572 if fn.Type().NumResults() > 0 {
1573 nbitmap = 2
1574 }
1575
1576
1577 checkStackmapOverflow(bv, nbitmap, fn.Pos())
1578
1579 lsym := base.Ctxt.Lookup(fn.LSym.Name + ".args_stackmap")
1580 lsym.Set(obj.AttrLinkname, true)
1581 off := objw.Uint32(lsym, 0, uint32(nbitmap))
1582 off = objw.Uint32(lsym, off, uint32(bv.N))
1583 off = objw.BitVec(lsym, off, bv)
1584
1585 if fn.Type().NumResults() > 0 {
1586 for _, p := range abiInfo.OutParams() {
1587 if len(p.Registers) == 0 {
1588 typebits.SetNoCheck(p.Type, p.FrameOffset(abiInfo), bv)
1589 }
1590 }
1591 off = objw.BitVec(lsym, off, bv)
1592 }
1593
1594 objw.Global(lsym, int32(off), obj.RODATA|obj.LOCAL)
1595 }
1596
1597
1598
1599
1600 func checkStackmapOverflow(bv bitvec.BitVec, count int, pos src.XPos) {
1601 if bv.N <= 0 || count <= 0 {
1602 return
1603 }
1604 bytesPerBitVec := (int64(bv.N) + 7) >> 3
1605 totalBytes := bytesPerBitVec * int64(count)
1606 if totalBytes > math.MaxInt32 {
1607
1608 base.FatalfAt(pos, "liveness stackmaps are too large: nbit=%d count=%d totalBytes=%d exceeds MaxInt32", bv.N, count, totalBytes)
1609 }
1610 }
1611
View as plain text