1
2
3
4
5
6
7
8
9
10
11
12
13
14
15 package liveness
16
17 import (
18 "cmp"
19 "fmt"
20 "os"
21 "slices"
22 "sort"
23 "strings"
24
25 "cmd/compile/internal/abi"
26 "cmd/compile/internal/base"
27 "cmd/compile/internal/bitvec"
28 "cmd/compile/internal/ir"
29 "cmd/compile/internal/objw"
30 "cmd/compile/internal/reflectdata"
31 "cmd/compile/internal/ssa"
32 "cmd/compile/internal/typebits"
33 "cmd/compile/internal/types"
34 "cmd/internal/hash"
35 "cmd/internal/obj"
36 "cmd/internal/src"
37
38 rtabi "internal/abi"
39 )
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88 type blockEffects struct {
89
90
91
92
93
94 uevar bitvec.BitVec
95 varkill bitvec.BitVec
96
97
98
99
100
101 livein bitvec.BitVec
102 liveout bitvec.BitVec
103 }
104
105
106 type Liveness struct {
107 fn *ir.Func
108 f *ssa.Func
109 vars []*ir.Name
110 idx map[*ir.Name]int32
111 stkptrsize int64
112
113 be []blockEffects
114
115
116
117 allUnsafe bool
118
119
120 unsafePoints bitvec.BitVec
121
122
123
124 unsafeBlocks bitvec.BitVec
125
126
127
128
129
130
131 livevars []bitvec.BitVec
132
133
134
135 livenessMap Map
136 stackMapSet bvecSet
137 stackMaps []bitvec.BitVec
138
139 cache progeffectscache
140
141
142
143
144 partLiveArgs map[*ir.Name]bool
145
146 doClobber bool
147 noClobberArgs bool
148
149
150
151
152 conservativeWrites bool
153 }
154
155
156
157
158 type Map struct {
159 Vals map[ssa.ID]objw.StackMapIndex
160 UnsafeVals map[ssa.ID]bool
161 UnsafeBlocks map[ssa.ID]bool
162
163
164 DeferReturn objw.StackMapIndex
165 }
166
167 func (m *Map) reset() {
168 if m.Vals == nil {
169 m.Vals = make(map[ssa.ID]objw.StackMapIndex)
170 m.UnsafeVals = make(map[ssa.ID]bool)
171 m.UnsafeBlocks = make(map[ssa.ID]bool)
172 } else {
173 clear(m.Vals)
174 clear(m.UnsafeVals)
175 clear(m.UnsafeBlocks)
176 }
177 m.DeferReturn = objw.StackMapDontCare
178 }
179
180 func (m *Map) set(v *ssa.Value, i objw.StackMapIndex) {
181 m.Vals[v.ID] = i
182 }
183 func (m *Map) setUnsafeVal(v *ssa.Value) {
184 m.UnsafeVals[v.ID] = true
185 }
186 func (m *Map) setUnsafeBlock(b *ssa.Block) {
187 m.UnsafeBlocks[b.ID] = true
188 }
189
190 func (m Map) Get(v *ssa.Value) objw.StackMapIndex {
191
192 if idx, ok := m.Vals[v.ID]; ok {
193 return idx
194 }
195 return objw.StackMapDontCare
196 }
197 func (m Map) GetUnsafe(v *ssa.Value) bool {
198
199 return m.UnsafeVals[v.ID]
200 }
201 func (m Map) GetUnsafeBlock(b *ssa.Block) bool {
202
203 return m.UnsafeBlocks[b.ID]
204 }
205
206 type progeffectscache struct {
207 retuevar []int32
208 tailuevar []int32
209 initialized bool
210 }
211
212
213
214
215
216
217
218 func shouldTrack(n *ir.Name) bool {
219 return (n.Class == ir.PAUTO && n.Esc() != ir.EscHeap || n.Class == ir.PPARAM || n.Class == ir.PPARAMOUT) && n.Type().HasPointers()
220 }
221
222
223
224 func getvariables(fn *ir.Func) ([]*ir.Name, map[*ir.Name]int32) {
225 var vars []*ir.Name
226 for _, n := range fn.Dcl {
227 if shouldTrack(n) {
228 vars = append(vars, n)
229 }
230 }
231 idx := make(map[*ir.Name]int32, len(vars))
232 for i, n := range vars {
233 idx[n] = int32(i)
234 }
235 return vars, idx
236 }
237
238 func (lv *Liveness) initcache() {
239 if lv.cache.initialized {
240 base.Fatalf("liveness cache initialized twice")
241 return
242 }
243 lv.cache.initialized = true
244
245 for i, node := range lv.vars {
246 switch node.Class {
247 case ir.PPARAM:
248
249
250
251
252
253
254 lv.cache.tailuevar = append(lv.cache.tailuevar, int32(i))
255
256 case ir.PPARAMOUT:
257
258
259
260 lv.cache.retuevar = append(lv.cache.retuevar, int32(i))
261 }
262 }
263 }
264
265
266
267
268
269
270
271
272
273
274 type liveEffect int
275
276 const (
277 uevar liveEffect = 1 << iota
278 varkill
279 )
280
281
282
283
284 func (lv *Liveness) valueEffects(v *ssa.Value) (int32, liveEffect) {
285 n, e := affectedVar(v)
286 if e == 0 || n == nil {
287 return -1, 0
288 }
289
290
291
292
293 switch v.Op {
294 case ssa.OpVarDef, ssa.OpVarLive, ssa.OpKeepAlive:
295 if !n.Used() {
296 return -1, 0
297 }
298 }
299
300 if n.Class == ir.PPARAM && !n.Addrtaken() && n.Type().Size() > int64(types.PtrSize) {
301
302
303 lv.partLiveArgs[n] = true
304 }
305
306 var effect liveEffect
307
308
309
310
311
312
313 if e&(ssa.SymRead|ssa.SymAddr) != 0 {
314 effect |= uevar
315 }
316 if e&ssa.SymWrite != 0 {
317 if !isfat(n.Type()) || v.Op == ssa.OpVarDef {
318 effect |= varkill
319 } else if lv.conservativeWrites {
320 effect |= uevar
321 }
322 }
323
324 if effect == 0 {
325 return -1, 0
326 }
327
328 if pos, ok := lv.idx[n]; ok {
329 return pos, effect
330 }
331 return -1, 0
332 }
333
334
335 func affectedVar(v *ssa.Value) (*ir.Name, ssa.SymEffect) {
336
337 switch v.Op {
338 case ssa.OpLoadReg:
339 n, _ := ssa.AutoVar(v.Args[0])
340 return n, ssa.SymRead
341 case ssa.OpStoreReg:
342 n, _ := ssa.AutoVar(v)
343 return n, ssa.SymWrite
344
345 case ssa.OpArgIntReg:
346
347
348
349
350
351
352
353
354
355
356
357
358 n, _ := ssa.AutoVar(v)
359 return n, ssa.SymRead
360
361 case ssa.OpVarLive:
362 return v.Aux.(*ir.Name), ssa.SymRead
363 case ssa.OpVarDef:
364 return v.Aux.(*ir.Name), ssa.SymWrite
365 case ssa.OpKeepAlive:
366 n, _ := ssa.AutoVar(v.Args[0])
367 return n, ssa.SymRead
368 }
369
370 e := v.Op.SymEffect()
371 if e == 0 {
372 return nil, 0
373 }
374
375 switch a := v.Aux.(type) {
376 case nil, *obj.LSym:
377
378 return nil, e
379 case *ir.Name:
380 return a, e
381 default:
382 base.Fatalf("weird aux: %s", v.LongString())
383 return nil, e
384 }
385 }
386
387 type livenessFuncCache struct {
388 be []blockEffects
389 livenessMap Map
390 }
391
392
393
394
395 func newliveness(fn *ir.Func, f *ssa.Func, vars []*ir.Name, idx map[*ir.Name]int32, stkptrsize int64) *Liveness {
396 lv := &Liveness{
397 fn: fn,
398 f: f,
399 vars: vars,
400 idx: idx,
401 stkptrsize: stkptrsize,
402 }
403
404
405
406
407 if lc, _ := f.Cache.Liveness.(*livenessFuncCache); lc == nil {
408
409 f.Cache.Liveness = new(livenessFuncCache)
410 } else {
411 if cap(lc.be) >= f.NumBlocks() {
412 lv.be = lc.be[:f.NumBlocks()]
413 }
414 lv.livenessMap = Map{
415 Vals: lc.livenessMap.Vals,
416 UnsafeVals: lc.livenessMap.UnsafeVals,
417 UnsafeBlocks: lc.livenessMap.UnsafeBlocks,
418 DeferReturn: objw.StackMapDontCare,
419 }
420 lc.livenessMap.Vals = nil
421 lc.livenessMap.UnsafeVals = nil
422 lc.livenessMap.UnsafeBlocks = nil
423 }
424 if lv.be == nil {
425 lv.be = make([]blockEffects, f.NumBlocks())
426 }
427
428 nblocks := int32(len(f.Blocks))
429 nvars := int32(len(vars))
430 bulk := bitvec.NewBulk(nvars, nblocks*7, fn.Pos())
431 for _, b := range f.Blocks {
432 be := lv.blockEffects(b)
433
434 be.uevar = bulk.Next()
435 be.varkill = bulk.Next()
436 be.livein = bulk.Next()
437 be.liveout = bulk.Next()
438 }
439 lv.livenessMap.reset()
440
441 lv.markUnsafePoints()
442
443 lv.partLiveArgs = make(map[*ir.Name]bool)
444
445 lv.enableClobber()
446
447 return lv
448 }
449
450 func (lv *Liveness) blockEffects(b *ssa.Block) *blockEffects {
451 return &lv.be[b.ID]
452 }
453
454
455
456
457 func (lv *Liveness) pointerMap(liveout bitvec.BitVec, vars []*ir.Name, args, locals bitvec.BitVec) {
458 var slotsSeen map[int64]*ir.Name
459 checkForDuplicateSlots := base.Debug.MergeLocals != 0
460 if checkForDuplicateSlots {
461 slotsSeen = make(map[int64]*ir.Name)
462 }
463 for i := int32(0); ; i++ {
464 i = liveout.Next(i)
465 if i < 0 {
466 break
467 }
468 node := vars[i]
469 switch node.Class {
470 case ir.PPARAM, ir.PPARAMOUT:
471 if !node.IsOutputParamInRegisters() {
472 if node.FrameOffset() < 0 {
473 lv.f.Fatalf("Node %v has frameoffset %d\n", node.Sym().Name, node.FrameOffset())
474 }
475 typebits.SetNoCheck(node.Type(), node.FrameOffset(), args)
476 break
477 }
478 fallthrough
479 case ir.PAUTO:
480 if checkForDuplicateSlots {
481 if prev, ok := slotsSeen[node.FrameOffset()]; ok {
482 base.FatalfAt(node.Pos(), "two vars live at pointerMap generation: %q and %q", prev.Sym().Name, node.Sym().Name)
483 }
484 slotsSeen[node.FrameOffset()] = node
485 }
486 typebits.Set(node.Type(), node.FrameOffset()+lv.stkptrsize, locals)
487 }
488 }
489 }
490
491
492
493 func IsUnsafe(f *ssa.Func) bool {
494
495
496
497
498
499
500
501
502
503 return base.Flag.CompilingRuntime || f.NoSplit
504 }
505
506
507 func (lv *Liveness) markUnsafePoints() {
508 if IsUnsafe(lv.f) {
509
510 lv.allUnsafe = true
511 return
512 }
513
514 lv.unsafePoints = bitvec.New(int32(lv.f.NumValues()))
515 lv.unsafeBlocks = bitvec.New(int32(lv.f.NumBlocks()))
516
517
518 for _, b := range lv.f.Blocks {
519 for _, v := range b.Values {
520 if v.Op.UnsafePoint() {
521 lv.unsafePoints.Set(int32(v.ID))
522 }
523 }
524 }
525
526 for _, b := range lv.f.Blocks {
527 for _, v := range b.Values {
528 if v.Op != ssa.OpWBend {
529 continue
530 }
531
532
533
534
535
536
537
538
539
540
541
542
543
544
545
546
547 m := v
548 for {
549 m = m.MemoryArg()
550 if m.Block != b {
551 lv.f.Fatalf("can't find Phi before write barrier end mark %v", v)
552 }
553 if m.Op == ssa.OpPhi {
554 break
555 }
556 }
557
558 if len(m.Args) != 2 {
559 lv.f.Fatalf("phi before write barrier end mark has %d args, want 2", len(m.Args))
560 }
561 c := b.Preds[0].Block()
562 d := b.Preds[1].Block()
563
564
565
566
567 var decisionBlock *ssa.Block
568 if len(c.Preds) == 1 && c.Preds[0].Block() == d {
569 decisionBlock = d
570 } else if len(d.Preds) == 1 && d.Preds[0].Block() == c {
571 decisionBlock = c
572 } else if len(c.Preds) == 1 && len(d.Preds) == 1 && c.Preds[0].Block() == d.Preds[0].Block() {
573 decisionBlock = c.Preds[0].Block()
574 } else {
575 lv.f.Fatalf("can't find write barrier pattern %v", v)
576 }
577 if len(decisionBlock.Succs) != 2 {
578 lv.f.Fatalf("common predecessor block the wrong type %s", decisionBlock.Kind)
579 }
580
581
582
583
584
585
586 var load *ssa.Value
587 v := decisionBlock.Controls[0]
588 for {
589 if v.MemoryArg() != nil {
590
591 if sym, ok := v.Aux.(*obj.LSym); ok && sym == ir.Syms.WriteBarrier {
592 load = v
593 break
594 }
595
596
597 if sym, ok := v.Args[0].Aux.(*obj.LSym); ok && sym == ir.Syms.WriteBarrier {
598 load = v
599 break
600 }
601 v.Fatalf("load of write barrier flag not from correct global: %s", v.LongString())
602 }
603
604 if len(v.Args) == 1 || len(v.Args) == 2 && v.Args[0] == v.Args[1] {
605
606 v = v.Args[0]
607 continue
608 }
609 v.Fatalf("write barrier control value has more than one argument: %s", v.LongString())
610 }
611
612
613 found := false
614 for _, v := range decisionBlock.Values {
615 if found {
616 lv.unsafePoints.Set(int32(v.ID))
617 }
618 found = found || v == load
619 }
620 lv.unsafeBlocks.Set(int32(decisionBlock.ID))
621
622
623 for _, e := range decisionBlock.Succs {
624 x := e.Block()
625 if x == b {
626 continue
627 }
628 for _, v := range x.Values {
629 lv.unsafePoints.Set(int32(v.ID))
630 }
631 lv.unsafeBlocks.Set(int32(x.ID))
632 }
633
634
635 for _, v := range b.Values {
636 if v.Op == ssa.OpWBend {
637 break
638 }
639 lv.unsafePoints.Set(int32(v.ID))
640 }
641 }
642 }
643 }
644
645
646
647
648
649
650 func (lv *Liveness) hasStackMap(v *ssa.Value) bool {
651 if !v.Op.IsCall() {
652 return false
653 }
654
655
656
657 if sym, ok := v.Aux.(*ssa.AuxCall); ok && (sym.Fn == ir.Syms.WBZero || sym.Fn == ir.Syms.WBMove) {
658 return false
659 }
660 return true
661 }
662
663
664
665
666 func (lv *Liveness) prologue() {
667 lv.initcache()
668
669 for _, b := range lv.f.Blocks {
670 be := lv.blockEffects(b)
671
672
673
674 for j := len(b.Values) - 1; j >= 0; j-- {
675 pos, e := lv.valueEffects(b.Values[j])
676 if e&varkill != 0 {
677 be.varkill.Set(pos)
678 be.uevar.Unset(pos)
679 }
680 if e&uevar != 0 {
681 be.uevar.Set(pos)
682 }
683 }
684 }
685 }
686
687
688 func (lv *Liveness) solve() {
689
690
691 nvars := int32(len(lv.vars))
692 newlivein := bitvec.New(nvars)
693 newliveout := bitvec.New(nvars)
694
695
696 po := lv.f.Postorder()
697
698
699
700
701
702 for change := true; change; {
703 change = false
704 for _, b := range po {
705 be := lv.blockEffects(b)
706
707 newliveout.Clear()
708 switch b.Kind {
709 case ssa.BlockRet:
710 for _, pos := range lv.cache.retuevar {
711 newliveout.Set(pos)
712 }
713 case ssa.BlockRetJmp:
714 for _, pos := range lv.cache.tailuevar {
715 newliveout.Set(pos)
716 }
717 case ssa.BlockExit:
718
719 default:
720
721
722
723
724 newliveout.Copy(lv.blockEffects(b.Succs[0].Block()).livein)
725 for _, succ := range b.Succs[1:] {
726 newliveout.Or(newliveout, lv.blockEffects(succ.Block()).livein)
727 }
728 }
729
730 if !be.liveout.Eq(newliveout) {
731 change = true
732 be.liveout.Copy(newliveout)
733 }
734
735
736
737
738
739
740 newlivein.AndNot(be.liveout, be.varkill)
741 be.livein.Or(newlivein, be.uevar)
742 }
743 }
744 }
745
746
747
748 func (lv *Liveness) epilogue() {
749 nvars := int32(len(lv.vars))
750 liveout := bitvec.New(nvars)
751 livedefer := bitvec.New(nvars)
752
753
754
755
756
757
758
759
760 if lv.fn.HasDefer() {
761 for i, n := range lv.vars {
762 if n.Class == ir.PPARAMOUT {
763 if n.IsOutputParamHeapAddr() {
764
765 base.Fatalf("variable %v both output param and heap output param", n)
766 }
767 if n.Heapaddr != nil {
768
769
770 continue
771 }
772
773 livedefer.Set(int32(i))
774 }
775 if n.IsOutputParamHeapAddr() {
776
777
778
779 n.SetNeedzero(true)
780 livedefer.Set(int32(i))
781 }
782 if n.OpenDeferSlot() {
783
784
785
786
787 livedefer.Set(int32(i))
788
789 if !n.Needzero() {
790 base.Fatalf("all pointer-containing defer arg slots should have Needzero set")
791 }
792 }
793 }
794 }
795
796
797
798
799 if lv.f.Entry != lv.f.Blocks[0] {
800 lv.f.Fatalf("entry block must be first")
801 }
802
803 {
804
805 live := bitvec.New(nvars)
806 lv.livevars = append(lv.livevars, live)
807 }
808
809 for _, b := range lv.f.Blocks {
810 be := lv.blockEffects(b)
811
812
813
814 for _, v := range b.Values {
815 if !lv.hasStackMap(v) {
816 continue
817 }
818
819 live := bitvec.New(nvars)
820 lv.livevars = append(lv.livevars, live)
821 }
822
823
824 index := int32(len(lv.livevars) - 1)
825
826 liveout.Copy(be.liveout)
827 for i := len(b.Values) - 1; i >= 0; i-- {
828 v := b.Values[i]
829
830 if lv.hasStackMap(v) {
831
832
833
834 live := &lv.livevars[index]
835 live.Or(*live, liveout)
836 live.Or(*live, livedefer)
837 index--
838 }
839
840
841 pos, e := lv.valueEffects(v)
842 if e&varkill != 0 {
843 liveout.Unset(pos)
844 }
845 if e&uevar != 0 {
846 liveout.Set(pos)
847 }
848 }
849
850 if b == lv.f.Entry {
851 if index != 0 {
852 base.Fatalf("bad index for entry point: %v", index)
853 }
854
855
856 for i, n := range lv.vars {
857 if !liveout.Get(int32(i)) {
858 continue
859 }
860 if n.Class == ir.PPARAM {
861 continue
862 }
863 base.FatalfAt(n.Pos(), "bad live variable at entry of %v: %L", lv.fn.Nname, n)
864 }
865
866
867 live := &lv.livevars[index]
868 live.Or(*live, liveout)
869 }
870
871 if lv.doClobber {
872 lv.clobber(b)
873 }
874
875
876 lv.compact(b)
877 }
878
879
880 if lv.fn.OpenCodedDeferDisallowed() {
881 lv.livenessMap.DeferReturn = objw.StackMapDontCare
882 } else {
883 idx, _ := lv.stackMapSet.add(livedefer)
884 lv.livenessMap.DeferReturn = objw.StackMapIndex(idx)
885 }
886
887
888 lv.stackMaps = lv.stackMapSet.extractUnique()
889 lv.stackMapSet = bvecSet{}
890
891
892
893
894 for j, n := range lv.vars {
895 if n.Class != ir.PPARAM && lv.stackMaps[0].Get(int32(j)) {
896 lv.f.Fatalf("%v %L recorded as live on entry", lv.fn.Nname, n)
897 }
898 }
899 }
900
901
902
903
904
905
906
907
908
909
910
911
912
913
914
915
916
917 func (lv *Liveness) compact(b *ssa.Block) {
918 pos := 0
919 if b == lv.f.Entry {
920
921 lv.stackMapSet.add(lv.livevars[0])
922 pos++
923 }
924 for _, v := range b.Values {
925 if lv.hasStackMap(v) {
926 idx, _ := lv.stackMapSet.add(lv.livevars[pos])
927 pos++
928 lv.livenessMap.set(v, objw.StackMapIndex(idx))
929 }
930 if lv.allUnsafe || v.Op != ssa.OpClobber && lv.unsafePoints.Get(int32(v.ID)) {
931 lv.livenessMap.setUnsafeVal(v)
932 }
933 }
934 if lv.allUnsafe || lv.unsafeBlocks.Get(int32(b.ID)) {
935 lv.livenessMap.setUnsafeBlock(b)
936 }
937
938
939 lv.livevars = lv.livevars[:0]
940 }
941
942 func (lv *Liveness) enableClobber() {
943
944
945 if !base.Flag.ClobberDead {
946 return
947 }
948 if lv.fn.Pragma&ir.CgoUnsafeArgs != 0 {
949
950 return
951 }
952 if len(lv.vars) > 10000 || len(lv.f.Blocks) > 10000 {
953
954
955
956 return
957 }
958 if lv.f.Name == "forkAndExecInChild" {
959
960
961
962
963
964 return
965 }
966 if lv.f.Name == "wbBufFlush" ||
967 ((lv.f.Name == "callReflect" || lv.f.Name == "callMethod") && lv.fn.ABIWrapper()) {
968
969
970
971
972
973
974
975
976
977
978 lv.noClobberArgs = true
979 }
980 if h := os.Getenv("GOCLOBBERDEADHASH"); h != "" {
981
982
983 hstr := ""
984 for _, b := range hash.Sum32([]byte(lv.f.Name)) {
985 hstr += fmt.Sprintf("%08b", b)
986 }
987 if !strings.HasSuffix(hstr, h) {
988 return
989 }
990 fmt.Printf("\t\t\tCLOBBERDEAD %s\n", lv.f.Name)
991 }
992 lv.doClobber = true
993 }
994
995
996
997 func (lv *Liveness) clobber(b *ssa.Block) {
998
999 oldSched := append([]*ssa.Value{}, b.Values...)
1000 b.Values = b.Values[:0]
1001 idx := 0
1002
1003
1004 if b == lv.f.Entry {
1005 for len(oldSched) > 0 && len(oldSched[0].Args) == 0 {
1006
1007
1008
1009
1010 b.Values = append(b.Values, oldSched[0])
1011 oldSched = oldSched[1:]
1012 }
1013 clobber(lv, b, lv.livevars[0])
1014 idx++
1015 }
1016
1017
1018 for _, v := range oldSched {
1019 if !lv.hasStackMap(v) {
1020 b.Values = append(b.Values, v)
1021 continue
1022 }
1023 clobber(lv, b, lv.livevars[idx])
1024 b.Values = append(b.Values, v)
1025 idx++
1026 }
1027 }
1028
1029
1030
1031
1032 func clobber(lv *Liveness, b *ssa.Block, live bitvec.BitVec) {
1033 for i, n := range lv.vars {
1034 if !live.Get(int32(i)) && !n.Addrtaken() && !n.OpenDeferSlot() && !n.IsOutputParamHeapAddr() {
1035
1036
1037
1038
1039 if lv.noClobberArgs && n.Class == ir.PPARAM {
1040 continue
1041 }
1042 clobberVar(b, n)
1043 }
1044 }
1045 }
1046
1047
1048
1049 func clobberVar(b *ssa.Block, v *ir.Name) {
1050 clobberWalk(b, v, 0, v.Type())
1051 }
1052
1053
1054
1055
1056
1057 func clobberWalk(b *ssa.Block, v *ir.Name, offset int64, t *types.Type) {
1058 if !t.HasPointers() {
1059 return
1060 }
1061 switch t.Kind() {
1062 case types.TPTR,
1063 types.TUNSAFEPTR,
1064 types.TFUNC,
1065 types.TCHAN,
1066 types.TMAP:
1067 clobberPtr(b, v, offset)
1068
1069 case types.TSTRING:
1070
1071 clobberPtr(b, v, offset)
1072
1073 case types.TINTER:
1074
1075
1076
1077 clobberPtr(b, v, offset)
1078 clobberPtr(b, v, offset+int64(types.PtrSize))
1079
1080 case types.TSLICE:
1081
1082 clobberPtr(b, v, offset)
1083
1084 case types.TARRAY:
1085 for i := int64(0); i < t.NumElem(); i++ {
1086 clobberWalk(b, v, offset+i*t.Elem().Size(), t.Elem())
1087 }
1088
1089 case types.TSTRUCT:
1090 for _, t1 := range t.Fields() {
1091 clobberWalk(b, v, offset+t1.Offset, t1.Type)
1092 }
1093
1094 default:
1095 base.Fatalf("clobberWalk: unexpected type, %v", t)
1096 }
1097 }
1098
1099
1100
1101 func clobberPtr(b *ssa.Block, v *ir.Name, offset int64) {
1102 b.NewValue0IA(src.NoXPos, ssa.OpClobber, types.TypeVoid, offset, v)
1103 }
1104
1105 func (lv *Liveness) showlive(v *ssa.Value, live bitvec.BitVec) {
1106 if base.Flag.Live == 0 || ir.FuncName(lv.fn) == "init" || strings.HasPrefix(ir.FuncName(lv.fn), ".") {
1107 return
1108 }
1109 if lv.fn.Wrapper() || lv.fn.Dupok() {
1110
1111 return
1112 }
1113 if !(v == nil || v.Op.IsCall()) {
1114
1115
1116 return
1117 }
1118 if live.IsEmpty() {
1119 return
1120 }
1121
1122 pos, s := lv.format(v, live)
1123
1124 base.WarnfAt(pos, "%s", s)
1125 }
1126
1127 func (lv *Liveness) Format(v *ssa.Value) string {
1128 if v == nil {
1129 _, s := lv.format(nil, lv.stackMaps[0])
1130 return s
1131 }
1132 if idx := lv.livenessMap.Get(v); idx.StackMapValid() {
1133 _, s := lv.format(v, lv.stackMaps[idx])
1134 return s
1135 }
1136 return ""
1137 }
1138
1139 func (lv *Liveness) format(v *ssa.Value, live bitvec.BitVec) (src.XPos, string) {
1140 pos := lv.fn.Nname.Pos()
1141 if v != nil {
1142 pos = v.Pos
1143 }
1144
1145 s := "live at "
1146 if v == nil {
1147 s += fmt.Sprintf("entry to %s:", ir.FuncName(lv.fn))
1148 } else if sym, ok := v.Aux.(*ssa.AuxCall); ok && sym.Fn != nil {
1149 fn := sym.Fn.Name
1150 if pos := strings.Index(fn, "."); pos >= 0 {
1151 fn = fn[pos+1:]
1152 }
1153 s += fmt.Sprintf("call to %s:", fn)
1154 } else {
1155 s += "indirect call:"
1156 }
1157
1158
1159
1160 var names []string
1161 for j, n := range lv.vars {
1162 if live.Get(int32(j)) {
1163 names = append(names, n.Sym().Name)
1164 }
1165 }
1166 sort.Strings(names)
1167 for _, v := range names {
1168 s += " " + v
1169 }
1170 return pos, s
1171 }
1172
1173 func (lv *Liveness) printbvec(printed bool, name string, live bitvec.BitVec) bool {
1174 if live.IsEmpty() {
1175 return printed
1176 }
1177
1178 if !printed {
1179 fmt.Printf("\t")
1180 } else {
1181 fmt.Printf(" ")
1182 }
1183 fmt.Printf("%s=", name)
1184
1185 comma := ""
1186 for i, n := range lv.vars {
1187 if !live.Get(int32(i)) {
1188 continue
1189 }
1190 fmt.Printf("%s%s", comma, n.Sym().Name)
1191 comma = ","
1192 }
1193 return true
1194 }
1195
1196
1197 func (lv *Liveness) printeffect(printed bool, name string, pos int32, x bool) bool {
1198 if !x {
1199 return printed
1200 }
1201 if !printed {
1202 fmt.Printf("\t")
1203 } else {
1204 fmt.Printf(" ")
1205 }
1206 fmt.Printf("%s=", name)
1207 if x {
1208 fmt.Printf("%s", lv.vars[pos].Sym().Name)
1209 }
1210
1211 return true
1212 }
1213
1214
1215
1216
1217 func (lv *Liveness) printDebug() {
1218 fmt.Printf("liveness: %s\n", ir.FuncName(lv.fn))
1219
1220 for i, b := range lv.f.Blocks {
1221 if i > 0 {
1222 fmt.Printf("\n")
1223 }
1224
1225
1226 fmt.Printf("bb#%d pred=", b.ID)
1227 for j, pred := range b.Preds {
1228 if j > 0 {
1229 fmt.Printf(",")
1230 }
1231 fmt.Printf("%d", pred.Block().ID)
1232 }
1233 fmt.Printf(" succ=")
1234 for j, succ := range b.Succs {
1235 if j > 0 {
1236 fmt.Printf(",")
1237 }
1238 fmt.Printf("%d", succ.Block().ID)
1239 }
1240 fmt.Printf("\n")
1241
1242 be := lv.blockEffects(b)
1243
1244
1245 printed := false
1246 printed = lv.printbvec(printed, "uevar", be.uevar)
1247 printed = lv.printbvec(printed, "livein", be.livein)
1248 if printed {
1249 fmt.Printf("\n")
1250 }
1251
1252
1253
1254 if b == lv.f.Entry {
1255 live := lv.stackMaps[0]
1256 fmt.Printf("(%s) function entry\n", base.FmtPos(lv.fn.Nname.Pos()))
1257 fmt.Printf("\tlive=")
1258 printed = false
1259 for j, n := range lv.vars {
1260 if !live.Get(int32(j)) {
1261 continue
1262 }
1263 if printed {
1264 fmt.Printf(",")
1265 }
1266 fmt.Printf("%v", n)
1267 printed = true
1268 }
1269 fmt.Printf("\n")
1270 }
1271
1272 for _, v := range b.Values {
1273 fmt.Printf("(%s) %v\n", base.FmtPos(v.Pos), v.LongString())
1274
1275 pcdata := lv.livenessMap.Get(v)
1276
1277 pos, effect := lv.valueEffects(v)
1278 printed = false
1279 printed = lv.printeffect(printed, "uevar", pos, effect&uevar != 0)
1280 printed = lv.printeffect(printed, "varkill", pos, effect&varkill != 0)
1281 if printed {
1282 fmt.Printf("\n")
1283 }
1284
1285 if pcdata.StackMapValid() {
1286 fmt.Printf("\tlive=")
1287 printed = false
1288 if pcdata.StackMapValid() {
1289 live := lv.stackMaps[pcdata]
1290 for j, n := range lv.vars {
1291 if !live.Get(int32(j)) {
1292 continue
1293 }
1294 if printed {
1295 fmt.Printf(",")
1296 }
1297 fmt.Printf("%v", n)
1298 printed = true
1299 }
1300 }
1301 fmt.Printf("\n")
1302 }
1303
1304 if lv.livenessMap.GetUnsafe(v) {
1305 fmt.Printf("\tunsafe-point\n")
1306 }
1307 }
1308 if lv.livenessMap.GetUnsafeBlock(b) {
1309 fmt.Printf("\tunsafe-block\n")
1310 }
1311
1312
1313 fmt.Printf("end\n")
1314 printed = false
1315 printed = lv.printbvec(printed, "varkill", be.varkill)
1316 printed = lv.printbvec(printed, "liveout", be.liveout)
1317 if printed {
1318 fmt.Printf("\n")
1319 }
1320 }
1321
1322 fmt.Printf("\n")
1323 }
1324
1325
1326
1327
1328
1329 func (lv *Liveness) emit() (argsSym, liveSym *obj.LSym) {
1330
1331
1332
1333 var maxArgNode *ir.Name
1334 for _, n := range lv.vars {
1335 switch n.Class {
1336 case ir.PPARAM, ir.PPARAMOUT:
1337 if !n.IsOutputParamInRegisters() {
1338 if maxArgNode == nil || n.FrameOffset() > maxArgNode.FrameOffset() {
1339 maxArgNode = n
1340 }
1341 }
1342 }
1343 }
1344
1345 var maxArgs int64
1346 if maxArgNode != nil {
1347 maxArgs = maxArgNode.FrameOffset() + types.PtrDataSize(maxArgNode.Type())
1348 }
1349
1350
1351
1352
1353
1354
1355
1356
1357 maxLocals := lv.stkptrsize
1358
1359
1360 var argsSymTmp, liveSymTmp obj.LSym
1361
1362 args := bitvec.New(int32(maxArgs / int64(types.PtrSize)))
1363 aoff := objw.Uint32(&argsSymTmp, 0, uint32(len(lv.stackMaps)))
1364 aoff = objw.Uint32(&argsSymTmp, aoff, uint32(args.N))
1365
1366 locals := bitvec.New(int32(maxLocals / int64(types.PtrSize)))
1367 loff := objw.Uint32(&liveSymTmp, 0, uint32(len(lv.stackMaps)))
1368 loff = objw.Uint32(&liveSymTmp, loff, uint32(locals.N))
1369
1370 for _, live := range lv.stackMaps {
1371 args.Clear()
1372 locals.Clear()
1373
1374 lv.pointerMap(live, lv.vars, args, locals)
1375
1376 aoff = objw.BitVec(&argsSymTmp, aoff, args)
1377 loff = objw.BitVec(&liveSymTmp, loff, locals)
1378 }
1379
1380
1381
1382 return base.Ctxt.GCLocalsSym(argsSymTmp.P), base.Ctxt.GCLocalsSym(liveSymTmp.P)
1383 }
1384
1385
1386
1387
1388
1389
1390 func Compute(curfn *ir.Func, f *ssa.Func, stkptrsize int64, pp *objw.Progs, retLiveness bool) (Map, map[*ir.Name]bool, *Liveness) {
1391
1392 vars, idx := getvariables(curfn)
1393 lv := newliveness(curfn, f, vars, idx, stkptrsize)
1394
1395
1396 lv.prologue()
1397 lv.solve()
1398 lv.epilogue()
1399 if base.Flag.Live > 0 {
1400 lv.showlive(nil, lv.stackMaps[0])
1401 for _, b := range f.Blocks {
1402 for _, val := range b.Values {
1403 if idx := lv.livenessMap.Get(val); idx.StackMapValid() {
1404 lv.showlive(val, lv.stackMaps[idx])
1405 }
1406 }
1407 }
1408 }
1409 if base.Flag.Live >= 2 {
1410 lv.printDebug()
1411 }
1412
1413
1414 {
1415 cache := f.Cache.Liveness.(*livenessFuncCache)
1416 if cap(lv.be) < 2000 {
1417 for i := range lv.be {
1418 lv.be[i] = blockEffects{}
1419 }
1420 cache.be = lv.be
1421 }
1422 if len(lv.livenessMap.Vals) < 2000 {
1423 cache.livenessMap = lv.livenessMap
1424 }
1425 }
1426
1427
1428 ls := curfn.LSym
1429 fninfo := ls.Func()
1430 fninfo.GCArgs, fninfo.GCLocals = lv.emit()
1431
1432 p := pp.Prog(obj.AFUNCDATA)
1433 p.From.SetConst(rtabi.FUNCDATA_ArgsPointerMaps)
1434 p.To.Type = obj.TYPE_MEM
1435 p.To.Name = obj.NAME_EXTERN
1436 p.To.Sym = fninfo.GCArgs
1437
1438 p = pp.Prog(obj.AFUNCDATA)
1439 p.From.SetConst(rtabi.FUNCDATA_LocalsPointerMaps)
1440 p.To.Type = obj.TYPE_MEM
1441 p.To.Name = obj.NAME_EXTERN
1442 p.To.Sym = fninfo.GCLocals
1443
1444 if x := lv.emitStackObjects(); x != nil {
1445 p := pp.Prog(obj.AFUNCDATA)
1446 p.From.SetConst(rtabi.FUNCDATA_StackObjects)
1447 p.To.Type = obj.TYPE_MEM
1448 p.To.Name = obj.NAME_EXTERN
1449 p.To.Sym = x
1450 }
1451
1452 retLv := lv
1453 if !retLiveness {
1454 retLv = nil
1455 }
1456
1457 return lv.livenessMap, lv.partLiveArgs, retLv
1458 }
1459
1460 func (lv *Liveness) emitStackObjects() *obj.LSym {
1461 var vars []*ir.Name
1462 for _, n := range lv.fn.Dcl {
1463 if shouldTrack(n) && n.Addrtaken() && n.Esc() != ir.EscHeap {
1464 vars = append(vars, n)
1465 }
1466 }
1467 if len(vars) == 0 {
1468 return nil
1469 }
1470
1471
1472 slices.SortFunc(vars, func(a, b *ir.Name) int { return cmp.Compare(a.FrameOffset(), b.FrameOffset()) })
1473
1474
1475
1476 x := base.Ctxt.Lookup(lv.fn.LSym.Name + ".stkobj")
1477 x.Set(obj.AttrContentAddressable, true)
1478 lv.fn.LSym.Func().StackObjects = x
1479 off := 0
1480 off = objw.Uintptr(x, off, uint64(len(vars)))
1481 for _, v := range vars {
1482
1483
1484
1485
1486
1487 frameOffset := v.FrameOffset()
1488 if frameOffset != int64(int32(frameOffset)) {
1489 base.Fatalf("frame offset too big: %v %d", v, frameOffset)
1490 }
1491 off = objw.Uint32(x, off, uint32(frameOffset))
1492
1493 t := v.Type()
1494 sz := t.Size()
1495 if sz != int64(int32(sz)) {
1496 base.Fatalf("stack object too big: %v of type %v, size %d", v, t, sz)
1497 }
1498 lsym, ptrBytes := reflectdata.GCSym(t)
1499 off = objw.Uint32(x, off, uint32(sz))
1500 off = objw.Uint32(x, off, uint32(ptrBytes))
1501 off = objw.SymPtrOff(x, off, lsym)
1502 }
1503
1504 if base.Flag.Live != 0 {
1505 for _, v := range vars {
1506 base.WarnfAt(v.Pos(), "stack object %v %v", v, v.Type())
1507 }
1508 }
1509
1510 return x
1511 }
1512
1513
1514
1515
1516
1517
1518
1519
1520
1521
1522
1523
1524
1525
1526 func isfat(t *types.Type) bool {
1527 if t != nil {
1528 switch t.Kind() {
1529 case types.TSLICE, types.TSTRING,
1530 types.TINTER:
1531 return true
1532 case types.TARRAY:
1533
1534 if t.NumElem() == 1 {
1535 return isfat(t.Elem())
1536 }
1537 return true
1538 case types.TSTRUCT:
1539
1540 if t.NumFields() == 1 {
1541 return isfat(t.Field(0).Type)
1542 }
1543 return true
1544 }
1545 }
1546
1547 return false
1548 }
1549
1550
1551
1552
1553 func WriteFuncMap(fn *ir.Func, abiInfo *abi.ABIParamResultInfo) {
1554 if ir.FuncName(fn) == "_" {
1555 return
1556 }
1557 nptr := int(abiInfo.ArgWidth() / int64(types.PtrSize))
1558 bv := bitvec.New(int32(nptr))
1559
1560 for _, p := range abiInfo.InParams() {
1561 typebits.SetNoCheck(p.Type, p.FrameOffset(abiInfo), bv)
1562 }
1563
1564 nbitmap := 1
1565 if fn.Type().NumResults() > 0 {
1566 nbitmap = 2
1567 }
1568 lsym := base.Ctxt.Lookup(fn.LSym.Name + ".args_stackmap")
1569 lsym.Set(obj.AttrLinkname, true)
1570 off := objw.Uint32(lsym, 0, uint32(nbitmap))
1571 off = objw.Uint32(lsym, off, uint32(bv.N))
1572 off = objw.BitVec(lsym, off, bv)
1573
1574 if fn.Type().NumResults() > 0 {
1575 for _, p := range abiInfo.OutParams() {
1576 if len(p.Registers) == 0 {
1577 typebits.SetNoCheck(p.Type, p.FrameOffset(abiInfo), bv)
1578 }
1579 }
1580 off = objw.BitVec(lsym, off, bv)
1581 }
1582
1583 objw.Global(lsym, int32(off), obj.RODATA|obj.LOCAL)
1584 }
1585
View as plain text