1
2
3
4
5 package ssa
6
7 import (
8 "cmd/compile/internal/reflectdata"
9 "cmd/compile/internal/types"
10 "cmd/internal/obj"
11 "cmd/internal/objabi"
12 "cmd/internal/src"
13 "fmt"
14 "internal/buildcfg"
15 )
16
17
18
19
20
21
22
23 type ZeroRegion struct {
24 base *Value
25 mask uint64
26 }
27
28
29
30 func mightBeHeapPointer(v *Value) bool {
31 if IsGlobalAddr(v) {
32 return false
33 }
34 return true
35 }
36
37
38
39
40 func mightContainHeapPointer(ptr *Value, size int64, mem *Value, zeroes map[ID]ZeroRegion) bool {
41 if IsReadOnlyGlobalAddr(ptr) {
42
43 return false
44 }
45
46
47
48
49 var off int64
50 for ptr.Op == OpOffPtr {
51 off += ptr.AuxInt
52 ptr = ptr.Args[0]
53 }
54
55 ptrSize := ptr.Block.Func.Config.PtrSize
56 if off%ptrSize != 0 {
57 return true
58 }
59 if size%ptrSize != 0 {
60 ptr.Fatalf("unaligned pointer write")
61 }
62 if off < 0 || off+size > 64*ptrSize {
63
64 return true
65 }
66 z := zeroes[mem.ID]
67 if ptr != z.base {
68
69 return true
70 }
71
72 m := (uint64(1)<<(size/ptrSize) - 1) << (off / ptrSize)
73
74 if z.mask&m == m {
75
76 return false
77 }
78 return true
79 }
80
81
82
83
84 func needwb(v *Value, zeroes map[ID]ZeroRegion) bool {
85 t, ok := v.Aux.(*types.Type)
86 if !ok {
87 v.Fatalf("store aux is not a type: %s", v.LongString())
88 }
89 if !t.HasPointers() {
90 return false
91 }
92 dst := v.Args[0]
93 if IsStackAddr(dst) {
94 return false
95 }
96
97
98 if mightContainHeapPointer(dst, t.Size(), v.MemoryArg(), zeroes) {
99 return true
100 }
101
102
103 switch v.Op {
104 case OpStore:
105 if !mightBeHeapPointer(v.Args[1]) {
106 return false
107 }
108 case OpZero:
109 return false
110 case OpMove:
111 if !mightContainHeapPointer(v.Args[1], t.Size(), v.Args[2], zeroes) {
112 return false
113 }
114 default:
115 v.Fatalf("store op unknown: %s", v.LongString())
116 }
117 return true
118 }
119
120
121 func needWBsrc(v *Value) bool {
122 return !IsGlobalAddr(v)
123 }
124
125
126
127 func needWBdst(ptr, mem *Value, zeroes map[ID]ZeroRegion) bool {
128
129 var off int64
130 for ptr.Op == OpOffPtr {
131 off += ptr.AuxInt
132 ptr = ptr.Args[0]
133 }
134 ptrSize := ptr.Block.Func.Config.PtrSize
135 if off%ptrSize != 0 {
136 return true
137 }
138 if off < 0 || off >= 64*ptrSize {
139
140 return true
141 }
142 z := zeroes[mem.ID]
143 if ptr != z.base {
144 return true
145 }
146
147
148 return z.mask>>uint(off/ptrSize)&1 == 0
149 }
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164 func writebarrier(f *Func) {
165 if !f.fe.UseWriteBarrier() {
166 return
167 }
168
169
170
171
172 const maxEntries = 8
173
174 var sb, sp, wbaddr, const0 *Value
175 var cgoCheckPtrWrite, cgoCheckMemmove *obj.LSym
176 var wbZero, wbMove *obj.LSym
177 var stores, after []*Value
178 var sset, sset2 *sparseSet
179 var storeNumber []int32
180
181
182 select1 := f.Cache.allocValueSlice(f.NumValues())
183 defer func() { f.Cache.freeValueSlice(select1) }()
184 for _, b := range f.Blocks {
185 for _, v := range b.Values {
186 if v.Op != OpSelectN {
187 continue
188 }
189 if v.AuxInt != 1 {
190 continue
191 }
192 select1[v.Args[0].ID] = v
193 }
194 }
195
196 zeroes := f.computeZeroMap(select1)
197 for _, b := range f.Blocks {
198
199
200 nWBops := 0
201 for _, v := range b.Values {
202 switch v.Op {
203 case OpStore, OpMove, OpZero:
204 if needwb(v, zeroes) {
205 switch v.Op {
206 case OpStore:
207 v.Op = OpStoreWB
208 case OpMove:
209 v.Op = OpMoveWB
210 case OpZero:
211 v.Op = OpZeroWB
212 }
213 nWBops++
214 }
215 }
216 }
217 if nWBops == 0 {
218 continue
219 }
220
221 if wbaddr == nil {
222
223
224 initpos := f.Entry.Pos
225 sp, sb = f.spSb()
226 wbsym := f.fe.Syslook("writeBarrier")
227 wbaddr = f.Entry.NewValue1A(initpos, OpAddr, f.Config.Types.UInt32Ptr, wbsym, sb)
228 wbZero = f.fe.Syslook("wbZero")
229 wbMove = f.fe.Syslook("wbMove")
230 if buildcfg.Experiment.CgoCheck2 {
231 cgoCheckPtrWrite = f.fe.Syslook("cgoCheckPtrWrite")
232 cgoCheckMemmove = f.fe.Syslook("cgoCheckMemmove")
233 }
234 const0 = f.ConstInt32(f.Config.Types.UInt32, 0)
235
236
237 sset = f.newSparseSet(f.NumValues())
238 defer f.retSparseSet(sset)
239 sset2 = f.newSparseSet(f.NumValues())
240 defer f.retSparseSet(sset2)
241 storeNumber = f.Cache.allocInt32Slice(f.NumValues())
242 defer f.Cache.freeInt32Slice(storeNumber)
243 }
244
245
246 b.Values = storeOrder(b.Values, sset, storeNumber)
247 again:
248
249
250
251 var last *Value
252 var start, end int
253 var nonPtrStores int
254 values := b.Values
255 hasMove := false
256 FindSeq:
257 for i := len(values) - 1; i >= 0; i-- {
258 w := values[i]
259 switch w.Op {
260 case OpStoreWB, OpMoveWB, OpZeroWB:
261 start = i
262 if last == nil {
263 last = w
264 end = i + 1
265 }
266 nonPtrStores = 0
267 if w.Op == OpMoveWB {
268 hasMove = true
269 }
270 case OpVarDef, OpVarLive:
271 continue
272 case OpStore:
273 if last == nil {
274 continue
275 }
276 nonPtrStores++
277 if nonPtrStores > 2 {
278 break FindSeq
279 }
280 if hasMove {
281
282
283
284
285
286
287
288
289 break FindSeq
290 }
291 default:
292 if last == nil {
293 continue
294 }
295 break FindSeq
296 }
297 }
298 stores = append(stores[:0], b.Values[start:end]...)
299 after = append(after[:0], b.Values[end:]...)
300 b.Values = b.Values[:start]
301
302
303 mem := stores[0].MemoryArg()
304 pos := stores[0].Pos
305
306
307
308
309
310 var nilcheck, nilcheckThen, nilcheckEnd *Value
311 if a := stores[0].Args[0]; a.Op == OpNilCheck && a.Args[1] == mem {
312 nilcheck = a
313 }
314
315
316
317
318
319
320
321
322
323
324 type volatileCopy struct {
325 src *Value
326 tmp *Value
327 }
328 var volatiles []volatileCopy
329
330 if !(f.ABIDefault == f.ABI1 && len(f.Config.intParamRegs) >= 3) {
331
332
333
334 copyLoop:
335 for _, w := range stores {
336 if w.Op == OpMoveWB {
337 val := w.Args[1]
338 if isVolatile(val) {
339 for _, c := range volatiles {
340 if val == c.src {
341 continue copyLoop
342 }
343 }
344
345 t := val.Type.Elem()
346 tmp := f.NewLocal(w.Pos, t)
347 mem = b.NewValue1A(w.Pos, OpVarDef, types.TypeMem, tmp, mem)
348 tmpaddr := b.NewValue2A(w.Pos, OpLocalAddr, t.PtrTo(), tmp, sp, mem)
349 siz := t.Size()
350 mem = b.NewValue3I(w.Pos, OpMove, types.TypeMem, siz, tmpaddr, val, mem)
351 mem.Aux = t
352 volatiles = append(volatiles, volatileCopy{val, tmpaddr})
353 }
354 }
355 }
356 }
357
358
359 bThen := f.NewBlock(BlockPlain)
360 bEnd := f.NewBlock(b.Kind)
361 bThen.Pos = pos
362 bEnd.Pos = b.Pos
363 b.Pos = pos
364
365
366 bEnd.CopyControls(b)
367 bEnd.Likely = b.Likely
368 for _, e := range b.Succs {
369 bEnd.Succs = append(bEnd.Succs, e)
370 e.b.Preds[e.i].b = bEnd
371 }
372
373
374
375 cfgtypes := &f.Config.Types
376 flag := b.NewValue2(pos, OpLoad, cfgtypes.UInt32, wbaddr, mem)
377 flag = b.NewValue2(pos, OpNeq32, cfgtypes.Bool, flag, const0)
378 b.Kind = BlockIf
379 b.SetControl(flag)
380 b.Likely = BranchUnlikely
381 b.Succs = b.Succs[:0]
382 b.AddEdgeTo(bThen)
383 b.AddEdgeTo(bEnd)
384 bThen.AddEdgeTo(bEnd)
385
386
387 memThen := mem
388
389 if nilcheck != nil {
390 nilcheckThen = bThen.NewValue2(nilcheck.Pos, OpNilCheck, nilcheck.Type, nilcheck.Args[0], memThen)
391 }
392
393
394
395
396
397
398
399
400
401
402
403
404 srcs := sset
405 srcs.clear()
406
407
408 dsts := sset2
409 dsts.clear()
410
411
412 type write struct {
413 ptr *Value
414 pos src.XPos
415 }
416 var writeStore [maxEntries]write
417 writes := writeStore[:0]
418
419 flush := func() {
420 if len(writes) == 0 {
421 return
422 }
423
424 t := types.NewTuple(types.Types[types.TUINTPTR].PtrTo(), types.TypeMem)
425 call := bThen.NewValue1I(pos, OpWB, t, int64(len(writes)), memThen)
426 curPtr := bThen.NewValue1(pos, OpSelect0, types.Types[types.TUINTPTR].PtrTo(), call)
427 memThen = bThen.NewValue1(pos, OpSelect1, types.TypeMem, call)
428
429 for i, write := range writes {
430 wbuf := bThen.NewValue1I(write.pos, OpOffPtr, types.Types[types.TUINTPTR].PtrTo(), int64(i)*f.Config.PtrSize, curPtr)
431 memThen = bThen.NewValue3A(write.pos, OpStore, types.TypeMem, types.Types[types.TUINTPTR], wbuf, write.ptr, memThen)
432 }
433 writes = writes[:0]
434 }
435 addEntry := func(pos src.XPos, ptr *Value) {
436 writes = append(writes, write{ptr: ptr, pos: pos})
437 if len(writes) == maxEntries {
438 flush()
439 }
440 }
441
442
443 for _, w := range stores {
444 if w.Op != OpStoreWB {
445 continue
446 }
447 pos := w.Pos
448 ptr := w.Args[0]
449 val := w.Args[1]
450 if !srcs.contains(val.ID) && needWBsrc(val) {
451 srcs.add(val.ID)
452 addEntry(pos, val)
453 }
454 if !dsts.contains(ptr.ID) && needWBdst(ptr, w.Args[2], zeroes) {
455 dsts.add(ptr.ID)
456
457
458
459
460
461
462
463 if ptr == nilcheck {
464 ptr = nilcheckThen
465 }
466 oldVal := bThen.NewValue2(pos, OpLoad, types.Types[types.TUINTPTR], ptr, memThen)
467
468 addEntry(pos, oldVal)
469 }
470 f.fe.Func().SetWBPos(pos)
471 nWBops--
472 }
473 flush()
474
475
476 for _, w := range stores {
477 pos := w.Pos
478 dst := w.Args[0]
479 if dst == nilcheck {
480 dst = nilcheckThen
481 }
482 switch w.Op {
483 case OpZeroWB:
484 typ := reflectdata.TypeLinksym(w.Aux.(*types.Type))
485
486 taddr := b.NewValue1A(pos, OpAddr, b.Func.Config.Types.Uintptr, typ, sb)
487 memThen = wbcall(pos, bThen, wbZero, sp, memThen, taddr, dst)
488 f.fe.Func().SetWBPos(pos)
489 nWBops--
490 case OpMoveWB:
491 src := w.Args[1]
492 if isVolatile(src) {
493 for _, c := range volatiles {
494 if src == c.src {
495 src = c.tmp
496 break
497 }
498 }
499 }
500 typ := reflectdata.TypeLinksym(w.Aux.(*types.Type))
501
502 taddr := b.NewValue1A(pos, OpAddr, b.Func.Config.Types.Uintptr, typ, sb)
503 memThen = wbcall(pos, bThen, wbMove, sp, memThen, taddr, dst, src)
504 f.fe.Func().SetWBPos(pos)
505 nWBops--
506 }
507 }
508
509
510 mem = bEnd.NewValue2(pos, OpPhi, types.TypeMem, mem, memThen)
511
512 if nilcheck != nil {
513 nilcheckEnd = bEnd.NewValue2(nilcheck.Pos, OpNilCheck, nilcheck.Type, nilcheck.Args[0], mem)
514 }
515
516
517 for _, w := range stores {
518 pos := w.Pos
519 dst := w.Args[0]
520 if dst == nilcheck {
521 dst = nilcheckEnd
522 }
523 switch w.Op {
524 case OpStoreWB:
525 val := w.Args[1]
526 if buildcfg.Experiment.CgoCheck2 {
527
528 mem = wbcall(pos, bEnd, cgoCheckPtrWrite, sp, mem, dst, val)
529 }
530 mem = bEnd.NewValue3A(pos, OpStore, types.TypeMem, w.Aux, dst, val, mem)
531 case OpZeroWB:
532 mem = bEnd.NewValue2I(pos, OpZero, types.TypeMem, w.AuxInt, dst, mem)
533 mem.Aux = w.Aux
534 case OpMoveWB:
535 src := w.Args[1]
536 if isVolatile(src) {
537 for _, c := range volatiles {
538 if src == c.src {
539 src = c.tmp
540 break
541 }
542 }
543 }
544 if buildcfg.Experiment.CgoCheck2 {
545
546 typ := reflectdata.TypeLinksym(w.Aux.(*types.Type))
547 taddr := b.NewValue1A(pos, OpAddr, b.Func.Config.Types.Uintptr, typ, sb)
548 mem = wbcall(pos, bEnd, cgoCheckMemmove, sp, mem, taddr, dst, src)
549 }
550 mem = bEnd.NewValue3I(pos, OpMove, types.TypeMem, w.AuxInt, dst, src, mem)
551 mem.Aux = w.Aux
552 case OpVarDef, OpVarLive:
553 mem = bEnd.NewValue1A(pos, w.Op, types.TypeMem, w.Aux, mem)
554 case OpStore:
555 val := w.Args[1]
556 mem = bEnd.NewValue3A(pos, OpStore, types.TypeMem, w.Aux, dst, val, mem)
557 }
558 }
559
560
561
562
563
564 bEnd.Values = append(bEnd.Values, last)
565 last.Block = bEnd
566 last.reset(OpWBend)
567 last.Pos = last.Pos.WithNotStmt()
568 last.Type = types.TypeMem
569 last.AddArg(mem)
570
571
572 for _, w := range stores {
573 if w != last {
574 w.resetArgs()
575 }
576 }
577 for _, w := range stores {
578 if w != last {
579 f.freeValue(w)
580 }
581 }
582 if nilcheck != nil && nilcheck.Uses == 0 {
583 nilcheck.reset(OpInvalid)
584 }
585
586
587 bEnd.Values = append(bEnd.Values, after...)
588 for _, w := range after {
589 w.Block = bEnd
590 }
591
592
593 if nWBops > 0 {
594 goto again
595 }
596 }
597 }
598
599
600
601 func (f *Func) computeZeroMap(select1 []*Value) map[ID]ZeroRegion {
602
603 ptrSize := f.Config.PtrSize
604
605
606
607
608 zeroes := map[ID]ZeroRegion{}
609
610 for _, b := range f.Blocks {
611 for _, v := range b.Values {
612 if mem, ok := IsNewObject(v, select1); ok {
613
614
615
616
617 if types.LocalPkg.Path == "runtime" && v.Type.IsUnsafePtr() {
618 continue
619 }
620
621 nptr := min(64, v.Type.Elem().Size()/ptrSize)
622 zeroes[mem.ID] = ZeroRegion{base: v, mask: 1<<uint(nptr) - 1}
623 }
624 }
625 }
626
627 for {
628 changed := false
629 for _, b := range f.Blocks {
630
631
632 for _, v := range b.Values {
633 if v.Op != OpStore {
634 continue
635 }
636 z, ok := zeroes[v.MemoryArg().ID]
637 if !ok {
638 continue
639 }
640 ptr := v.Args[0]
641 var off int64
642 size := v.Aux.(*types.Type).Size()
643 for ptr.Op == OpOffPtr {
644 off += ptr.AuxInt
645 ptr = ptr.Args[0]
646 }
647 if ptr != z.base {
648
649
650
651
652 continue
653 }
654
655
656
657 if d := off % ptrSize; d != 0 {
658 off -= d
659 size += d
660 }
661 if d := size % ptrSize; d != 0 {
662 size += ptrSize - d
663 }
664
665 minimum := max(off, 0)
666 maximum := min(off+size, 64*ptrSize)
667
668
669
670 for i := minimum; i < maximum; i += ptrSize {
671 bit := i / ptrSize
672 z.mask &^= 1 << uint(bit)
673 }
674 if z.mask == 0 {
675
676 continue
677 }
678
679 if zeroes[v.ID] != z {
680 zeroes[v.ID] = z
681 changed = true
682 }
683 }
684 }
685 if !changed {
686 break
687 }
688 }
689 if f.pass.debug > 0 {
690 fmt.Printf("func %s\n", f.Name)
691 for mem, z := range zeroes {
692 fmt.Printf(" memory=v%d ptr=%v zeromask=%b\n", mem, z.base, z.mask)
693 }
694 }
695 return zeroes
696 }
697
698
699 func wbcall(pos src.XPos, b *Block, fn *obj.LSym, sp, mem *Value, args ...*Value) *Value {
700 config := b.Func.Config
701 typ := config.Types.Uintptr
702 nargs := len(args)
703
704
705 inRegs := b.Func.ABIDefault == b.Func.ABI1 && len(config.intParamRegs) >= 3
706
707 if !inRegs {
708
709 off := config.ctxt.Arch.FixedFrameSize
710 for _, arg := range args {
711 stkaddr := b.NewValue1I(pos, OpOffPtr, typ.PtrTo(), off, sp)
712 mem = b.NewValue3A(pos, OpStore, types.TypeMem, typ, stkaddr, arg, mem)
713 off += typ.Size()
714 }
715 args = args[:0]
716 }
717
718 args = append(args, mem)
719
720
721 argTypes := make([]*types.Type, nargs, 3)
722 for i := 0; i < nargs; i++ {
723 argTypes[i] = typ
724 }
725 call := b.NewValue0A(pos, OpStaticCall, types.TypeResultMem, StaticAuxCall(fn, b.Func.ABIDefault.ABIAnalyzeTypes(argTypes, nil)))
726 call.AddArgs(args...)
727 call.AuxInt = int64(nargs) * typ.Size()
728 return b.NewValue1I(pos, OpSelectN, types.TypeMem, 0, call)
729 }
730
731
732 func IsStackAddr(v *Value) bool {
733 for v.Op == OpOffPtr || v.Op == OpAddPtr || v.Op == OpPtrIndex || v.Op == OpCopy {
734 v = v.Args[0]
735 }
736 switch v.Op {
737 case OpSP, OpLocalAddr, OpSelectNAddr, OpGetCallerSP:
738 return true
739 }
740 return false
741 }
742
743
744 func IsGlobalAddr(v *Value) bool {
745 for v.Op == OpOffPtr || v.Op == OpAddPtr || v.Op == OpPtrIndex || v.Op == OpCopy {
746 v = v.Args[0]
747 }
748 if v.Op == OpAddr && v.Args[0].Op == OpSB {
749 return true
750 }
751 if v.Op == OpConstNil {
752 return true
753 }
754 if v.Op == OpLoad && IsReadOnlyGlobalAddr(v.Args[0]) {
755 return true
756 }
757 return false
758 }
759
760
761 func IsReadOnlyGlobalAddr(v *Value) bool {
762 if v.Op == OpConstNil {
763
764 return true
765 }
766 if v.Op == OpAddr && v.Aux != nil && v.Aux.(*obj.LSym).Type == objabi.SRODATA {
767 return true
768 }
769 return false
770 }
771
772
773
774 func IsNewObject(v *Value, select1 []*Value) (mem *Value, ok bool) {
775 f := v.Block.Func
776 c := f.Config
777 if f.ABIDefault == f.ABI1 && len(c.intParamRegs) >= 1 {
778 if v.Op != OpSelectN || v.AuxInt != 0 {
779 return nil, false
780 }
781 mem = select1[v.Args[0].ID]
782 if mem == nil {
783 return nil, false
784 }
785 } else {
786 if v.Op != OpLoad {
787 return nil, false
788 }
789 mem = v.MemoryArg()
790 if mem.Op != OpSelectN {
791 return nil, false
792 }
793 if mem.Type != types.TypeMem {
794 return nil, false
795 }
796 }
797 call := mem.Args[0]
798 if call.Op != OpStaticCall {
799 return nil, false
800 }
801
802
803
804 var numParameters int64
805 switch {
806 case isNewObject(call.Aux):
807 numParameters = 1
808 case isSpecializedMalloc(call.Aux) && !v.Type.IsUnsafePtr():
809 numParameters = 3
810 default:
811 return nil, false
812 }
813 if f.ABIDefault == f.ABI1 && len(c.intParamRegs) >= 1 {
814 if v.Args[0] == call {
815 return mem, true
816 }
817 return nil, false
818 }
819 if v.Args[0].Op != OpOffPtr {
820 return nil, false
821 }
822 if v.Args[0].Args[0].Op != OpSP {
823 return nil, false
824 }
825 if v.Args[0].AuxInt != c.ctxt.Arch.FixedFrameSize+numParameters*c.RegSize {
826 return nil, false
827 }
828 return mem, true
829 }
830
831
832
833 func IsSanitizerSafeAddr(v *Value) bool {
834 for v.Op == OpOffPtr || v.Op == OpAddPtr || v.Op == OpPtrIndex || v.Op == OpCopy {
835 v = v.Args[0]
836 }
837 switch v.Op {
838 case OpSP, OpLocalAddr, OpSelectNAddr:
839
840 return true
841 case OpITab, OpStringPtr, OpGetClosurePtr:
842
843
844 return true
845 case OpAddr:
846 vt := v.Aux.(*obj.LSym).Type
847 return vt == objabi.SRODATA || vt == objabi.SLIBFUZZER_8BIT_COUNTER || vt == objabi.SCOVERAGE_COUNTER || vt == objabi.SCOVERAGE_AUXVAR
848 }
849 return false
850 }
851
852
853
854 func isVolatile(v *Value) bool {
855 for v.Op == OpOffPtr || v.Op == OpAddPtr || v.Op == OpPtrIndex || v.Op == OpCopy || v.Op == OpSelectNAddr {
856 v = v.Args[0]
857 }
858 return v.Op == OpSP
859 }
860
View as plain text