1
2
3
4
5 package ssa
6
7 import (
8 "cmd/compile/internal/reflectdata"
9 "cmd/compile/internal/types"
10 "cmd/internal/obj"
11 "cmd/internal/objabi"
12 "cmd/internal/src"
13 "fmt"
14 "internal/buildcfg"
15 )
16
17
18
19
20
21
22
23 type ZeroRegion struct {
24 base *Value
25 mask uint64
26 }
27
28
29
30 func mightBeHeapPointer(v *Value) bool {
31 if IsGlobalAddr(v) {
32 return false
33 }
34 return true
35 }
36
37
38
39
40 func mightContainHeapPointer(ptr *Value, size int64, mem *Value, zeroes map[ID]ZeroRegion) bool {
41 if IsReadOnlyGlobalAddr(ptr) {
42
43 return false
44 }
45
46
47
48
49 var off int64
50 for ptr.Op == OpOffPtr {
51 off += ptr.AuxInt
52 ptr = ptr.Args[0]
53 }
54
55 ptrSize := ptr.Block.Func.Config.PtrSize
56 if off%ptrSize != 0 {
57 return true
58 }
59 if size%ptrSize != 0 {
60 ptr.Fatalf("unaligned pointer write")
61 }
62 if off < 0 || off+size > 64*ptrSize {
63
64 return true
65 }
66 z := zeroes[mem.ID]
67 if ptr != z.base {
68
69 return true
70 }
71
72 m := (uint64(1)<<(size/ptrSize) - 1) << (off / ptrSize)
73
74 if z.mask&m == m {
75
76 return false
77 }
78 return true
79 }
80
81
82
83
84 func needwb(v *Value, zeroes map[ID]ZeroRegion) bool {
85 t, ok := v.Aux.(*types.Type)
86 if !ok {
87 v.Fatalf("store aux is not a type: %s", v.LongString())
88 }
89 if !t.HasPointers() {
90 return false
91 }
92 dst := v.Args[0]
93 if IsStackAddr(dst) {
94 return false
95 }
96
97
98 if mightContainHeapPointer(dst, t.Size(), v.MemoryArg(), zeroes) {
99 return true
100 }
101
102
103 switch v.Op {
104 case OpStore:
105 if !mightBeHeapPointer(v.Args[1]) {
106 return false
107 }
108 case OpZero:
109 return false
110 case OpMove:
111 if !mightContainHeapPointer(v.Args[1], t.Size(), v.Args[2], zeroes) {
112 return false
113 }
114 default:
115 v.Fatalf("store op unknown: %s", v.LongString())
116 }
117 return true
118 }
119
120
121 func needWBsrc(v *Value) bool {
122 return !IsGlobalAddr(v)
123 }
124
125
126
127 func needWBdst(ptr, mem *Value, zeroes map[ID]ZeroRegion) bool {
128
129 var off int64
130 for ptr.Op == OpOffPtr {
131 off += ptr.AuxInt
132 ptr = ptr.Args[0]
133 }
134 ptrSize := ptr.Block.Func.Config.PtrSize
135 if off%ptrSize != 0 {
136 return true
137 }
138 if off < 0 || off >= 64*ptrSize {
139
140 return true
141 }
142 z := zeroes[mem.ID]
143 if ptr != z.base {
144 return true
145 }
146
147
148 return z.mask>>uint(off/ptrSize)&1 == 0
149 }
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164 func writebarrier(f *Func) {
165 if !f.fe.UseWriteBarrier() {
166 return
167 }
168
169
170
171
172 const maxEntries = 8
173
174 var sb, sp, wbaddr, const0 *Value
175 var cgoCheckPtrWrite, cgoCheckMemmove *obj.LSym
176 var wbZero, wbMove *obj.LSym
177 var stores, after []*Value
178 var sset, sset2 *sparseSet
179 var storeNumber []int32
180
181
182 select1 := f.Cache.allocValueSlice(f.NumValues())
183 defer func() { f.Cache.freeValueSlice(select1) }()
184 for _, b := range f.Blocks {
185 for _, v := range b.Values {
186 if v.Op != OpSelectN {
187 continue
188 }
189 if v.AuxInt != 1 {
190 continue
191 }
192 select1[v.Args[0].ID] = v
193 }
194 }
195
196 zeroes := f.computeZeroMap(select1)
197 for _, b := range f.Blocks {
198
199
200 nWBops := 0
201 for _, v := range b.Values {
202 switch v.Op {
203 case OpStore, OpMove, OpZero:
204 if needwb(v, zeroes) {
205 switch v.Op {
206 case OpStore:
207 v.Op = OpStoreWB
208 case OpMove:
209 v.Op = OpMoveWB
210 case OpZero:
211 v.Op = OpZeroWB
212 }
213 nWBops++
214 }
215 }
216 }
217 if nWBops == 0 {
218 continue
219 }
220
221 if wbaddr == nil {
222
223
224 initpos := f.Entry.Pos
225 sp, sb = f.spSb()
226 wbsym := f.fe.Syslook("writeBarrier")
227 wbaddr = f.Entry.NewValue1A(initpos, OpAddr, f.Config.Types.UInt32Ptr, wbsym, sb)
228 wbZero = f.fe.Syslook("wbZero")
229 wbMove = f.fe.Syslook("wbMove")
230 if buildcfg.Experiment.CgoCheck2 {
231 cgoCheckPtrWrite = f.fe.Syslook("cgoCheckPtrWrite")
232 cgoCheckMemmove = f.fe.Syslook("cgoCheckMemmove")
233 }
234 const0 = f.ConstInt32(f.Config.Types.UInt32, 0)
235
236
237 sset = f.newSparseSet(f.NumValues())
238 defer f.retSparseSet(sset)
239 sset2 = f.newSparseSet(f.NumValues())
240 defer f.retSparseSet(sset2)
241 storeNumber = f.Cache.allocInt32Slice(f.NumValues())
242 defer f.Cache.freeInt32Slice(storeNumber)
243 }
244
245
246 b.Values = storeOrder(b.Values, sset, storeNumber)
247 again:
248
249
250
251 var last *Value
252 var start, end int
253 var nonPtrStores int
254 values := b.Values
255 hasMove := false
256 FindSeq:
257 for i := len(values) - 1; i >= 0; i-- {
258 w := values[i]
259 switch w.Op {
260 case OpStoreWB, OpMoveWB, OpZeroWB:
261 start = i
262 if last == nil {
263 last = w
264 end = i + 1
265 }
266 nonPtrStores = 0
267 if w.Op == OpMoveWB {
268 hasMove = true
269 }
270 case OpVarDef, OpVarLive:
271 continue
272 case OpStore:
273 if last == nil {
274 continue
275 }
276 nonPtrStores++
277 if nonPtrStores > 2 {
278 break FindSeq
279 }
280 if hasMove {
281
282
283
284
285
286
287
288
289 break FindSeq
290 }
291 default:
292 if last == nil {
293 continue
294 }
295 break FindSeq
296 }
297 }
298 stores = append(stores[:0], b.Values[start:end]...)
299 after = append(after[:0], b.Values[end:]...)
300 b.Values = b.Values[:start]
301
302
303 mem := stores[0].MemoryArg()
304 pos := stores[0].Pos
305
306
307
308
309
310
311
312
313
314
315 type volatileCopy struct {
316 src *Value
317 tmp *Value
318 }
319 var volatiles []volatileCopy
320
321 if !(f.ABIDefault == f.ABI1 && len(f.Config.intParamRegs) >= 3) {
322
323
324
325 copyLoop:
326 for _, w := range stores {
327 if w.Op == OpMoveWB {
328 val := w.Args[1]
329 if isVolatile(val) {
330 for _, c := range volatiles {
331 if val == c.src {
332 continue copyLoop
333 }
334 }
335
336 t := val.Type.Elem()
337 tmp := f.NewLocal(w.Pos, t)
338 mem = b.NewValue1A(w.Pos, OpVarDef, types.TypeMem, tmp, mem)
339 tmpaddr := b.NewValue2A(w.Pos, OpLocalAddr, t.PtrTo(), tmp, sp, mem)
340 siz := t.Size()
341 mem = b.NewValue3I(w.Pos, OpMove, types.TypeMem, siz, tmpaddr, val, mem)
342 mem.Aux = t
343 volatiles = append(volatiles, volatileCopy{val, tmpaddr})
344 }
345 }
346 }
347 }
348
349
350 bThen := f.NewBlock(BlockPlain)
351 bEnd := f.NewBlock(b.Kind)
352 bThen.Pos = pos
353 bEnd.Pos = b.Pos
354 b.Pos = pos
355
356
357 bEnd.CopyControls(b)
358 bEnd.Likely = b.Likely
359 for _, e := range b.Succs {
360 bEnd.Succs = append(bEnd.Succs, e)
361 e.b.Preds[e.i].b = bEnd
362 }
363
364
365
366 cfgtypes := &f.Config.Types
367 flag := b.NewValue2(pos, OpLoad, cfgtypes.UInt32, wbaddr, mem)
368 flag = b.NewValue2(pos, OpNeq32, cfgtypes.Bool, flag, const0)
369 b.Kind = BlockIf
370 b.SetControl(flag)
371 b.Likely = BranchUnlikely
372 b.Succs = b.Succs[:0]
373 b.AddEdgeTo(bThen)
374 b.AddEdgeTo(bEnd)
375 bThen.AddEdgeTo(bEnd)
376
377
378 memThen := mem
379
380
381
382
383
384
385
386
387
388
389
390
391 srcs := sset
392 srcs.clear()
393
394
395 dsts := sset2
396 dsts.clear()
397
398
399 type write struct {
400 ptr *Value
401 pos src.XPos
402 }
403 var writeStore [maxEntries]write
404 writes := writeStore[:0]
405
406 flush := func() {
407 if len(writes) == 0 {
408 return
409 }
410
411 t := types.NewTuple(types.Types[types.TUINTPTR].PtrTo(), types.TypeMem)
412 call := bThen.NewValue1I(pos, OpWB, t, int64(len(writes)), memThen)
413 curPtr := bThen.NewValue1(pos, OpSelect0, types.Types[types.TUINTPTR].PtrTo(), call)
414 memThen = bThen.NewValue1(pos, OpSelect1, types.TypeMem, call)
415
416 for i, write := range writes {
417 wbuf := bThen.NewValue1I(write.pos, OpOffPtr, types.Types[types.TUINTPTR].PtrTo(), int64(i)*f.Config.PtrSize, curPtr)
418 memThen = bThen.NewValue3A(write.pos, OpStore, types.TypeMem, types.Types[types.TUINTPTR], wbuf, write.ptr, memThen)
419 }
420 writes = writes[:0]
421 }
422 addEntry := func(pos src.XPos, ptr *Value) {
423 writes = append(writes, write{ptr: ptr, pos: pos})
424 if len(writes) == maxEntries {
425 flush()
426 }
427 }
428
429
430 for _, w := range stores {
431 if w.Op != OpStoreWB {
432 continue
433 }
434 pos := w.Pos
435 ptr := w.Args[0]
436 val := w.Args[1]
437 if !srcs.contains(val.ID) && needWBsrc(val) {
438 srcs.add(val.ID)
439 addEntry(pos, val)
440 }
441 if !dsts.contains(ptr.ID) && needWBdst(ptr, w.Args[2], zeroes) {
442 dsts.add(ptr.ID)
443
444
445
446
447
448
449
450 oldVal := bThen.NewValue2(pos, OpLoad, types.Types[types.TUINTPTR], ptr, memThen)
451
452 addEntry(pos, oldVal)
453 }
454 f.fe.Func().SetWBPos(pos)
455 nWBops--
456 }
457 flush()
458
459
460 for _, w := range stores {
461 pos := w.Pos
462 switch w.Op {
463 case OpZeroWB:
464 dst := w.Args[0]
465 typ := reflectdata.TypeLinksym(w.Aux.(*types.Type))
466
467 taddr := b.NewValue1A(pos, OpAddr, b.Func.Config.Types.Uintptr, typ, sb)
468 memThen = wbcall(pos, bThen, wbZero, sp, memThen, taddr, dst)
469 f.fe.Func().SetWBPos(pos)
470 nWBops--
471 case OpMoveWB:
472 dst := w.Args[0]
473 src := w.Args[1]
474 if isVolatile(src) {
475 for _, c := range volatiles {
476 if src == c.src {
477 src = c.tmp
478 break
479 }
480 }
481 }
482 typ := reflectdata.TypeLinksym(w.Aux.(*types.Type))
483
484 taddr := b.NewValue1A(pos, OpAddr, b.Func.Config.Types.Uintptr, typ, sb)
485 memThen = wbcall(pos, bThen, wbMove, sp, memThen, taddr, dst, src)
486 f.fe.Func().SetWBPos(pos)
487 nWBops--
488 }
489 }
490
491
492 mem = bEnd.NewValue2(pos, OpPhi, types.TypeMem, mem, memThen)
493
494
495 for _, w := range stores {
496 pos := w.Pos
497 switch w.Op {
498 case OpStoreWB:
499 ptr := w.Args[0]
500 val := w.Args[1]
501 if buildcfg.Experiment.CgoCheck2 {
502
503 mem = wbcall(pos, bEnd, cgoCheckPtrWrite, sp, mem, ptr, val)
504 }
505 mem = bEnd.NewValue3A(pos, OpStore, types.TypeMem, w.Aux, ptr, val, mem)
506 case OpZeroWB:
507 dst := w.Args[0]
508 mem = bEnd.NewValue2I(pos, OpZero, types.TypeMem, w.AuxInt, dst, mem)
509 mem.Aux = w.Aux
510 case OpMoveWB:
511 dst := w.Args[0]
512 src := w.Args[1]
513 if isVolatile(src) {
514 for _, c := range volatiles {
515 if src == c.src {
516 src = c.tmp
517 break
518 }
519 }
520 }
521 if buildcfg.Experiment.CgoCheck2 {
522
523 typ := reflectdata.TypeLinksym(w.Aux.(*types.Type))
524 taddr := b.NewValue1A(pos, OpAddr, b.Func.Config.Types.Uintptr, typ, sb)
525 mem = wbcall(pos, bEnd, cgoCheckMemmove, sp, mem, taddr, dst, src)
526 }
527 mem = bEnd.NewValue3I(pos, OpMove, types.TypeMem, w.AuxInt, dst, src, mem)
528 mem.Aux = w.Aux
529 case OpVarDef, OpVarLive:
530 mem = bEnd.NewValue1A(pos, w.Op, types.TypeMem, w.Aux, mem)
531 case OpStore:
532 ptr := w.Args[0]
533 val := w.Args[1]
534 mem = bEnd.NewValue3A(pos, OpStore, types.TypeMem, w.Aux, ptr, val, mem)
535 }
536 }
537
538
539
540
541
542 bEnd.Values = append(bEnd.Values, last)
543 last.Block = bEnd
544 last.reset(OpWBend)
545 last.Pos = last.Pos.WithNotStmt()
546 last.Type = types.TypeMem
547 last.AddArg(mem)
548
549
550 for _, w := range stores {
551 if w != last {
552 w.resetArgs()
553 }
554 }
555 for _, w := range stores {
556 if w != last {
557 f.freeValue(w)
558 }
559 }
560
561
562 bEnd.Values = append(bEnd.Values, after...)
563 for _, w := range after {
564 w.Block = bEnd
565 }
566
567
568 if nWBops > 0 {
569 goto again
570 }
571 }
572 }
573
574
575
576 func (f *Func) computeZeroMap(select1 []*Value) map[ID]ZeroRegion {
577
578 ptrSize := f.Config.PtrSize
579
580
581
582
583 zeroes := map[ID]ZeroRegion{}
584
585 for _, b := range f.Blocks {
586 for _, v := range b.Values {
587 if mem, ok := IsNewObject(v, select1); ok {
588
589
590
591
592 if types.LocalPkg.Path == "runtime" && v.Type.IsUnsafePtr() {
593 continue
594 }
595
596 nptr := min(64, v.Type.Elem().Size()/ptrSize)
597 zeroes[mem.ID] = ZeroRegion{base: v, mask: 1<<uint(nptr) - 1}
598 }
599 }
600 }
601
602 for {
603 changed := false
604 for _, b := range f.Blocks {
605
606
607 for _, v := range b.Values {
608 if v.Op != OpStore {
609 continue
610 }
611 z, ok := zeroes[v.MemoryArg().ID]
612 if !ok {
613 continue
614 }
615 ptr := v.Args[0]
616 var off int64
617 size := v.Aux.(*types.Type).Size()
618 for ptr.Op == OpOffPtr {
619 off += ptr.AuxInt
620 ptr = ptr.Args[0]
621 }
622 if ptr != z.base {
623
624
625
626
627 continue
628 }
629
630
631
632 if d := off % ptrSize; d != 0 {
633 off -= d
634 size += d
635 }
636 if d := size % ptrSize; d != 0 {
637 size += ptrSize - d
638 }
639
640 minimum := max(off, 0)
641 maximum := min(off+size, 64*ptrSize)
642
643
644
645 for i := minimum; i < maximum; i += ptrSize {
646 bit := i / ptrSize
647 z.mask &^= 1 << uint(bit)
648 }
649 if z.mask == 0 {
650
651 continue
652 }
653
654 if zeroes[v.ID] != z {
655 zeroes[v.ID] = z
656 changed = true
657 }
658 }
659 }
660 if !changed {
661 break
662 }
663 }
664 if f.pass.debug > 0 {
665 fmt.Printf("func %s\n", f.Name)
666 for mem, z := range zeroes {
667 fmt.Printf(" memory=v%d ptr=%v zeromask=%b\n", mem, z.base, z.mask)
668 }
669 }
670 return zeroes
671 }
672
673
674 func wbcall(pos src.XPos, b *Block, fn *obj.LSym, sp, mem *Value, args ...*Value) *Value {
675 config := b.Func.Config
676 typ := config.Types.Uintptr
677 nargs := len(args)
678
679
680 inRegs := b.Func.ABIDefault == b.Func.ABI1 && len(config.intParamRegs) >= 3
681
682 if !inRegs {
683
684 off := config.ctxt.Arch.FixedFrameSize
685 for _, arg := range args {
686 stkaddr := b.NewValue1I(pos, OpOffPtr, typ.PtrTo(), off, sp)
687 mem = b.NewValue3A(pos, OpStore, types.TypeMem, typ, stkaddr, arg, mem)
688 off += typ.Size()
689 }
690 args = args[:0]
691 }
692
693 args = append(args, mem)
694
695
696 argTypes := make([]*types.Type, nargs, 3)
697 for i := 0; i < nargs; i++ {
698 argTypes[i] = typ
699 }
700 call := b.NewValue0A(pos, OpStaticCall, types.TypeResultMem, StaticAuxCall(fn, b.Func.ABIDefault.ABIAnalyzeTypes(argTypes, nil)))
701 call.AddArgs(args...)
702 call.AuxInt = int64(nargs) * typ.Size()
703 return b.NewValue1I(pos, OpSelectN, types.TypeMem, 0, call)
704 }
705
706
707 func IsStackAddr(v *Value) bool {
708 for v.Op == OpOffPtr || v.Op == OpAddPtr || v.Op == OpPtrIndex || v.Op == OpCopy {
709 v = v.Args[0]
710 }
711 switch v.Op {
712 case OpSP, OpLocalAddr, OpSelectNAddr, OpGetCallerSP:
713 return true
714 }
715 return false
716 }
717
718
719 func IsGlobalAddr(v *Value) bool {
720 for v.Op == OpOffPtr || v.Op == OpAddPtr || v.Op == OpPtrIndex || v.Op == OpCopy {
721 v = v.Args[0]
722 }
723 if v.Op == OpAddr && v.Args[0].Op == OpSB {
724 return true
725 }
726 if v.Op == OpConstNil {
727 return true
728 }
729 if v.Op == OpLoad && IsReadOnlyGlobalAddr(v.Args[0]) {
730 return true
731 }
732 return false
733 }
734
735
736 func IsReadOnlyGlobalAddr(v *Value) bool {
737 if v.Op == OpConstNil {
738
739 return true
740 }
741 if v.Op == OpAddr && v.Aux != nil && v.Aux.(*obj.LSym).Type == objabi.SRODATA {
742 return true
743 }
744 return false
745 }
746
747
748
749 func IsNewObject(v *Value, select1 []*Value) (mem *Value, ok bool) {
750 f := v.Block.Func
751 c := f.Config
752 if f.ABIDefault == f.ABI1 && len(c.intParamRegs) >= 1 {
753 if v.Op != OpSelectN || v.AuxInt != 0 {
754 return nil, false
755 }
756 mem = select1[v.Args[0].ID]
757 if mem == nil {
758 return nil, false
759 }
760 } else {
761 if v.Op != OpLoad {
762 return nil, false
763 }
764 mem = v.MemoryArg()
765 if mem.Op != OpSelectN {
766 return nil, false
767 }
768 if mem.Type != types.TypeMem {
769 return nil, false
770 }
771 }
772 call := mem.Args[0]
773 if call.Op != OpStaticCall {
774 return nil, false
775 }
776 if !isSameCall(call.Aux, "runtime.newobject") {
777 return nil, false
778 }
779 if f.ABIDefault == f.ABI1 && len(c.intParamRegs) >= 1 {
780 if v.Args[0] == call {
781 return mem, true
782 }
783 return nil, false
784 }
785 if v.Args[0].Op != OpOffPtr {
786 return nil, false
787 }
788 if v.Args[0].Args[0].Op != OpSP {
789 return nil, false
790 }
791 if v.Args[0].AuxInt != c.ctxt.Arch.FixedFrameSize+c.RegSize {
792 return nil, false
793 }
794 return mem, true
795 }
796
797
798
799 func IsSanitizerSafeAddr(v *Value) bool {
800 for v.Op == OpOffPtr || v.Op == OpAddPtr || v.Op == OpPtrIndex || v.Op == OpCopy {
801 v = v.Args[0]
802 }
803 switch v.Op {
804 case OpSP, OpLocalAddr, OpSelectNAddr:
805
806 return true
807 case OpITab, OpStringPtr, OpGetClosurePtr:
808
809
810 return true
811 case OpAddr:
812 vt := v.Aux.(*obj.LSym).Type
813 return vt == objabi.SRODATA || vt == objabi.SLIBFUZZER_8BIT_COUNTER || vt == objabi.SCOVERAGE_COUNTER || vt == objabi.SCOVERAGE_AUXVAR
814 }
815 return false
816 }
817
818
819
820 func isVolatile(v *Value) bool {
821 for v.Op == OpOffPtr || v.Op == OpAddPtr || v.Op == OpPtrIndex || v.Op == OpCopy || v.Op == OpSelectNAddr {
822 v = v.Args[0]
823 }
824 return v.Op == OpSP
825 }
826
View as plain text