1
2
3
4
5 package walk
6
7 import (
8 "fmt"
9 "go/constant"
10 "internal/abi"
11 "internal/buildcfg"
12 "strings"
13
14 "cmd/compile/internal/base"
15 "cmd/compile/internal/ir"
16 "cmd/compile/internal/objw"
17 "cmd/compile/internal/reflectdata"
18 "cmd/compile/internal/rttype"
19 "cmd/compile/internal/staticdata"
20 "cmd/compile/internal/typecheck"
21 "cmd/compile/internal/types"
22 "cmd/internal/obj"
23 "cmd/internal/objabi"
24 )
25
26
27
28
29 func walkExpr(n ir.Node, init *ir.Nodes) ir.Node {
30 if n == nil {
31 return n
32 }
33
34 if n, ok := n.(ir.InitNode); ok && init == n.PtrInit() {
35
36
37
38 base.Fatalf("walkExpr init == &n->ninit")
39 }
40
41 if len(n.Init()) != 0 {
42 walkStmtList(n.Init())
43 init.Append(ir.TakeInit(n)...)
44 }
45
46 lno := ir.SetPos(n)
47
48 if base.Flag.LowerW > 1 {
49 ir.Dump("before walk expr", n)
50 }
51
52 if n.Typecheck() != 1 {
53 base.Fatalf("missed typecheck: %+v", n)
54 }
55
56 if n.Type().IsUntyped() {
57 base.Fatalf("expression has untyped type: %+v", n)
58 }
59
60 n = walkExpr1(n, init)
61
62
63 if typ := n.Type(); typ != nil && typ.Kind() != types.TBLANK && !typ.IsFuncArgStruct() {
64 types.CheckSize(typ)
65 }
66 if n, ok := n.(*ir.Name); ok && n.Heapaddr != nil {
67 types.CheckSize(n.Heapaddr.Type())
68 }
69 if ir.IsConst(n, constant.String) {
70
71
72 _ = staticdata.StringSym(n.Pos(), constant.StringVal(n.Val()))
73 }
74
75 if base.Flag.LowerW != 0 && n != nil {
76 ir.Dump("after walk expr", n)
77 }
78
79 base.Pos = lno
80 return n
81 }
82
83 func walkExpr1(n ir.Node, init *ir.Nodes) ir.Node {
84 switch n.Op() {
85 default:
86 ir.Dump("walk", n)
87 base.Fatalf("walkExpr: switch 1 unknown op %+v", n.Op())
88 panic("unreachable")
89
90 case ir.OGETG, ir.OGETCALLERSP:
91 return n
92
93 case ir.OTYPE, ir.ONAME, ir.OLITERAL, ir.ONIL, ir.OLINKSYMOFFSET:
94
95
96
97
98 return n
99
100 case ir.OMETHEXPR:
101
102 n := n.(*ir.SelectorExpr)
103 return n.FuncName()
104
105 case ir.OMIN, ir.OMAX:
106 n := n.(*ir.CallExpr)
107 return walkMinMax(n, init)
108
109 case ir.ONOT, ir.ONEG, ir.OPLUS, ir.OBITNOT, ir.OREAL, ir.OIMAG, ir.OSPTR, ir.OITAB, ir.OIDATA:
110 n := n.(*ir.UnaryExpr)
111 n.X = walkExpr(n.X, init)
112 return n
113
114 case ir.ODOTMETH, ir.ODOTINTER:
115 n := n.(*ir.SelectorExpr)
116 n.X = walkExpr(n.X, init)
117 return n
118
119 case ir.OADDR:
120 n := n.(*ir.AddrExpr)
121 n.X = walkExpr(n.X, init)
122 return n
123
124 case ir.ODEREF:
125 n := n.(*ir.StarExpr)
126 n.X = walkExpr(n.X, init)
127 return n
128
129 case ir.OMAKEFACE, ir.OAND, ir.OANDNOT, ir.OSUB, ir.OMUL, ir.OADD, ir.OOR, ir.OXOR, ir.OLSH, ir.ORSH,
130 ir.OUNSAFEADD:
131 n := n.(*ir.BinaryExpr)
132 n.X = walkExpr(n.X, init)
133 n.Y = walkExpr(n.Y, init)
134 return n
135
136 case ir.OUNSAFESLICE:
137 n := n.(*ir.BinaryExpr)
138 return walkUnsafeSlice(n, init)
139
140 case ir.OUNSAFESTRING:
141 n := n.(*ir.BinaryExpr)
142 return walkUnsafeString(n, init)
143
144 case ir.OUNSAFESTRINGDATA, ir.OUNSAFESLICEDATA:
145 n := n.(*ir.UnaryExpr)
146 return walkUnsafeData(n, init)
147
148 case ir.ODOT, ir.ODOTPTR:
149 n := n.(*ir.SelectorExpr)
150 return walkDot(n, init)
151
152 case ir.ODOTTYPE, ir.ODOTTYPE2:
153 n := n.(*ir.TypeAssertExpr)
154 return walkDotType(n, init)
155
156 case ir.ODYNAMICDOTTYPE, ir.ODYNAMICDOTTYPE2:
157 n := n.(*ir.DynamicTypeAssertExpr)
158 return walkDynamicDotType(n, init)
159
160 case ir.OLEN, ir.OCAP:
161 n := n.(*ir.UnaryExpr)
162 return walkLenCap(n, init)
163
164 case ir.OCOMPLEX:
165 n := n.(*ir.BinaryExpr)
166 n.X = walkExpr(n.X, init)
167 n.Y = walkExpr(n.Y, init)
168 return n
169
170 case ir.OEQ, ir.ONE, ir.OLT, ir.OLE, ir.OGT, ir.OGE:
171 n := n.(*ir.BinaryExpr)
172 return walkCompare(n, init)
173
174 case ir.OANDAND, ir.OOROR:
175 n := n.(*ir.LogicalExpr)
176 return walkLogical(n, init)
177
178 case ir.OPRINT, ir.OPRINTLN:
179 return walkPrint(n.(*ir.CallExpr), init)
180
181 case ir.OPANIC:
182 n := n.(*ir.UnaryExpr)
183 return mkcall("gopanic", nil, init, n.X)
184
185 case ir.ORECOVERFP:
186 return walkRecoverFP(n.(*ir.CallExpr), init)
187
188 case ir.OCFUNC:
189 return n
190
191 case ir.OCALLINTER, ir.OCALLFUNC:
192 n := n.(*ir.CallExpr)
193 return walkCall(n, init)
194
195 case ir.OAS, ir.OASOP:
196 return walkAssign(init, n)
197
198 case ir.OAS2:
199 n := n.(*ir.AssignListStmt)
200 return walkAssignList(init, n)
201
202
203 case ir.OAS2FUNC:
204 n := n.(*ir.AssignListStmt)
205 return walkAssignFunc(init, n)
206
207
208
209 case ir.OAS2RECV:
210 n := n.(*ir.AssignListStmt)
211 return walkAssignRecv(init, n)
212
213
214 case ir.OAS2MAPR:
215 n := n.(*ir.AssignListStmt)
216 return walkAssignMapRead(init, n)
217
218 case ir.ODELETE:
219 n := n.(*ir.CallExpr)
220 return walkDelete(init, n)
221
222 case ir.OAS2DOTTYPE:
223 n := n.(*ir.AssignListStmt)
224 return walkAssignDotType(n, init)
225
226 case ir.OCONVIFACE:
227 n := n.(*ir.ConvExpr)
228 return walkConvInterface(n, init)
229
230 case ir.OCONV, ir.OCONVNOP:
231 n := n.(*ir.ConvExpr)
232 return walkConv(n, init)
233
234 case ir.OSLICE2ARR:
235 n := n.(*ir.ConvExpr)
236 return walkSliceToArray(n, init)
237
238 case ir.OSLICE2ARRPTR:
239 n := n.(*ir.ConvExpr)
240 n.X = walkExpr(n.X, init)
241 return n
242
243 case ir.ODIV, ir.OMOD:
244 n := n.(*ir.BinaryExpr)
245 return walkDivMod(n, init)
246
247 case ir.OINDEX:
248 n := n.(*ir.IndexExpr)
249 return walkIndex(n, init)
250
251 case ir.OINDEXMAP:
252 n := n.(*ir.IndexExpr)
253 return walkIndexMap(n, init)
254
255 case ir.ORECV:
256 base.Fatalf("walkExpr ORECV")
257 panic("unreachable")
258
259 case ir.OSLICEHEADER:
260 n := n.(*ir.SliceHeaderExpr)
261 return walkSliceHeader(n, init)
262
263 case ir.OSTRINGHEADER:
264 n := n.(*ir.StringHeaderExpr)
265 return walkStringHeader(n, init)
266
267 case ir.OSLICE, ir.OSLICEARR, ir.OSLICESTR, ir.OSLICE3, ir.OSLICE3ARR:
268 n := n.(*ir.SliceExpr)
269 return walkSlice(n, init)
270
271 case ir.ONEW:
272 n := n.(*ir.UnaryExpr)
273 return walkNew(n, init)
274
275 case ir.OADDSTR:
276 return walkAddString(n.(*ir.AddStringExpr), init, nil)
277
278 case ir.OAPPEND:
279
280 base.Fatalf("append outside assignment")
281 panic("unreachable")
282
283 case ir.OCOPY:
284 return walkCopy(n.(*ir.BinaryExpr), init, base.Flag.Cfg.Instrumenting && !base.Flag.CompilingRuntime)
285
286 case ir.OCLEAR:
287 n := n.(*ir.UnaryExpr)
288 return walkClear(n)
289
290 case ir.OCLOSE:
291 n := n.(*ir.UnaryExpr)
292 return walkClose(n, init)
293
294 case ir.OMAKECHAN:
295 n := n.(*ir.MakeExpr)
296 return walkMakeChan(n, init)
297
298 case ir.OMAKEMAP:
299 n := n.(*ir.MakeExpr)
300 return walkMakeMap(n, init)
301
302 case ir.OMAKESLICE:
303 n := n.(*ir.MakeExpr)
304 return walkMakeSlice(n, init)
305
306 case ir.OMAKESLICECOPY:
307 n := n.(*ir.MakeExpr)
308 return walkMakeSliceCopy(n, init)
309
310 case ir.ORUNESTR:
311 n := n.(*ir.ConvExpr)
312 return walkRuneToString(n, init)
313
314 case ir.OBYTES2STR, ir.ORUNES2STR:
315 n := n.(*ir.ConvExpr)
316 return walkBytesRunesToString(n, init)
317
318 case ir.OBYTES2STRTMP:
319 n := n.(*ir.ConvExpr)
320 return walkBytesToStringTemp(n, init)
321
322 case ir.OSTR2BYTES:
323 n := n.(*ir.ConvExpr)
324 return walkStringToBytes(n, init)
325
326 case ir.OSTR2BYTESTMP:
327 n := n.(*ir.ConvExpr)
328 return walkStringToBytesTemp(n, init)
329
330 case ir.OSTR2RUNES:
331 n := n.(*ir.ConvExpr)
332 return walkStringToRunes(n, init)
333
334 case ir.OARRAYLIT, ir.OSLICELIT, ir.OMAPLIT, ir.OSTRUCTLIT, ir.OPTRLIT:
335 return walkCompLit(n, init)
336
337 case ir.OSEND:
338 n := n.(*ir.SendStmt)
339 return walkSend(n, init)
340
341 case ir.OCLOSURE:
342 return walkClosure(n.(*ir.ClosureExpr), init)
343
344 case ir.OMETHVALUE:
345 return walkMethodValue(n.(*ir.SelectorExpr), init)
346 }
347
348
349
350
351 }
352
353
354
355
356
357
358 func walkExprList(s []ir.Node, init *ir.Nodes) {
359 for i := range s {
360 s[i] = walkExpr(s[i], init)
361 }
362 }
363
364 func walkExprListCheap(s []ir.Node, init *ir.Nodes) {
365 for i, n := range s {
366 s[i] = cheapExpr(n, init)
367 s[i] = walkExpr(s[i], init)
368 }
369 }
370
371 func walkExprListSafe(s []ir.Node, init *ir.Nodes) {
372 for i, n := range s {
373 s[i] = safeExpr(n, init)
374 s[i] = walkExpr(s[i], init)
375 }
376 }
377
378
379
380 func cheapExpr(n ir.Node, init *ir.Nodes) ir.Node {
381 switch n.Op() {
382 case ir.ONAME, ir.OLITERAL, ir.ONIL:
383 return n
384 }
385
386 return copyExpr(n, n.Type(), init)
387 }
388
389
390
391 func safeExpr(n ir.Node, init *ir.Nodes) ir.Node {
392 if n == nil {
393 return nil
394 }
395
396 if len(n.Init()) != 0 {
397 walkStmtList(n.Init())
398 init.Append(ir.TakeInit(n)...)
399 }
400
401 switch n.Op() {
402 case ir.ONAME, ir.OLITERAL, ir.ONIL, ir.OLINKSYMOFFSET:
403 return n
404
405 case ir.OLEN, ir.OCAP:
406 n := n.(*ir.UnaryExpr)
407 l := safeExpr(n.X, init)
408 if l == n.X {
409 return n
410 }
411 a := ir.Copy(n).(*ir.UnaryExpr)
412 a.X = l
413 return walkExpr(typecheck.Expr(a), init)
414
415 case ir.ODOT, ir.ODOTPTR:
416 n := n.(*ir.SelectorExpr)
417 l := safeExpr(n.X, init)
418 if l == n.X {
419 return n
420 }
421 a := ir.Copy(n).(*ir.SelectorExpr)
422 a.X = l
423 return walkExpr(typecheck.Expr(a), init)
424
425 case ir.ODEREF:
426 n := n.(*ir.StarExpr)
427 l := safeExpr(n.X, init)
428 if l == n.X {
429 return n
430 }
431 a := ir.Copy(n).(*ir.StarExpr)
432 a.X = l
433 return walkExpr(typecheck.Expr(a), init)
434
435 case ir.OINDEX, ir.OINDEXMAP:
436 n := n.(*ir.IndexExpr)
437 l := safeExpr(n.X, init)
438 r := safeExpr(n.Index, init)
439 if l == n.X && r == n.Index {
440 return n
441 }
442 a := ir.Copy(n).(*ir.IndexExpr)
443 a.X = l
444 a.Index = r
445 return walkExpr(typecheck.Expr(a), init)
446
447 case ir.OSTRUCTLIT, ir.OARRAYLIT, ir.OSLICELIT:
448 n := n.(*ir.CompLitExpr)
449 if isStaticCompositeLiteral(n) {
450 return n
451 }
452 }
453
454
455 if ir.IsAddressable(n) {
456 base.Fatalf("missing lvalue case in safeExpr: %v", n)
457 }
458 return cheapExpr(n, init)
459 }
460
461 func copyExpr(n ir.Node, t *types.Type, init *ir.Nodes) ir.Node {
462 l := typecheck.TempAt(base.Pos, ir.CurFunc, t)
463 appendWalkStmt(init, ir.NewAssignStmt(base.Pos, l, n))
464 return l
465 }
466
467
468
469 func walkAddString(x *ir.AddStringExpr, init *ir.Nodes, conv *ir.ConvExpr) ir.Node {
470 c := len(x.List)
471 if c < 2 {
472 base.Fatalf("walkAddString count %d too small", c)
473 }
474
475 typ := x.Type()
476 if conv != nil {
477 typ = conv.Type()
478 }
479
480
481 var args []ir.Node
482
483 var fn, fnsmall, fnbig string
484
485 buf := typecheck.NodNil()
486 switch {
487 default:
488 base.FatalfAt(x.Pos(), "unexpected type: %v", typ)
489 case typ.IsString():
490 if x.Esc() == ir.EscNone {
491 sz := int64(0)
492 for _, n1 := range x.List {
493 if n1.Op() == ir.OLITERAL {
494 sz += int64(len(ir.StringVal(n1)))
495 }
496 }
497
498
499 if sz < tmpstringbufsize {
500
501 buf = stackBufAddr(tmpstringbufsize, types.Types[types.TUINT8])
502 }
503 }
504
505 args = []ir.Node{buf}
506 fnsmall, fnbig = "concatstring%d", "concatstrings"
507 case typ.IsSlice() && typ.Elem().IsKind(types.TUINT8):
508 if conv != nil && conv.Esc() == ir.EscNone {
509 buf = stackBufAddr(tmpstringbufsize, types.Types[types.TUINT8])
510 }
511 args = []ir.Node{buf}
512 fnsmall, fnbig = "concatbyte%d", "concatbytes"
513 }
514
515 if c <= 5 {
516
517
518 fn = fmt.Sprintf(fnsmall, c)
519
520 for _, n2 := range x.List {
521 args = append(args, typecheck.Conv(n2, types.Types[types.TSTRING]))
522 }
523 } else {
524
525 fn = fnbig
526 t := types.NewSlice(types.Types[types.TSTRING])
527
528 slargs := make([]ir.Node, len(x.List))
529 for i, n2 := range x.List {
530 slargs[i] = typecheck.Conv(n2, types.Types[types.TSTRING])
531 }
532 slice := ir.NewCompLitExpr(base.Pos, ir.OCOMPLIT, t, slargs)
533 slice.Prealloc = x.Prealloc
534 args = append(args, slice)
535 slice.SetEsc(ir.EscNone)
536 }
537
538 cat := typecheck.LookupRuntime(fn)
539 r := ir.NewCallExpr(base.Pos, ir.OCALL, cat, nil)
540 r.Args = args
541 r1 := typecheck.Expr(r)
542 r1 = walkExpr(r1, init)
543 r1.SetType(typ)
544
545 return r1
546 }
547
548 type hookInfo struct {
549 paramType types.Kind
550 argsNum int
551 runtimeFunc string
552 }
553
554 var hooks = map[string]hookInfo{
555 "strings.EqualFold": {paramType: types.TSTRING, argsNum: 2, runtimeFunc: "libfuzzerHookEqualFold"},
556 }
557
558
559 func walkCall(n *ir.CallExpr, init *ir.Nodes) ir.Node {
560 if n.Op() == ir.OCALLMETH {
561 base.FatalfAt(n.Pos(), "OCALLMETH missed by typecheck")
562 }
563 if n.Op() == ir.OCALLINTER || n.Fun.Op() == ir.OMETHEXPR {
564
565
566 usemethod(n)
567 }
568 if n.Op() == ir.OCALLINTER {
569 reflectdata.MarkUsedIfaceMethod(n)
570 }
571
572 if n.Op() == ir.OCALLFUNC && n.Fun.Op() == ir.OCLOSURE {
573 directClosureCall(n)
574 }
575
576 if ir.IsFuncPCIntrinsic(n) {
577
578
579 name := n.Fun.(*ir.Name).Sym().Name
580 arg := n.Args[0]
581 var wantABI obj.ABI
582 switch name {
583 case "FuncPCABI0":
584 wantABI = obj.ABI0
585 case "FuncPCABIInternal":
586 wantABI = obj.ABIInternal
587 }
588 if n.Type() != types.Types[types.TUINTPTR] {
589 base.FatalfAt(n.Pos(), "FuncPC intrinsic should return uintptr, got %v", n.Type())
590 }
591 n := ir.FuncPC(n.Pos(), arg, wantABI)
592 return walkExpr(n, init)
593 }
594
595 if n.Op() == ir.OCALLFUNC {
596 fn := ir.StaticCalleeName(n.Fun)
597 if fn != nil && fn.Sym().Pkg.Path == "hash/maphash" && strings.HasPrefix(fn.Sym().Name, "escapeForHash[") {
598
599
600
601
602 ps := fn.Type().Params()
603 if len(ps) == 2 && ps[1].Type.IsShape() {
604 return walkExpr(n.Args[1], init)
605 }
606 }
607 }
608
609 if name, ok := n.Fun.(*ir.Name); ok {
610 sym := name.Sym()
611 if sym.Pkg.Path == "go.runtime" && sym.Name == "deferrangefunc" {
612
613
614
615 ir.CurFunc.SetHasDefer(true)
616 ir.CurFunc.SetOpenCodedDeferDisallowed(true)
617 }
618 }
619
620 walkCall1(n, init)
621 return n
622 }
623
624 func walkCall1(n *ir.CallExpr, init *ir.Nodes) {
625 if n.Walked() {
626 return
627 }
628 n.SetWalked(true)
629
630 if n.Op() == ir.OCALLMETH {
631 base.FatalfAt(n.Pos(), "OCALLMETH missed by typecheck")
632 }
633
634 args := n.Args
635 params := n.Fun.Type().Params()
636
637 n.Fun = walkExpr(n.Fun, init)
638 walkExprList(args, init)
639
640 for i, arg := range args {
641
642 param := params[i]
643 if !types.Identical(arg.Type(), param.Type) {
644 base.FatalfAt(n.Pos(), "assigning %L to parameter %v (type %v)", arg, param.Sym, param.Type)
645 }
646
647
648
649
650 if mayCall(arg) {
651
652 tmp := typecheck.TempAt(base.Pos, ir.CurFunc, param.Type)
653 init.Append(convas(typecheck.Stmt(ir.NewAssignStmt(base.Pos, tmp, arg)).(*ir.AssignStmt), init))
654
655 args[i] = tmp
656 }
657 }
658
659 funSym := n.Fun.Sym()
660 if base.Debug.Libfuzzer != 0 && funSym != nil {
661 if hook, found := hooks[funSym.Pkg.Path+"."+funSym.Name]; found {
662 if len(args) != hook.argsNum {
663 panic(fmt.Sprintf("%s.%s expects %d arguments, but received %d", funSym.Pkg.Path, funSym.Name, hook.argsNum, len(args)))
664 }
665 var hookArgs []ir.Node
666 for _, arg := range args {
667 hookArgs = append(hookArgs, tracecmpArg(arg, types.Types[hook.paramType], init))
668 }
669 hookArgs = append(hookArgs, fakePC(n))
670 init.Append(mkcall(hook.runtimeFunc, nil, init, hookArgs...))
671 }
672 }
673 }
674
675
676 func walkDivMod(n *ir.BinaryExpr, init *ir.Nodes) ir.Node {
677 n.X = walkExpr(n.X, init)
678 n.Y = walkExpr(n.Y, init)
679
680
681 et := n.X.Type().Kind()
682
683 if types.IsComplex[et] && n.Op() == ir.ODIV {
684 t := n.Type()
685 call := mkcall("complex128div", types.Types[types.TCOMPLEX128], init, typecheck.Conv(n.X, types.Types[types.TCOMPLEX128]), typecheck.Conv(n.Y, types.Types[types.TCOMPLEX128]))
686 return typecheck.Conv(call, t)
687 }
688
689
690 if types.IsFloat[et] {
691 return n
692 }
693
694
695
696
697 if types.RegSize < 8 && (et == types.TINT64 || et == types.TUINT64) {
698 if n.Y.Op() == ir.OLITERAL {
699
700
701 switch et {
702 case types.TINT64:
703 c := ir.Int64Val(n.Y)
704 if c < 0 {
705 c = -c
706 }
707 if c != 0 && c&(c-1) == 0 {
708 return n
709 }
710 case types.TUINT64:
711 c := ir.Uint64Val(n.Y)
712 if c < 1<<16 {
713 return n
714 }
715 if c != 0 && c&(c-1) == 0 {
716 return n
717 }
718 }
719 }
720 var fn string
721 if et == types.TINT64 {
722 fn = "int64"
723 } else {
724 fn = "uint64"
725 }
726 if n.Op() == ir.ODIV {
727 fn += "div"
728 } else {
729 fn += "mod"
730 }
731 return mkcall(fn, n.Type(), init, typecheck.Conv(n.X, types.Types[et]), typecheck.Conv(n.Y, types.Types[et]))
732 }
733 return n
734 }
735
736
737 func walkDot(n *ir.SelectorExpr, init *ir.Nodes) ir.Node {
738 usefield(n)
739 n.X = walkExpr(n.X, init)
740 return n
741 }
742
743
744 func walkDotType(n *ir.TypeAssertExpr, init *ir.Nodes) ir.Node {
745 n.X = walkExpr(n.X, init)
746
747 if !n.Type().IsInterface() && !n.X.Type().IsEmptyInterface() {
748 n.ITab = reflectdata.ITabAddrAt(base.Pos, n.Type(), n.X.Type())
749 }
750 if n.X.Type().IsInterface() && n.Type().IsInterface() && !n.Type().IsEmptyInterface() {
751
752
753 n.Descriptor = makeTypeAssertDescriptor(n.Type(), n.Op() == ir.ODOTTYPE2)
754 }
755 return n
756 }
757
758 func makeTypeAssertDescriptor(target *types.Type, canFail bool) *obj.LSym {
759
760
761 lsym := types.LocalPkg.Lookup(fmt.Sprintf(".typeAssert.%d", typeAssertGen)).LinksymABI(obj.ABI0)
762 typeAssertGen++
763 c := rttype.NewCursor(lsym, 0, rttype.TypeAssert)
764 c.Field("Cache").WritePtr(typecheck.LookupRuntimeVar("emptyTypeAssertCache"))
765 c.Field("Inter").WritePtr(reflectdata.TypeLinksym(target))
766 c.Field("CanFail").WriteBool(canFail)
767 objw.Global(lsym, int32(rttype.TypeAssert.Size()), obj.LOCAL)
768 lsym.Gotype = reflectdata.TypeLinksym(rttype.TypeAssert)
769 return lsym
770 }
771
772 var typeAssertGen int
773
774
775 func walkDynamicDotType(n *ir.DynamicTypeAssertExpr, init *ir.Nodes) ir.Node {
776 n.X = walkExpr(n.X, init)
777 n.RType = walkExpr(n.RType, init)
778 n.ITab = walkExpr(n.ITab, init)
779
780 if n.RType != nil && n.RType.Op() == ir.OADDR {
781 addr := n.RType.(*ir.AddrExpr)
782 if addr.X.Op() == ir.OLINKSYMOFFSET {
783 r := ir.NewTypeAssertExpr(n.Pos(), n.X, n.Type())
784 if n.Op() == ir.ODYNAMICDOTTYPE2 {
785 r.SetOp(ir.ODOTTYPE2)
786 }
787 r.SetType(n.Type())
788 r.SetTypecheck(1)
789 return walkExpr(r, init)
790 }
791 }
792 return n
793 }
794
795
796 func walkIndex(n *ir.IndexExpr, init *ir.Nodes) ir.Node {
797 n.X = walkExpr(n.X, init)
798
799
800
801 r := n.Index
802
803 n.Index = walkExpr(n.Index, init)
804
805
806
807 if n.Bounded() {
808 return n
809 }
810 t := n.X.Type()
811 if t != nil && t.IsPtr() {
812 t = t.Elem()
813 }
814 if t.IsArray() {
815 n.SetBounded(bounded(r, t.NumElem()))
816 if base.Flag.LowerM != 0 && n.Bounded() && !ir.IsConst(n.Index, constant.Int) {
817 base.Warn("index bounds check elided")
818 }
819 } else if ir.IsConst(n.X, constant.String) {
820 n.SetBounded(bounded(r, int64(len(ir.StringVal(n.X)))))
821 if base.Flag.LowerM != 0 && n.Bounded() && !ir.IsConst(n.Index, constant.Int) {
822 base.Warn("index bounds check elided")
823 }
824 }
825 return n
826 }
827
828
829
830
831 func mapKeyArg(fast int, n, key ir.Node, assigned bool) ir.Node {
832 if fast == mapslow {
833
834
835 return typecheck.NodAddr(key)
836 }
837 if assigned {
838
839 return key
840 }
841
842 switch fast {
843 case mapfast32ptr:
844 return ir.NewConvExpr(n.Pos(), ir.OCONVNOP, types.Types[types.TUINT32], key)
845 case mapfast64ptr:
846 return ir.NewConvExpr(n.Pos(), ir.OCONVNOP, types.Types[types.TUINT64], key)
847 default:
848
849 return key
850 }
851 }
852
853
854
855 func walkIndexMap(n *ir.IndexExpr, init *ir.Nodes) ir.Node {
856 n.X = walkExpr(n.X, init)
857 n.Index = walkExpr(n.Index, init)
858 map_ := n.X
859 t := map_.Type()
860 fast := mapfast(t)
861 key := mapKeyArg(fast, n, n.Index, n.Assigned)
862 args := []ir.Node{reflectdata.IndexMapRType(base.Pos, n), map_, key}
863
864 var mapFn ir.Node
865 switch {
866 case n.Assigned:
867 mapFn = mapfn(mapassign[fast], t, false)
868 case t.Elem().Size() > abi.ZeroValSize:
869 args = append(args, reflectdata.ZeroAddr(t.Elem().Size()))
870 mapFn = mapfn("mapaccess1_fat", t, true)
871 default:
872 mapFn = mapfn(mapaccess1[fast], t, false)
873 }
874 call := mkcall1(mapFn, nil, init, args...)
875 call.SetType(types.NewPtr(t.Elem()))
876 call.MarkNonNil()
877 star := ir.NewStarExpr(base.Pos, call)
878 star.SetType(t.Elem())
879 star.SetTypecheck(1)
880 return star
881 }
882
883
884 func walkLogical(n *ir.LogicalExpr, init *ir.Nodes) ir.Node {
885 n.X = walkExpr(n.X, init)
886
887
888
889
890 var ll ir.Nodes
891
892 n.Y = walkExpr(n.Y, &ll)
893 n.Y = ir.InitExpr(ll, n.Y)
894 return n
895 }
896
897
898 func walkSend(n *ir.SendStmt, init *ir.Nodes) ir.Node {
899 n1 := n.Value
900 n1 = typecheck.AssignConv(n1, n.Chan.Type().Elem(), "chan send")
901 n1 = walkExpr(n1, init)
902 n1 = typecheck.NodAddr(n1)
903 return mkcall1(chanfn("chansend1", 2, n.Chan.Type()), nil, init, n.Chan, n1)
904 }
905
906
907 func walkSlice(n *ir.SliceExpr, init *ir.Nodes) ir.Node {
908 n.X = walkExpr(n.X, init)
909 n.Low = walkExpr(n.Low, init)
910 if n.Low != nil && ir.IsZero(n.Low) {
911
912 n.Low = nil
913 }
914 n.High = walkExpr(n.High, init)
915 n.Max = walkExpr(n.Max, init)
916
917 if (n.Op() == ir.OSLICE || n.Op() == ir.OSLICESTR) && n.Low == nil && n.High == nil {
918
919 if base.Debug.Slice > 0 {
920 base.Warn("slice: omit slice operation")
921 }
922 return n.X
923 }
924 return n
925 }
926
927
928 func walkSliceHeader(n *ir.SliceHeaderExpr, init *ir.Nodes) ir.Node {
929 n.Ptr = walkExpr(n.Ptr, init)
930 n.Len = walkExpr(n.Len, init)
931 n.Cap = walkExpr(n.Cap, init)
932 return n
933 }
934
935
936 func walkStringHeader(n *ir.StringHeaderExpr, init *ir.Nodes) ir.Node {
937 n.Ptr = walkExpr(n.Ptr, init)
938 n.Len = walkExpr(n.Len, init)
939 return n
940 }
941
942
943 func bounded(n ir.Node, max int64) bool {
944 if n.Type() == nil || !n.Type().IsInteger() {
945 return false
946 }
947
948 sign := n.Type().IsSigned()
949 bits := int32(8 * n.Type().Size())
950
951 if ir.IsSmallIntConst(n) {
952 v := ir.Int64Val(n)
953 return 0 <= v && v < max
954 }
955
956 switch n.Op() {
957 case ir.OAND, ir.OANDNOT:
958 n := n.(*ir.BinaryExpr)
959 v := int64(-1)
960 switch {
961 case ir.IsSmallIntConst(n.X):
962 v = ir.Int64Val(n.X)
963 case ir.IsSmallIntConst(n.Y):
964 v = ir.Int64Val(n.Y)
965 if n.Op() == ir.OANDNOT {
966 v = ^v
967 if !sign {
968 v &= 1<<uint(bits) - 1
969 }
970 }
971 }
972 if 0 <= v && v < max {
973 return true
974 }
975
976 case ir.OMOD:
977 n := n.(*ir.BinaryExpr)
978 if !sign && ir.IsSmallIntConst(n.Y) {
979 v := ir.Int64Val(n.Y)
980 if 0 <= v && v <= max {
981 return true
982 }
983 }
984
985 case ir.ODIV:
986 n := n.(*ir.BinaryExpr)
987 if !sign && ir.IsSmallIntConst(n.Y) {
988 v := ir.Int64Val(n.Y)
989 for bits > 0 && v >= 2 {
990 bits--
991 v >>= 1
992 }
993 }
994
995 case ir.ORSH:
996 n := n.(*ir.BinaryExpr)
997 if !sign && ir.IsSmallIntConst(n.Y) {
998 v := ir.Int64Val(n.Y)
999 if v > int64(bits) {
1000 return true
1001 }
1002 bits -= int32(v)
1003 }
1004 }
1005
1006 if !sign && bits <= 62 && 1<<uint(bits) <= max {
1007 return true
1008 }
1009
1010 return false
1011 }
1012
1013
1014
1015 func usemethod(n *ir.CallExpr) {
1016
1017
1018
1019 if base.Ctxt.Pkgpath == "reflect" {
1020
1021 switch fn := ir.CurFunc.Nname.Sym().Name; {
1022 case fn == "(*rtype).Method", fn == "(*rtype).MethodByName":
1023 return
1024 case fn == "(*interfaceType).Method", fn == "(*interfaceType).MethodByName":
1025 return
1026 case fn == "Value.Method", fn == "Value.MethodByName":
1027 return
1028 }
1029 }
1030
1031 dot, ok := n.Fun.(*ir.SelectorExpr)
1032 if !ok {
1033 return
1034 }
1035
1036
1037
1038
1039
1040
1041
1042 methodName := dot.Sel.Name
1043 t := dot.Selection.Type
1044
1045
1046 if t.NumParams() != 1 || (t.NumResults() != 1 && t.NumResults() != 2) {
1047 return
1048 }
1049
1050
1051 switch pKind := t.Param(0).Type.Kind(); {
1052 case methodName == "Method" && pKind == types.TINT,
1053 methodName == "MethodByName" && pKind == types.TSTRING:
1054
1055 default:
1056
1057 return
1058 }
1059
1060
1061
1062
1063
1064 switch s := t.Result(0).Type.Sym(); {
1065 case s != nil && types.ReflectSymName(s) == "Method",
1066 s != nil && types.ReflectSymName(s) == "Value":
1067
1068 default:
1069
1070 return
1071 }
1072
1073 var targetName ir.Node
1074 switch dot.Op() {
1075 case ir.ODOTINTER:
1076 if methodName == "MethodByName" {
1077 targetName = n.Args[0]
1078 }
1079 case ir.OMETHEXPR:
1080 if methodName == "MethodByName" {
1081 targetName = n.Args[1]
1082 }
1083 default:
1084 base.FatalfAt(dot.Pos(), "usemethod: unexpected dot.Op() %s", dot.Op())
1085 }
1086
1087 if ir.IsConst(targetName, constant.String) {
1088 name := constant.StringVal(targetName.Val())
1089 ir.CurFunc.LSym.AddRel(base.Ctxt, obj.Reloc{
1090 Type: objabi.R_USENAMEDMETHOD,
1091 Sym: staticdata.StringSymNoCommon(name),
1092 })
1093 } else {
1094 ir.CurFunc.LSym.Set(obj.AttrReflectMethod, true)
1095 }
1096 }
1097
1098 func usefield(n *ir.SelectorExpr) {
1099 if !buildcfg.Experiment.FieldTrack {
1100 return
1101 }
1102
1103 switch n.Op() {
1104 default:
1105 base.Fatalf("usefield %v", n.Op())
1106
1107 case ir.ODOT, ir.ODOTPTR:
1108 break
1109 }
1110
1111 field := n.Selection
1112 if field == nil {
1113 base.Fatalf("usefield %v %v without paramfld", n.X.Type(), n.Sel)
1114 }
1115 if field.Sym != n.Sel {
1116 base.Fatalf("field inconsistency: %v != %v", field.Sym, n.Sel)
1117 }
1118 if !strings.Contains(field.Note, "go:\"track\"") {
1119 return
1120 }
1121
1122 outer := n.X.Type()
1123 if outer.IsPtr() {
1124 outer = outer.Elem()
1125 }
1126 if outer.Sym() == nil {
1127 base.Errorf("tracked field must be in named struct type")
1128 }
1129
1130 sym := reflectdata.TrackSym(outer, field)
1131 if ir.CurFunc.FieldTrack == nil {
1132 ir.CurFunc.FieldTrack = make(map[*obj.LSym]struct{})
1133 }
1134 ir.CurFunc.FieldTrack[sym] = struct{}{}
1135 }
1136
View as plain text