1
2
3
4
5 package walk
6
7 import (
8 "go/constant"
9 "internal/abi"
10
11 "cmd/compile/internal/base"
12 "cmd/compile/internal/ir"
13 "cmd/compile/internal/reflectdata"
14 "cmd/compile/internal/typecheck"
15 "cmd/compile/internal/types"
16 "cmd/internal/src"
17 )
18
19
20 func walkAssign(init *ir.Nodes, n ir.Node) ir.Node {
21 init.Append(ir.TakeInit(n)...)
22
23 var left, right ir.Node
24 switch n.Op() {
25 case ir.OAS:
26 n := n.(*ir.AssignStmt)
27 left, right = n.X, n.Y
28 case ir.OASOP:
29 n := n.(*ir.AssignOpStmt)
30 left, right = n.X, n.Y
31 }
32
33
34
35 var mapAppend *ir.CallExpr
36 if left.Op() == ir.OINDEXMAP && right.Op() == ir.OAPPEND {
37 left := left.(*ir.IndexExpr)
38 mapAppend = right.(*ir.CallExpr)
39 if !ir.SameSafeExpr(left, mapAppend.Args[0]) {
40 base.Fatalf("not same expressions: %v != %v", left, mapAppend.Args[0])
41 }
42 }
43
44 left = walkExpr(left, init)
45 left = safeExpr(left, init)
46 if mapAppend != nil {
47 mapAppend.Args[0] = left
48 }
49
50 if n.Op() == ir.OASOP {
51
52 n = ir.NewAssignStmt(base.Pos, left, typecheck.Expr(ir.NewBinaryExpr(base.Pos, n.(*ir.AssignOpStmt).AsOp, left, right)))
53 } else {
54 n.(*ir.AssignStmt).X = left
55 }
56 as := n.(*ir.AssignStmt)
57
58 if oaslit(as, init) {
59 return ir.NewBlockStmt(as.Pos(), nil)
60 }
61
62 if as.Y == nil {
63
64 return as
65 }
66
67 if !base.Flag.Cfg.Instrumenting && ir.IsZero(as.Y) {
68 return as
69 }
70
71 switch as.Y.Op() {
72 default:
73 as.Y = walkExpr(as.Y, init)
74
75 case ir.ORECV:
76
77
78 recv := as.Y.(*ir.UnaryExpr)
79 recv.X = walkExpr(recv.X, init)
80
81 n1 := typecheck.NodAddr(as.X)
82 r := recv.X
83 return mkcall1(chanfn("chanrecv1", 2, r.Type()), nil, init, r, n1)
84
85 case ir.OAPPEND:
86
87 call := as.Y.(*ir.CallExpr)
88 if call.Type().Elem().NotInHeap() {
89 base.Errorf("%v can't be allocated in Go; it is incomplete (or unallocatable)", call.Type().Elem())
90 }
91 var r ir.Node
92 switch {
93 case isAppendOfMake(call):
94
95 r = extendSlice(call, init)
96 case call.IsDDD:
97 r = appendSlice(call, init)
98 default:
99 r = walkAppend(call, init, as)
100 }
101 as.Y = r
102 if r.Op() == ir.OAPPEND {
103 r := r.(*ir.CallExpr)
104
105
106
107 r.Fun = reflectdata.AppendElemRType(base.Pos, r)
108 return as
109 }
110
111
112 }
113
114 if as.X != nil && as.Y != nil {
115 return convas(as, init)
116 }
117 return as
118 }
119
120
121 func walkAssignDotType(n *ir.AssignListStmt, init *ir.Nodes) ir.Node {
122 walkExprListSafe(n.Lhs, init)
123 n.Rhs[0] = walkExpr(n.Rhs[0], init)
124 return n
125 }
126
127
128 func walkAssignFunc(init *ir.Nodes, n *ir.AssignListStmt) ir.Node {
129 init.Append(ir.TakeInit(n)...)
130
131 r := n.Rhs[0]
132 walkExprListSafe(n.Lhs, init)
133 r = walkExpr(r, init)
134
135 if ir.IsIntrinsicCall(r.(*ir.CallExpr)) {
136 n.Rhs = []ir.Node{r}
137 return n
138 }
139 init.Append(r)
140
141 ll := ascompatet(n.Lhs, r.Type())
142 return ir.NewBlockStmt(src.NoXPos, ll)
143 }
144
145
146 func walkAssignList(init *ir.Nodes, n *ir.AssignListStmt) ir.Node {
147 init.Append(ir.TakeInit(n)...)
148 return ir.NewBlockStmt(src.NoXPos, ascompatee(ir.OAS, n.Lhs, n.Rhs))
149 }
150
151
152 func walkAssignMapRead(init *ir.Nodes, n *ir.AssignListStmt) ir.Node {
153 init.Append(ir.TakeInit(n)...)
154
155 r := n.Rhs[0].(*ir.IndexExpr)
156 walkExprListSafe(n.Lhs, init)
157
158 r.X = walkExpr(r.X, init)
159 r.Index = walkExpr(r.Index, init)
160 map_ := r.X
161 t := r.X.Type()
162 fast := mapfast(t)
163 key := mapKeyArg(fast, r, r.Index, false)
164 args := []ir.Node{reflectdata.IndexMapRType(base.Pos, r), map_, key}
165
166
167
168
169
170
171 a := n.Lhs[0]
172
173 var mapFn ir.Node
174 if t.Elem().Size() > abi.ZeroValSize {
175 args = append(args, reflectdata.ZeroAddr(t.Elem().Size()))
176 mapFn = mapfn("mapaccess2_fat", t, true)
177 } else {
178 mapFn = mapfn(mapaccess[fast], t, false)
179 }
180 call := mkcall1(mapFn, mapFn.Type().ResultsTuple(), init, args...)
181
182
183
184
185 if ok := n.Lhs[1]; !ir.IsBlank(ok) && ok.Type().IsBoolean() {
186 call.Type().Field(1).Type = ok.Type()
187 }
188 n.Rhs = []ir.Node{call}
189 n.SetOp(ir.OAS2FUNC)
190
191
192 if ir.IsBlank(a) {
193 return walkExpr(typecheck.Stmt(n), init)
194 }
195
196 var_ := typecheck.TempAt(base.Pos, ir.CurFunc, types.NewPtr(t.Elem()))
197 var_.SetTypecheck(1)
198 var_.MarkNonNil()
199
200 n.Lhs[0] = var_
201 init.Append(walkExpr(n, init))
202
203 as := ir.NewAssignStmt(base.Pos, a, ir.NewStarExpr(base.Pos, var_))
204 return walkExpr(typecheck.Stmt(as), init)
205 }
206
207
208 func walkAssignRecv(init *ir.Nodes, n *ir.AssignListStmt) ir.Node {
209 init.Append(ir.TakeInit(n)...)
210
211 r := n.Rhs[0].(*ir.UnaryExpr)
212 walkExprListSafe(n.Lhs, init)
213 r.X = walkExpr(r.X, init)
214 var n1 ir.Node
215 if ir.IsBlank(n.Lhs[0]) {
216 n1 = typecheck.NodNil()
217 } else {
218 n1 = typecheck.NodAddr(n.Lhs[0])
219 }
220 fn := chanfn("chanrecv2", 2, r.X.Type())
221 ok := n.Lhs[1]
222 call := mkcall1(fn, types.Types[types.TBOOL], init, r.X, n1)
223 return walkAssign(init, typecheck.Stmt(ir.NewAssignStmt(base.Pos, ok, call)))
224 }
225
226
227 func walkReturn(n *ir.ReturnStmt) ir.Node {
228 fn := ir.CurFunc
229
230 fn.NumReturns++
231 if len(n.Results) == 0 {
232 return n
233 }
234
235 results := fn.Type().Results()
236 dsts := make([]ir.Node, len(results))
237 for i, v := range results {
238
239 dsts[i] = typecheck.AssignExpr(v.Nname.(*ir.Name))
240 }
241
242 n.Results = ascompatee(n.Op(), dsts, n.Results)
243 return n
244 }
245
246
247
248
249
250 func ascompatet(nl ir.Nodes, nr *types.Type) []ir.Node {
251 if len(nl) != nr.NumFields() {
252 base.Fatalf("ascompatet: assignment count mismatch: %d = %d", len(nl), nr.NumFields())
253 }
254
255 var nn ir.Nodes
256 for i, l := range nl {
257 if ir.IsBlank(l) {
258 continue
259 }
260 r := nr.Field(i)
261
262
263
264 if tmp, ok := l.(*ir.Name); !ok || !tmp.AutoTemp() || !types.Identical(tmp.Type(), r.Type) {
265 base.FatalfAt(l.Pos(), "assigning %v to %+v", r.Type, l)
266 }
267
268 res := ir.NewResultExpr(base.Pos, nil, types.BADWIDTH)
269 res.Index = int64(i)
270 res.SetType(r.Type)
271 res.SetTypecheck(1)
272
273 nn.Append(ir.NewAssignStmt(base.Pos, l, res))
274 }
275 return nn
276 }
277
278
279
280
281
282 func ascompatee(op ir.Op, nl, nr []ir.Node) []ir.Node {
283
284 if len(nl) != len(nr) {
285 base.Fatalf("assignment operands mismatch: %+v / %+v", ir.Nodes(nl), ir.Nodes(nr))
286 }
287
288 var assigned ir.NameSet
289 var memWrite, deferResultWrite bool
290
291
292
293 affected := func(n ir.Node) bool {
294 if deferResultWrite {
295 return true
296 }
297 return ir.Any(n, func(n ir.Node) bool {
298 if n.Op() == ir.ONAME && assigned.Has(n.(*ir.Name)) {
299 return true
300 }
301 if memWrite && readsMemory(n) {
302 return true
303 }
304 return false
305 })
306 }
307
308
309
310
311 var early ir.Nodes
312 save := func(np *ir.Node) {
313 if n := *np; affected(n) {
314 *np = copyExpr(n, n.Type(), &early)
315 }
316 }
317
318 var late ir.Nodes
319 for i, lorig := range nl {
320 l, r := lorig, nr[i]
321
322
323 if op == ir.ORETURN && ir.SameSafeExpr(l, r) {
324 continue
325 }
326
327
328
329 for {
330
331
332
333 init := ir.TakeInit(l)
334 walkStmtList(init)
335 early.Append(init...)
336
337 switch ll := l.(type) {
338 case *ir.IndexExpr:
339 if ll.X.Type().IsArray() {
340 save(&ll.Index)
341 l = ll.X
342 continue
343 }
344 case *ir.ParenExpr:
345 l = ll.X
346 continue
347 case *ir.SelectorExpr:
348 if ll.Op() == ir.ODOT {
349 l = ll.X
350 continue
351 }
352 }
353 break
354 }
355
356 var name *ir.Name
357 switch l.Op() {
358 default:
359 base.Fatalf("unexpected lvalue %v", l.Op())
360 case ir.ONAME:
361 name = l.(*ir.Name)
362 case ir.OINDEX, ir.OINDEXMAP:
363 l := l.(*ir.IndexExpr)
364 save(&l.X)
365 save(&l.Index)
366 case ir.ODEREF:
367 l := l.(*ir.StarExpr)
368 save(&l.X)
369 case ir.ODOTPTR:
370 l := l.(*ir.SelectorExpr)
371 save(&l.X)
372 }
373
374
375 save(&r)
376
377 appendWalkStmt(&late, convas(ir.NewAssignStmt(base.Pos, lorig, r), &late))
378
379
380
381
382 if name == nil {
383
384
385 memWrite = true
386 continue
387 }
388
389 if name.Class == ir.PPARAMOUT && ir.CurFunc.HasDefer() {
390
391
392
393 deferResultWrite = true
394 continue
395 }
396
397 if ir.IsBlank(name) {
398
399
400 continue
401 }
402
403 if name.Addrtaken() || !name.OnStack() {
404
405
406 memWrite = true
407 continue
408 }
409
410
411
412 assigned.Add(name)
413 }
414
415 early.Append(late.Take()...)
416 return early
417 }
418
419
420
421 func readsMemory(n ir.Node) bool {
422 switch n.Op() {
423 case ir.ONAME:
424 n := n.(*ir.Name)
425 if n.Class == ir.PFUNC {
426 return false
427 }
428 return n.Addrtaken() || !n.OnStack()
429
430 case ir.OADD,
431 ir.OAND,
432 ir.OANDAND,
433 ir.OANDNOT,
434 ir.OBITNOT,
435 ir.OCONV,
436 ir.OCONVIFACE,
437 ir.OCONVNOP,
438 ir.ODIV,
439 ir.ODOT,
440 ir.ODOTTYPE,
441 ir.OLITERAL,
442 ir.OLSH,
443 ir.OMOD,
444 ir.OMUL,
445 ir.ONEG,
446 ir.ONIL,
447 ir.OOR,
448 ir.OOROR,
449 ir.OPAREN,
450 ir.OPLUS,
451 ir.ORSH,
452 ir.OSUB,
453 ir.OXOR:
454 return false
455 }
456
457
458 return true
459 }
460
461
462
463
464
465
466
467
468
469
470
471
472
473
474
475
476
477 func appendSlice(n *ir.CallExpr, init *ir.Nodes) ir.Node {
478 walkAppendArgs(n, init)
479
480 l1 := n.Args[0]
481 l2 := n.Args[1]
482 l2 = cheapExpr(l2, init)
483 n.Args[1] = l2
484
485 var nodes ir.Nodes
486
487
488 s := typecheck.TempAt(base.Pos, ir.CurFunc, l1.Type())
489 nodes.Append(ir.NewAssignStmt(base.Pos, s, l1))
490
491 elemtype := s.Type().Elem()
492
493
494 oldPtr := ir.NewUnaryExpr(base.Pos, ir.OSPTR, s)
495 oldLen := ir.NewUnaryExpr(base.Pos, ir.OLEN, s)
496 oldCap := ir.NewUnaryExpr(base.Pos, ir.OCAP, s)
497
498
499 num := ir.NewUnaryExpr(base.Pos, ir.OLEN, l2)
500
501
502 newLen := typecheck.TempAt(base.Pos, ir.CurFunc, types.Types[types.TINT])
503 nodes.Append(ir.NewAssignStmt(base.Pos, newLen, ir.NewBinaryExpr(base.Pos, ir.OADD, oldLen, num)))
504
505
506 nif := ir.NewIfStmt(base.Pos, nil, nil, nil)
507 nuint := typecheck.Conv(newLen, types.Types[types.TUINT])
508 scapuint := typecheck.Conv(oldCap, types.Types[types.TUINT])
509 nif.Cond = ir.NewBinaryExpr(base.Pos, ir.OLE, nuint, scapuint)
510 nif.Likely = true
511
512
513 slice := ir.NewSliceExpr(base.Pos, ir.OSLICE, s, nil, newLen, nil)
514 slice.SetBounded(true)
515 nif.Body = []ir.Node{ir.NewAssignStmt(base.Pos, s, slice)}
516
517
518 call := walkGrowslice(s, nif.PtrInit(), oldPtr, newLen, oldCap, num)
519 nif.Else = []ir.Node{ir.NewAssignStmt(base.Pos, s, call)}
520
521 nodes.Append(nif)
522
523
524
525
526
527
528
529 idx := ir.NewBinaryExpr(base.Pos, ir.OSUB, newLen, ir.NewUnaryExpr(base.Pos, ir.OLEN, l2))
530
531 var ncopy ir.Node
532 if elemtype.HasPointers() {
533
534 slice := ir.NewSliceExpr(base.Pos, ir.OSLICE, s, idx, nil, nil)
535 slice.SetType(s.Type())
536 slice.SetBounded(true)
537
538 ir.CurFunc.SetWBPos(n.Pos())
539
540
541 fn := typecheck.LookupRuntime("typedslicecopy", l1.Type().Elem(), l2.Type().Elem())
542 ptr1, len1 := backingArrayPtrLen(cheapExpr(slice, &nodes))
543 ptr2, len2 := backingArrayPtrLen(l2)
544 ncopy = mkcall1(fn, types.Types[types.TINT], &nodes, reflectdata.AppendElemRType(base.Pos, n), ptr1, len1, ptr2, len2)
545 } else if base.Flag.Cfg.Instrumenting && !base.Flag.CompilingRuntime {
546
547
548
549 slice := ir.NewSliceExpr(base.Pos, ir.OSLICE, s, idx, nil, nil)
550 slice.SetType(s.Type())
551 slice.SetBounded(true)
552
553 ptr1, len1 := backingArrayPtrLen(cheapExpr(slice, &nodes))
554 ptr2, len2 := backingArrayPtrLen(l2)
555
556 fn := typecheck.LookupRuntime("slicecopy", ptr1.Type().Elem(), ptr2.Type().Elem())
557 ncopy = mkcall1(fn, types.Types[types.TINT], &nodes, ptr1, len1, ptr2, len2, ir.NewInt(base.Pos, elemtype.Size()))
558 } else {
559
560 ix := ir.NewIndexExpr(base.Pos, s, idx)
561 ix.SetBounded(true)
562 addr := typecheck.NodAddr(ix)
563
564 sptr := ir.NewUnaryExpr(base.Pos, ir.OSPTR, l2)
565
566 nwid := cheapExpr(typecheck.Conv(ir.NewUnaryExpr(base.Pos, ir.OLEN, l2), types.Types[types.TUINTPTR]), &nodes)
567 nwid = ir.NewBinaryExpr(base.Pos, ir.OMUL, nwid, ir.NewInt(base.Pos, elemtype.Size()))
568
569
570 fn := typecheck.LookupRuntime("memmove", elemtype, elemtype)
571 ncopy = mkcall1(fn, nil, &nodes, addr, sptr, nwid)
572 }
573 ln := append(nodes, ncopy)
574
575 typecheck.Stmts(ln)
576 walkStmtList(ln)
577 init.Append(ln...)
578 return s
579 }
580
581
582
583 func isAppendOfMake(n ir.Node) bool {
584 if base.Flag.N != 0 || base.Flag.Cfg.Instrumenting {
585 return false
586 }
587
588 if n.Typecheck() == 0 {
589 base.Fatalf("missing typecheck: %+v", n)
590 }
591
592 if n.Op() != ir.OAPPEND {
593 return false
594 }
595 call := n.(*ir.CallExpr)
596 if !call.IsDDD || len(call.Args) != 2 || call.Args[1].Op() != ir.OMAKESLICE {
597 return false
598 }
599
600 mk := call.Args[1].(*ir.MakeExpr)
601 if mk.Cap != nil {
602 return false
603 }
604
605
606
607
608
609
610
611 y := mk.Len
612 if !ir.IsConst(y, constant.Int) && y.Type().Size() > types.Types[types.TUINT].Size() {
613 return false
614 }
615
616 return true
617 }
618
619
620
621
622
623
624
625
626
627
628
629
630
631
632
633
634
635
636
637
638
639
640
641
642
643
644
645
646
647
648
649
650 func extendSlice(n *ir.CallExpr, init *ir.Nodes) ir.Node {
651
652
653
654 l2 := typecheck.Conv(n.Args[1].(*ir.MakeExpr).Len, types.Types[types.TINT])
655 l2 = typecheck.Expr(l2)
656 n.Args[1] = l2
657
658 walkAppendArgs(n, init)
659
660 l1 := n.Args[0]
661 l2 = n.Args[1]
662
663 var nodes []ir.Node
664
665
666 nifneg := ir.NewIfStmt(base.Pos, ir.NewBinaryExpr(base.Pos, ir.OGE, l2, ir.NewInt(base.Pos, 0)), nil, nil)
667 nifneg.Likely = true
668
669
670 nifneg.Else = []ir.Node{mkcall("panicmakeslicelen", nil, init)}
671 nodes = append(nodes, nifneg)
672
673
674 s := typecheck.TempAt(base.Pos, ir.CurFunc, l1.Type())
675 nodes = append(nodes, ir.NewAssignStmt(base.Pos, s, l1))
676
677
678
679
680 nifnz := ir.NewIfStmt(base.Pos, ir.NewBinaryExpr(base.Pos, ir.ONE, l2, ir.NewInt(base.Pos, 0)), nil, nil)
681 nifnz.Likely = true
682 nodes = append(nodes, nifnz)
683
684 elemtype := s.Type().Elem()
685
686
687 nn := typecheck.TempAt(base.Pos, ir.CurFunc, types.Types[types.TINT])
688 nifnz.Body = append(nifnz.Body, ir.NewAssignStmt(base.Pos, nn, ir.NewBinaryExpr(base.Pos, ir.OADD, ir.NewUnaryExpr(base.Pos, ir.OLEN, s), l2)))
689
690
691 nuint := typecheck.Conv(nn, types.Types[types.TUINT])
692 capuint := typecheck.Conv(ir.NewUnaryExpr(base.Pos, ir.OCAP, s), types.Types[types.TUINT])
693 nif := ir.NewIfStmt(base.Pos, ir.NewBinaryExpr(base.Pos, ir.OLE, nuint, capuint), nil, nil)
694 nif.Likely = true
695
696
697 nt := ir.NewSliceExpr(base.Pos, ir.OSLICE, s, nil, nn, nil)
698 nt.SetBounded(true)
699 nif.Body = []ir.Node{ir.NewAssignStmt(base.Pos, s, nt)}
700
701
702 nif.Else = []ir.Node{
703 ir.NewAssignStmt(base.Pos, s, walkGrowslice(s, nif.PtrInit(),
704 ir.NewUnaryExpr(base.Pos, ir.OSPTR, s),
705 nn,
706 ir.NewUnaryExpr(base.Pos, ir.OCAP, s),
707 l2)),
708 }
709
710 nifnz.Body = append(nifnz.Body, nif)
711
712
713
714 ix := ir.NewIndexExpr(base.Pos, s, ir.NewBinaryExpr(base.Pos, ir.OSUB, ir.NewUnaryExpr(base.Pos, ir.OLEN, s), l2))
715 ix.SetBounded(true)
716 hp := typecheck.ConvNop(typecheck.NodAddr(ix), types.Types[types.TUNSAFEPTR])
717
718
719 hn := typecheck.Conv(ir.NewBinaryExpr(base.Pos, ir.OMUL, l2, ir.NewInt(base.Pos, elemtype.Size())), types.Types[types.TUINTPTR])
720
721 clrname := "memclrNoHeapPointers"
722 hasPointers := elemtype.HasPointers()
723 if hasPointers {
724 clrname = "memclrHasPointers"
725 ir.CurFunc.SetWBPos(n.Pos())
726 }
727
728 var clr ir.Nodes
729 clrfn := mkcall(clrname, nil, &clr, hp, hn)
730 clr.Append(clrfn)
731 if hasPointers {
732
733
734 nif.Body = append(nif.Body, clr...)
735 } else {
736 nifnz.Body = append(nifnz.Body, clr...)
737 }
738
739 typecheck.Stmts(nodes)
740 walkStmtList(nodes)
741 init.Append(nodes...)
742 return s
743 }
744
View as plain text