1
2
3
4
5 package ssa
6
7 import (
8 "cmd/compile/internal/abi"
9 "cmd/compile/internal/base"
10 "cmd/compile/internal/ir"
11 "cmd/compile/internal/types"
12 "cmd/internal/src"
13 "fmt"
14 )
15
16 func postExpandCallsDecompose(f *Func) {
17 decomposeUser(f)
18 decomposeBuiltin(f)
19 }
20
21 func expandCalls(f *Func) {
22
23
24
25
26
27
28 sp, _ := f.spSb()
29
30 x := &expandState{
31 f: f,
32 debug: f.pass.debug,
33 regSize: f.Config.RegSize,
34 sp: sp,
35 typs: &f.Config.Types,
36 wideSelects: make(map[*Value]*Value),
37 commonArgs: make(map[selKey]*Value),
38 commonSelectors: make(map[selKey]*Value),
39 memForCall: make(map[ID]*Value),
40 }
41
42
43 if f.Config.BigEndian {
44 x.firstOp = OpInt64Hi
45 x.secondOp = OpInt64Lo
46 x.firstType = x.typs.Int32
47 x.secondType = x.typs.UInt32
48 } else {
49 x.firstOp = OpInt64Lo
50 x.secondOp = OpInt64Hi
51 x.firstType = x.typs.UInt32
52 x.secondType = x.typs.Int32
53 }
54
55
56 var selects []*Value
57 var calls []*Value
58 var args []*Value
59 var exitBlocks []*Block
60
61 var m0 *Value
62
63
64
65
66
67 for _, b := range f.Blocks {
68 for _, v := range b.Values {
69 switch v.Op {
70 case OpInitMem:
71 m0 = v
72
73 case OpClosureLECall, OpInterLECall, OpStaticLECall, OpTailLECall, OpTailLECallInter:
74 calls = append(calls, v)
75
76 case OpArg:
77 args = append(args, v)
78
79 case OpStore:
80 if a := v.Args[1]; a.Op == OpSelectN && !CanSSA(a.Type) {
81 if a.Uses > 1 {
82 panic(fmt.Errorf("Saw double use of wide SelectN %s operand of Store %s",
83 a.LongString(), v.LongString()))
84 }
85 x.wideSelects[a] = v
86 }
87
88 case OpSelectN:
89 if v.Type == types.TypeMem {
90
91 call := v.Args[0]
92 aux := call.Aux.(*AuxCall)
93 mem := x.memForCall[call.ID]
94 if mem == nil {
95 v.AuxInt = int64(aux.abiInfo.OutRegistersUsed())
96 x.memForCall[call.ID] = v
97 } else {
98 panic(fmt.Errorf("Saw two memories for call %v, %v and %v", call, mem, v))
99 }
100 } else {
101 selects = append(selects, v)
102 }
103
104 case OpSelectNAddr:
105 call := v.Args[0]
106 which := v.AuxInt
107 aux := call.Aux.(*AuxCall)
108 pt := v.Type
109 off := x.offsetFrom(x.f.Entry, x.sp, aux.OffsetOfResult(which), pt)
110 v.copyOf(off)
111 }
112 }
113
114
115
116 if isBlockMultiValueExit(b) {
117 exitBlocks = append(exitBlocks, b)
118 }
119 }
120
121
122 for _, v := range args {
123 var rc registerCursor
124 a := x.prAssignForArg(v)
125 aux := x.f.OwnAux
126 regs := a.Registers
127 var offset int64
128 if len(regs) == 0 {
129 offset = a.FrameOffset(aux.abiInfo)
130 }
131 auxBase := x.offsetFrom(x.f.Entry, x.sp, offset, types.NewPtr(v.Type))
132 rc.init(regs, aux.abiInfo, nil, auxBase, 0)
133 x.rewriteSelectOrArg(f.Entry.Pos, f.Entry, v, v, m0, v.Type, rc)
134 }
135
136
137 for _, v := range selects {
138 if v.Op == OpInvalid {
139 continue
140 }
141
142 call := v.Args[0]
143 aux := call.Aux.(*AuxCall)
144 mem := x.memForCall[call.ID]
145 if mem == nil {
146 mem = call.Block.NewValue1I(call.Pos, OpSelectN, types.TypeMem, int64(aux.abiInfo.OutRegistersUsed()), call)
147 x.memForCall[call.ID] = mem
148 }
149
150 i := v.AuxInt
151 regs := aux.RegsOfResult(i)
152
153
154 if store := x.wideSelects[v]; store != nil {
155
156 storeAddr := store.Args[0]
157 mem := store.Args[2]
158 if len(regs) > 0 {
159
160 var rc registerCursor
161 rc.init(regs, aux.abiInfo, nil, storeAddr, 0)
162 mem = x.rewriteWideSelectToStores(call.Pos, call.Block, v, mem, v.Type, rc)
163 store.copyOf(mem)
164 } else {
165
166 offset := aux.OffsetOfResult(i)
167 auxBase := x.offsetFrom(x.f.Entry, x.sp, offset, types.NewPtr(v.Type))
168
169
170 move := store.Block.NewValue3A(store.Pos, OpMove, types.TypeMem, v.Type, storeAddr, auxBase, mem)
171 move.AuxInt = v.Type.Size()
172 store.copyOf(move)
173 }
174 continue
175 }
176
177 var auxBase *Value
178 if len(regs) == 0 {
179 offset := aux.OffsetOfResult(i)
180 auxBase = x.offsetFrom(x.f.Entry, x.sp, offset, types.NewPtr(v.Type))
181 }
182 var rc registerCursor
183 rc.init(regs, aux.abiInfo, nil, auxBase, 0)
184 x.rewriteSelectOrArg(call.Pos, call.Block, v, v, mem, v.Type, rc)
185 }
186
187 rewriteCall := func(v *Value, newOp Op, argStart int) {
188
189 x.rewriteCallArgs(v, argStart)
190 v.Op = newOp
191 rts := abi.RegisterTypes(v.Aux.(*AuxCall).abiInfo.OutParams())
192 v.Type = types.NewResults(append(rts, types.TypeMem))
193 }
194
195
196 for _, v := range calls {
197 switch v.Op {
198 case OpStaticLECall:
199 rewriteCall(v, OpStaticCall, 0)
200 case OpTailLECall:
201 rewriteCall(v, OpTailCall, 0)
202 case OpTailLECallInter:
203 rewriteCall(v, OpTailCallInter, 1)
204 case OpClosureLECall:
205 rewriteCall(v, OpClosureCall, 2)
206 case OpInterLECall:
207 rewriteCall(v, OpInterCall, 1)
208 }
209 }
210
211
212 for _, b := range exitBlocks {
213 v := b.Controls[0]
214 x.rewriteFuncResults(v, b, f.OwnAux)
215 b.SetControl(v)
216 }
217
218 }
219
220 func (x *expandState) rewriteFuncResults(v *Value, b *Block, aux *AuxCall) {
221
222
223
224
225
226 m0 := v.MemoryArg()
227 mem := m0
228
229 allResults := []*Value{}
230 var oldArgs []*Value
231 argsWithoutMem := v.Args[:len(v.Args)-1]
232
233 for j, a := range argsWithoutMem {
234 oldArgs = append(oldArgs, a)
235 i := int64(j)
236 auxType := aux.TypeOfResult(i)
237 auxBase := b.NewValue2A(v.Pos, OpLocalAddr, types.NewPtr(auxType), aux.NameOfResult(i), x.sp, mem)
238 auxOffset := int64(0)
239 aRegs := aux.RegsOfResult(int64(j))
240 if a.Op == OpDereference {
241 a.Op = OpLoad
242 }
243 var rc registerCursor
244 var result *[]*Value
245 if len(aRegs) > 0 {
246 result = &allResults
247 } else {
248 if a.Op == OpLoad && a.Args[0].Op == OpLocalAddr && a.Args[0].Aux == aux.NameOfResult(i) {
249 continue
250 }
251 }
252 rc.init(aRegs, aux.abiInfo, result, auxBase, auxOffset)
253 mem = x.decomposeAsNecessary(v.Pos, b, a, mem, rc)
254 }
255 v.resetArgs()
256 v.AddArgs(allResults...)
257 v.AddArg(mem)
258 for _, a := range oldArgs {
259 if a.Uses == 0 {
260 if x.debug > 1 {
261 x.Printf("...marking %v unused\n", a.LongString())
262 }
263 x.invalidateRecursively(a)
264 }
265 }
266 v.Type = types.NewResults(append(abi.RegisterTypes(aux.abiInfo.OutParams()), types.TypeMem))
267 return
268 }
269
270 func (x *expandState) rewriteCallArgs(v *Value, firstArg int) {
271 if x.debug > 1 {
272 x.indent(3)
273 defer x.indent(-3)
274 x.Printf("rewriteCallArgs(%s; %d)\n", v.LongString(), firstArg)
275 }
276
277 aux := v.Aux.(*AuxCall)
278 m0 := v.MemoryArg()
279 mem := m0
280 allResults := []*Value{}
281 oldArgs := []*Value{}
282 argsWithoutMem := v.Args[firstArg : len(v.Args)-1]
283
284 sp := x.sp
285 if v.Op == OpTailLECall || v.Op == OpTailLECallInter {
286
287
288 sp = v.Block.NewValue1(src.NoXPos, OpGetCallerSP, x.typs.Uintptr, mem)
289 }
290
291 for i, a := range argsWithoutMem {
292 oldArgs = append(oldArgs, a)
293 auxI := int64(i)
294 aRegs := aux.RegsOfArg(auxI)
295 aType := aux.TypeOfArg(auxI)
296
297 if a.Op == OpDereference {
298 a.Op = OpLoad
299 }
300 var rc registerCursor
301 var result *[]*Value
302 var aOffset int64
303 if len(aRegs) > 0 {
304 result = &allResults
305 } else {
306 aOffset = aux.OffsetOfArg(auxI)
307 }
308 if v.Op == OpTailLECall && a.Op == OpArg && a.AuxInt == 0 {
309
310
311 n := a.Aux.(*ir.Name)
312 if n.Class == ir.PPARAM && n.FrameOffset()+x.f.Config.ctxt.Arch.FixedFrameSize == aOffset {
313 continue
314 }
315 }
316 if x.debug > 1 {
317 x.Printf("...storeArg %s, %v, %d\n", a.LongString(), aType, aOffset)
318 }
319
320 rc.init(aRegs, aux.abiInfo, result, sp, aOffset)
321 mem = x.decomposeAsNecessary(v.Pos, v.Block, a, mem, rc)
322 }
323 var preArgStore [2]*Value
324 preArgs := append(preArgStore[:0], v.Args[0:firstArg]...)
325 v.resetArgs()
326 v.AddArgs(preArgs...)
327 v.AddArgs(allResults...)
328 v.AddArg(mem)
329 for _, a := range oldArgs {
330 if a.Uses == 0 {
331 x.invalidateRecursively(a)
332 }
333 }
334
335 return
336 }
337
338 func (x *expandState) decomposePair(pos src.XPos, b *Block, a, mem *Value, t0, t1 *types.Type, o0, o1 Op, rc *registerCursor) *Value {
339 e := b.NewValue1(pos, o0, t0, a)
340 pos = pos.WithNotStmt()
341 mem = x.decomposeAsNecessary(pos, b, e, mem, rc.next(t0))
342 e = b.NewValue1(pos, o1, t1, a)
343 mem = x.decomposeAsNecessary(pos, b, e, mem, rc.next(t1))
344 return mem
345 }
346
347 func (x *expandState) decomposeOne(pos src.XPos, b *Block, a, mem *Value, t0 *types.Type, o0 Op, rc *registerCursor) *Value {
348 e := b.NewValue1(pos, o0, t0, a)
349 pos = pos.WithNotStmt()
350 mem = x.decomposeAsNecessary(pos, b, e, mem, rc.next(t0))
351 return mem
352 }
353
354
355
356
357
358
359
360
361
362 func (x *expandState) decomposeAsNecessary(pos src.XPos, b *Block, a, m0 *Value, rc registerCursor) *Value {
363 if x.debug > 1 {
364 x.indent(3)
365 defer x.indent(-3)
366 }
367 at := a.Type
368 if at.Size() == 0 {
369 return m0
370 }
371 if a.Op == OpDereference {
372 a.Op = OpLoad
373 }
374
375 if !rc.hasRegs() && !CanSSA(at) {
376 dst := x.offsetFrom(b, rc.storeDest, rc.storeOffset, types.NewPtr(at))
377 if x.debug > 1 {
378 x.Printf("...recur store %s at %s\n", a.LongString(), dst.LongString())
379 }
380 if a.Op == OpLoad {
381 m0 = b.NewValue3A(pos, OpMove, types.TypeMem, at, dst, a.Args[0], m0)
382 m0.AuxInt = at.Size()
383 return m0
384 } else {
385 panic(fmt.Errorf("Store of not a load"))
386 }
387 }
388
389 mem := m0
390 switch at.Kind() {
391 case types.TARRAY:
392 et := at.Elem()
393 for i := int64(0); i < at.NumElem(); i++ {
394 e := b.NewValue1I(pos, OpArraySelect, et, i, a)
395 pos = pos.WithNotStmt()
396 mem = x.decomposeAsNecessary(pos, b, e, mem, rc.next(et))
397 }
398 return mem
399
400 case types.TSTRUCT:
401 if at.IsSIMD() {
402 break
403 }
404 for i := 0; i < at.NumFields(); i++ {
405 et := at.Field(i).Type
406 e := b.NewValue1I(pos, OpStructSelect, et, int64(i), a)
407 pos = pos.WithNotStmt()
408 if x.debug > 1 {
409 x.Printf("...recur decompose %s, %v\n", e.LongString(), et)
410 }
411 mem = x.decomposeAsNecessary(pos, b, e, mem, rc.next(et))
412 }
413 return mem
414
415 case types.TSLICE:
416 mem = x.decomposeOne(pos, b, a, mem, at.Elem().PtrTo(), OpSlicePtr, &rc)
417 pos = pos.WithNotStmt()
418 mem = x.decomposeOne(pos, b, a, mem, x.typs.Int, OpSliceLen, &rc)
419 return x.decomposeOne(pos, b, a, mem, x.typs.Int, OpSliceCap, &rc)
420
421 case types.TSTRING:
422 return x.decomposePair(pos, b, a, mem, x.typs.BytePtr, x.typs.Int, OpStringPtr, OpStringLen, &rc)
423
424 case types.TINTER:
425 mem = x.decomposeOne(pos, b, a, mem, x.typs.Uintptr, OpITab, &rc)
426 pos = pos.WithNotStmt()
427
428 if a.Op == OpIMake {
429 data := a.Args[1]
430 for data.Op == OpStructMake || data.Op == OpArrayMake1 {
431
432
433 for _, a := range data.Args {
434 if a.Type.Size() > 0 {
435 data = a
436 break
437 }
438 }
439 }
440 return x.decomposeAsNecessary(pos, b, data, mem, rc.next(data.Type))
441 }
442 return x.decomposeOne(pos, b, a, mem, x.typs.BytePtr, OpIData, &rc)
443
444 case types.TCOMPLEX64:
445 return x.decomposePair(pos, b, a, mem, x.typs.Float32, x.typs.Float32, OpComplexReal, OpComplexImag, &rc)
446
447 case types.TCOMPLEX128:
448 return x.decomposePair(pos, b, a, mem, x.typs.Float64, x.typs.Float64, OpComplexReal, OpComplexImag, &rc)
449
450 case types.TINT64:
451 if at.Size() > x.regSize {
452 return x.decomposePair(pos, b, a, mem, x.firstType, x.secondType, x.firstOp, x.secondOp, &rc)
453 }
454 case types.TUINT64:
455 if at.Size() > x.regSize {
456 return x.decomposePair(pos, b, a, mem, x.typs.UInt32, x.typs.UInt32, x.firstOp, x.secondOp, &rc)
457 }
458 }
459
460
461
462 if rc.hasRegs() {
463 if x.debug > 1 {
464 x.Printf("...recur addArg %s\n", a.LongString())
465 }
466 rc.addArg(a)
467 } else {
468 dst := x.offsetFrom(b, rc.storeDest, rc.storeOffset, types.NewPtr(at))
469 if x.debug > 1 {
470 x.Printf("...recur store %s at %s\n", a.LongString(), dst.LongString())
471 }
472 mem = b.NewValue3A(pos, OpStore, types.TypeMem, at, dst, a, mem)
473 }
474
475 return mem
476 }
477
478
479
480
481
482
483
484
485
486
487
488
489
490
491 func (x *expandState) rewriteSelectOrArg(pos src.XPos, b *Block, container, a, m0 *Value, at *types.Type, rc registerCursor) *Value {
492
493 if at == types.TypeMem {
494 a.copyOf(m0)
495 return a
496 }
497
498 makeOf := func(a *Value, op Op, args []*Value) *Value {
499 if a == nil {
500 a = b.NewValue0(pos, op, at)
501 a.AddArgs(args...)
502 } else {
503 a.resetArgs()
504 a.Aux, a.AuxInt = nil, 0
505 a.Pos, a.Op, a.Type = pos, op, at
506 a.AddArgs(args...)
507 }
508 return a
509 }
510
511 if at.Size() == 0 {
512
513 return makeOf(a, OpEmpty, nil)
514 }
515
516 sk := selKey{from: container, size: 0, offsetOrIndex: rc.storeOffset, typ: at}
517 dupe := x.commonSelectors[sk]
518 if dupe != nil {
519 if a == nil {
520 return dupe
521 }
522 a.copyOf(dupe)
523 return a
524 }
525
526 var argStore [10]*Value
527 args := argStore[:0]
528
529 addArg := func(a0 *Value) {
530 if a0 == nil {
531 as := "<nil>"
532 if a != nil {
533 as = a.LongString()
534 }
535 panic(fmt.Errorf("a0 should not be nil, a=%v, container=%v, at=%v", as, container.LongString(), at))
536 }
537 args = append(args, a0)
538 }
539
540 switch at.Kind() {
541 case types.TARRAY:
542 et := at.Elem()
543 for i := int64(0); i < at.NumElem(); i++ {
544 e := x.rewriteSelectOrArg(pos, b, container, nil, m0, et, rc.next(et))
545 addArg(e)
546 }
547 a = makeOf(a, OpArrayMake1, args)
548 x.commonSelectors[sk] = a
549 return a
550
551 case types.TSTRUCT:
552
553 if at.IsSIMD() {
554 break
555 }
556 for i := 0; i < at.NumFields(); i++ {
557 et := at.Field(i).Type
558 e := x.rewriteSelectOrArg(pos, b, container, nil, m0, et, rc.next(et))
559 if e == nil {
560 panic(fmt.Errorf("nil e, et=%v, et.Size()=%d, i=%d", et, et.Size(), i))
561 }
562 addArg(e)
563 pos = pos.WithNotStmt()
564 }
565 if at.NumFields() > MaxStruct && !types.IsDirectIface(at) {
566 panic(fmt.Errorf("Too many fields (%d, %d bytes), container=%s", at.NumFields(), at.Size(), container.LongString()))
567 }
568 a = makeOf(a, OpStructMake, args)
569 x.commonSelectors[sk] = a
570 return a
571
572 case types.TSLICE:
573 addArg(x.rewriteSelectOrArg(pos, b, container, nil, m0, at.Elem().PtrTo(), rc.next(x.typs.BytePtr)))
574 pos = pos.WithNotStmt()
575 addArg(x.rewriteSelectOrArg(pos, b, container, nil, m0, x.typs.Int, rc.next(x.typs.Int)))
576 addArg(x.rewriteSelectOrArg(pos, b, container, nil, m0, x.typs.Int, rc.next(x.typs.Int)))
577 a = makeOf(a, OpSliceMake, args)
578 x.commonSelectors[sk] = a
579 return a
580
581 case types.TSTRING:
582 addArg(x.rewriteSelectOrArg(pos, b, container, nil, m0, x.typs.BytePtr, rc.next(x.typs.BytePtr)))
583 pos = pos.WithNotStmt()
584 addArg(x.rewriteSelectOrArg(pos, b, container, nil, m0, x.typs.Int, rc.next(x.typs.Int)))
585 a = makeOf(a, OpStringMake, args)
586 x.commonSelectors[sk] = a
587 return a
588
589 case types.TINTER:
590 addArg(x.rewriteSelectOrArg(pos, b, container, nil, m0, x.typs.Uintptr, rc.next(x.typs.Uintptr)))
591 pos = pos.WithNotStmt()
592 addArg(x.rewriteSelectOrArg(pos, b, container, nil, m0, x.typs.BytePtr, rc.next(x.typs.BytePtr)))
593 a = makeOf(a, OpIMake, args)
594 x.commonSelectors[sk] = a
595 return a
596
597 case types.TCOMPLEX64:
598 addArg(x.rewriteSelectOrArg(pos, b, container, nil, m0, x.typs.Float32, rc.next(x.typs.Float32)))
599 pos = pos.WithNotStmt()
600 addArg(x.rewriteSelectOrArg(pos, b, container, nil, m0, x.typs.Float32, rc.next(x.typs.Float32)))
601 a = makeOf(a, OpComplexMake, args)
602 x.commonSelectors[sk] = a
603 return a
604
605 case types.TCOMPLEX128:
606 addArg(x.rewriteSelectOrArg(pos, b, container, nil, m0, x.typs.Float64, rc.next(x.typs.Float64)))
607 pos = pos.WithNotStmt()
608 addArg(x.rewriteSelectOrArg(pos, b, container, nil, m0, x.typs.Float64, rc.next(x.typs.Float64)))
609 a = makeOf(a, OpComplexMake, args)
610 x.commonSelectors[sk] = a
611 return a
612
613 case types.TINT64:
614 if at.Size() > x.regSize {
615 addArg(x.rewriteSelectOrArg(pos, b, container, nil, m0, x.firstType, rc.next(x.firstType)))
616 pos = pos.WithNotStmt()
617 addArg(x.rewriteSelectOrArg(pos, b, container, nil, m0, x.secondType, rc.next(x.secondType)))
618 if !x.f.Config.BigEndian {
619
620 args[0], args[1] = args[1], args[0]
621 }
622 a = makeOf(a, OpInt64Make, args)
623 x.commonSelectors[sk] = a
624 return a
625 }
626 case types.TUINT64:
627 if at.Size() > x.regSize {
628 addArg(x.rewriteSelectOrArg(pos, b, container, nil, m0, x.typs.UInt32, rc.next(x.typs.UInt32)))
629 pos = pos.WithNotStmt()
630 addArg(x.rewriteSelectOrArg(pos, b, container, nil, m0, x.typs.UInt32, rc.next(x.typs.UInt32)))
631 if !x.f.Config.BigEndian {
632
633 args[0], args[1] = args[1], args[0]
634 }
635 a = makeOf(a, OpInt64Make, args)
636 x.commonSelectors[sk] = a
637 return a
638 }
639 }
640
641
642
643
644
645 if container.Op == OpArg {
646 if rc.hasRegs() {
647 op, i := rc.ArgOpAndRegisterFor()
648 name := container.Aux.(*ir.Name)
649 a = makeOf(a, op, nil)
650 a.AuxInt = i
651 a.Aux = &AuxNameOffset{name, rc.storeOffset}
652 } else {
653 key := selKey{container, rc.storeOffset, at.Size(), at}
654 w := x.commonArgs[key]
655 if w != nil && w.Uses != 0 {
656 if a == nil {
657 a = w
658 } else {
659 a.copyOf(w)
660 }
661 } else {
662 if a == nil {
663 aux := container.Aux
664 auxInt := container.AuxInt + rc.storeOffset
665 a = container.Block.NewValue0IA(container.Pos, OpArg, at, auxInt, aux)
666 } else {
667
668 }
669 x.commonArgs[key] = a
670 }
671 }
672 } else if container.Op == OpSelectN {
673 call := container.Args[0]
674 aux := call.Aux.(*AuxCall)
675 which := container.AuxInt
676
677 if at == types.TypeMem {
678 if a != m0 || a != x.memForCall[call.ID] {
679 panic(fmt.Errorf("Memories %s, %s, and %s should all be equal after %s", a.LongString(), m0.LongString(), x.memForCall[call.ID], call.LongString()))
680 }
681 } else if rc.hasRegs() {
682 firstReg := uint32(0)
683 for i := 0; i < int(which); i++ {
684 firstReg += uint32(len(aux.abiInfo.OutParam(i).Registers))
685 }
686 reg := int64(rc.nextSlice + Abi1RO(firstReg))
687 a = makeOf(a, OpSelectN, []*Value{call})
688 a.AuxInt = reg
689 } else {
690 off := x.offsetFrom(x.f.Entry, x.sp, rc.storeOffset+aux.OffsetOfResult(which), types.NewPtr(at))
691 a = makeOf(a, OpLoad, []*Value{off, m0})
692 }
693
694 } else {
695 panic(fmt.Errorf("Expected container OpArg or OpSelectN, saw %v instead", container.LongString()))
696 }
697
698 x.commonSelectors[sk] = a
699 return a
700 }
701
702
703
704
705
706 func (x *expandState) rewriteWideSelectToStores(pos src.XPos, b *Block, container, m0 *Value, at *types.Type, rc registerCursor) *Value {
707
708 if at.Size() == 0 {
709 return m0
710 }
711
712 switch at.Kind() {
713 case types.TARRAY:
714 et := at.Elem()
715 for i := int64(0); i < at.NumElem(); i++ {
716 m0 = x.rewriteWideSelectToStores(pos, b, container, m0, et, rc.next(et))
717 }
718 return m0
719
720 case types.TSTRUCT:
721
722 if at.IsSIMD() {
723 break
724 }
725 for i := 0; i < at.NumFields(); i++ {
726 et := at.Field(i).Type
727 m0 = x.rewriteWideSelectToStores(pos, b, container, m0, et, rc.next(et))
728 pos = pos.WithNotStmt()
729 }
730 return m0
731
732 case types.TSLICE:
733 m0 = x.rewriteWideSelectToStores(pos, b, container, m0, at.Elem().PtrTo(), rc.next(x.typs.BytePtr))
734 pos = pos.WithNotStmt()
735 m0 = x.rewriteWideSelectToStores(pos, b, container, m0, x.typs.Int, rc.next(x.typs.Int))
736 m0 = x.rewriteWideSelectToStores(pos, b, container, m0, x.typs.Int, rc.next(x.typs.Int))
737 return m0
738
739 case types.TSTRING:
740 m0 = x.rewriteWideSelectToStores(pos, b, container, m0, x.typs.BytePtr, rc.next(x.typs.BytePtr))
741 pos = pos.WithNotStmt()
742 m0 = x.rewriteWideSelectToStores(pos, b, container, m0, x.typs.Int, rc.next(x.typs.Int))
743 return m0
744
745 case types.TINTER:
746 m0 = x.rewriteWideSelectToStores(pos, b, container, m0, x.typs.Uintptr, rc.next(x.typs.Uintptr))
747 pos = pos.WithNotStmt()
748 m0 = x.rewriteWideSelectToStores(pos, b, container, m0, x.typs.BytePtr, rc.next(x.typs.BytePtr))
749 return m0
750
751 case types.TCOMPLEX64:
752 m0 = x.rewriteWideSelectToStores(pos, b, container, m0, x.typs.Float32, rc.next(x.typs.Float32))
753 pos = pos.WithNotStmt()
754 m0 = x.rewriteWideSelectToStores(pos, b, container, m0, x.typs.Float32, rc.next(x.typs.Float32))
755 return m0
756
757 case types.TCOMPLEX128:
758 m0 = x.rewriteWideSelectToStores(pos, b, container, m0, x.typs.Float64, rc.next(x.typs.Float64))
759 pos = pos.WithNotStmt()
760 m0 = x.rewriteWideSelectToStores(pos, b, container, m0, x.typs.Float64, rc.next(x.typs.Float64))
761 return m0
762
763 case types.TINT64:
764 if at.Size() > x.regSize {
765 m0 = x.rewriteWideSelectToStores(pos, b, container, m0, x.firstType, rc.next(x.firstType))
766 pos = pos.WithNotStmt()
767 m0 = x.rewriteWideSelectToStores(pos, b, container, m0, x.secondType, rc.next(x.secondType))
768 return m0
769 }
770 case types.TUINT64:
771 if at.Size() > x.regSize {
772 m0 = x.rewriteWideSelectToStores(pos, b, container, m0, x.typs.UInt32, rc.next(x.typs.UInt32))
773 pos = pos.WithNotStmt()
774 m0 = x.rewriteWideSelectToStores(pos, b, container, m0, x.typs.UInt32, rc.next(x.typs.UInt32))
775 return m0
776 }
777 }
778
779
780 if container.Op == OpSelectN {
781 call := container.Args[0]
782 aux := call.Aux.(*AuxCall)
783 which := container.AuxInt
784
785 if rc.hasRegs() {
786 firstReg := uint32(0)
787 for i := 0; i < int(which); i++ {
788 firstReg += uint32(len(aux.abiInfo.OutParam(i).Registers))
789 }
790 reg := int64(rc.nextSlice + Abi1RO(firstReg))
791 a := b.NewValue1I(pos, OpSelectN, at, reg, call)
792 dst := x.offsetFrom(b, rc.storeDest, rc.storeOffset, types.NewPtr(at))
793 m0 = b.NewValue3A(pos, OpStore, types.TypeMem, at, dst, a, m0)
794 } else {
795 panic(fmt.Errorf("Expected rc to have registers"))
796 }
797 } else {
798 panic(fmt.Errorf("Expected container OpSelectN, saw %v instead", container.LongString()))
799 }
800 return m0
801 }
802
803 func isBlockMultiValueExit(b *Block) bool {
804 return (b.Kind == BlockRet || b.Kind == BlockRetJmp) && b.Controls[0] != nil && b.Controls[0].Op == OpMakeResult
805 }
806
807 type Abi1RO uint8
808
809
810 type registerCursor struct {
811 storeDest *Value
812 storeOffset int64
813 regs []abi.RegIndex
814 nextSlice Abi1RO
815 config *abi.ABIConfig
816 regValues *[]*Value
817 }
818
819 func (c *registerCursor) String() string {
820 dest := "<none>"
821 if c.storeDest != nil {
822 dest = fmt.Sprintf("%s+%d", c.storeDest.String(), c.storeOffset)
823 }
824 regs := "<none>"
825 if c.regValues != nil {
826 regs = ""
827 for i, x := range *c.regValues {
828 if i > 0 {
829 regs = regs + "; "
830 }
831 regs = regs + x.LongString()
832 }
833 }
834
835
836 return fmt.Sprintf("RCSR{storeDest=%v, regsLen=%d, nextSlice=%d, regValues=[%s]}", dest, len(c.regs), c.nextSlice, regs)
837 }
838
839
840
841 func (c *registerCursor) next(t *types.Type) registerCursor {
842 c.storeOffset = types.RoundUp(c.storeOffset, t.Alignment())
843 rc := *c
844 c.storeOffset = types.RoundUp(c.storeOffset+t.Size(), t.Alignment())
845 if int(c.nextSlice) < len(c.regs) {
846 w := c.config.NumParamRegs(t)
847 c.nextSlice += Abi1RO(w)
848 }
849 return rc
850 }
851
852
853 func (c *registerCursor) plus(regWidth Abi1RO) registerCursor {
854 rc := *c
855 rc.nextSlice += regWidth
856 return rc
857 }
858
859 func (c *registerCursor) init(regs []abi.RegIndex, info *abi.ABIParamResultInfo, result *[]*Value, storeDest *Value, storeOffset int64) {
860 c.regs = regs
861 c.nextSlice = 0
862 c.storeOffset = storeOffset
863 c.storeDest = storeDest
864 c.config = info.Config()
865 c.regValues = result
866 }
867
868 func (c *registerCursor) addArg(v *Value) {
869 *c.regValues = append(*c.regValues, v)
870 }
871
872 func (c *registerCursor) hasRegs() bool {
873 return len(c.regs) > 0
874 }
875
876 func (c *registerCursor) ArgOpAndRegisterFor() (Op, int64) {
877 r := c.regs[c.nextSlice]
878 return ArgOpAndRegisterFor(r, c.config)
879 }
880
881
882
883 func ArgOpAndRegisterFor(r abi.RegIndex, abiConfig *abi.ABIConfig) (Op, int64) {
884 i := abiConfig.FloatIndexFor(r)
885 if i >= 0 {
886 return OpArgFloatReg, i
887 }
888 return OpArgIntReg, int64(r)
889 }
890
891 type selKey struct {
892 from *Value
893 offsetOrIndex int64
894 size int64
895 typ *types.Type
896 }
897
898 type expandState struct {
899 f *Func
900 debug int
901 regSize int64
902 sp *Value
903 typs *Types
904
905 firstOp Op
906 secondOp Op
907 firstType *types.Type
908 secondType *types.Type
909
910 wideSelects map[*Value]*Value
911 commonSelectors map[selKey]*Value
912 commonArgs map[selKey]*Value
913 memForCall map[ID]*Value
914 indentLevel int
915 }
916
917
918 func (x *expandState) offsetFrom(b *Block, from *Value, offset int64, pt *types.Type) *Value {
919 ft := from.Type
920 if offset == 0 {
921 if ft == pt {
922 return from
923 }
924
925 if (ft.IsPtr() || ft.IsUnsafePtr()) && pt.IsPtr() {
926 return from
927 }
928 }
929
930 for from.Op == OpOffPtr {
931 offset += from.AuxInt
932 from = from.Args[0]
933 }
934 if from == x.sp {
935 return x.f.ConstOffPtrSP(pt, offset, x.sp)
936 }
937 return b.NewValue1I(from.Pos.WithNotStmt(), OpOffPtr, pt, offset, from)
938 }
939
940
941 func (x *expandState) prAssignForArg(v *Value) *abi.ABIParamAssignment {
942 if v.Op != OpArg {
943 panic(fmt.Errorf("Wanted OpArg, instead saw %s", v.LongString()))
944 }
945 return ParamAssignmentForArgName(x.f, v.Aux.(*ir.Name))
946 }
947
948
949 func ParamAssignmentForArgName(f *Func, name *ir.Name) *abi.ABIParamAssignment {
950 abiInfo := f.OwnAux.abiInfo
951 ip := abiInfo.InParams()
952 for i, a := range ip {
953 if a.Name == name {
954 return &ip[i]
955 }
956 }
957 panic(fmt.Errorf("Did not match param %v in prInfo %+v", name, abiInfo.InParams()))
958 }
959
960
961 func (x *expandState) indent(n int) {
962 x.indentLevel += n
963 }
964
965
966 func (x *expandState) Printf(format string, a ...any) (n int, err error) {
967 if x.indentLevel > 0 {
968 fmt.Printf("%[1]*s", x.indentLevel, "")
969 }
970 return fmt.Printf(format, a...)
971 }
972
973 func (x *expandState) invalidateRecursively(a *Value) {
974 var s string
975 if x.debug > 0 {
976 plus := " "
977 if a.Pos.IsStmt() == src.PosIsStmt {
978 plus = " +"
979 }
980 s = a.String() + plus + a.Pos.LineNumber() + " " + a.LongString()
981 if x.debug > 1 {
982 x.Printf("...marking %v unused\n", s)
983 }
984 }
985 lost := a.invalidateRecursively()
986 if x.debug&1 != 0 && lost {
987 x.Printf("Lost statement marker in %s on former %s\n", base.Ctxt.Pkgpath+"."+x.f.Name, s)
988 }
989 }
990
View as plain text