1
2
3
4
5 package riscv64
6
7 import (
8 "cmd/compile/internal/base"
9 "cmd/compile/internal/ir"
10 "cmd/compile/internal/logopt"
11 "cmd/compile/internal/objw"
12 "cmd/compile/internal/ssa"
13 "cmd/compile/internal/ssagen"
14 "cmd/compile/internal/types"
15 "cmd/internal/obj"
16 "cmd/internal/obj/riscv"
17 "internal/abi"
18 )
19
20
21 var ssaRegToReg = []int16{
22 riscv.REG_X0,
23
24 riscv.REG_X2,
25 riscv.REG_X3,
26 riscv.REG_X4,
27 riscv.REG_X5,
28 riscv.REG_X6,
29 riscv.REG_X7,
30 riscv.REG_X8,
31 riscv.REG_X9,
32 riscv.REG_X10,
33 riscv.REG_X11,
34 riscv.REG_X12,
35 riscv.REG_X13,
36 riscv.REG_X14,
37 riscv.REG_X15,
38 riscv.REG_X16,
39 riscv.REG_X17,
40 riscv.REG_X18,
41 riscv.REG_X19,
42 riscv.REG_X20,
43 riscv.REG_X21,
44 riscv.REG_X22,
45 riscv.REG_X23,
46 riscv.REG_X24,
47 riscv.REG_X25,
48 riscv.REG_X26,
49 riscv.REG_X27,
50 riscv.REG_X28,
51 riscv.REG_X29,
52 riscv.REG_X30,
53 riscv.REG_X31,
54 riscv.REG_F0,
55 riscv.REG_F1,
56 riscv.REG_F2,
57 riscv.REG_F3,
58 riscv.REG_F4,
59 riscv.REG_F5,
60 riscv.REG_F6,
61 riscv.REG_F7,
62 riscv.REG_F8,
63 riscv.REG_F9,
64 riscv.REG_F10,
65 riscv.REG_F11,
66 riscv.REG_F12,
67 riscv.REG_F13,
68 riscv.REG_F14,
69 riscv.REG_F15,
70 riscv.REG_F16,
71 riscv.REG_F17,
72 riscv.REG_F18,
73 riscv.REG_F19,
74 riscv.REG_F20,
75 riscv.REG_F21,
76 riscv.REG_F22,
77 riscv.REG_F23,
78 riscv.REG_F24,
79 riscv.REG_F25,
80 riscv.REG_F26,
81 riscv.REG_F27,
82 riscv.REG_F28,
83 riscv.REG_F29,
84 riscv.REG_F30,
85 riscv.REG_F31,
86 0,
87 }
88
89 func loadByType(t *types.Type) obj.As {
90 width := t.Size()
91
92 if t.IsFloat() {
93 switch width {
94 case 4:
95 return riscv.AMOVF
96 case 8:
97 return riscv.AMOVD
98 default:
99 base.Fatalf("unknown float width for load %d in type %v", width, t)
100 return 0
101 }
102 }
103
104 switch width {
105 case 1:
106 if t.IsSigned() {
107 return riscv.AMOVB
108 } else {
109 return riscv.AMOVBU
110 }
111 case 2:
112 if t.IsSigned() {
113 return riscv.AMOVH
114 } else {
115 return riscv.AMOVHU
116 }
117 case 4:
118 if t.IsSigned() {
119 return riscv.AMOVW
120 } else {
121 return riscv.AMOVWU
122 }
123 case 8:
124 return riscv.AMOV
125 default:
126 base.Fatalf("unknown width for load %d in type %v", width, t)
127 return 0
128 }
129 }
130
131
132 func storeByType(t *types.Type) obj.As {
133 width := t.Size()
134
135 if t.IsFloat() {
136 switch width {
137 case 4:
138 return riscv.AMOVF
139 case 8:
140 return riscv.AMOVD
141 default:
142 base.Fatalf("unknown float width for store %d in type %v", width, t)
143 return 0
144 }
145 }
146
147 switch width {
148 case 1:
149 return riscv.AMOVB
150 case 2:
151 return riscv.AMOVH
152 case 4:
153 return riscv.AMOVW
154 case 8:
155 return riscv.AMOV
156 default:
157 base.Fatalf("unknown width for store %d in type %v", width, t)
158 return 0
159 }
160 }
161
162
163
164
165
166
167
168
169
170
171 func largestMove(alignment int64) (obj.As, int64) {
172 switch {
173 case alignment%8 == 0:
174 return riscv.AMOV, 8
175 case alignment%4 == 0:
176 return riscv.AMOVW, 4
177 case alignment%2 == 0:
178 return riscv.AMOVH, 2
179 default:
180 return riscv.AMOVB, 1
181 }
182 }
183
184 var fracMovOps = []obj.As{riscv.AMOVB, riscv.AMOVH, riscv.AMOVW, riscv.AMOV}
185
186
187
188 func ssaMarkMoves(s *ssagen.State, b *ssa.Block) {}
189
190 func ssaGenValue(s *ssagen.State, v *ssa.Value) {
191 s.SetPos(v.Pos)
192
193 switch v.Op {
194 case ssa.OpInitMem:
195
196 case ssa.OpArg:
197
198 case ssa.OpPhi:
199 ssagen.CheckLoweredPhi(v)
200 case ssa.OpCopy, ssa.OpRISCV64MOVDreg:
201 if v.Type.IsMemory() {
202 return
203 }
204 rs := v.Args[0].Reg()
205 rd := v.Reg()
206 if rs == rd {
207 return
208 }
209 as := riscv.AMOV
210 if v.Type.IsFloat() {
211 as = riscv.AMOVD
212 }
213 p := s.Prog(as)
214 p.From.Type = obj.TYPE_REG
215 p.From.Reg = rs
216 p.To.Type = obj.TYPE_REG
217 p.To.Reg = rd
218 case ssa.OpRISCV64MOVDnop:
219
220 case ssa.OpLoadReg:
221 if v.Type.IsFlags() {
222 v.Fatalf("load flags not implemented: %v", v.LongString())
223 return
224 }
225 p := s.Prog(loadByType(v.Type))
226 ssagen.AddrAuto(&p.From, v.Args[0])
227 p.To.Type = obj.TYPE_REG
228 p.To.Reg = v.Reg()
229 case ssa.OpStoreReg:
230 if v.Type.IsFlags() {
231 v.Fatalf("store flags not implemented: %v", v.LongString())
232 return
233 }
234 p := s.Prog(storeByType(v.Type))
235 p.From.Type = obj.TYPE_REG
236 p.From.Reg = v.Args[0].Reg()
237 ssagen.AddrAuto(&p.To, v)
238 case ssa.OpArgIntReg, ssa.OpArgFloatReg:
239
240
241 for _, a := range v.Block.Func.RegArgs {
242
243
244 addr := ssagen.SpillSlotAddr(a, riscv.REG_SP, base.Ctxt.Arch.FixedFrameSize)
245 s.FuncInfo().AddSpill(
246 obj.RegSpill{Reg: a.Reg, Addr: addr, Unspill: loadByType(a.Type), Spill: storeByType(a.Type)})
247 }
248 v.Block.Func.RegArgs = nil
249
250 ssagen.CheckArgReg(v)
251 case ssa.OpSP, ssa.OpSB, ssa.OpGetG:
252
253 case ssa.OpRISCV64MOVBreg, ssa.OpRISCV64MOVHreg, ssa.OpRISCV64MOVWreg,
254 ssa.OpRISCV64MOVBUreg, ssa.OpRISCV64MOVHUreg, ssa.OpRISCV64MOVWUreg:
255 a := v.Args[0]
256 for a.Op == ssa.OpCopy || a.Op == ssa.OpRISCV64MOVDreg {
257 a = a.Args[0]
258 }
259 as := v.Op.Asm()
260 rs := v.Args[0].Reg()
261 rd := v.Reg()
262 if a.Op == ssa.OpLoadReg {
263 t := a.Type
264 switch {
265 case v.Op == ssa.OpRISCV64MOVBreg && t.Size() == 1 && t.IsSigned(),
266 v.Op == ssa.OpRISCV64MOVHreg && t.Size() == 2 && t.IsSigned(),
267 v.Op == ssa.OpRISCV64MOVWreg && t.Size() == 4 && t.IsSigned(),
268 v.Op == ssa.OpRISCV64MOVBUreg && t.Size() == 1 && !t.IsSigned(),
269 v.Op == ssa.OpRISCV64MOVHUreg && t.Size() == 2 && !t.IsSigned(),
270 v.Op == ssa.OpRISCV64MOVWUreg && t.Size() == 4 && !t.IsSigned():
271
272 if rs == rd {
273 return
274 }
275 as = riscv.AMOV
276 default:
277 }
278 }
279 p := s.Prog(as)
280 p.From.Type = obj.TYPE_REG
281 p.From.Reg = rs
282 p.To.Type = obj.TYPE_REG
283 p.To.Reg = rd
284 case ssa.OpRISCV64ADD, ssa.OpRISCV64SUB, ssa.OpRISCV64SUBW, ssa.OpRISCV64XNOR, ssa.OpRISCV64XOR,
285 ssa.OpRISCV64OR, ssa.OpRISCV64ORN, ssa.OpRISCV64AND, ssa.OpRISCV64ANDN,
286 ssa.OpRISCV64SLL, ssa.OpRISCV64SLLW, ssa.OpRISCV64SRA, ssa.OpRISCV64SRAW, ssa.OpRISCV64SRL, ssa.OpRISCV64SRLW,
287 ssa.OpRISCV64SLT, ssa.OpRISCV64SLTU, ssa.OpRISCV64MUL, ssa.OpRISCV64MULW, ssa.OpRISCV64MULH,
288 ssa.OpRISCV64MULHU, ssa.OpRISCV64DIV, ssa.OpRISCV64DIVU, ssa.OpRISCV64DIVW,
289 ssa.OpRISCV64DIVUW, ssa.OpRISCV64REM, ssa.OpRISCV64REMU, ssa.OpRISCV64REMW,
290 ssa.OpRISCV64REMUW,
291 ssa.OpRISCV64ROL, ssa.OpRISCV64ROLW, ssa.OpRISCV64ROR, ssa.OpRISCV64RORW,
292 ssa.OpRISCV64FADDS, ssa.OpRISCV64FSUBS, ssa.OpRISCV64FMULS, ssa.OpRISCV64FDIVS,
293 ssa.OpRISCV64FEQS, ssa.OpRISCV64FNES, ssa.OpRISCV64FLTS, ssa.OpRISCV64FLES,
294 ssa.OpRISCV64FADDD, ssa.OpRISCV64FSUBD, ssa.OpRISCV64FMULD, ssa.OpRISCV64FDIVD,
295 ssa.OpRISCV64FEQD, ssa.OpRISCV64FNED, ssa.OpRISCV64FLTD, ssa.OpRISCV64FLED, ssa.OpRISCV64FSGNJD,
296 ssa.OpRISCV64MIN, ssa.OpRISCV64MAX, ssa.OpRISCV64MINU, ssa.OpRISCV64MAXU,
297 ssa.OpRISCV64SH1ADD, ssa.OpRISCV64SH2ADD, ssa.OpRISCV64SH3ADD,
298 ssa.OpRISCV64CZEROEQZ, ssa.OpRISCV64CZERONEZ:
299 r := v.Reg()
300 r1 := v.Args[0].Reg()
301 r2 := v.Args[1].Reg()
302 p := s.Prog(v.Op.Asm())
303 p.From.Type = obj.TYPE_REG
304 p.From.Reg = r2
305 p.Reg = r1
306 p.To.Type = obj.TYPE_REG
307 p.To.Reg = r
308
309 case ssa.OpRISCV64LoweredFMAXD, ssa.OpRISCV64LoweredFMIND, ssa.OpRISCV64LoweredFMAXS, ssa.OpRISCV64LoweredFMINS:
310
311
312
313
314
315
316
317
318
319
320
321 r0 := v.Args[0].Reg()
322 r1 := v.Args[1].Reg()
323 out := v.Reg()
324 add, feq := riscv.AFADDD, riscv.AFEQD
325 if v.Op == ssa.OpRISCV64LoweredFMAXS || v.Op == ssa.OpRISCV64LoweredFMINS {
326 add = riscv.AFADDS
327 feq = riscv.AFEQS
328 }
329
330 p1 := s.Prog(add)
331 p1.From.Type = obj.TYPE_REG
332 p1.From.Reg = r0
333 p1.Reg = r1
334 p1.To.Type = obj.TYPE_REG
335 p1.To.Reg = out
336
337 p2 := s.Prog(feq)
338 p2.From.Type = obj.TYPE_REG
339 p2.From.Reg = r0
340 p2.Reg = r0
341 p2.To.Type = obj.TYPE_REG
342 p2.To.Reg = riscv.REG_TMP
343
344 p3 := s.Prog(riscv.ABEQ)
345 p3.From.Type = obj.TYPE_REG
346 p3.From.Reg = riscv.REG_ZERO
347 p3.Reg = riscv.REG_TMP
348 p3.To.Type = obj.TYPE_BRANCH
349
350 p4 := s.Prog(feq)
351 p4.From.Type = obj.TYPE_REG
352 p4.From.Reg = r1
353 p4.Reg = r1
354 p4.To.Type = obj.TYPE_REG
355 p4.To.Reg = riscv.REG_TMP
356
357 p5 := s.Prog(riscv.ABEQ)
358 p5.From.Type = obj.TYPE_REG
359 p5.From.Reg = riscv.REG_ZERO
360 p5.Reg = riscv.REG_TMP
361 p5.To.Type = obj.TYPE_BRANCH
362
363 p6 := s.Prog(v.Op.Asm())
364 p6.From.Type = obj.TYPE_REG
365 p6.From.Reg = r1
366 p6.Reg = r0
367 p6.To.Type = obj.TYPE_REG
368 p6.To.Reg = out
369
370 nop := s.Prog(obj.ANOP)
371 p3.To.SetTarget(nop)
372 p5.To.SetTarget(nop)
373
374 case ssa.OpRISCV64LoweredMuluhilo:
375 r0 := v.Args[0].Reg()
376 r1 := v.Args[1].Reg()
377 p := s.Prog(riscv.AMULHU)
378 p.From.Type = obj.TYPE_REG
379 p.From.Reg = r1
380 p.Reg = r0
381 p.To.Type = obj.TYPE_REG
382 p.To.Reg = v.Reg0()
383 p1 := s.Prog(riscv.AMUL)
384 p1.From.Type = obj.TYPE_REG
385 p1.From.Reg = r1
386 p1.Reg = r0
387 p1.To.Type = obj.TYPE_REG
388 p1.To.Reg = v.Reg1()
389 case ssa.OpRISCV64LoweredMuluover:
390 r0 := v.Args[0].Reg()
391 r1 := v.Args[1].Reg()
392 p := s.Prog(riscv.AMULHU)
393 p.From.Type = obj.TYPE_REG
394 p.From.Reg = r1
395 p.Reg = r0
396 p.To.Type = obj.TYPE_REG
397 p.To.Reg = v.Reg1()
398 p1 := s.Prog(riscv.AMUL)
399 p1.From.Type = obj.TYPE_REG
400 p1.From.Reg = r1
401 p1.Reg = r0
402 p1.To.Type = obj.TYPE_REG
403 p1.To.Reg = v.Reg0()
404 p2 := s.Prog(riscv.ASNEZ)
405 p2.From.Type = obj.TYPE_REG
406 p2.From.Reg = v.Reg1()
407 p2.To.Type = obj.TYPE_REG
408 p2.To.Reg = v.Reg1()
409 case ssa.OpRISCV64FMADDD, ssa.OpRISCV64FMSUBD, ssa.OpRISCV64FNMADDD, ssa.OpRISCV64FNMSUBD,
410 ssa.OpRISCV64FMADDS, ssa.OpRISCV64FMSUBS, ssa.OpRISCV64FNMADDS, ssa.OpRISCV64FNMSUBS:
411 r := v.Reg()
412 r1 := v.Args[0].Reg()
413 r2 := v.Args[1].Reg()
414 r3 := v.Args[2].Reg()
415 p := s.Prog(v.Op.Asm())
416 p.From.Type = obj.TYPE_REG
417 p.From.Reg = r2
418 p.Reg = r1
419 p.AddRestSource(obj.Addr{Type: obj.TYPE_REG, Reg: r3})
420 p.To.Type = obj.TYPE_REG
421 p.To.Reg = r
422 case ssa.OpRISCV64FSQRTS, ssa.OpRISCV64FSQRTD,
423 ssa.OpRISCV64FNEGS, ssa.OpRISCV64FNEGD,
424 ssa.OpRISCV64FABSS, ssa.OpRISCV64FABSD,
425 ssa.OpRISCV64FMVSX, ssa.OpRISCV64FMVXS, ssa.OpRISCV64FMVDX, ssa.OpRISCV64FMVXD,
426 ssa.OpRISCV64FCVTSW, ssa.OpRISCV64FCVTSL, ssa.OpRISCV64FCVTWS, ssa.OpRISCV64FCVTLS,
427 ssa.OpRISCV64FCVTDW, ssa.OpRISCV64FCVTDL, ssa.OpRISCV64FCVTWD, ssa.OpRISCV64FCVTLD, ssa.OpRISCV64FCVTDS, ssa.OpRISCV64FCVTSD,
428 ssa.OpRISCV64FCLASSS, ssa.OpRISCV64FCLASSD,
429 ssa.OpRISCV64NOT, ssa.OpRISCV64NEG, ssa.OpRISCV64NEGW, ssa.OpRISCV64CLZ, ssa.OpRISCV64CLZW, ssa.OpRISCV64CTZ, ssa.OpRISCV64CTZW,
430 ssa.OpRISCV64REV8, ssa.OpRISCV64CPOP, ssa.OpRISCV64CPOPW:
431 p := s.Prog(v.Op.Asm())
432 p.From.Type = obj.TYPE_REG
433 p.From.Reg = v.Args[0].Reg()
434 p.To.Type = obj.TYPE_REG
435 p.To.Reg = v.Reg()
436 case ssa.OpRISCV64ADDI, ssa.OpRISCV64ADDIW, ssa.OpRISCV64XORI, ssa.OpRISCV64ORI, ssa.OpRISCV64ANDI,
437 ssa.OpRISCV64SLLI, ssa.OpRISCV64SLLIW, ssa.OpRISCV64SRAI, ssa.OpRISCV64SRAIW,
438 ssa.OpRISCV64SRLI, ssa.OpRISCV64SRLIW, ssa.OpRISCV64SLTI, ssa.OpRISCV64SLTIU,
439 ssa.OpRISCV64RORI, ssa.OpRISCV64RORIW:
440 p := s.Prog(v.Op.Asm())
441 p.From.Type = obj.TYPE_CONST
442 p.From.Offset = v.AuxInt
443 p.Reg = v.Args[0].Reg()
444 p.To.Type = obj.TYPE_REG
445 p.To.Reg = v.Reg()
446 case ssa.OpRISCV64MOVDconst:
447 p := s.Prog(v.Op.Asm())
448 p.From.Type = obj.TYPE_CONST
449 p.From.Offset = v.AuxInt
450 p.To.Type = obj.TYPE_REG
451 p.To.Reg = v.Reg()
452 case ssa.OpRISCV64FMOVDconst, ssa.OpRISCV64FMOVFconst:
453 p := s.Prog(v.Op.Asm())
454 p.From.Type = obj.TYPE_FCONST
455 p.From.Val = v.AuxFloat()
456 p.From.Name = obj.NAME_NONE
457 p.From.Reg = obj.REG_NONE
458 p.To.Type = obj.TYPE_REG
459 p.To.Reg = v.Reg()
460 case ssa.OpRISCV64MOVaddr:
461 p := s.Prog(v.Op.Asm())
462 p.From.Type = obj.TYPE_ADDR
463 p.To.Type = obj.TYPE_REG
464 p.To.Reg = v.Reg()
465
466 var wantreg string
467
468 switch v.Aux.(type) {
469 default:
470 v.Fatalf("aux is of unknown type %T", v.Aux)
471 case *obj.LSym:
472 wantreg = "SB"
473 ssagen.AddAux(&p.From, v)
474 case *ir.Name:
475 wantreg = "SP"
476 ssagen.AddAux(&p.From, v)
477 case nil:
478
479 wantreg = "SP"
480 p.From.Reg = riscv.REG_SP
481 p.From.Offset = v.AuxInt
482 }
483 if reg := v.Args[0].RegName(); reg != wantreg {
484 v.Fatalf("bad reg %s for symbol type %T, want %s", reg, v.Aux, wantreg)
485 }
486 case ssa.OpRISCV64MOVBload, ssa.OpRISCV64MOVHload, ssa.OpRISCV64MOVWload, ssa.OpRISCV64MOVDload,
487 ssa.OpRISCV64MOVBUload, ssa.OpRISCV64MOVHUload, ssa.OpRISCV64MOVWUload,
488 ssa.OpRISCV64FMOVWload, ssa.OpRISCV64FMOVDload:
489 p := s.Prog(v.Op.Asm())
490 p.From.Type = obj.TYPE_MEM
491 p.From.Reg = v.Args[0].Reg()
492 ssagen.AddAux(&p.From, v)
493 p.To.Type = obj.TYPE_REG
494 p.To.Reg = v.Reg()
495 case ssa.OpRISCV64MOVBstore, ssa.OpRISCV64MOVHstore, ssa.OpRISCV64MOVWstore, ssa.OpRISCV64MOVDstore,
496 ssa.OpRISCV64FMOVWstore, ssa.OpRISCV64FMOVDstore:
497 p := s.Prog(v.Op.Asm())
498 p.From.Type = obj.TYPE_REG
499 p.From.Reg = v.Args[1].Reg()
500 p.To.Type = obj.TYPE_MEM
501 p.To.Reg = v.Args[0].Reg()
502 ssagen.AddAux(&p.To, v)
503 case ssa.OpRISCV64MOVBstorezero, ssa.OpRISCV64MOVHstorezero, ssa.OpRISCV64MOVWstorezero, ssa.OpRISCV64MOVDstorezero:
504 p := s.Prog(v.Op.Asm())
505 p.From.Type = obj.TYPE_REG
506 p.From.Reg = riscv.REG_ZERO
507 p.To.Type = obj.TYPE_MEM
508 p.To.Reg = v.Args[0].Reg()
509 ssagen.AddAux(&p.To, v)
510 case ssa.OpRISCV64SEQZ, ssa.OpRISCV64SNEZ:
511 p := s.Prog(v.Op.Asm())
512 p.From.Type = obj.TYPE_REG
513 p.From.Reg = v.Args[0].Reg()
514 p.To.Type = obj.TYPE_REG
515 p.To.Reg = v.Reg()
516 case ssa.OpRISCV64CALLstatic, ssa.OpRISCV64CALLclosure, ssa.OpRISCV64CALLinter:
517 s.Call(v)
518 case ssa.OpRISCV64CALLtail, ssa.OpRISCV64CALLtailinter:
519 s.TailCall(v)
520 case ssa.OpRISCV64LoweredWB:
521 p := s.Prog(obj.ACALL)
522 p.To.Type = obj.TYPE_MEM
523 p.To.Name = obj.NAME_EXTERN
524
525 p.To.Sym = ir.Syms.GCWriteBarrier[v.AuxInt-1]
526
527 case ssa.OpRISCV64LoweredPanicBoundsRR, ssa.OpRISCV64LoweredPanicBoundsRC, ssa.OpRISCV64LoweredPanicBoundsCR, ssa.OpRISCV64LoweredPanicBoundsCC:
528
529 code, signed := ssa.BoundsKind(v.AuxInt).Code()
530 xIsReg := false
531 yIsReg := false
532 xVal := 0
533 yVal := 0
534 switch v.Op {
535 case ssa.OpRISCV64LoweredPanicBoundsRR:
536 xIsReg = true
537 xVal = int(v.Args[0].Reg() - riscv.REG_X5)
538 yIsReg = true
539 yVal = int(v.Args[1].Reg() - riscv.REG_X5)
540 case ssa.OpRISCV64LoweredPanicBoundsRC:
541 xIsReg = true
542 xVal = int(v.Args[0].Reg() - riscv.REG_X5)
543 c := v.Aux.(ssa.PanicBoundsC).C
544 if c >= 0 && c <= abi.BoundsMaxConst {
545 yVal = int(c)
546 } else {
547
548 yIsReg = true
549 if yVal == xVal {
550 yVal = 1
551 }
552 p := s.Prog(riscv.AMOV)
553 p.From.Type = obj.TYPE_CONST
554 p.From.Offset = c
555 p.To.Type = obj.TYPE_REG
556 p.To.Reg = riscv.REG_X5 + int16(yVal)
557 }
558 case ssa.OpRISCV64LoweredPanicBoundsCR:
559 yIsReg = true
560 yVal = int(v.Args[0].Reg() - riscv.REG_X5)
561 c := v.Aux.(ssa.PanicBoundsC).C
562 if c >= 0 && c <= abi.BoundsMaxConst {
563 xVal = int(c)
564 } else {
565
566 if xVal == yVal {
567 xVal = 1
568 }
569 p := s.Prog(riscv.AMOV)
570 p.From.Type = obj.TYPE_CONST
571 p.From.Offset = c
572 p.To.Type = obj.TYPE_REG
573 p.To.Reg = riscv.REG_X5 + int16(xVal)
574 }
575 case ssa.OpRISCV64LoweredPanicBoundsCC:
576 c := v.Aux.(ssa.PanicBoundsCC).Cx
577 if c >= 0 && c <= abi.BoundsMaxConst {
578 xVal = int(c)
579 } else {
580
581 xIsReg = true
582 p := s.Prog(riscv.AMOV)
583 p.From.Type = obj.TYPE_CONST
584 p.From.Offset = c
585 p.To.Type = obj.TYPE_REG
586 p.To.Reg = riscv.REG_X5 + int16(xVal)
587 }
588 c = v.Aux.(ssa.PanicBoundsCC).Cy
589 if c >= 0 && c <= abi.BoundsMaxConst {
590 yVal = int(c)
591 } else {
592
593 yIsReg = true
594 yVal = 1
595 p := s.Prog(riscv.AMOV)
596 p.From.Type = obj.TYPE_CONST
597 p.From.Offset = c
598 p.To.Type = obj.TYPE_REG
599 p.To.Reg = riscv.REG_X5 + int16(yVal)
600 }
601 }
602 c := abi.BoundsEncode(code, signed, xIsReg, yIsReg, xVal, yVal)
603
604 p := s.Prog(obj.APCDATA)
605 p.From.SetConst(abi.PCDATA_PanicBounds)
606 p.To.SetConst(int64(c))
607 p = s.Prog(obj.ACALL)
608 p.To.Type = obj.TYPE_MEM
609 p.To.Name = obj.NAME_EXTERN
610 p.To.Sym = ir.Syms.PanicBounds
611
612 case ssa.OpRISCV64LoweredAtomicLoad8:
613 s.Prog(riscv.AFENCE)
614 p := s.Prog(riscv.AMOVBU)
615 p.From.Type = obj.TYPE_MEM
616 p.From.Reg = v.Args[0].Reg()
617 p.To.Type = obj.TYPE_REG
618 p.To.Reg = v.Reg0()
619 s.Prog(riscv.AFENCE)
620
621 case ssa.OpRISCV64LoweredAtomicLoad32, ssa.OpRISCV64LoweredAtomicLoad64:
622 as := riscv.ALRW
623 if v.Op == ssa.OpRISCV64LoweredAtomicLoad64 {
624 as = riscv.ALRD
625 }
626 p := s.Prog(as)
627 p.From.Type = obj.TYPE_MEM
628 p.From.Reg = v.Args[0].Reg()
629 p.To.Type = obj.TYPE_REG
630 p.To.Reg = v.Reg0()
631
632 case ssa.OpRISCV64LoweredAtomicStore8:
633 s.Prog(riscv.AFENCE)
634 p := s.Prog(riscv.AMOVB)
635 p.From.Type = obj.TYPE_REG
636 p.From.Reg = v.Args[1].Reg()
637 p.To.Type = obj.TYPE_MEM
638 p.To.Reg = v.Args[0].Reg()
639 s.Prog(riscv.AFENCE)
640
641 case ssa.OpRISCV64LoweredAtomicStore32, ssa.OpRISCV64LoweredAtomicStore64:
642 as := riscv.AAMOSWAPW
643 if v.Op == ssa.OpRISCV64LoweredAtomicStore64 {
644 as = riscv.AAMOSWAPD
645 }
646 p := s.Prog(as)
647 p.From.Type = obj.TYPE_REG
648 p.From.Reg = v.Args[1].Reg()
649 p.To.Type = obj.TYPE_MEM
650 p.To.Reg = v.Args[0].Reg()
651 p.RegTo2 = riscv.REG_ZERO
652
653 case ssa.OpRISCV64LoweredAtomicAdd32, ssa.OpRISCV64LoweredAtomicAdd64:
654 as := riscv.AAMOADDW
655 if v.Op == ssa.OpRISCV64LoweredAtomicAdd64 {
656 as = riscv.AAMOADDD
657 }
658 p := s.Prog(as)
659 p.From.Type = obj.TYPE_REG
660 p.From.Reg = v.Args[1].Reg()
661 p.To.Type = obj.TYPE_MEM
662 p.To.Reg = v.Args[0].Reg()
663 p.RegTo2 = riscv.REG_TMP
664
665 p2 := s.Prog(riscv.AADD)
666 p2.From.Type = obj.TYPE_REG
667 p2.From.Reg = riscv.REG_TMP
668 p2.Reg = v.Args[1].Reg()
669 p2.To.Type = obj.TYPE_REG
670 p2.To.Reg = v.Reg0()
671
672 case ssa.OpRISCV64LoweredAtomicExchange32, ssa.OpRISCV64LoweredAtomicExchange64:
673 as := riscv.AAMOSWAPW
674 if v.Op == ssa.OpRISCV64LoweredAtomicExchange64 {
675 as = riscv.AAMOSWAPD
676 }
677 p := s.Prog(as)
678 p.From.Type = obj.TYPE_REG
679 p.From.Reg = v.Args[1].Reg()
680 p.To.Type = obj.TYPE_MEM
681 p.To.Reg = v.Args[0].Reg()
682 p.RegTo2 = v.Reg0()
683
684 case ssa.OpRISCV64LoweredAtomicCas32, ssa.OpRISCV64LoweredAtomicCas64:
685
686
687
688
689
690
691
692 lr := riscv.ALRW
693 sc := riscv.ASCW
694 if v.Op == ssa.OpRISCV64LoweredAtomicCas64 {
695 lr = riscv.ALRD
696 sc = riscv.ASCD
697 }
698
699 r0 := v.Args[0].Reg()
700 r1 := v.Args[1].Reg()
701 r2 := v.Args[2].Reg()
702 out := v.Reg0()
703
704 p := s.Prog(riscv.AMOV)
705 p.From.Type = obj.TYPE_REG
706 p.From.Reg = riscv.REG_ZERO
707 p.To.Type = obj.TYPE_REG
708 p.To.Reg = out
709
710 p1 := s.Prog(lr)
711 p1.From.Type = obj.TYPE_MEM
712 p1.From.Reg = r0
713 p1.To.Type = obj.TYPE_REG
714 p1.To.Reg = riscv.REG_TMP
715
716 p2 := s.Prog(riscv.ABNE)
717 p2.From.Type = obj.TYPE_REG
718 p2.From.Reg = r1
719 p2.Reg = riscv.REG_TMP
720 p2.To.Type = obj.TYPE_BRANCH
721
722 p3 := s.Prog(sc)
723 p3.From.Type = obj.TYPE_REG
724 p3.From.Reg = r2
725 p3.To.Type = obj.TYPE_MEM
726 p3.To.Reg = r0
727 p3.RegTo2 = riscv.REG_TMP
728
729 p4 := s.Prog(riscv.ABNE)
730 p4.From.Type = obj.TYPE_REG
731 p4.From.Reg = riscv.REG_TMP
732 p4.Reg = riscv.REG_ZERO
733 p4.To.Type = obj.TYPE_BRANCH
734 p4.To.SetTarget(p1)
735
736 p5 := s.Prog(riscv.AMOV)
737 p5.From.Type = obj.TYPE_CONST
738 p5.From.Offset = 1
739 p5.To.Type = obj.TYPE_REG
740 p5.To.Reg = out
741
742 p6 := s.Prog(obj.ANOP)
743 p2.To.SetTarget(p6)
744
745 case ssa.OpRISCV64LoweredAtomicAnd32, ssa.OpRISCV64LoweredAtomicOr32:
746 p := s.Prog(v.Op.Asm())
747 p.From.Type = obj.TYPE_REG
748 p.From.Reg = v.Args[1].Reg()
749 p.To.Type = obj.TYPE_MEM
750 p.To.Reg = v.Args[0].Reg()
751 p.RegTo2 = riscv.REG_ZERO
752
753 case ssa.OpRISCV64LoweredZero:
754 ptr := v.Args[0].Reg()
755 sc := v.AuxValAndOff()
756 n := sc.Val64()
757
758 mov, sz := largestMove(sc.Off64())
759
760
761 var off int64
762 for n >= sz {
763 zeroOp(s, mov, ptr, off)
764 off += sz
765 n -= sz
766 }
767
768 for i := len(fracMovOps) - 1; i >= 0; i-- {
769 tsz := int64(1 << i)
770 if n < tsz {
771 continue
772 }
773 zeroOp(s, fracMovOps[i], ptr, off)
774 off += tsz
775 n -= tsz
776 }
777
778 case ssa.OpRISCV64LoweredZeroLoop:
779 ptr := v.Args[0].Reg()
780 sc := v.AuxValAndOff()
781 n := sc.Val64()
782 mov, sz := largestMove(sc.Off64())
783 chunk := 8 * sz
784
785 if n <= 3*chunk {
786 v.Fatalf("ZeroLoop too small:%d, expect:%d", n, 3*chunk)
787 }
788
789 tmp := v.RegTmp()
790
791 p := s.Prog(riscv.AADD)
792 p.From.Type = obj.TYPE_CONST
793 p.From.Offset = n - n%chunk
794 p.Reg = ptr
795 p.To.Type = obj.TYPE_REG
796 p.To.Reg = tmp
797
798 for i := int64(0); i < 8; i++ {
799 zeroOp(s, mov, ptr, sz*i)
800 }
801
802 p2 := s.Prog(riscv.AADD)
803 p2.From.Type = obj.TYPE_CONST
804 p2.From.Offset = chunk
805 p2.To.Type = obj.TYPE_REG
806 p2.To.Reg = ptr
807
808 p3 := s.Prog(riscv.ABNE)
809 p3.From.Reg = tmp
810 p3.From.Type = obj.TYPE_REG
811 p3.Reg = ptr
812 p3.To.Type = obj.TYPE_BRANCH
813 p3.To.SetTarget(p.Link)
814
815 n %= chunk
816
817
818 var off int64
819 for n >= sz {
820 zeroOp(s, mov, ptr, off)
821 off += sz
822 n -= sz
823 }
824
825 for i := len(fracMovOps) - 1; i >= 0; i-- {
826 tsz := int64(1 << i)
827 if n < tsz {
828 continue
829 }
830 zeroOp(s, fracMovOps[i], ptr, off)
831 off += tsz
832 n -= tsz
833 }
834
835 case ssa.OpRISCV64LoweredMove:
836 dst := v.Args[0].Reg()
837 src := v.Args[1].Reg()
838 if dst == src {
839 break
840 }
841
842 sa := v.AuxValAndOff()
843 n := sa.Val64()
844 mov, sz := largestMove(sa.Off64())
845
846 var off int64
847 tmp := int16(riscv.REG_X5)
848 for n >= sz {
849 moveOp(s, mov, dst, src, tmp, off)
850 off += sz
851 n -= sz
852 }
853
854 for i := len(fracMovOps) - 1; i >= 0; i-- {
855 tsz := int64(1 << i)
856 if n < tsz {
857 continue
858 }
859 moveOp(s, fracMovOps[i], dst, src, tmp, off)
860 off += tsz
861 n -= tsz
862 }
863
864 case ssa.OpRISCV64LoweredMoveLoop:
865 dst := v.Args[0].Reg()
866 src := v.Args[1].Reg()
867 if dst == src {
868 break
869 }
870
871 sc := v.AuxValAndOff()
872 n := sc.Val64()
873 mov, sz := largestMove(sc.Off64())
874 chunk := 8 * sz
875
876 if n <= 3*chunk {
877 v.Fatalf("MoveLoop too small:%d, expect:%d", n, 3*chunk)
878 }
879 tmp := int16(riscv.REG_X5)
880
881 p := s.Prog(riscv.AADD)
882 p.From.Type = obj.TYPE_CONST
883 p.From.Offset = n - n%chunk
884 p.Reg = src
885 p.To.Type = obj.TYPE_REG
886 p.To.Reg = riscv.REG_X6
887
888 for i := int64(0); i < 8; i++ {
889 moveOp(s, mov, dst, src, tmp, sz*i)
890 }
891
892 p1 := s.Prog(riscv.AADD)
893 p1.From.Type = obj.TYPE_CONST
894 p1.From.Offset = chunk
895 p1.To.Type = obj.TYPE_REG
896 p1.To.Reg = src
897
898 p2 := s.Prog(riscv.AADD)
899 p2.From.Type = obj.TYPE_CONST
900 p2.From.Offset = chunk
901 p2.To.Type = obj.TYPE_REG
902 p2.To.Reg = dst
903
904 p3 := s.Prog(riscv.ABNE)
905 p3.From.Reg = riscv.REG_X6
906 p3.From.Type = obj.TYPE_REG
907 p3.Reg = src
908 p3.To.Type = obj.TYPE_BRANCH
909 p3.To.SetTarget(p.Link)
910
911 n %= chunk
912
913 var off int64
914 for n >= sz {
915 moveOp(s, mov, dst, src, tmp, off)
916 off += sz
917 n -= sz
918 }
919
920 for i := len(fracMovOps) - 1; i >= 0; i-- {
921 tsz := int64(1 << i)
922 if n < tsz {
923 continue
924 }
925 moveOp(s, fracMovOps[i], dst, src, tmp, off)
926 off += tsz
927 n -= tsz
928 }
929
930 case ssa.OpRISCV64LoweredNilCheck:
931
932 p := s.Prog(riscv.AMOVB)
933 p.From.Type = obj.TYPE_MEM
934 p.From.Reg = v.Args[0].Reg()
935 ssagen.AddAux(&p.From, v)
936 p.To.Type = obj.TYPE_REG
937 p.To.Reg = riscv.REG_ZERO
938 if logopt.Enabled() {
939 logopt.LogOpt(v.Pos, "nilcheck", "genssa", v.Block.Func.Name)
940 }
941 if base.Debug.Nil != 0 && v.Pos.Line() > 1 {
942 base.WarnfAt(v.Pos, "generated nil check")
943 }
944
945 case ssa.OpRISCV64LoweredGetClosurePtr:
946
947 ssagen.CheckLoweredGetClosurePtr(v)
948
949 case ssa.OpRISCV64LoweredGetCallerSP:
950
951 p := s.Prog(riscv.AMOV)
952 p.From.Type = obj.TYPE_ADDR
953 p.From.Offset = -base.Ctxt.Arch.FixedFrameSize
954 p.From.Name = obj.NAME_PARAM
955 p.To.Type = obj.TYPE_REG
956 p.To.Reg = v.Reg()
957
958 case ssa.OpRISCV64LoweredGetCallerPC:
959 p := s.Prog(obj.AGETCALLERPC)
960 p.To.Type = obj.TYPE_REG
961 p.To.Reg = v.Reg()
962
963 case ssa.OpRISCV64LoweredPubBarrier:
964
965 s.Prog(v.Op.Asm())
966
967 case ssa.OpRISCV64LoweredRound32F, ssa.OpRISCV64LoweredRound64F:
968
969
970 case ssa.OpClobber, ssa.OpClobberReg:
971
972
973 default:
974 v.Fatalf("Unhandled op %v", v.Op)
975 }
976 }
977
978 var blockBranch = [...]obj.As{
979 ssa.BlockRISCV64BEQ: riscv.ABEQ,
980 ssa.BlockRISCV64BEQZ: riscv.ABEQZ,
981 ssa.BlockRISCV64BGE: riscv.ABGE,
982 ssa.BlockRISCV64BGEU: riscv.ABGEU,
983 ssa.BlockRISCV64BGEZ: riscv.ABGEZ,
984 ssa.BlockRISCV64BGTZ: riscv.ABGTZ,
985 ssa.BlockRISCV64BLEZ: riscv.ABLEZ,
986 ssa.BlockRISCV64BLT: riscv.ABLT,
987 ssa.BlockRISCV64BLTU: riscv.ABLTU,
988 ssa.BlockRISCV64BLTZ: riscv.ABLTZ,
989 ssa.BlockRISCV64BNE: riscv.ABNE,
990 ssa.BlockRISCV64BNEZ: riscv.ABNEZ,
991 }
992
993 func ssaGenBlock(s *ssagen.State, b, next *ssa.Block) {
994 s.SetPos(b.Pos)
995
996 switch b.Kind {
997 case ssa.BlockPlain, ssa.BlockDefer:
998 if b.Succs[0].Block() != next {
999 p := s.Prog(obj.AJMP)
1000 p.To.Type = obj.TYPE_BRANCH
1001 s.Branches = append(s.Branches, ssagen.Branch{P: p, B: b.Succs[0].Block()})
1002 }
1003 case ssa.BlockExit, ssa.BlockRetJmp:
1004 case ssa.BlockRet:
1005 s.Prog(obj.ARET)
1006 case ssa.BlockRISCV64BEQ, ssa.BlockRISCV64BEQZ, ssa.BlockRISCV64BNE, ssa.BlockRISCV64BNEZ,
1007 ssa.BlockRISCV64BLT, ssa.BlockRISCV64BLEZ, ssa.BlockRISCV64BGE, ssa.BlockRISCV64BGEZ,
1008 ssa.BlockRISCV64BLTZ, ssa.BlockRISCV64BGTZ, ssa.BlockRISCV64BLTU, ssa.BlockRISCV64BGEU:
1009
1010 as := blockBranch[b.Kind]
1011 invAs := riscv.InvertBranch(as)
1012
1013 var p *obj.Prog
1014 switch next {
1015 case b.Succs[0].Block():
1016 p = s.Br(invAs, b.Succs[1].Block())
1017 case b.Succs[1].Block():
1018 p = s.Br(as, b.Succs[0].Block())
1019 default:
1020 if b.Likely != ssa.BranchUnlikely {
1021 p = s.Br(as, b.Succs[0].Block())
1022 s.Br(obj.AJMP, b.Succs[1].Block())
1023 } else {
1024 p = s.Br(invAs, b.Succs[1].Block())
1025 s.Br(obj.AJMP, b.Succs[0].Block())
1026 }
1027 }
1028
1029 p.From.Type = obj.TYPE_REG
1030 switch b.Kind {
1031 case ssa.BlockRISCV64BEQ, ssa.BlockRISCV64BNE, ssa.BlockRISCV64BLT, ssa.BlockRISCV64BGE, ssa.BlockRISCV64BLTU, ssa.BlockRISCV64BGEU:
1032 if b.NumControls() != 2 {
1033 b.Fatalf("Unexpected number of controls (%d != 2): %s", b.NumControls(), b.LongString())
1034 }
1035 p.From.Reg = b.Controls[0].Reg()
1036 p.Reg = b.Controls[1].Reg()
1037
1038 case ssa.BlockRISCV64BEQZ, ssa.BlockRISCV64BNEZ, ssa.BlockRISCV64BGEZ, ssa.BlockRISCV64BLEZ, ssa.BlockRISCV64BLTZ, ssa.BlockRISCV64BGTZ:
1039 if b.NumControls() != 1 {
1040 b.Fatalf("Unexpected number of controls (%d != 1): %s", b.NumControls(), b.LongString())
1041 }
1042 p.From.Reg = b.Controls[0].Reg()
1043 }
1044
1045 default:
1046 b.Fatalf("Unhandled block: %s", b.LongString())
1047 }
1048 }
1049
1050 func loadRegResult(s *ssagen.State, f *ssa.Func, t *types.Type, reg int16, n *ir.Name, off int64) *obj.Prog {
1051 p := s.Prog(loadByType(t))
1052 p.From.Type = obj.TYPE_MEM
1053 p.From.Name = obj.NAME_AUTO
1054 p.From.Sym = n.Linksym()
1055 p.From.Offset = n.FrameOffset() + off
1056 p.To.Type = obj.TYPE_REG
1057 p.To.Reg = reg
1058 return p
1059 }
1060
1061 func spillArgReg(pp *objw.Progs, p *obj.Prog, f *ssa.Func, t *types.Type, reg int16, n *ir.Name, off int64) *obj.Prog {
1062 p = pp.Append(p, storeByType(t), obj.TYPE_REG, reg, 0, obj.TYPE_MEM, 0, n.FrameOffset()+off)
1063 p.To.Name = obj.NAME_PARAM
1064 p.To.Sym = n.Linksym()
1065 p.Pos = p.Pos.WithNotStmt()
1066 return p
1067 }
1068
1069 func zeroOp(s *ssagen.State, mov obj.As, reg int16, off int64) {
1070 p := s.Prog(mov)
1071 p.From.Type = obj.TYPE_REG
1072 p.From.Reg = riscv.REG_ZERO
1073 p.To.Type = obj.TYPE_MEM
1074 p.To.Reg = reg
1075 p.To.Offset = off
1076 return
1077 }
1078
1079 func moveOp(s *ssagen.State, mov obj.As, dst int16, src int16, tmp int16, off int64) {
1080 p := s.Prog(mov)
1081 p.From.Type = obj.TYPE_MEM
1082 p.From.Reg = src
1083 p.From.Offset = off
1084 p.To.Type = obj.TYPE_REG
1085 p.To.Reg = tmp
1086
1087 p1 := s.Prog(mov)
1088 p1.From.Type = obj.TYPE_REG
1089 p1.From.Reg = tmp
1090 p1.To.Type = obj.TYPE_MEM
1091 p1.To.Reg = dst
1092 p1.To.Offset = off
1093
1094 return
1095 }
1096
View as plain text