1
2
3
4
5 package ssagen
6
7 import (
8 "bufio"
9 "bytes"
10 "cmp"
11 "fmt"
12 "go/constant"
13 "html"
14 "internal/buildcfg"
15 "internal/goexperiment"
16 "internal/runtime/gc"
17 "os"
18 "path/filepath"
19 "slices"
20 "strings"
21
22 "cmd/compile/internal/abi"
23 "cmd/compile/internal/base"
24 "cmd/compile/internal/ir"
25 "cmd/compile/internal/liveness"
26 "cmd/compile/internal/objw"
27 "cmd/compile/internal/reflectdata"
28 "cmd/compile/internal/rttype"
29 "cmd/compile/internal/ssa"
30 "cmd/compile/internal/staticdata"
31 "cmd/compile/internal/typecheck"
32 "cmd/compile/internal/types"
33 "cmd/internal/obj"
34 "cmd/internal/objabi"
35 "cmd/internal/src"
36 "cmd/internal/sys"
37
38 rtabi "internal/abi"
39 )
40
41 var ssaConfig *ssa.Config
42 var ssaCaches []ssa.Cache
43
44 var ssaDump string
45 var ssaDir string
46 var ssaDumpStdout bool
47 var ssaDumpCFG string
48 const ssaDumpFile = "ssa.html"
49
50
51 var ssaDumpInlined []*ir.Func
52
53
54
55
56 const maxAggregatedHeapAllocation = 16
57
58 func DumpInline(fn *ir.Func) {
59 if ssaDump != "" && ssaDump == ir.FuncName(fn) {
60 ssaDumpInlined = append(ssaDumpInlined, fn)
61 }
62 }
63
64 func InitEnv() {
65 ssaDump = os.Getenv("GOSSAFUNC")
66 ssaDir = os.Getenv("GOSSADIR")
67 if ssaDump != "" {
68 if strings.HasSuffix(ssaDump, "+") {
69 ssaDump = ssaDump[:len(ssaDump)-1]
70 ssaDumpStdout = true
71 }
72 spl := strings.Split(ssaDump, ":")
73 if len(spl) > 1 {
74 ssaDump = spl[0]
75 ssaDumpCFG = spl[1]
76 }
77 }
78 }
79
80 func InitConfig() {
81 types_ := ssa.NewTypes()
82
83 if Arch.SoftFloat {
84 softfloatInit()
85 }
86
87
88
89 _ = types.NewPtr(types.Types[types.TINTER])
90 _ = types.NewPtr(types.NewPtr(types.Types[types.TSTRING]))
91 _ = types.NewPtr(types.NewSlice(types.Types[types.TINTER]))
92 _ = types.NewPtr(types.NewPtr(types.ByteType))
93 _ = types.NewPtr(types.NewSlice(types.ByteType))
94 _ = types.NewPtr(types.NewSlice(types.Types[types.TSTRING]))
95 _ = types.NewPtr(types.NewPtr(types.NewPtr(types.Types[types.TUINT8])))
96 _ = types.NewPtr(types.Types[types.TINT16])
97 _ = types.NewPtr(types.Types[types.TINT64])
98 _ = types.NewPtr(types.ErrorType)
99 _ = types.NewPtr(reflectdata.MapType())
100 _ = types.NewPtr(deferstruct())
101 types.NewPtrCacheEnabled = false
102 ssaConfig = ssa.NewConfig(base.Ctxt.Arch.Name, *types_, base.Ctxt, base.Flag.N == 0, Arch.SoftFloat)
103 ssaConfig.Race = base.Flag.Race
104 ssaCaches = make([]ssa.Cache, base.Flag.LowerC)
105
106
107 ir.Syms.AssertE2I = typecheck.LookupRuntimeFunc("assertE2I")
108 ir.Syms.AssertE2I2 = typecheck.LookupRuntimeFunc("assertE2I2")
109 ir.Syms.CgoCheckMemmove = typecheck.LookupRuntimeFunc("cgoCheckMemmove")
110 ir.Syms.CgoCheckPtrWrite = typecheck.LookupRuntimeFunc("cgoCheckPtrWrite")
111 ir.Syms.CheckPtrAlignment = typecheck.LookupRuntimeFunc("checkptrAlignment")
112 ir.Syms.Deferproc = typecheck.LookupRuntimeFunc("deferproc")
113 ir.Syms.Deferprocat = typecheck.LookupRuntimeFunc("deferprocat")
114 ir.Syms.DeferprocStack = typecheck.LookupRuntimeFunc("deferprocStack")
115 ir.Syms.Deferreturn = typecheck.LookupRuntimeFunc("deferreturn")
116 ir.Syms.Duffcopy = typecheck.LookupRuntimeFunc("duffcopy")
117 ir.Syms.Duffzero = typecheck.LookupRuntimeFunc("duffzero")
118 ir.Syms.GCWriteBarrier[0] = typecheck.LookupRuntimeFunc("gcWriteBarrier1")
119 ir.Syms.GCWriteBarrier[1] = typecheck.LookupRuntimeFunc("gcWriteBarrier2")
120 ir.Syms.GCWriteBarrier[2] = typecheck.LookupRuntimeFunc("gcWriteBarrier3")
121 ir.Syms.GCWriteBarrier[3] = typecheck.LookupRuntimeFunc("gcWriteBarrier4")
122 ir.Syms.GCWriteBarrier[4] = typecheck.LookupRuntimeFunc("gcWriteBarrier5")
123 ir.Syms.GCWriteBarrier[5] = typecheck.LookupRuntimeFunc("gcWriteBarrier6")
124 ir.Syms.GCWriteBarrier[6] = typecheck.LookupRuntimeFunc("gcWriteBarrier7")
125 ir.Syms.GCWriteBarrier[7] = typecheck.LookupRuntimeFunc("gcWriteBarrier8")
126 ir.Syms.Goschedguarded = typecheck.LookupRuntimeFunc("goschedguarded")
127 ir.Syms.Growslice = typecheck.LookupRuntimeFunc("growslice")
128 ir.Syms.GrowsliceBuf = typecheck.LookupRuntimeFunc("growsliceBuf")
129 ir.Syms.GrowsliceBufNoAlias = typecheck.LookupRuntimeFunc("growsliceBufNoAlias")
130 ir.Syms.GrowsliceNoAlias = typecheck.LookupRuntimeFunc("growsliceNoAlias")
131 ir.Syms.MoveSlice = typecheck.LookupRuntimeFunc("moveSlice")
132 ir.Syms.MoveSliceNoScan = typecheck.LookupRuntimeFunc("moveSliceNoScan")
133 ir.Syms.MoveSliceNoCap = typecheck.LookupRuntimeFunc("moveSliceNoCap")
134 ir.Syms.MoveSliceNoCapNoScan = typecheck.LookupRuntimeFunc("moveSliceNoCapNoScan")
135 ir.Syms.InterfaceSwitch = typecheck.LookupRuntimeFunc("interfaceSwitch")
136 for i := 1; i < len(ir.Syms.MallocGCSmallNoScan); i++ {
137 ir.Syms.MallocGCSmallNoScan[i] = typecheck.LookupRuntimeFunc(fmt.Sprintf("mallocgcSmallNoScanSC%d", i))
138 }
139 for i := 1; i < len(ir.Syms.MallocGCSmallScanNoHeader); i++ {
140 ir.Syms.MallocGCSmallScanNoHeader[i] = typecheck.LookupRuntimeFunc(fmt.Sprintf("mallocgcSmallScanNoHeaderSC%d", i))
141 }
142 for i := 1; i < len(ir.Syms.MallocGCTiny); i++ {
143 ir.Syms.MallocGCTiny[i] = typecheck.LookupRuntimeFunc(fmt.Sprintf("mallocgcTinySize%d", i))
144 }
145 ir.Syms.MallocGC = typecheck.LookupRuntimeFunc("mallocgc")
146 ir.Syms.Memmove = typecheck.LookupRuntimeFunc("memmove")
147 ir.Syms.Memequal = typecheck.LookupRuntimeFunc("memequal")
148 ir.Syms.Msanread = typecheck.LookupRuntimeFunc("msanread")
149 ir.Syms.Msanwrite = typecheck.LookupRuntimeFunc("msanwrite")
150 ir.Syms.Msanmove = typecheck.LookupRuntimeFunc("msanmove")
151 ir.Syms.Asanread = typecheck.LookupRuntimeFunc("asanread")
152 ir.Syms.Asanwrite = typecheck.LookupRuntimeFunc("asanwrite")
153 ir.Syms.Newobject = typecheck.LookupRuntimeFunc("newobject")
154 ir.Syms.Newproc = typecheck.LookupRuntimeFunc("newproc")
155 ir.Syms.PanicBounds = typecheck.LookupRuntimeFunc("panicBounds")
156 ir.Syms.PanicExtend = typecheck.LookupRuntimeFunc("panicExtend")
157 ir.Syms.Panicdivide = typecheck.LookupRuntimeFunc("panicdivide")
158 ir.Syms.PanicdottypeE = typecheck.LookupRuntimeFunc("panicdottypeE")
159 ir.Syms.PanicdottypeI = typecheck.LookupRuntimeFunc("panicdottypeI")
160 ir.Syms.Panicnildottype = typecheck.LookupRuntimeFunc("panicnildottype")
161 ir.Syms.Panicoverflow = typecheck.LookupRuntimeFunc("panicoverflow")
162 ir.Syms.Panicshift = typecheck.LookupRuntimeFunc("panicshift")
163 ir.Syms.PanicSimdImm = typecheck.LookupRuntimeFunc("panicSimdImm")
164 ir.Syms.Racefuncenter = typecheck.LookupRuntimeFunc("racefuncenter")
165 ir.Syms.Racefuncexit = typecheck.LookupRuntimeFunc("racefuncexit")
166 ir.Syms.Raceread = typecheck.LookupRuntimeFunc("raceread")
167 ir.Syms.Racereadrange = typecheck.LookupRuntimeFunc("racereadrange")
168 ir.Syms.Racewrite = typecheck.LookupRuntimeFunc("racewrite")
169 ir.Syms.Racewriterange = typecheck.LookupRuntimeFunc("racewriterange")
170 ir.Syms.TypeAssert = typecheck.LookupRuntimeFunc("typeAssert")
171 ir.Syms.WBZero = typecheck.LookupRuntimeFunc("wbZero")
172 ir.Syms.WBMove = typecheck.LookupRuntimeFunc("wbMove")
173 ir.Syms.X86HasAVX = typecheck.LookupRuntimeVar("x86HasAVX")
174 ir.Syms.X86HasFMA = typecheck.LookupRuntimeVar("x86HasFMA")
175 ir.Syms.X86HasPOPCNT = typecheck.LookupRuntimeVar("x86HasPOPCNT")
176 ir.Syms.X86HasSSE41 = typecheck.LookupRuntimeVar("x86HasSSE41")
177 ir.Syms.ARMHasVFPv4 = typecheck.LookupRuntimeVar("armHasVFPv4")
178 ir.Syms.ARM64HasATOMICS = typecheck.LookupRuntimeVar("arm64HasATOMICS")
179 ir.Syms.Loong64HasLAMCAS = typecheck.LookupRuntimeVar("loong64HasLAMCAS")
180 ir.Syms.Loong64HasLAM_BH = typecheck.LookupRuntimeVar("loong64HasLAM_BH")
181 ir.Syms.Loong64HasLSX = typecheck.LookupRuntimeVar("loong64HasLSX")
182 ir.Syms.RISCV64HasZbb = typecheck.LookupRuntimeVar("riscv64HasZbb")
183 ir.Syms.Staticuint64s = typecheck.LookupRuntimeVar("staticuint64s")
184 ir.Syms.Typedmemmove = typecheck.LookupRuntimeFunc("typedmemmove")
185 ir.Syms.Udiv = typecheck.LookupRuntimeVar("udiv")
186 ir.Syms.WriteBarrier = typecheck.LookupRuntimeVar("writeBarrier")
187 ir.Syms.Zerobase = typecheck.LookupRuntimeVar("zerobase")
188 ir.Syms.ZeroVal = typecheck.LookupRuntimeVar("zeroVal")
189
190 if Arch.LinkArch.Family == sys.Wasm {
191 BoundsCheckFunc[ssa.BoundsIndex] = typecheck.LookupRuntimeFunc("goPanicIndex")
192 BoundsCheckFunc[ssa.BoundsIndexU] = typecheck.LookupRuntimeFunc("goPanicIndexU")
193 BoundsCheckFunc[ssa.BoundsSliceAlen] = typecheck.LookupRuntimeFunc("goPanicSliceAlen")
194 BoundsCheckFunc[ssa.BoundsSliceAlenU] = typecheck.LookupRuntimeFunc("goPanicSliceAlenU")
195 BoundsCheckFunc[ssa.BoundsSliceAcap] = typecheck.LookupRuntimeFunc("goPanicSliceAcap")
196 BoundsCheckFunc[ssa.BoundsSliceAcapU] = typecheck.LookupRuntimeFunc("goPanicSliceAcapU")
197 BoundsCheckFunc[ssa.BoundsSliceB] = typecheck.LookupRuntimeFunc("goPanicSliceB")
198 BoundsCheckFunc[ssa.BoundsSliceBU] = typecheck.LookupRuntimeFunc("goPanicSliceBU")
199 BoundsCheckFunc[ssa.BoundsSlice3Alen] = typecheck.LookupRuntimeFunc("goPanicSlice3Alen")
200 BoundsCheckFunc[ssa.BoundsSlice3AlenU] = typecheck.LookupRuntimeFunc("goPanicSlice3AlenU")
201 BoundsCheckFunc[ssa.BoundsSlice3Acap] = typecheck.LookupRuntimeFunc("goPanicSlice3Acap")
202 BoundsCheckFunc[ssa.BoundsSlice3AcapU] = typecheck.LookupRuntimeFunc("goPanicSlice3AcapU")
203 BoundsCheckFunc[ssa.BoundsSlice3B] = typecheck.LookupRuntimeFunc("goPanicSlice3B")
204 BoundsCheckFunc[ssa.BoundsSlice3BU] = typecheck.LookupRuntimeFunc("goPanicSlice3BU")
205 BoundsCheckFunc[ssa.BoundsSlice3C] = typecheck.LookupRuntimeFunc("goPanicSlice3C")
206 BoundsCheckFunc[ssa.BoundsSlice3CU] = typecheck.LookupRuntimeFunc("goPanicSlice3CU")
207 BoundsCheckFunc[ssa.BoundsConvert] = typecheck.LookupRuntimeFunc("goPanicSliceConvert")
208 }
209
210
211 ir.Syms.WasmDiv = typecheck.LookupRuntimeVar("wasmDiv")
212 ir.Syms.WasmTruncS = typecheck.LookupRuntimeVar("wasmTruncS")
213 ir.Syms.WasmTruncU = typecheck.LookupRuntimeVar("wasmTruncU")
214 ir.Syms.SigPanic = typecheck.LookupRuntimeFunc("sigpanic")
215 }
216
217 func InitTables() {
218 initIntrinsics(nil)
219 }
220
221
222
223
224
225
226
227
228 func AbiForBodylessFuncStackMap(fn *ir.Func) *abi.ABIConfig {
229 return ssaConfig.ABI0.Copy()
230 }
231
232
233
234 func abiForFunc(fn *ir.Func, abi0, abi1 *abi.ABIConfig) *abi.ABIConfig {
235 if buildcfg.Experiment.RegabiArgs {
236
237 if fn == nil {
238 return abi1
239 }
240 switch fn.ABI {
241 case obj.ABI0:
242 return abi0
243 case obj.ABIInternal:
244
245
246 return abi1
247 }
248 base.Fatalf("function %v has unknown ABI %v", fn, fn.ABI)
249 panic("not reachable")
250 }
251
252 a := abi0
253 if fn != nil {
254 if fn.Pragma&ir.RegisterParams != 0 {
255 a = abi1
256 }
257 }
258 return a
259 }
260
261
262
263
264
265
266
267
268
269
270
271
272 func (s *state) emitOpenDeferInfo() {
273 firstOffset := s.openDefers[0].closureNode.FrameOffset()
274
275
276 for i, r := range s.openDefers {
277 have := r.closureNode.FrameOffset()
278 want := firstOffset + int64(i)*int64(types.PtrSize)
279 if have != want {
280 base.FatalfAt(s.curfn.Pos(), "unexpected frame offset for open-coded defer slot #%v: have %v, want %v", i, have, want)
281 }
282 }
283
284 x := base.Ctxt.Lookup(s.curfn.LSym.Name + ".opendefer")
285 x.Set(obj.AttrContentAddressable, true)
286 x.Align = 1
287 s.curfn.LSym.Func().OpenCodedDeferInfo = x
288
289 off := 0
290 off = objw.Uvarint(x, off, uint64(-s.deferBitsTemp.FrameOffset()))
291 off = objw.Uvarint(x, off, uint64(-firstOffset))
292 }
293
294
295
296 func buildssa(fn *ir.Func, worker int, isPgoHot bool) *ssa.Func {
297 name := ir.FuncName(fn)
298
299 abiSelf := abiForFunc(fn, ssaConfig.ABI0, ssaConfig.ABI1)
300
301 printssa := false
302
303
304 if strings.Contains(ssaDump, name) {
305 nameOptABI := name
306 if l := len(ssaDump); l > 1 && ssaDump[l-2] == ',' {
307 nameOptABI = ssa.FuncNameABI(name, abiSelf.Which())
308 } else if strings.HasSuffix(ssaDump, ">") {
309 l := len(ssaDump)
310 if l >= 3 && ssaDump[l-3] == '<' {
311 nameOptABI = ssa.FuncNameABI(name, abiSelf.Which())
312 ssaDump = ssaDump[:l-3] + "," + ssaDump[l-2:l-1]
313 }
314 }
315 pkgDotName := base.Ctxt.Pkgpath + "." + nameOptABI
316 printssa = nameOptABI == ssaDump ||
317 pkgDotName == ssaDump ||
318 strings.HasSuffix(pkgDotName, ssaDump) && strings.HasSuffix(pkgDotName, "/"+ssaDump)
319 }
320
321 var astBuf *bytes.Buffer
322 if printssa {
323 astBuf = &bytes.Buffer{}
324 ir.FDumpList(astBuf, "buildssa-body", fn.Body)
325 if ssaDumpStdout {
326 fmt.Println("generating SSA for", name)
327 fmt.Print(astBuf.String())
328 }
329 }
330
331 var s state
332 s.pushLine(fn.Pos())
333 defer s.popLine()
334
335 s.hasdefer = fn.HasDefer()
336 if fn.Pragma&ir.CgoUnsafeArgs != 0 {
337 s.cgoUnsafeArgs = true
338 }
339 s.checkPtrEnabled = ir.ShouldCheckPtr(fn, 1)
340
341 if base.Flag.Cfg.Instrumenting && fn.Pragma&ir.Norace == 0 && !fn.Linksym().ABIWrapper() {
342 if !base.Flag.Race || !objabi.LookupPkgSpecial(fn.Sym().Pkg.Path).NoRaceFunc {
343 s.instrumentMemory = true
344 if base.Flag.Race {
345 s.instrumentEnterExit = true
346 }
347 }
348 }
349
350 fe := ssafn{
351 curfn: fn,
352 log: printssa && ssaDumpStdout,
353 }
354 s.curfn = fn
355
356 cache := &ssaCaches[worker]
357 cache.Reset()
358
359 s.f = ssaConfig.NewFunc(&fe, cache)
360 s.config = ssaConfig
361 s.f.Type = fn.Type()
362 s.f.Name = name
363 s.f.PrintOrHtmlSSA = printssa
364 if fn.Pragma&ir.Nosplit != 0 {
365 s.f.NoSplit = true
366 }
367 s.f.ABI0 = ssaConfig.ABI0
368 s.f.ABI1 = ssaConfig.ABI1
369 s.f.ABIDefault = abiForFunc(nil, ssaConfig.ABI0, ssaConfig.ABI1)
370 s.f.ABISelf = abiSelf
371
372 s.panics = map[funcLine]*ssa.Block{}
373 s.softFloat = s.config.SoftFloat
374
375
376 s.f.Entry = s.f.NewBlock(ssa.BlockPlain)
377 s.f.Entry.Pos = fn.Pos()
378 s.f.IsPgoHot = isPgoHot
379
380 if printssa {
381 ssaDF := ssaDumpFile
382 if ssaDir != "" {
383 ssaDF = filepath.Join(ssaDir, base.Ctxt.Pkgpath+"."+s.f.NameABI()+".html")
384 ssaD := filepath.Dir(ssaDF)
385 os.MkdirAll(ssaD, 0755)
386 }
387 s.f.HTMLWriter = ssa.NewHTMLWriter(ssaDF, s.f, ssaDumpCFG)
388
389 dumpSourcesColumn(s.f.HTMLWriter, fn)
390 s.f.HTMLWriter.WriteAST("AST", astBuf)
391 }
392
393
394 s.labels = map[string]*ssaLabel{}
395 s.fwdVars = map[ir.Node]*ssa.Value{}
396 s.startmem = s.entryNewValue0(ssa.OpInitMem, types.TypeMem)
397
398 s.hasOpenDefers = base.Flag.N == 0 && s.hasdefer && !s.curfn.OpenCodedDeferDisallowed()
399 switch {
400 case base.Debug.NoOpenDefer != 0:
401 s.hasOpenDefers = false
402 case s.hasOpenDefers && (base.Ctxt.Flag_shared || base.Ctxt.Flag_dynlink) && base.Ctxt.Arch.Name == "386":
403
404
405
406
407
408 s.hasOpenDefers = false
409 }
410 if s.hasOpenDefers && s.instrumentEnterExit {
411
412
413
414 s.hasOpenDefers = false
415 }
416 if s.hasOpenDefers {
417
418
419 for _, f := range s.curfn.Type().Results() {
420 if !f.Nname.(*ir.Name).OnStack() {
421 s.hasOpenDefers = false
422 break
423 }
424 }
425 }
426 if s.hasOpenDefers &&
427 s.curfn.NumReturns*s.curfn.NumDefers > 15 {
428
429
430
431
432
433 s.hasOpenDefers = false
434 }
435
436 s.sp = s.entryNewValue0(ssa.OpSP, types.Types[types.TUINTPTR])
437 s.sb = s.entryNewValue0(ssa.OpSB, types.Types[types.TUINTPTR])
438
439 s.startBlock(s.f.Entry)
440 s.vars[memVar] = s.startmem
441 if s.hasOpenDefers {
442
443
444
445 deferBitsTemp := typecheck.TempAt(src.NoXPos, s.curfn, types.Types[types.TUINT8])
446 deferBitsTemp.SetAddrtaken(true)
447 s.deferBitsTemp = deferBitsTemp
448
449 startDeferBits := s.entryNewValue0(ssa.OpConst8, types.Types[types.TUINT8])
450 s.vars[deferBitsVar] = startDeferBits
451 s.deferBitsAddr = s.addr(deferBitsTemp)
452 s.store(types.Types[types.TUINT8], s.deferBitsAddr, startDeferBits)
453
454
455
456
457
458 s.vars[memVar] = s.newValue1Apos(ssa.OpVarLive, types.TypeMem, deferBitsTemp, s.mem(), false)
459 }
460
461 var params *abi.ABIParamResultInfo
462 params = s.f.ABISelf.ABIAnalyze(fn.Type(), true)
463
464
465
466
467
468
469 var debugInfo ssa.FuncDebug
470 for _, n := range fn.Dcl {
471 if n.Class == ir.PPARAMOUT && n.IsOutputParamInRegisters() {
472 debugInfo.RegOutputParams = append(debugInfo.RegOutputParams, n)
473 }
474 }
475 fn.DebugInfo = &debugInfo
476
477
478 s.decladdrs = map[*ir.Name]*ssa.Value{}
479 for _, n := range fn.Dcl {
480 switch n.Class {
481 case ir.PPARAM:
482
483 s.decladdrs[n] = s.entryNewValue2A(ssa.OpLocalAddr, types.NewPtr(n.Type()), n, s.sp, s.startmem)
484 case ir.PPARAMOUT:
485 s.decladdrs[n] = s.entryNewValue2A(ssa.OpLocalAddr, types.NewPtr(n.Type()), n, s.sp, s.startmem)
486 case ir.PAUTO:
487
488
489 default:
490 s.Fatalf("local variable with class %v unimplemented", n.Class)
491 }
492 }
493
494 s.f.OwnAux = ssa.OwnAuxCall(fn.LSym, params)
495
496
497 for _, n := range fn.Dcl {
498 if n.Class == ir.PPARAM {
499 if s.canSSA(n) {
500 v := s.newValue0A(ssa.OpArg, n.Type(), n)
501 s.vars[n] = v
502 s.addNamedValue(n, v)
503 } else {
504 paramAssignment := ssa.ParamAssignmentForArgName(s.f, n)
505 if len(paramAssignment.Registers) > 0 {
506 if ssa.CanSSA(n.Type()) {
507 v := s.newValue0A(ssa.OpArg, n.Type(), n)
508 s.store(n.Type(), s.decladdrs[n], v)
509 } else {
510
511
512 s.storeParameterRegsToStack(s.f.ABISelf, paramAssignment, n, s.decladdrs[n], false)
513 }
514 }
515 }
516 }
517 }
518
519
520 if fn.Needctxt() {
521 clo := s.entryNewValue0(ssa.OpGetClosurePtr, s.f.Config.Types.BytePtr)
522 if fn.RangeParent != nil && base.Flag.N != 0 {
523
524
525
526 sym := &types.Sym{Name: ".closureptr", Pkg: types.LocalPkg}
527 cloSlot := s.curfn.NewLocal(src.NoXPos, sym, s.f.Config.Types.BytePtr)
528 cloSlot.SetUsed(true)
529 cloSlot.SetEsc(ir.EscNever)
530 cloSlot.SetAddrtaken(true)
531 s.f.CloSlot = cloSlot
532 s.vars[memVar] = s.newValue1Apos(ssa.OpVarDef, types.TypeMem, cloSlot, s.mem(), false)
533 addr := s.addr(cloSlot)
534 s.store(s.f.Config.Types.BytePtr, addr, clo)
535
536 s.vars[memVar] = s.newValue1Apos(ssa.OpVarLive, types.TypeMem, cloSlot, s.mem(), false)
537 }
538 csiter := typecheck.NewClosureStructIter(fn.ClosureVars)
539 for {
540 n, typ, offset := csiter.Next()
541 if n == nil {
542 break
543 }
544
545 ptr := s.newValue1I(ssa.OpOffPtr, types.NewPtr(typ), offset, clo)
546
547
548
549
550
551
552
553
554
555 if n.Byval() && !n.Addrtaken() && ssa.CanSSA(n.Type()) {
556 n.Class = ir.PAUTO
557 fn.Dcl = append(fn.Dcl, n)
558 s.assign(n, s.load(n.Type(), ptr), false, 0)
559 continue
560 }
561
562 if !n.Byval() {
563 ptr = s.load(typ, ptr)
564 }
565 s.setHeapaddr(fn.Pos(), n, ptr)
566 }
567 }
568
569
570 if s.instrumentEnterExit {
571 s.rtcall(ir.Syms.Racefuncenter, true, nil, s.newValue0(ssa.OpGetCallerPC, types.Types[types.TUINTPTR]))
572 }
573 s.zeroResults()
574 s.paramsToHeap()
575 s.stmtList(fn.Body)
576
577
578 if s.curBlock != nil {
579 s.pushLine(fn.Endlineno)
580 s.exit()
581 s.popLine()
582 }
583
584 for _, b := range s.f.Blocks {
585 if b.Pos != src.NoXPos {
586 s.updateUnsetPredPos(b)
587 }
588 }
589
590 s.f.HTMLWriter.WritePhase("before insert phis", "before insert phis")
591
592 s.insertPhis()
593
594
595 ssa.Compile(s.f)
596
597 fe.AllocFrame(s.f)
598
599 if len(s.openDefers) != 0 {
600 s.emitOpenDeferInfo()
601 }
602
603
604
605
606
607
608 for _, p := range params.InParams() {
609 typs, offs := p.RegisterTypesAndOffsets()
610 if len(offs) < len(typs) {
611 s.Fatalf("len(offs)=%d < len(typs)=%d, params=\n%s", len(offs), len(typs), params)
612 }
613 for i, t := range typs {
614 o := offs[i]
615 fo := p.FrameOffset(params)
616 reg := ssa.ObjRegForAbiReg(p.Registers[i], s.f.Config)
617 s.f.RegArgs = append(s.f.RegArgs, ssa.Spill{Reg: reg, Offset: fo + o, Type: t})
618 }
619 }
620
621 return s.f
622 }
623
624 func (s *state) storeParameterRegsToStack(abi *abi.ABIConfig, paramAssignment *abi.ABIParamAssignment, n *ir.Name, addr *ssa.Value, pointersOnly bool) {
625 typs, offs := paramAssignment.RegisterTypesAndOffsets()
626 for i, t := range typs {
627 if pointersOnly && !t.IsPtrShaped() {
628 continue
629 }
630 r := paramAssignment.Registers[i]
631 o := offs[i]
632 op, reg := ssa.ArgOpAndRegisterFor(r, abi)
633 aux := &ssa.AuxNameOffset{Name: n, Offset: o}
634 v := s.newValue0I(op, t, reg)
635 v.Aux = aux
636 p := s.newValue1I(ssa.OpOffPtr, types.NewPtr(t), o, addr)
637 s.store(t, p, v)
638 }
639 }
640
641
642
643
644
645
646
647 func (s *state) zeroResults() {
648 for _, f := range s.curfn.Type().Results() {
649 n := f.Nname.(*ir.Name)
650 if !n.OnStack() {
651
652
653
654 continue
655 }
656
657 if typ := n.Type(); ssa.CanSSA(typ) {
658 s.assign(n, s.zeroVal(typ), false, 0)
659 } else {
660 if typ.HasPointers() || ssa.IsMergeCandidate(n) {
661 s.vars[memVar] = s.newValue1A(ssa.OpVarDef, types.TypeMem, n, s.mem())
662 }
663 s.zero(n.Type(), s.decladdrs[n])
664 }
665 }
666 }
667
668
669
670 func (s *state) paramsToHeap() {
671 do := func(params []*types.Field) {
672 for _, f := range params {
673 if f.Nname == nil {
674 continue
675 }
676 n := f.Nname.(*ir.Name)
677 if ir.IsBlank(n) || n.OnStack() {
678 continue
679 }
680 s.newHeapaddr(n)
681 if n.Class == ir.PPARAM {
682 s.move(n.Type(), s.expr(n.Heapaddr), s.decladdrs[n])
683 }
684 }
685 }
686
687 typ := s.curfn.Type()
688 do(typ.Recvs())
689 do(typ.Params())
690 do(typ.Results())
691 }
692
693
694
695
696 func allocSizeAndAlign(t *types.Type) (int64, int64) {
697 size, align := t.Size(), t.Alignment()
698 if types.PtrSize == 4 && align == 4 && size >= 8 {
699
700 size = types.RoundUp(size, 8)
701 align = 8
702 }
703 return size, align
704 }
705 func allocSize(t *types.Type) int64 {
706 size, _ := allocSizeAndAlign(t)
707 return size
708 }
709 func allocAlign(t *types.Type) int64 {
710 _, align := allocSizeAndAlign(t)
711 return align
712 }
713
714
715 func (s *state) newHeapaddr(n *ir.Name) {
716 size := allocSize(n.Type())
717 if n.Type().HasPointers() || size >= maxAggregatedHeapAllocation || size == 0 {
718 s.setHeapaddr(n.Pos(), n, s.newObject(n.Type()))
719 return
720 }
721
722
723
724 var used int64
725 for _, v := range s.pendingHeapAllocations {
726 used += allocSize(v.Type.Elem())
727 }
728 if used+size > maxAggregatedHeapAllocation {
729 s.flushPendingHeapAllocations()
730 }
731
732 var allocCall *ssa.Value
733 if len(s.pendingHeapAllocations) == 0 {
734
735
736
737 allocCall = s.newObjectNonSpecialized(n.Type(), nil)
738 } else {
739 allocCall = s.pendingHeapAllocations[0].Args[0]
740 }
741
742 v := s.newValue1I(ssa.OpOffPtr, n.Type().PtrTo(), 0, allocCall)
743
744
745 s.pendingHeapAllocations = append(s.pendingHeapAllocations, v)
746
747
748 s.setHeapaddr(n.Pos(), n, v)
749 }
750
751 func (s *state) flushPendingHeapAllocations() {
752 pending := s.pendingHeapAllocations
753 if len(pending) == 0 {
754 return
755 }
756 s.pendingHeapAllocations = nil
757 ptr := pending[0].Args[0]
758 call := ptr.Args[0]
759
760 if len(pending) == 1 {
761
762 v := pending[0]
763 v.Op = ssa.OpCopy
764 return
765 }
766
767
768
769
770 slices.SortStableFunc(pending, func(x, y *ssa.Value) int {
771 return cmp.Compare(allocAlign(y.Type.Elem()), allocAlign(x.Type.Elem()))
772 })
773
774
775 var size int64
776 for _, v := range pending {
777 v.AuxInt = size
778 size += allocSize(v.Type.Elem())
779 }
780 align := allocAlign(pending[0].Type.Elem())
781 size = types.RoundUp(size, align)
782
783
784 args := []*ssa.Value{
785 s.constInt(types.Types[types.TUINTPTR], size),
786 s.constNil(call.Args[0].Type),
787 s.constBool(true),
788 call.Args[1],
789 }
790 mallocSym := ir.Syms.MallocGC
791 if specialMallocSym := s.specializedMallocSym(size, false); specialMallocSym != nil {
792 mallocSym = specialMallocSym
793 }
794 call.Aux = ssa.StaticAuxCall(mallocSym, s.f.ABIDefault.ABIAnalyzeTypes(
795 []*types.Type{args[0].Type, args[1].Type, args[2].Type},
796 []*types.Type{types.Types[types.TUNSAFEPTR]},
797 ))
798 call.AuxInt = 4 * s.config.PtrSize
799 call.SetArgs4(args[0], args[1], args[2], args[3])
800
801
802 call.Type = types.NewTuple(types.Types[types.TUNSAFEPTR], types.TypeMem)
803 ptr.Type = types.Types[types.TUNSAFEPTR]
804 }
805
806 func (s *state) specializedMallocSym(size int64, hasPointers bool) *obj.LSym {
807 if !s.sizeSpecializedMallocEnabled() {
808 return nil
809 }
810 ptrSize := s.config.PtrSize
811 ptrBits := ptrSize * 8
812 minSizeForMallocHeader := ptrSize * ptrBits
813 heapBitsInSpan := size <= minSizeForMallocHeader
814 if !heapBitsInSpan {
815 return nil
816 }
817 divRoundUp := func(n, a uintptr) uintptr { return (n + a - 1) / a }
818 sizeClass := gc.SizeToSizeClass8[divRoundUp(uintptr(size), gc.SmallSizeDiv)]
819 if hasPointers {
820 return ir.Syms.MallocGCSmallScanNoHeader[sizeClass]
821 }
822 if size < gc.TinySize {
823 return ir.Syms.MallocGCTiny[size]
824 }
825 return ir.Syms.MallocGCSmallNoScan[sizeClass]
826 }
827
828 func (s *state) sizeSpecializedMallocEnabled() bool {
829 if base.Flag.CompilingRuntime {
830
831
832
833
834
835
836
837 return false
838 }
839
840 return buildcfg.Experiment.SizeSpecializedMalloc && !base.Flag.Cfg.Instrumenting
841 }
842
843
844
845 func (s *state) setHeapaddr(pos src.XPos, n *ir.Name, ptr *ssa.Value) {
846 if !ptr.Type.IsPtr() || !types.Identical(n.Type(), ptr.Type.Elem()) {
847 base.FatalfAt(n.Pos(), "setHeapaddr %L with type %v", n, ptr.Type)
848 }
849
850
851 sym := &types.Sym{Name: "&" + n.Sym().Name, Pkg: types.LocalPkg}
852 addr := s.curfn.NewLocal(pos, sym, types.NewPtr(n.Type()))
853 addr.SetUsed(true)
854 types.CalcSize(addr.Type())
855
856 if n.Class == ir.PPARAMOUT {
857 addr.SetIsOutputParamHeapAddr(true)
858 }
859
860 n.Heapaddr = addr
861 s.assign(addr, ptr, false, 0)
862 }
863
864
865 func (s *state) newObject(typ *types.Type) *ssa.Value {
866 if typ.Size() == 0 {
867 return s.newValue1A(ssa.OpAddr, types.NewPtr(typ), ir.Syms.Zerobase, s.sb)
868 }
869 rtype := s.reflectType(typ)
870 if specialMallocSym := s.specializedMallocSym(typ.Size(), typ.HasPointers()); specialMallocSym != nil {
871 return s.rtcall(specialMallocSym, true, []*types.Type{types.NewPtr(typ)},
872 s.constInt(types.Types[types.TUINTPTR], typ.Size()),
873 rtype,
874 s.constBool(true),
875 )[0]
876 }
877 return s.rtcall(ir.Syms.Newobject, true, []*types.Type{types.NewPtr(typ)}, rtype)[0]
878 }
879
880
881
882 func (s *state) newObjectNonSpecialized(typ *types.Type, rtype *ssa.Value) *ssa.Value {
883 if typ.Size() == 0 {
884 return s.newValue1A(ssa.OpAddr, types.NewPtr(typ), ir.Syms.Zerobase, s.sb)
885 }
886 if rtype == nil {
887 rtype = s.reflectType(typ)
888 }
889 return s.rtcall(ir.Syms.Newobject, true, []*types.Type{types.NewPtr(typ)}, rtype)[0]
890 }
891
892 func (s *state) checkPtrAlignment(n *ir.ConvExpr, v *ssa.Value, count *ssa.Value) {
893 if !n.Type().IsPtr() {
894 s.Fatalf("expected pointer type: %v", n.Type())
895 }
896 elem, rtypeExpr := n.Type().Elem(), n.ElemRType
897 if count != nil {
898 if !elem.IsArray() {
899 s.Fatalf("expected array type: %v", elem)
900 }
901 elem, rtypeExpr = elem.Elem(), n.ElemElemRType
902 }
903 size := elem.Size()
904
905 if elem.Alignment() == 1 && (size == 0 || size == 1 || count == nil) {
906 return
907 }
908 if count == nil {
909 count = s.constInt(types.Types[types.TUINTPTR], 1)
910 }
911 if count.Type.Size() != s.config.PtrSize {
912 s.Fatalf("expected count fit to a uintptr size, have: %d, want: %d", count.Type.Size(), s.config.PtrSize)
913 }
914 var rtype *ssa.Value
915 if rtypeExpr != nil {
916 rtype = s.expr(rtypeExpr)
917 } else {
918 rtype = s.reflectType(elem)
919 }
920 s.rtcall(ir.Syms.CheckPtrAlignment, true, nil, v, rtype, count)
921 }
922
923
924
925 func (s *state) reflectType(typ *types.Type) *ssa.Value {
926
927
928 lsym := reflectdata.TypeLinksym(typ)
929 return s.entryNewValue1A(ssa.OpAddr, types.NewPtr(types.Types[types.TUINT8]), lsym, s.sb)
930 }
931
932 func dumpSourcesColumn(writer *ssa.HTMLWriter, fn *ir.Func) {
933
934 fname := base.Ctxt.PosTable.Pos(fn.Pos()).Filename()
935 targetFn, err := readFuncLines(fname, fn.Pos().Line(), fn.Endlineno.Line())
936 if err != nil {
937 writer.Logf("cannot read sources for function %v: %v", fn, err)
938 }
939
940
941 var inlFns []*ssa.FuncLines
942 for _, fi := range ssaDumpInlined {
943 elno := fi.Endlineno
944 fname := base.Ctxt.PosTable.Pos(fi.Pos()).Filename()
945 fnLines, err := readFuncLines(fname, fi.Pos().Line(), elno.Line())
946 if err != nil {
947 writer.Logf("cannot read sources for inlined function %v: %v", fi, err)
948 continue
949 }
950 inlFns = append(inlFns, fnLines)
951 }
952
953 slices.SortFunc(inlFns, ssa.ByTopoCmp)
954 if targetFn != nil {
955 inlFns = append([]*ssa.FuncLines{targetFn}, inlFns...)
956 }
957
958 writer.WriteSources("sources", inlFns)
959 }
960
961 func readFuncLines(file string, start, end uint) (*ssa.FuncLines, error) {
962 f, err := os.Open(os.ExpandEnv(file))
963 if err != nil {
964 return nil, err
965 }
966 defer f.Close()
967 var lines []string
968 ln := uint(1)
969 scanner := bufio.NewScanner(f)
970 for scanner.Scan() && ln <= end {
971 if ln >= start {
972 lines = append(lines, scanner.Text())
973 }
974 ln++
975 }
976 return &ssa.FuncLines{Filename: file, StartLineno: start, Lines: lines}, nil
977 }
978
979
980
981
982 func (s *state) updateUnsetPredPos(b *ssa.Block) {
983 if b.Pos == src.NoXPos {
984 s.Fatalf("Block %s should have a position", b)
985 }
986 bestPos := src.NoXPos
987 for _, e := range b.Preds {
988 p := e.Block()
989 if !p.LackingPos() {
990 continue
991 }
992 if bestPos == src.NoXPos {
993 bestPos = b.Pos
994 for _, v := range b.Values {
995 if v.LackingPos() {
996 continue
997 }
998 if v.Pos != src.NoXPos {
999
1000
1001 bestPos = v.Pos
1002 break
1003 }
1004 }
1005 }
1006 p.Pos = bestPos
1007 s.updateUnsetPredPos(p)
1008 }
1009 }
1010
1011
1012 type openDeferInfo struct {
1013
1014 n *ir.CallExpr
1015
1016
1017 closure *ssa.Value
1018
1019
1020
1021 closureNode *ir.Name
1022 }
1023
1024 type state struct {
1025
1026 config *ssa.Config
1027
1028
1029 f *ssa.Func
1030
1031
1032 curfn *ir.Func
1033
1034
1035 labels map[string]*ssaLabel
1036
1037
1038 breakTo *ssa.Block
1039 continueTo *ssa.Block
1040
1041
1042 curBlock *ssa.Block
1043
1044
1045
1046
1047 vars map[ir.Node]*ssa.Value
1048
1049
1050
1051
1052 fwdVars map[ir.Node]*ssa.Value
1053
1054
1055 defvars []map[ir.Node]*ssa.Value
1056
1057
1058 decladdrs map[*ir.Name]*ssa.Value
1059
1060
1061 startmem *ssa.Value
1062 sp *ssa.Value
1063 sb *ssa.Value
1064
1065 deferBitsAddr *ssa.Value
1066 deferBitsTemp *ir.Name
1067
1068
1069 line []src.XPos
1070
1071 lastPos src.XPos
1072
1073
1074
1075 panics map[funcLine]*ssa.Block
1076
1077 cgoUnsafeArgs bool
1078 hasdefer bool
1079 softFloat bool
1080 hasOpenDefers bool
1081 checkPtrEnabled bool
1082 instrumentEnterExit bool
1083 instrumentMemory bool
1084
1085
1086
1087
1088 openDefers []*openDeferInfo
1089
1090
1091
1092
1093 lastDeferExit *ssa.Block
1094 lastDeferFinalBlock *ssa.Block
1095 lastDeferCount int
1096
1097 prevCall *ssa.Value
1098
1099
1100
1101
1102 pendingHeapAllocations []*ssa.Value
1103
1104
1105 appendTargets map[ir.Node]bool
1106
1107
1108 blockStarts []src.XPos
1109
1110
1111
1112 backingStores map[ir.Node]*backingStoreInfo
1113 }
1114
1115 type backingStoreInfo struct {
1116
1117 K int64
1118
1119 store *ir.Name
1120
1121 used *ir.Name
1122
1123
1124
1125 usedStatic bool
1126 }
1127
1128 type funcLine struct {
1129 f *obj.LSym
1130 base *src.PosBase
1131 line uint
1132 }
1133
1134 type ssaLabel struct {
1135 target *ssa.Block
1136 breakTarget *ssa.Block
1137 continueTarget *ssa.Block
1138 }
1139
1140
1141 func (s *state) label(sym *types.Sym) *ssaLabel {
1142 lab := s.labels[sym.Name]
1143 if lab == nil {
1144 lab = new(ssaLabel)
1145 s.labels[sym.Name] = lab
1146 }
1147 return lab
1148 }
1149
1150 func (s *state) Logf(msg string, args ...any) { s.f.Logf(msg, args...) }
1151 func (s *state) Log() bool { return s.f.Log() }
1152 func (s *state) Fatalf(msg string, args ...any) {
1153 s.f.Frontend().Fatalf(s.peekPos(), msg, args...)
1154 }
1155 func (s *state) Warnl(pos src.XPos, msg string, args ...any) { s.f.Warnl(pos, msg, args...) }
1156 func (s *state) Debug_checknil() bool { return s.f.Frontend().Debug_checknil() }
1157
1158 func ssaMarker(name string) *ir.Name {
1159 return ir.NewNameAt(base.Pos, &types.Sym{Name: name}, nil)
1160 }
1161
1162 var (
1163
1164 memVar = ssaMarker("mem")
1165
1166
1167 ptrVar = ssaMarker("ptr")
1168 lenVar = ssaMarker("len")
1169 capVar = ssaMarker("cap")
1170 typVar = ssaMarker("typ")
1171 okVar = ssaMarker("ok")
1172 deferBitsVar = ssaMarker("deferBits")
1173 hashVar = ssaMarker("hash")
1174 )
1175
1176
1177 func (s *state) startBlock(b *ssa.Block) {
1178 if s.curBlock != nil {
1179 s.Fatalf("starting block %v when block %v has not ended", b, s.curBlock)
1180 }
1181 s.curBlock = b
1182 s.vars = map[ir.Node]*ssa.Value{}
1183 clear(s.fwdVars)
1184 for len(s.blockStarts) <= int(b.ID) {
1185 s.blockStarts = append(s.blockStarts, src.NoXPos)
1186 }
1187 }
1188
1189
1190
1191
1192 func (s *state) endBlock() *ssa.Block {
1193 b := s.curBlock
1194 if b == nil {
1195 return nil
1196 }
1197
1198 s.flushPendingHeapAllocations()
1199
1200 for len(s.defvars) <= int(b.ID) {
1201 s.defvars = append(s.defvars, nil)
1202 }
1203 s.defvars[b.ID] = s.vars
1204 s.curBlock = nil
1205 s.vars = nil
1206 if b.LackingPos() {
1207
1208
1209
1210 b.Pos = src.NoXPos
1211 } else {
1212 b.Pos = s.lastPos
1213 if s.blockStarts[b.ID] == src.NoXPos {
1214 s.blockStarts[b.ID] = s.lastPos
1215 }
1216 }
1217 return b
1218 }
1219
1220
1221 func (s *state) pushLine(line src.XPos) {
1222 if !line.IsKnown() {
1223
1224
1225 line = s.peekPos()
1226 if base.Flag.K != 0 {
1227 base.Warn("buildssa: unknown position (line 0)")
1228 }
1229 } else {
1230 s.lastPos = line
1231 }
1232
1233
1234 if b := s.curBlock; b != nil && s.blockStarts[b.ID] == src.NoXPos {
1235 s.blockStarts[b.ID] = line
1236 }
1237
1238 s.line = append(s.line, line)
1239 }
1240
1241
1242 func (s *state) popLine() {
1243 s.line = s.line[:len(s.line)-1]
1244 }
1245
1246
1247 func (s *state) peekPos() src.XPos {
1248 return s.line[len(s.line)-1]
1249 }
1250
1251
1252 func (s *state) newValue0(op ssa.Op, t *types.Type) *ssa.Value {
1253 return s.curBlock.NewValue0(s.peekPos(), op, t)
1254 }
1255
1256
1257 func (s *state) newValue0A(op ssa.Op, t *types.Type, aux ssa.Aux) *ssa.Value {
1258 return s.curBlock.NewValue0A(s.peekPos(), op, t, aux)
1259 }
1260
1261
1262 func (s *state) newValue0I(op ssa.Op, t *types.Type, auxint int64) *ssa.Value {
1263 return s.curBlock.NewValue0I(s.peekPos(), op, t, auxint)
1264 }
1265
1266
1267 func (s *state) newValue1(op ssa.Op, t *types.Type, arg *ssa.Value) *ssa.Value {
1268 return s.curBlock.NewValue1(s.peekPos(), op, t, arg)
1269 }
1270
1271
1272 func (s *state) newValue1A(op ssa.Op, t *types.Type, aux ssa.Aux, arg *ssa.Value) *ssa.Value {
1273 return s.curBlock.NewValue1A(s.peekPos(), op, t, aux, arg)
1274 }
1275
1276
1277
1278
1279 func (s *state) newValue1Apos(op ssa.Op, t *types.Type, aux ssa.Aux, arg *ssa.Value, isStmt bool) *ssa.Value {
1280 if isStmt {
1281 return s.curBlock.NewValue1A(s.peekPos(), op, t, aux, arg)
1282 }
1283 return s.curBlock.NewValue1A(s.peekPos().WithNotStmt(), op, t, aux, arg)
1284 }
1285
1286
1287 func (s *state) newValue1I(op ssa.Op, t *types.Type, aux int64, arg *ssa.Value) *ssa.Value {
1288 return s.curBlock.NewValue1I(s.peekPos(), op, t, aux, arg)
1289 }
1290
1291
1292 func (s *state) newValue2(op ssa.Op, t *types.Type, arg0, arg1 *ssa.Value) *ssa.Value {
1293 return s.curBlock.NewValue2(s.peekPos(), op, t, arg0, arg1)
1294 }
1295
1296
1297 func (s *state) newValue2A(op ssa.Op, t *types.Type, aux ssa.Aux, arg0, arg1 *ssa.Value) *ssa.Value {
1298 return s.curBlock.NewValue2A(s.peekPos(), op, t, aux, arg0, arg1)
1299 }
1300
1301
1302
1303
1304 func (s *state) newValue2Apos(op ssa.Op, t *types.Type, aux ssa.Aux, arg0, arg1 *ssa.Value, isStmt bool) *ssa.Value {
1305 if isStmt {
1306 return s.curBlock.NewValue2A(s.peekPos(), op, t, aux, arg0, arg1)
1307 }
1308 return s.curBlock.NewValue2A(s.peekPos().WithNotStmt(), op, t, aux, arg0, arg1)
1309 }
1310
1311
1312 func (s *state) newValue2I(op ssa.Op, t *types.Type, aux int64, arg0, arg1 *ssa.Value) *ssa.Value {
1313 return s.curBlock.NewValue2I(s.peekPos(), op, t, aux, arg0, arg1)
1314 }
1315
1316
1317 func (s *state) newValue3(op ssa.Op, t *types.Type, arg0, arg1, arg2 *ssa.Value) *ssa.Value {
1318 return s.curBlock.NewValue3(s.peekPos(), op, t, arg0, arg1, arg2)
1319 }
1320
1321
1322 func (s *state) newValue3I(op ssa.Op, t *types.Type, aux int64, arg0, arg1, arg2 *ssa.Value) *ssa.Value {
1323 return s.curBlock.NewValue3I(s.peekPos(), op, t, aux, arg0, arg1, arg2)
1324 }
1325
1326
1327 func (s *state) newValue3A(op ssa.Op, t *types.Type, aux ssa.Aux, arg0, arg1, arg2 *ssa.Value) *ssa.Value {
1328 return s.curBlock.NewValue3A(s.peekPos(), op, t, aux, arg0, arg1, arg2)
1329 }
1330
1331
1332
1333
1334 func (s *state) newValue3Apos(op ssa.Op, t *types.Type, aux ssa.Aux, arg0, arg1, arg2 *ssa.Value, isStmt bool) *ssa.Value {
1335 if isStmt {
1336 return s.curBlock.NewValue3A(s.peekPos(), op, t, aux, arg0, arg1, arg2)
1337 }
1338 return s.curBlock.NewValue3A(s.peekPos().WithNotStmt(), op, t, aux, arg0, arg1, arg2)
1339 }
1340
1341
1342 func (s *state) newValue4(op ssa.Op, t *types.Type, arg0, arg1, arg2, arg3 *ssa.Value) *ssa.Value {
1343 return s.curBlock.NewValue4(s.peekPos(), op, t, arg0, arg1, arg2, arg3)
1344 }
1345
1346
1347 func (s *state) newValue4A(op ssa.Op, t *types.Type, aux ssa.Aux, arg0, arg1, arg2, arg3 *ssa.Value) *ssa.Value {
1348 return s.curBlock.NewValue4A(s.peekPos(), op, t, aux, arg0, arg1, arg2, arg3)
1349 }
1350
1351
1352 func (s *state) newValue4I(op ssa.Op, t *types.Type, aux int64, arg0, arg1, arg2, arg3 *ssa.Value) *ssa.Value {
1353 return s.curBlock.NewValue4I(s.peekPos(), op, t, aux, arg0, arg1, arg2, arg3)
1354 }
1355
1356 func (s *state) entryBlock() *ssa.Block {
1357 b := s.f.Entry
1358 if base.Flag.N > 0 && s.curBlock != nil {
1359
1360
1361
1362
1363 b = s.curBlock
1364 }
1365 return b
1366 }
1367
1368
1369 func (s *state) entryNewValue0(op ssa.Op, t *types.Type) *ssa.Value {
1370 return s.entryBlock().NewValue0(src.NoXPos, op, t)
1371 }
1372
1373
1374 func (s *state) entryNewValue0A(op ssa.Op, t *types.Type, aux ssa.Aux) *ssa.Value {
1375 return s.entryBlock().NewValue0A(src.NoXPos, op, t, aux)
1376 }
1377
1378
1379 func (s *state) entryNewValue1(op ssa.Op, t *types.Type, arg *ssa.Value) *ssa.Value {
1380 return s.entryBlock().NewValue1(src.NoXPos, op, t, arg)
1381 }
1382
1383
1384 func (s *state) entryNewValue1I(op ssa.Op, t *types.Type, auxint int64, arg *ssa.Value) *ssa.Value {
1385 return s.entryBlock().NewValue1I(src.NoXPos, op, t, auxint, arg)
1386 }
1387
1388
1389 func (s *state) entryNewValue1A(op ssa.Op, t *types.Type, aux ssa.Aux, arg *ssa.Value) *ssa.Value {
1390 return s.entryBlock().NewValue1A(src.NoXPos, op, t, aux, arg)
1391 }
1392
1393
1394 func (s *state) entryNewValue2(op ssa.Op, t *types.Type, arg0, arg1 *ssa.Value) *ssa.Value {
1395 return s.entryBlock().NewValue2(src.NoXPos, op, t, arg0, arg1)
1396 }
1397
1398
1399 func (s *state) entryNewValue2A(op ssa.Op, t *types.Type, aux ssa.Aux, arg0, arg1 *ssa.Value) *ssa.Value {
1400 return s.entryBlock().NewValue2A(src.NoXPos, op, t, aux, arg0, arg1)
1401 }
1402
1403
1404 func (s *state) constSlice(t *types.Type) *ssa.Value {
1405 return s.f.ConstSlice(t)
1406 }
1407 func (s *state) constInterface(t *types.Type) *ssa.Value {
1408 return s.f.ConstInterface(t)
1409 }
1410 func (s *state) constNil(t *types.Type) *ssa.Value { return s.f.ConstNil(t) }
1411 func (s *state) constEmptyString(t *types.Type) *ssa.Value {
1412 return s.f.ConstEmptyString(t)
1413 }
1414 func (s *state) constBool(c bool) *ssa.Value {
1415 return s.f.ConstBool(types.Types[types.TBOOL], c)
1416 }
1417 func (s *state) constInt8(t *types.Type, c int8) *ssa.Value {
1418 return s.f.ConstInt8(t, c)
1419 }
1420 func (s *state) constInt16(t *types.Type, c int16) *ssa.Value {
1421 return s.f.ConstInt16(t, c)
1422 }
1423 func (s *state) constInt32(t *types.Type, c int32) *ssa.Value {
1424 return s.f.ConstInt32(t, c)
1425 }
1426 func (s *state) constInt64(t *types.Type, c int64) *ssa.Value {
1427 return s.f.ConstInt64(t, c)
1428 }
1429 func (s *state) constFloat32(t *types.Type, c float64) *ssa.Value {
1430 return s.f.ConstFloat32(t, c)
1431 }
1432 func (s *state) constFloat64(t *types.Type, c float64) *ssa.Value {
1433 return s.f.ConstFloat64(t, c)
1434 }
1435 func (s *state) constInt(t *types.Type, c int64) *ssa.Value {
1436 if s.config.PtrSize == 8 {
1437 return s.constInt64(t, c)
1438 }
1439 if int64(int32(c)) != c {
1440 s.Fatalf("integer constant too big %d", c)
1441 }
1442 return s.constInt32(t, int32(c))
1443 }
1444
1445
1446
1447 func (s *state) newValueOrSfCall1(op ssa.Op, t *types.Type, arg *ssa.Value) *ssa.Value {
1448 if s.softFloat {
1449 if c, ok := s.sfcall(op, arg); ok {
1450 return c
1451 }
1452 }
1453 return s.newValue1(op, t, arg)
1454 }
1455 func (s *state) newValueOrSfCall2(op ssa.Op, t *types.Type, arg0, arg1 *ssa.Value) *ssa.Value {
1456 if s.softFloat {
1457 if c, ok := s.sfcall(op, arg0, arg1); ok {
1458 return c
1459 }
1460 }
1461 return s.newValue2(op, t, arg0, arg1)
1462 }
1463
1464 type instrumentKind uint8
1465
1466 const (
1467 instrumentRead = iota
1468 instrumentWrite
1469 instrumentMove
1470 )
1471
1472 func (s *state) instrument(t *types.Type, addr *ssa.Value, kind instrumentKind) {
1473 s.instrument2(t, addr, nil, kind)
1474 }
1475
1476
1477
1478
1479 func (s *state) instrumentFields(t *types.Type, addr *ssa.Value, kind instrumentKind) {
1480 if !(base.Flag.MSan || base.Flag.ASan) || !isStructNotSIMD(t) {
1481 s.instrument(t, addr, kind)
1482 return
1483 }
1484 for _, f := range t.Fields() {
1485 if f.Sym.IsBlank() {
1486 continue
1487 }
1488 offptr := s.newValue1I(ssa.OpOffPtr, types.NewPtr(f.Type), f.Offset, addr)
1489 s.instrumentFields(f.Type, offptr, kind)
1490 }
1491 }
1492
1493 func (s *state) instrumentMove(t *types.Type, dst, src *ssa.Value) {
1494 if base.Flag.MSan {
1495 s.instrument2(t, dst, src, instrumentMove)
1496 } else {
1497 s.instrument(t, src, instrumentRead)
1498 s.instrument(t, dst, instrumentWrite)
1499 }
1500 }
1501
1502 func (s *state) instrument2(t *types.Type, addr, addr2 *ssa.Value, kind instrumentKind) {
1503 if !s.instrumentMemory {
1504 return
1505 }
1506
1507 w := t.Size()
1508 if w == 0 {
1509 return
1510 }
1511
1512 if ssa.IsSanitizerSafeAddr(addr) {
1513 return
1514 }
1515
1516 var fn *obj.LSym
1517 needWidth := false
1518
1519 if addr2 != nil && kind != instrumentMove {
1520 panic("instrument2: non-nil addr2 for non-move instrumentation")
1521 }
1522
1523 if base.Flag.MSan {
1524 switch kind {
1525 case instrumentRead:
1526 fn = ir.Syms.Msanread
1527 case instrumentWrite:
1528 fn = ir.Syms.Msanwrite
1529 case instrumentMove:
1530 fn = ir.Syms.Msanmove
1531 default:
1532 panic("unreachable")
1533 }
1534 needWidth = true
1535 } else if base.Flag.Race && t.NumComponents(types.CountBlankFields) > 1 {
1536
1537
1538
1539 switch kind {
1540 case instrumentRead:
1541 fn = ir.Syms.Racereadrange
1542 case instrumentWrite:
1543 fn = ir.Syms.Racewriterange
1544 default:
1545 panic("unreachable")
1546 }
1547 needWidth = true
1548 } else if base.Flag.Race {
1549
1550
1551 switch kind {
1552 case instrumentRead:
1553 fn = ir.Syms.Raceread
1554 case instrumentWrite:
1555 fn = ir.Syms.Racewrite
1556 default:
1557 panic("unreachable")
1558 }
1559 } else if base.Flag.ASan {
1560 switch kind {
1561 case instrumentRead:
1562 fn = ir.Syms.Asanread
1563 case instrumentWrite:
1564 fn = ir.Syms.Asanwrite
1565 default:
1566 panic("unreachable")
1567 }
1568 needWidth = true
1569 } else {
1570 panic("unreachable")
1571 }
1572
1573 args := []*ssa.Value{addr}
1574 if addr2 != nil {
1575 args = append(args, addr2)
1576 }
1577 if needWidth {
1578 args = append(args, s.constInt(types.Types[types.TUINTPTR], w))
1579 }
1580 s.rtcall(fn, true, nil, args...)
1581 }
1582
1583 func (s *state) load(t *types.Type, src *ssa.Value) *ssa.Value {
1584 s.instrumentFields(t, src, instrumentRead)
1585 return s.rawLoad(t, src)
1586 }
1587
1588 func (s *state) rawLoad(t *types.Type, src *ssa.Value) *ssa.Value {
1589 return s.newValue2(ssa.OpLoad, t, src, s.mem())
1590 }
1591
1592 func (s *state) store(t *types.Type, dst, val *ssa.Value) {
1593 s.vars[memVar] = s.newValue3A(ssa.OpStore, types.TypeMem, t, dst, val, s.mem())
1594 }
1595
1596 func (s *state) zero(t *types.Type, dst *ssa.Value) {
1597 s.instrument(t, dst, instrumentWrite)
1598 store := s.newValue2I(ssa.OpZero, types.TypeMem, t.Size(), dst, s.mem())
1599 store.Aux = t
1600 s.vars[memVar] = store
1601 }
1602
1603 func (s *state) move(t *types.Type, dst, src *ssa.Value) {
1604 s.moveWhichMayOverlap(t, dst, src, false)
1605 }
1606 func (s *state) moveWhichMayOverlap(t *types.Type, dst, src *ssa.Value, mayOverlap bool) {
1607 s.instrumentMove(t, dst, src)
1608 if mayOverlap && t.IsArray() && t.NumElem() > 1 && !ssa.IsInlinableMemmove(dst, src, t.Size(), s.f.Config) {
1609
1610
1611
1612
1613
1614
1615
1616
1617
1618
1619
1620
1621
1622
1623
1624
1625
1626
1627
1628
1629
1630
1631
1632 if t.HasPointers() {
1633 s.rtcall(ir.Syms.Typedmemmove, true, nil, s.reflectType(t), dst, src)
1634
1635
1636
1637
1638 s.curfn.SetWBPos(s.peekPos())
1639 } else {
1640 s.rtcall(ir.Syms.Memmove, true, nil, dst, src, s.constInt(types.Types[types.TUINTPTR], t.Size()))
1641 }
1642 ssa.LogLargeCopy(s.f.Name, s.peekPos(), t.Size())
1643 return
1644 }
1645 store := s.newValue3I(ssa.OpMove, types.TypeMem, t.Size(), dst, src, s.mem())
1646 store.Aux = t
1647 s.vars[memVar] = store
1648 }
1649
1650
1651 func (s *state) stmtList(l ir.Nodes) {
1652 for _, n := range l {
1653 s.stmt(n)
1654 }
1655 }
1656
1657
1658 func (s *state) stmt(n ir.Node) {
1659 s.pushLine(n.Pos())
1660 defer s.popLine()
1661
1662
1663
1664 if s.curBlock == nil && n.Op() != ir.OLABEL {
1665 return
1666 }
1667
1668 s.stmtList(n.Init())
1669 switch n.Op() {
1670
1671 case ir.OBLOCK:
1672 n := n.(*ir.BlockStmt)
1673 s.stmtList(n.List)
1674
1675 case ir.OFALL:
1676
1677
1678 case ir.OCALLFUNC:
1679 n := n.(*ir.CallExpr)
1680 if ir.IsIntrinsicCall(n) {
1681 s.intrinsicCall(n)
1682 return
1683 }
1684 fallthrough
1685
1686 case ir.OCALLINTER:
1687 n := n.(*ir.CallExpr)
1688 s.callResult(n, callNormal)
1689 if n.Op() == ir.OCALLFUNC && n.Fun.Op() == ir.ONAME && n.Fun.(*ir.Name).Class == ir.PFUNC {
1690 if fn := n.Fun.Sym().Name; base.Flag.CompilingRuntime && fn == "throw" ||
1691 n.Fun.Sym().Pkg == ir.Pkgs.Runtime &&
1692 (fn == "throwinit" || fn == "gopanic" || fn == "panicwrap" || fn == "block" ||
1693 fn == "panicmakeslicelen" || fn == "panicmakeslicecap" || fn == "panicunsafeslicelen" ||
1694 fn == "panicunsafeslicenilptr" || fn == "panicunsafestringlen" || fn == "panicunsafestringnilptr" ||
1695 fn == "panicrangestate") {
1696 m := s.mem()
1697 b := s.endBlock()
1698 b.Kind = ssa.BlockExit
1699 b.SetControl(m)
1700
1701
1702
1703 }
1704 }
1705 case ir.ODEFER:
1706 n := n.(*ir.GoDeferStmt)
1707 if base.Debug.Defer > 0 {
1708 var defertype string
1709 if s.hasOpenDefers {
1710 defertype = "open-coded"
1711 } else if n.Esc() == ir.EscNever {
1712 defertype = "stack-allocated"
1713 } else {
1714 defertype = "heap-allocated"
1715 }
1716 base.WarnfAt(n.Pos(), "%s defer", defertype)
1717 }
1718 if s.hasOpenDefers {
1719 s.openDeferRecord(n.Call.(*ir.CallExpr))
1720 } else {
1721 d := callDefer
1722 if n.Esc() == ir.EscNever && n.DeferAt == nil {
1723 d = callDeferStack
1724 }
1725 s.call(n.Call.(*ir.CallExpr), d, false, n.DeferAt)
1726 }
1727 case ir.OGO:
1728 n := n.(*ir.GoDeferStmt)
1729 s.callResult(n.Call.(*ir.CallExpr), callGo)
1730
1731 case ir.OAS2DOTTYPE:
1732 n := n.(*ir.AssignListStmt)
1733 var res, resok *ssa.Value
1734 if n.Rhs[0].Op() == ir.ODOTTYPE2 {
1735 res, resok = s.dottype(n.Rhs[0].(*ir.TypeAssertExpr), true)
1736 } else {
1737 res, resok = s.dynamicDottype(n.Rhs[0].(*ir.DynamicTypeAssertExpr), true)
1738 }
1739 deref := false
1740 if !ssa.CanSSA(n.Rhs[0].Type()) {
1741 if res.Op != ssa.OpLoad {
1742 s.Fatalf("dottype of non-load")
1743 }
1744 mem := s.mem()
1745 if res.Args[1] != mem {
1746 s.Fatalf("memory no longer live from 2-result dottype load")
1747 }
1748 deref = true
1749 res = res.Args[0]
1750 }
1751 s.assign(n.Lhs[0], res, deref, 0)
1752 s.assign(n.Lhs[1], resok, false, 0)
1753 return
1754
1755 case ir.OAS2FUNC:
1756
1757 n := n.(*ir.AssignListStmt)
1758 call := n.Rhs[0].(*ir.CallExpr)
1759 if !ir.IsIntrinsicCall(call) {
1760 s.Fatalf("non-intrinsic AS2FUNC not expanded %v", call)
1761 }
1762 v := s.intrinsicCall(call)
1763 v1 := s.newValue1(ssa.OpSelect0, n.Lhs[0].Type(), v)
1764 v2 := s.newValue1(ssa.OpSelect1, n.Lhs[1].Type(), v)
1765 s.assign(n.Lhs[0], v1, false, 0)
1766 s.assign(n.Lhs[1], v2, false, 0)
1767 return
1768
1769 case ir.ODCL:
1770 n := n.(*ir.Decl)
1771 if v := n.X; v.Esc() == ir.EscHeap {
1772 s.newHeapaddr(v)
1773 }
1774
1775 case ir.OLABEL:
1776 n := n.(*ir.LabelStmt)
1777 sym := n.Label
1778 if sym.IsBlank() {
1779
1780 break
1781 }
1782 lab := s.label(sym)
1783
1784
1785 if lab.target == nil {
1786 lab.target = s.f.NewBlock(ssa.BlockPlain)
1787 }
1788
1789
1790
1791 if s.curBlock != nil {
1792 b := s.endBlock()
1793 b.AddEdgeTo(lab.target)
1794 }
1795 s.startBlock(lab.target)
1796
1797 case ir.OGOTO:
1798 n := n.(*ir.BranchStmt)
1799 sym := n.Label
1800
1801 lab := s.label(sym)
1802 if lab.target == nil {
1803 lab.target = s.f.NewBlock(ssa.BlockPlain)
1804 }
1805
1806 b := s.endBlock()
1807 b.Pos = s.lastPos.WithIsStmt()
1808 b.AddEdgeTo(lab.target)
1809
1810 case ir.OAS:
1811 n := n.(*ir.AssignStmt)
1812 if n.X == n.Y && n.X.Op() == ir.ONAME {
1813
1814
1815
1816
1817
1818
1819
1820 return
1821 }
1822
1823
1824
1825
1826
1827
1828
1829
1830
1831
1832 mayOverlap := n.X.Op() == ir.ODEREF && (n.Y != nil && n.Y.Op() == ir.ODEREF)
1833 if n.Y != nil && n.Y.Op() == ir.ODEREF {
1834 p := n.Y.(*ir.StarExpr).X
1835 for p.Op() == ir.OCONVNOP {
1836 p = p.(*ir.ConvExpr).X
1837 }
1838 if p.Op() == ir.OSPTR && p.(*ir.UnaryExpr).X.Type().IsString() {
1839
1840
1841 mayOverlap = false
1842 }
1843 }
1844
1845
1846 rhs := n.Y
1847 if rhs != nil {
1848 switch rhs.Op() {
1849 case ir.OSTRUCTLIT, ir.OARRAYLIT, ir.OSLICELIT:
1850
1851
1852
1853 if !ir.IsZero(rhs) {
1854 s.Fatalf("literal with nonzero value in SSA: %v", rhs)
1855 }
1856 rhs = nil
1857 case ir.OAPPEND:
1858 rhs := rhs.(*ir.CallExpr)
1859
1860
1861
1862 if !ir.SameSafeExpr(n.X, rhs.Args[0]) || base.Flag.N != 0 {
1863 break
1864 }
1865
1866
1867
1868 if s.canSSA(n.X) {
1869 if base.Debug.Append > 0 {
1870 base.WarnfAt(n.Pos(), "append: len-only update (in local slice)")
1871 }
1872 break
1873 }
1874 if base.Debug.Append > 0 {
1875 base.WarnfAt(n.Pos(), "append: len-only update")
1876 }
1877 s.append(rhs, true)
1878 return
1879 }
1880 }
1881
1882 if ir.IsBlank(n.X) {
1883
1884
1885 if rhs != nil {
1886 s.expr(rhs)
1887 }
1888 return
1889 }
1890
1891 var t *types.Type
1892 if n.Y != nil {
1893 t = n.Y.Type()
1894 } else {
1895 t = n.X.Type()
1896 }
1897
1898 var r *ssa.Value
1899 deref := !ssa.CanSSA(t)
1900 if deref {
1901 if rhs == nil {
1902 r = nil
1903 } else {
1904 r = s.addr(rhs)
1905 }
1906 } else {
1907 if rhs == nil {
1908 r = s.zeroVal(t)
1909 } else {
1910 r = s.expr(rhs)
1911 }
1912 }
1913
1914 var skip skipMask
1915 if rhs != nil && (rhs.Op() == ir.OSLICE || rhs.Op() == ir.OSLICE3 || rhs.Op() == ir.OSLICESTR) && ir.SameSafeExpr(rhs.(*ir.SliceExpr).X, n.X) {
1916
1917
1918 rhs := rhs.(*ir.SliceExpr)
1919 i, j, k := rhs.Low, rhs.High, rhs.Max
1920 if i != nil && (i.Op() == ir.OLITERAL && i.Val().Kind() == constant.Int && ir.Int64Val(i) == 0) {
1921
1922 i = nil
1923 }
1924
1925
1926
1927
1928
1929
1930
1931
1932
1933
1934 if i == nil {
1935 skip |= skipPtr
1936 if j == nil {
1937 skip |= skipLen
1938 }
1939 if k == nil {
1940 skip |= skipCap
1941 }
1942 }
1943 }
1944
1945 s.assignWhichMayOverlap(n.X, r, deref, skip, mayOverlap)
1946
1947 case ir.OIF:
1948 n := n.(*ir.IfStmt)
1949 if ir.IsConst(n.Cond, constant.Bool) {
1950 s.stmtList(n.Cond.Init())
1951 if ir.BoolVal(n.Cond) {
1952 s.stmtList(n.Body)
1953 } else {
1954 s.stmtList(n.Else)
1955 }
1956 break
1957 }
1958
1959 bEnd := s.f.NewBlock(ssa.BlockPlain)
1960 var likely int8
1961 if n.Likely {
1962 likely = 1
1963 }
1964 var bThen *ssa.Block
1965 if len(n.Body) != 0 {
1966 bThen = s.f.NewBlock(ssa.BlockPlain)
1967 } else {
1968 bThen = bEnd
1969 }
1970 var bElse *ssa.Block
1971 if len(n.Else) != 0 {
1972 bElse = s.f.NewBlock(ssa.BlockPlain)
1973 } else {
1974 bElse = bEnd
1975 }
1976 s.condBranch(n.Cond, bThen, bElse, likely)
1977
1978 if len(n.Body) != 0 {
1979 s.startBlock(bThen)
1980 s.stmtList(n.Body)
1981 if b := s.endBlock(); b != nil {
1982 b.AddEdgeTo(bEnd)
1983 }
1984 }
1985 if len(n.Else) != 0 {
1986 s.startBlock(bElse)
1987 s.stmtList(n.Else)
1988 if b := s.endBlock(); b != nil {
1989 b.AddEdgeTo(bEnd)
1990 }
1991 }
1992 s.startBlock(bEnd)
1993
1994 case ir.ORETURN:
1995 n := n.(*ir.ReturnStmt)
1996 s.stmtList(n.Results)
1997 b := s.exit()
1998 b.Pos = s.lastPos.WithIsStmt()
1999
2000 case ir.OTAILCALL:
2001 n := n.(*ir.TailCallStmt)
2002 s.callResult(n.Call, callTail)
2003 call := s.mem()
2004 b := s.endBlock()
2005 b.Kind = ssa.BlockRetJmp
2006 b.SetControl(call)
2007
2008 case ir.OCONTINUE, ir.OBREAK:
2009 n := n.(*ir.BranchStmt)
2010 var to *ssa.Block
2011 if n.Label == nil {
2012
2013 switch n.Op() {
2014 case ir.OCONTINUE:
2015 to = s.continueTo
2016 case ir.OBREAK:
2017 to = s.breakTo
2018 }
2019 } else {
2020
2021 sym := n.Label
2022 lab := s.label(sym)
2023 switch n.Op() {
2024 case ir.OCONTINUE:
2025 to = lab.continueTarget
2026 case ir.OBREAK:
2027 to = lab.breakTarget
2028 }
2029 }
2030
2031 b := s.endBlock()
2032 b.Pos = s.lastPos.WithIsStmt()
2033 b.AddEdgeTo(to)
2034
2035 case ir.OFOR:
2036
2037
2038 n := n.(*ir.ForStmt)
2039 base.Assert(!n.DistinctVars)
2040 bCond := s.f.NewBlock(ssa.BlockPlain)
2041 bBody := s.f.NewBlock(ssa.BlockPlain)
2042 bIncr := s.f.NewBlock(ssa.BlockPlain)
2043 bEnd := s.f.NewBlock(ssa.BlockPlain)
2044
2045
2046 bBody.Pos = n.Pos()
2047
2048
2049 b := s.endBlock()
2050 b.AddEdgeTo(bCond)
2051
2052
2053 s.startBlock(bCond)
2054 if n.Cond != nil {
2055 s.condBranch(n.Cond, bBody, bEnd, 1)
2056 } else {
2057 b := s.endBlock()
2058 b.Kind = ssa.BlockPlain
2059 b.AddEdgeTo(bBody)
2060 }
2061
2062
2063 prevContinue := s.continueTo
2064 prevBreak := s.breakTo
2065 s.continueTo = bIncr
2066 s.breakTo = bEnd
2067 var lab *ssaLabel
2068 if sym := n.Label; sym != nil {
2069
2070 lab = s.label(sym)
2071 lab.continueTarget = bIncr
2072 lab.breakTarget = bEnd
2073 }
2074
2075
2076 s.startBlock(bBody)
2077 s.stmtList(n.Body)
2078
2079
2080 s.continueTo = prevContinue
2081 s.breakTo = prevBreak
2082 if lab != nil {
2083 lab.continueTarget = nil
2084 lab.breakTarget = nil
2085 }
2086
2087
2088 if b := s.endBlock(); b != nil {
2089 b.AddEdgeTo(bIncr)
2090 }
2091
2092
2093 s.startBlock(bIncr)
2094 if n.Post != nil {
2095 s.stmt(n.Post)
2096 }
2097 if b := s.endBlock(); b != nil {
2098 b.AddEdgeTo(bCond)
2099
2100
2101 if b.Pos == src.NoXPos {
2102 b.Pos = bCond.Pos
2103 }
2104 }
2105
2106 s.startBlock(bEnd)
2107
2108 case ir.OSWITCH, ir.OSELECT:
2109
2110
2111 bEnd := s.f.NewBlock(ssa.BlockPlain)
2112
2113 prevBreak := s.breakTo
2114 s.breakTo = bEnd
2115 var sym *types.Sym
2116 var body ir.Nodes
2117 if n.Op() == ir.OSWITCH {
2118 n := n.(*ir.SwitchStmt)
2119 sym = n.Label
2120 body = n.Compiled
2121 } else {
2122 n := n.(*ir.SelectStmt)
2123 sym = n.Label
2124 body = n.Compiled
2125 }
2126
2127 var lab *ssaLabel
2128 if sym != nil {
2129
2130 lab = s.label(sym)
2131 lab.breakTarget = bEnd
2132 }
2133
2134
2135 s.stmtList(body)
2136
2137 s.breakTo = prevBreak
2138 if lab != nil {
2139 lab.breakTarget = nil
2140 }
2141
2142
2143
2144 if s.curBlock != nil {
2145 m := s.mem()
2146 b := s.endBlock()
2147 b.Kind = ssa.BlockExit
2148 b.SetControl(m)
2149 }
2150 s.startBlock(bEnd)
2151
2152 case ir.OJUMPTABLE:
2153 n := n.(*ir.JumpTableStmt)
2154
2155
2156 jt := s.f.NewBlock(ssa.BlockJumpTable)
2157 bEnd := s.f.NewBlock(ssa.BlockPlain)
2158
2159
2160 idx := s.expr(n.Idx)
2161 unsigned := idx.Type.IsUnsigned()
2162
2163
2164 t := types.Types[types.TUINTPTR]
2165 idx = s.conv(nil, idx, idx.Type, t)
2166
2167
2168
2169
2170
2171
2172
2173 var min, max uint64
2174 if unsigned {
2175 min, _ = constant.Uint64Val(n.Cases[0])
2176 max, _ = constant.Uint64Val(n.Cases[len(n.Cases)-1])
2177 } else {
2178 mn, _ := constant.Int64Val(n.Cases[0])
2179 mx, _ := constant.Int64Val(n.Cases[len(n.Cases)-1])
2180 min = uint64(mn)
2181 max = uint64(mx)
2182 }
2183
2184 idx = s.newValue2(s.ssaOp(ir.OSUB, t), t, idx, s.uintptrConstant(min))
2185 width := s.uintptrConstant(max - min)
2186 cmp := s.newValue2(s.ssaOp(ir.OLE, t), types.Types[types.TBOOL], idx, width)
2187 b := s.endBlock()
2188 b.Kind = ssa.BlockIf
2189 b.SetControl(cmp)
2190 b.AddEdgeTo(jt)
2191 b.AddEdgeTo(bEnd)
2192 b.Likely = ssa.BranchLikely
2193
2194
2195 s.startBlock(jt)
2196 jt.Pos = n.Pos()
2197 if base.Flag.Cfg.SpectreIndex {
2198 idx = s.newValue2(ssa.OpSpectreSliceIndex, t, idx, width)
2199 }
2200 jt.SetControl(idx)
2201
2202
2203 table := make([]*ssa.Block, max-min+1)
2204 for i := range table {
2205 table[i] = bEnd
2206 }
2207 for i := range n.Targets {
2208 c := n.Cases[i]
2209 lab := s.label(n.Targets[i])
2210 if lab.target == nil {
2211 lab.target = s.f.NewBlock(ssa.BlockPlain)
2212 }
2213 var val uint64
2214 if unsigned {
2215 val, _ = constant.Uint64Val(c)
2216 } else {
2217 vl, _ := constant.Int64Val(c)
2218 val = uint64(vl)
2219 }
2220
2221 table[val-min] = lab.target
2222 }
2223 for _, t := range table {
2224 jt.AddEdgeTo(t)
2225 }
2226 s.endBlock()
2227
2228 s.startBlock(bEnd)
2229
2230 case ir.OINTERFACESWITCH:
2231 n := n.(*ir.InterfaceSwitchStmt)
2232 typs := s.f.Config.Types
2233
2234 t := s.expr(n.RuntimeType)
2235 h := s.expr(n.Hash)
2236 d := s.newValue1A(ssa.OpAddr, typs.BytePtr, n.Descriptor, s.sb)
2237
2238
2239 var merge *ssa.Block
2240 if base.Flag.N == 0 && rtabi.UseInterfaceSwitchCache(Arch.LinkArch.Family) {
2241
2242
2243 if intrinsics.lookup(Arch.LinkArch.Arch, "internal/runtime/atomic", "Loadp") == nil {
2244 s.Fatalf("atomic load not available")
2245 }
2246 merge = s.f.NewBlock(ssa.BlockPlain)
2247 cacheHit := s.f.NewBlock(ssa.BlockPlain)
2248 cacheMiss := s.f.NewBlock(ssa.BlockPlain)
2249 loopHead := s.f.NewBlock(ssa.BlockPlain)
2250 loopBody := s.f.NewBlock(ssa.BlockPlain)
2251
2252
2253 var mul, and, add, zext ssa.Op
2254 if s.config.PtrSize == 4 {
2255 mul = ssa.OpMul32
2256 and = ssa.OpAnd32
2257 add = ssa.OpAdd32
2258 zext = ssa.OpCopy
2259 } else {
2260 mul = ssa.OpMul64
2261 and = ssa.OpAnd64
2262 add = ssa.OpAdd64
2263 zext = ssa.OpZeroExt32to64
2264 }
2265
2266
2267
2268 atomicLoad := s.newValue2(ssa.OpAtomicLoadPtr, types.NewTuple(typs.BytePtr, types.TypeMem), d, s.mem())
2269 cache := s.newValue1(ssa.OpSelect0, typs.BytePtr, atomicLoad)
2270 s.vars[memVar] = s.newValue1(ssa.OpSelect1, types.TypeMem, atomicLoad)
2271
2272
2273 s.vars[hashVar] = s.newValue1(zext, typs.Uintptr, h)
2274
2275
2276 mask := s.newValue2(ssa.OpLoad, typs.Uintptr, cache, s.mem())
2277
2278 b := s.endBlock()
2279 b.AddEdgeTo(loopHead)
2280
2281
2282
2283 s.startBlock(loopHead)
2284 entries := s.newValue2(ssa.OpAddPtr, typs.UintptrPtr, cache, s.uintptrConstant(uint64(s.config.PtrSize)))
2285 idx := s.newValue2(and, typs.Uintptr, s.variable(hashVar, typs.Uintptr), mask)
2286 idx = s.newValue2(mul, typs.Uintptr, idx, s.uintptrConstant(uint64(3*s.config.PtrSize)))
2287 e := s.newValue2(ssa.OpAddPtr, typs.UintptrPtr, entries, idx)
2288
2289 s.vars[hashVar] = s.newValue2(add, typs.Uintptr, s.variable(hashVar, typs.Uintptr), s.uintptrConstant(1))
2290
2291
2292
2293 eTyp := s.newValue2(ssa.OpLoad, typs.Uintptr, e, s.mem())
2294 cmp1 := s.newValue2(ssa.OpEqPtr, typs.Bool, t, eTyp)
2295 b = s.endBlock()
2296 b.Kind = ssa.BlockIf
2297 b.SetControl(cmp1)
2298 b.AddEdgeTo(cacheHit)
2299 b.AddEdgeTo(loopBody)
2300
2301
2302
2303 s.startBlock(loopBody)
2304 cmp2 := s.newValue2(ssa.OpEqPtr, typs.Bool, eTyp, s.constNil(typs.BytePtr))
2305 b = s.endBlock()
2306 b.Kind = ssa.BlockIf
2307 b.SetControl(cmp2)
2308 b.AddEdgeTo(cacheMiss)
2309 b.AddEdgeTo(loopHead)
2310
2311
2312
2313
2314 s.startBlock(cacheHit)
2315 eCase := s.newValue2(ssa.OpLoad, typs.Int, s.newValue1I(ssa.OpOffPtr, typs.IntPtr, s.config.PtrSize, e), s.mem())
2316 eItab := s.newValue2(ssa.OpLoad, typs.BytePtr, s.newValue1I(ssa.OpOffPtr, typs.BytePtrPtr, 2*s.config.PtrSize, e), s.mem())
2317 s.assign(n.Case, eCase, false, 0)
2318 s.assign(n.Itab, eItab, false, 0)
2319 b = s.endBlock()
2320 b.AddEdgeTo(merge)
2321
2322
2323 s.startBlock(cacheMiss)
2324 }
2325
2326 r := s.rtcall(ir.Syms.InterfaceSwitch, true, []*types.Type{typs.Int, typs.BytePtr}, d, t)
2327 s.assign(n.Case, r[0], false, 0)
2328 s.assign(n.Itab, r[1], false, 0)
2329
2330 if merge != nil {
2331
2332 b := s.endBlock()
2333 b.Kind = ssa.BlockPlain
2334 b.AddEdgeTo(merge)
2335 s.startBlock(merge)
2336 }
2337
2338 case ir.OCHECKNIL:
2339 n := n.(*ir.UnaryExpr)
2340 p := s.expr(n.X)
2341 _ = s.nilCheck(p)
2342
2343
2344 case ir.OINLMARK:
2345 n := n.(*ir.InlineMarkStmt)
2346 s.newValue1I(ssa.OpInlMark, types.TypeVoid, n.Index, s.mem())
2347
2348 default:
2349 s.Fatalf("unhandled stmt %v", n.Op())
2350 }
2351 }
2352
2353
2354
2355 const shareDeferExits = false
2356
2357
2358
2359
2360 func (s *state) exit() *ssa.Block {
2361 if s.hasdefer {
2362 if s.hasOpenDefers {
2363 if shareDeferExits && s.lastDeferExit != nil && len(s.openDefers) == s.lastDeferCount {
2364 if s.curBlock.Kind != ssa.BlockPlain {
2365 panic("Block for an exit should be BlockPlain")
2366 }
2367 s.curBlock.AddEdgeTo(s.lastDeferExit)
2368 s.endBlock()
2369 return s.lastDeferFinalBlock
2370 }
2371 s.openDeferExit()
2372 } else {
2373
2374
2375
2376
2377
2378
2379
2380
2381 s.pushLine(s.curfn.Endlineno)
2382 s.rtcall(ir.Syms.Deferreturn, true, nil)
2383 s.popLine()
2384 }
2385 }
2386
2387
2388
2389 resultFields := s.curfn.Type().Results()
2390 results := make([]*ssa.Value, len(resultFields)+1, len(resultFields)+1)
2391
2392 for i, f := range resultFields {
2393 n := f.Nname.(*ir.Name)
2394 if s.canSSA(n) {
2395 if !n.IsOutputParamInRegisters() && n.Type().HasPointers() {
2396
2397 s.vars[memVar] = s.newValue1A(ssa.OpVarDef, types.TypeMem, n, s.mem())
2398 }
2399 results[i] = s.variable(n, n.Type())
2400 } else if !n.OnStack() {
2401
2402 if n.Type().HasPointers() {
2403 s.vars[memVar] = s.newValue1A(ssa.OpVarDef, types.TypeMem, n, s.mem())
2404 }
2405 ha := s.expr(n.Heapaddr)
2406 s.instrumentFields(n.Type(), ha, instrumentRead)
2407 results[i] = s.newValue2(ssa.OpDereference, n.Type(), ha, s.mem())
2408 } else {
2409
2410
2411
2412 results[i] = s.newValue2(ssa.OpDereference, n.Type(), s.addr(n), s.mem())
2413 }
2414 }
2415
2416
2417
2418
2419 if s.instrumentEnterExit {
2420 s.rtcall(ir.Syms.Racefuncexit, true, nil)
2421 }
2422
2423 results[len(results)-1] = s.mem()
2424 m := s.newValue0(ssa.OpMakeResult, s.f.OwnAux.LateExpansionResultType())
2425 m.AddArgs(results...)
2426
2427 b := s.endBlock()
2428 b.Kind = ssa.BlockRet
2429 b.SetControl(m)
2430 if s.hasdefer && s.hasOpenDefers {
2431 s.lastDeferFinalBlock = b
2432 }
2433 return b
2434 }
2435
2436 type opAndType struct {
2437 op ir.Op
2438 etype types.Kind
2439 }
2440
2441 var opToSSA = map[opAndType]ssa.Op{
2442 {ir.OADD, types.TINT8}: ssa.OpAdd8,
2443 {ir.OADD, types.TUINT8}: ssa.OpAdd8,
2444 {ir.OADD, types.TINT16}: ssa.OpAdd16,
2445 {ir.OADD, types.TUINT16}: ssa.OpAdd16,
2446 {ir.OADD, types.TINT32}: ssa.OpAdd32,
2447 {ir.OADD, types.TUINT32}: ssa.OpAdd32,
2448 {ir.OADD, types.TINT64}: ssa.OpAdd64,
2449 {ir.OADD, types.TUINT64}: ssa.OpAdd64,
2450 {ir.OADD, types.TFLOAT32}: ssa.OpAdd32F,
2451 {ir.OADD, types.TFLOAT64}: ssa.OpAdd64F,
2452
2453 {ir.OSUB, types.TINT8}: ssa.OpSub8,
2454 {ir.OSUB, types.TUINT8}: ssa.OpSub8,
2455 {ir.OSUB, types.TINT16}: ssa.OpSub16,
2456 {ir.OSUB, types.TUINT16}: ssa.OpSub16,
2457 {ir.OSUB, types.TINT32}: ssa.OpSub32,
2458 {ir.OSUB, types.TUINT32}: ssa.OpSub32,
2459 {ir.OSUB, types.TINT64}: ssa.OpSub64,
2460 {ir.OSUB, types.TUINT64}: ssa.OpSub64,
2461 {ir.OSUB, types.TFLOAT32}: ssa.OpSub32F,
2462 {ir.OSUB, types.TFLOAT64}: ssa.OpSub64F,
2463
2464 {ir.ONOT, types.TBOOL}: ssa.OpNot,
2465
2466 {ir.ONEG, types.TINT8}: ssa.OpNeg8,
2467 {ir.ONEG, types.TUINT8}: ssa.OpNeg8,
2468 {ir.ONEG, types.TINT16}: ssa.OpNeg16,
2469 {ir.ONEG, types.TUINT16}: ssa.OpNeg16,
2470 {ir.ONEG, types.TINT32}: ssa.OpNeg32,
2471 {ir.ONEG, types.TUINT32}: ssa.OpNeg32,
2472 {ir.ONEG, types.TINT64}: ssa.OpNeg64,
2473 {ir.ONEG, types.TUINT64}: ssa.OpNeg64,
2474 {ir.ONEG, types.TFLOAT32}: ssa.OpNeg32F,
2475 {ir.ONEG, types.TFLOAT64}: ssa.OpNeg64F,
2476
2477 {ir.OBITNOT, types.TINT8}: ssa.OpCom8,
2478 {ir.OBITNOT, types.TUINT8}: ssa.OpCom8,
2479 {ir.OBITNOT, types.TINT16}: ssa.OpCom16,
2480 {ir.OBITNOT, types.TUINT16}: ssa.OpCom16,
2481 {ir.OBITNOT, types.TINT32}: ssa.OpCom32,
2482 {ir.OBITNOT, types.TUINT32}: ssa.OpCom32,
2483 {ir.OBITNOT, types.TINT64}: ssa.OpCom64,
2484 {ir.OBITNOT, types.TUINT64}: ssa.OpCom64,
2485
2486 {ir.OIMAG, types.TCOMPLEX64}: ssa.OpComplexImag,
2487 {ir.OIMAG, types.TCOMPLEX128}: ssa.OpComplexImag,
2488 {ir.OREAL, types.TCOMPLEX64}: ssa.OpComplexReal,
2489 {ir.OREAL, types.TCOMPLEX128}: ssa.OpComplexReal,
2490
2491 {ir.OMUL, types.TINT8}: ssa.OpMul8,
2492 {ir.OMUL, types.TUINT8}: ssa.OpMul8,
2493 {ir.OMUL, types.TINT16}: ssa.OpMul16,
2494 {ir.OMUL, types.TUINT16}: ssa.OpMul16,
2495 {ir.OMUL, types.TINT32}: ssa.OpMul32,
2496 {ir.OMUL, types.TUINT32}: ssa.OpMul32,
2497 {ir.OMUL, types.TINT64}: ssa.OpMul64,
2498 {ir.OMUL, types.TUINT64}: ssa.OpMul64,
2499 {ir.OMUL, types.TFLOAT32}: ssa.OpMul32F,
2500 {ir.OMUL, types.TFLOAT64}: ssa.OpMul64F,
2501
2502 {ir.ODIV, types.TFLOAT32}: ssa.OpDiv32F,
2503 {ir.ODIV, types.TFLOAT64}: ssa.OpDiv64F,
2504
2505 {ir.ODIV, types.TINT8}: ssa.OpDiv8,
2506 {ir.ODIV, types.TUINT8}: ssa.OpDiv8u,
2507 {ir.ODIV, types.TINT16}: ssa.OpDiv16,
2508 {ir.ODIV, types.TUINT16}: ssa.OpDiv16u,
2509 {ir.ODIV, types.TINT32}: ssa.OpDiv32,
2510 {ir.ODIV, types.TUINT32}: ssa.OpDiv32u,
2511 {ir.ODIV, types.TINT64}: ssa.OpDiv64,
2512 {ir.ODIV, types.TUINT64}: ssa.OpDiv64u,
2513
2514 {ir.OMOD, types.TINT8}: ssa.OpMod8,
2515 {ir.OMOD, types.TUINT8}: ssa.OpMod8u,
2516 {ir.OMOD, types.TINT16}: ssa.OpMod16,
2517 {ir.OMOD, types.TUINT16}: ssa.OpMod16u,
2518 {ir.OMOD, types.TINT32}: ssa.OpMod32,
2519 {ir.OMOD, types.TUINT32}: ssa.OpMod32u,
2520 {ir.OMOD, types.TINT64}: ssa.OpMod64,
2521 {ir.OMOD, types.TUINT64}: ssa.OpMod64u,
2522
2523 {ir.OAND, types.TINT8}: ssa.OpAnd8,
2524 {ir.OAND, types.TUINT8}: ssa.OpAnd8,
2525 {ir.OAND, types.TINT16}: ssa.OpAnd16,
2526 {ir.OAND, types.TUINT16}: ssa.OpAnd16,
2527 {ir.OAND, types.TINT32}: ssa.OpAnd32,
2528 {ir.OAND, types.TUINT32}: ssa.OpAnd32,
2529 {ir.OAND, types.TINT64}: ssa.OpAnd64,
2530 {ir.OAND, types.TUINT64}: ssa.OpAnd64,
2531
2532 {ir.OOR, types.TINT8}: ssa.OpOr8,
2533 {ir.OOR, types.TUINT8}: ssa.OpOr8,
2534 {ir.OOR, types.TINT16}: ssa.OpOr16,
2535 {ir.OOR, types.TUINT16}: ssa.OpOr16,
2536 {ir.OOR, types.TINT32}: ssa.OpOr32,
2537 {ir.OOR, types.TUINT32}: ssa.OpOr32,
2538 {ir.OOR, types.TINT64}: ssa.OpOr64,
2539 {ir.OOR, types.TUINT64}: ssa.OpOr64,
2540
2541 {ir.OXOR, types.TINT8}: ssa.OpXor8,
2542 {ir.OXOR, types.TUINT8}: ssa.OpXor8,
2543 {ir.OXOR, types.TINT16}: ssa.OpXor16,
2544 {ir.OXOR, types.TUINT16}: ssa.OpXor16,
2545 {ir.OXOR, types.TINT32}: ssa.OpXor32,
2546 {ir.OXOR, types.TUINT32}: ssa.OpXor32,
2547 {ir.OXOR, types.TINT64}: ssa.OpXor64,
2548 {ir.OXOR, types.TUINT64}: ssa.OpXor64,
2549
2550 {ir.OEQ, types.TBOOL}: ssa.OpEqB,
2551 {ir.OEQ, types.TINT8}: ssa.OpEq8,
2552 {ir.OEQ, types.TUINT8}: ssa.OpEq8,
2553 {ir.OEQ, types.TINT16}: ssa.OpEq16,
2554 {ir.OEQ, types.TUINT16}: ssa.OpEq16,
2555 {ir.OEQ, types.TINT32}: ssa.OpEq32,
2556 {ir.OEQ, types.TUINT32}: ssa.OpEq32,
2557 {ir.OEQ, types.TINT64}: ssa.OpEq64,
2558 {ir.OEQ, types.TUINT64}: ssa.OpEq64,
2559 {ir.OEQ, types.TINTER}: ssa.OpEqInter,
2560 {ir.OEQ, types.TSLICE}: ssa.OpEqSlice,
2561 {ir.OEQ, types.TFUNC}: ssa.OpEqPtr,
2562 {ir.OEQ, types.TMAP}: ssa.OpEqPtr,
2563 {ir.OEQ, types.TCHAN}: ssa.OpEqPtr,
2564 {ir.OEQ, types.TPTR}: ssa.OpEqPtr,
2565 {ir.OEQ, types.TUINTPTR}: ssa.OpEqPtr,
2566 {ir.OEQ, types.TUNSAFEPTR}: ssa.OpEqPtr,
2567 {ir.OEQ, types.TFLOAT64}: ssa.OpEq64F,
2568 {ir.OEQ, types.TFLOAT32}: ssa.OpEq32F,
2569
2570 {ir.ONE, types.TBOOL}: ssa.OpNeqB,
2571 {ir.ONE, types.TINT8}: ssa.OpNeq8,
2572 {ir.ONE, types.TUINT8}: ssa.OpNeq8,
2573 {ir.ONE, types.TINT16}: ssa.OpNeq16,
2574 {ir.ONE, types.TUINT16}: ssa.OpNeq16,
2575 {ir.ONE, types.TINT32}: ssa.OpNeq32,
2576 {ir.ONE, types.TUINT32}: ssa.OpNeq32,
2577 {ir.ONE, types.TINT64}: ssa.OpNeq64,
2578 {ir.ONE, types.TUINT64}: ssa.OpNeq64,
2579 {ir.ONE, types.TINTER}: ssa.OpNeqInter,
2580 {ir.ONE, types.TSLICE}: ssa.OpNeqSlice,
2581 {ir.ONE, types.TFUNC}: ssa.OpNeqPtr,
2582 {ir.ONE, types.TMAP}: ssa.OpNeqPtr,
2583 {ir.ONE, types.TCHAN}: ssa.OpNeqPtr,
2584 {ir.ONE, types.TPTR}: ssa.OpNeqPtr,
2585 {ir.ONE, types.TUINTPTR}: ssa.OpNeqPtr,
2586 {ir.ONE, types.TUNSAFEPTR}: ssa.OpNeqPtr,
2587 {ir.ONE, types.TFLOAT64}: ssa.OpNeq64F,
2588 {ir.ONE, types.TFLOAT32}: ssa.OpNeq32F,
2589
2590 {ir.OLT, types.TINT8}: ssa.OpLess8,
2591 {ir.OLT, types.TUINT8}: ssa.OpLess8U,
2592 {ir.OLT, types.TINT16}: ssa.OpLess16,
2593 {ir.OLT, types.TUINT16}: ssa.OpLess16U,
2594 {ir.OLT, types.TINT32}: ssa.OpLess32,
2595 {ir.OLT, types.TUINT32}: ssa.OpLess32U,
2596 {ir.OLT, types.TINT64}: ssa.OpLess64,
2597 {ir.OLT, types.TUINT64}: ssa.OpLess64U,
2598 {ir.OLT, types.TFLOAT64}: ssa.OpLess64F,
2599 {ir.OLT, types.TFLOAT32}: ssa.OpLess32F,
2600
2601 {ir.OLE, types.TINT8}: ssa.OpLeq8,
2602 {ir.OLE, types.TUINT8}: ssa.OpLeq8U,
2603 {ir.OLE, types.TINT16}: ssa.OpLeq16,
2604 {ir.OLE, types.TUINT16}: ssa.OpLeq16U,
2605 {ir.OLE, types.TINT32}: ssa.OpLeq32,
2606 {ir.OLE, types.TUINT32}: ssa.OpLeq32U,
2607 {ir.OLE, types.TINT64}: ssa.OpLeq64,
2608 {ir.OLE, types.TUINT64}: ssa.OpLeq64U,
2609 {ir.OLE, types.TFLOAT64}: ssa.OpLeq64F,
2610 {ir.OLE, types.TFLOAT32}: ssa.OpLeq32F,
2611 }
2612
2613 func (s *state) concreteEtype(t *types.Type) types.Kind {
2614 e := t.Kind()
2615 switch e {
2616 default:
2617 return e
2618 case types.TINT:
2619 if s.config.PtrSize == 8 {
2620 return types.TINT64
2621 }
2622 return types.TINT32
2623 case types.TUINT:
2624 if s.config.PtrSize == 8 {
2625 return types.TUINT64
2626 }
2627 return types.TUINT32
2628 case types.TUINTPTR:
2629 if s.config.PtrSize == 8 {
2630 return types.TUINT64
2631 }
2632 return types.TUINT32
2633 }
2634 }
2635
2636 func (s *state) ssaOp(op ir.Op, t *types.Type) ssa.Op {
2637 etype := s.concreteEtype(t)
2638 x, ok := opToSSA[opAndType{op, etype}]
2639 if !ok {
2640 s.Fatalf("unhandled binary op %v %s", op, etype)
2641 }
2642 return x
2643 }
2644
2645 type opAndTwoTypes struct {
2646 op ir.Op
2647 etype1 types.Kind
2648 etype2 types.Kind
2649 }
2650
2651 type twoTypes struct {
2652 etype1 types.Kind
2653 etype2 types.Kind
2654 }
2655
2656 type twoOpsAndType struct {
2657 op1 ssa.Op
2658 op2 ssa.Op
2659 intermediateType types.Kind
2660 }
2661
2662 var fpConvOpToSSA = map[twoTypes]twoOpsAndType{
2663
2664 {types.TINT8, types.TFLOAT32}: {ssa.OpSignExt8to32, ssa.OpCvt32to32F, types.TINT32},
2665 {types.TINT16, types.TFLOAT32}: {ssa.OpSignExt16to32, ssa.OpCvt32to32F, types.TINT32},
2666 {types.TINT32, types.TFLOAT32}: {ssa.OpCopy, ssa.OpCvt32to32F, types.TINT32},
2667 {types.TINT64, types.TFLOAT32}: {ssa.OpCopy, ssa.OpCvt64to32F, types.TINT64},
2668
2669 {types.TINT8, types.TFLOAT64}: {ssa.OpSignExt8to32, ssa.OpCvt32to64F, types.TINT32},
2670 {types.TINT16, types.TFLOAT64}: {ssa.OpSignExt16to32, ssa.OpCvt32to64F, types.TINT32},
2671 {types.TINT32, types.TFLOAT64}: {ssa.OpCopy, ssa.OpCvt32to64F, types.TINT32},
2672 {types.TINT64, types.TFLOAT64}: {ssa.OpCopy, ssa.OpCvt64to64F, types.TINT64},
2673
2674 {types.TFLOAT32, types.TINT8}: {ssa.OpCvt32Fto32, ssa.OpTrunc32to8, types.TINT32},
2675 {types.TFLOAT32, types.TINT16}: {ssa.OpCvt32Fto32, ssa.OpTrunc32to16, types.TINT32},
2676 {types.TFLOAT32, types.TINT32}: {ssa.OpCvt32Fto32, ssa.OpCopy, types.TINT32},
2677 {types.TFLOAT32, types.TINT64}: {ssa.OpCvt32Fto64, ssa.OpCopy, types.TINT64},
2678
2679 {types.TFLOAT64, types.TINT8}: {ssa.OpCvt64Fto32, ssa.OpTrunc32to8, types.TINT32},
2680 {types.TFLOAT64, types.TINT16}: {ssa.OpCvt64Fto32, ssa.OpTrunc32to16, types.TINT32},
2681 {types.TFLOAT64, types.TINT32}: {ssa.OpCvt64Fto32, ssa.OpCopy, types.TINT32},
2682 {types.TFLOAT64, types.TINT64}: {ssa.OpCvt64Fto64, ssa.OpCopy, types.TINT64},
2683
2684 {types.TUINT8, types.TFLOAT32}: {ssa.OpZeroExt8to32, ssa.OpCvt32to32F, types.TINT32},
2685 {types.TUINT16, types.TFLOAT32}: {ssa.OpZeroExt16to32, ssa.OpCvt32to32F, types.TINT32},
2686 {types.TUINT32, types.TFLOAT32}: {ssa.OpZeroExt32to64, ssa.OpCvt64to32F, types.TINT64},
2687 {types.TUINT64, types.TFLOAT32}: {ssa.OpCopy, ssa.OpInvalid, types.TUINT64},
2688
2689 {types.TUINT8, types.TFLOAT64}: {ssa.OpZeroExt8to32, ssa.OpCvt32to64F, types.TINT32},
2690 {types.TUINT16, types.TFLOAT64}: {ssa.OpZeroExt16to32, ssa.OpCvt32to64F, types.TINT32},
2691 {types.TUINT32, types.TFLOAT64}: {ssa.OpZeroExt32to64, ssa.OpCvt64to64F, types.TINT64},
2692 {types.TUINT64, types.TFLOAT64}: {ssa.OpCopy, ssa.OpInvalid, types.TUINT64},
2693
2694 {types.TFLOAT32, types.TUINT8}: {ssa.OpCvt32Fto32, ssa.OpTrunc32to8, types.TINT32},
2695 {types.TFLOAT32, types.TUINT16}: {ssa.OpCvt32Fto32, ssa.OpTrunc32to16, types.TINT32},
2696 {types.TFLOAT32, types.TUINT32}: {ssa.OpInvalid, ssa.OpCopy, types.TINT64},
2697 {types.TFLOAT32, types.TUINT64}: {ssa.OpInvalid, ssa.OpCopy, types.TUINT64},
2698
2699 {types.TFLOAT64, types.TUINT8}: {ssa.OpCvt64Fto32, ssa.OpTrunc32to8, types.TINT32},
2700 {types.TFLOAT64, types.TUINT16}: {ssa.OpCvt64Fto32, ssa.OpTrunc32to16, types.TINT32},
2701 {types.TFLOAT64, types.TUINT32}: {ssa.OpInvalid, ssa.OpCopy, types.TINT64},
2702 {types.TFLOAT64, types.TUINT64}: {ssa.OpInvalid, ssa.OpCopy, types.TUINT64},
2703
2704
2705 {types.TFLOAT64, types.TFLOAT32}: {ssa.OpCvt64Fto32F, ssa.OpCopy, types.TFLOAT32},
2706 {types.TFLOAT64, types.TFLOAT64}: {ssa.OpRound64F, ssa.OpCopy, types.TFLOAT64},
2707 {types.TFLOAT32, types.TFLOAT32}: {ssa.OpRound32F, ssa.OpCopy, types.TFLOAT32},
2708 {types.TFLOAT32, types.TFLOAT64}: {ssa.OpCvt32Fto64F, ssa.OpCopy, types.TFLOAT64},
2709 }
2710
2711
2712
2713 var fpConvOpToSSA32 = map[twoTypes]twoOpsAndType{
2714 {types.TUINT32, types.TFLOAT32}: {ssa.OpCopy, ssa.OpCvt32Uto32F, types.TUINT32},
2715 {types.TUINT32, types.TFLOAT64}: {ssa.OpCopy, ssa.OpCvt32Uto64F, types.TUINT32},
2716 {types.TFLOAT32, types.TUINT32}: {ssa.OpCvt32Fto32U, ssa.OpCopy, types.TUINT32},
2717 {types.TFLOAT64, types.TUINT32}: {ssa.OpCvt64Fto32U, ssa.OpCopy, types.TUINT32},
2718 }
2719
2720
2721 var uint64fpConvOpToSSA = map[twoTypes]twoOpsAndType{
2722 {types.TUINT64, types.TFLOAT32}: {ssa.OpCopy, ssa.OpCvt64Uto32F, types.TUINT64},
2723 {types.TUINT64, types.TFLOAT64}: {ssa.OpCopy, ssa.OpCvt64Uto64F, types.TUINT64},
2724 {types.TFLOAT32, types.TUINT64}: {ssa.OpCvt32Fto64U, ssa.OpCopy, types.TUINT64},
2725 {types.TFLOAT64, types.TUINT64}: {ssa.OpCvt64Fto64U, ssa.OpCopy, types.TUINT64},
2726 }
2727
2728 var shiftOpToSSA = map[opAndTwoTypes]ssa.Op{
2729 {ir.OLSH, types.TINT8, types.TUINT8}: ssa.OpLsh8x8,
2730 {ir.OLSH, types.TUINT8, types.TUINT8}: ssa.OpLsh8x8,
2731 {ir.OLSH, types.TINT8, types.TUINT16}: ssa.OpLsh8x16,
2732 {ir.OLSH, types.TUINT8, types.TUINT16}: ssa.OpLsh8x16,
2733 {ir.OLSH, types.TINT8, types.TUINT32}: ssa.OpLsh8x32,
2734 {ir.OLSH, types.TUINT8, types.TUINT32}: ssa.OpLsh8x32,
2735 {ir.OLSH, types.TINT8, types.TUINT64}: ssa.OpLsh8x64,
2736 {ir.OLSH, types.TUINT8, types.TUINT64}: ssa.OpLsh8x64,
2737
2738 {ir.OLSH, types.TINT16, types.TUINT8}: ssa.OpLsh16x8,
2739 {ir.OLSH, types.TUINT16, types.TUINT8}: ssa.OpLsh16x8,
2740 {ir.OLSH, types.TINT16, types.TUINT16}: ssa.OpLsh16x16,
2741 {ir.OLSH, types.TUINT16, types.TUINT16}: ssa.OpLsh16x16,
2742 {ir.OLSH, types.TINT16, types.TUINT32}: ssa.OpLsh16x32,
2743 {ir.OLSH, types.TUINT16, types.TUINT32}: ssa.OpLsh16x32,
2744 {ir.OLSH, types.TINT16, types.TUINT64}: ssa.OpLsh16x64,
2745 {ir.OLSH, types.TUINT16, types.TUINT64}: ssa.OpLsh16x64,
2746
2747 {ir.OLSH, types.TINT32, types.TUINT8}: ssa.OpLsh32x8,
2748 {ir.OLSH, types.TUINT32, types.TUINT8}: ssa.OpLsh32x8,
2749 {ir.OLSH, types.TINT32, types.TUINT16}: ssa.OpLsh32x16,
2750 {ir.OLSH, types.TUINT32, types.TUINT16}: ssa.OpLsh32x16,
2751 {ir.OLSH, types.TINT32, types.TUINT32}: ssa.OpLsh32x32,
2752 {ir.OLSH, types.TUINT32, types.TUINT32}: ssa.OpLsh32x32,
2753 {ir.OLSH, types.TINT32, types.TUINT64}: ssa.OpLsh32x64,
2754 {ir.OLSH, types.TUINT32, types.TUINT64}: ssa.OpLsh32x64,
2755
2756 {ir.OLSH, types.TINT64, types.TUINT8}: ssa.OpLsh64x8,
2757 {ir.OLSH, types.TUINT64, types.TUINT8}: ssa.OpLsh64x8,
2758 {ir.OLSH, types.TINT64, types.TUINT16}: ssa.OpLsh64x16,
2759 {ir.OLSH, types.TUINT64, types.TUINT16}: ssa.OpLsh64x16,
2760 {ir.OLSH, types.TINT64, types.TUINT32}: ssa.OpLsh64x32,
2761 {ir.OLSH, types.TUINT64, types.TUINT32}: ssa.OpLsh64x32,
2762 {ir.OLSH, types.TINT64, types.TUINT64}: ssa.OpLsh64x64,
2763 {ir.OLSH, types.TUINT64, types.TUINT64}: ssa.OpLsh64x64,
2764
2765 {ir.ORSH, types.TINT8, types.TUINT8}: ssa.OpRsh8x8,
2766 {ir.ORSH, types.TUINT8, types.TUINT8}: ssa.OpRsh8Ux8,
2767 {ir.ORSH, types.TINT8, types.TUINT16}: ssa.OpRsh8x16,
2768 {ir.ORSH, types.TUINT8, types.TUINT16}: ssa.OpRsh8Ux16,
2769 {ir.ORSH, types.TINT8, types.TUINT32}: ssa.OpRsh8x32,
2770 {ir.ORSH, types.TUINT8, types.TUINT32}: ssa.OpRsh8Ux32,
2771 {ir.ORSH, types.TINT8, types.TUINT64}: ssa.OpRsh8x64,
2772 {ir.ORSH, types.TUINT8, types.TUINT64}: ssa.OpRsh8Ux64,
2773
2774 {ir.ORSH, types.TINT16, types.TUINT8}: ssa.OpRsh16x8,
2775 {ir.ORSH, types.TUINT16, types.TUINT8}: ssa.OpRsh16Ux8,
2776 {ir.ORSH, types.TINT16, types.TUINT16}: ssa.OpRsh16x16,
2777 {ir.ORSH, types.TUINT16, types.TUINT16}: ssa.OpRsh16Ux16,
2778 {ir.ORSH, types.TINT16, types.TUINT32}: ssa.OpRsh16x32,
2779 {ir.ORSH, types.TUINT16, types.TUINT32}: ssa.OpRsh16Ux32,
2780 {ir.ORSH, types.TINT16, types.TUINT64}: ssa.OpRsh16x64,
2781 {ir.ORSH, types.TUINT16, types.TUINT64}: ssa.OpRsh16Ux64,
2782
2783 {ir.ORSH, types.TINT32, types.TUINT8}: ssa.OpRsh32x8,
2784 {ir.ORSH, types.TUINT32, types.TUINT8}: ssa.OpRsh32Ux8,
2785 {ir.ORSH, types.TINT32, types.TUINT16}: ssa.OpRsh32x16,
2786 {ir.ORSH, types.TUINT32, types.TUINT16}: ssa.OpRsh32Ux16,
2787 {ir.ORSH, types.TINT32, types.TUINT32}: ssa.OpRsh32x32,
2788 {ir.ORSH, types.TUINT32, types.TUINT32}: ssa.OpRsh32Ux32,
2789 {ir.ORSH, types.TINT32, types.TUINT64}: ssa.OpRsh32x64,
2790 {ir.ORSH, types.TUINT32, types.TUINT64}: ssa.OpRsh32Ux64,
2791
2792 {ir.ORSH, types.TINT64, types.TUINT8}: ssa.OpRsh64x8,
2793 {ir.ORSH, types.TUINT64, types.TUINT8}: ssa.OpRsh64Ux8,
2794 {ir.ORSH, types.TINT64, types.TUINT16}: ssa.OpRsh64x16,
2795 {ir.ORSH, types.TUINT64, types.TUINT16}: ssa.OpRsh64Ux16,
2796 {ir.ORSH, types.TINT64, types.TUINT32}: ssa.OpRsh64x32,
2797 {ir.ORSH, types.TUINT64, types.TUINT32}: ssa.OpRsh64Ux32,
2798 {ir.ORSH, types.TINT64, types.TUINT64}: ssa.OpRsh64x64,
2799 {ir.ORSH, types.TUINT64, types.TUINT64}: ssa.OpRsh64Ux64,
2800 }
2801
2802 func (s *state) ssaShiftOp(op ir.Op, t *types.Type, u *types.Type) ssa.Op {
2803 etype1 := s.concreteEtype(t)
2804 etype2 := s.concreteEtype(u)
2805 x, ok := shiftOpToSSA[opAndTwoTypes{op, etype1, etype2}]
2806 if !ok {
2807 s.Fatalf("unhandled shift op %v etype=%s/%s", op, etype1, etype2)
2808 }
2809 return x
2810 }
2811
2812 func (s *state) uintptrConstant(v uint64) *ssa.Value {
2813 if s.config.PtrSize == 4 {
2814 return s.newValue0I(ssa.OpConst32, types.Types[types.TUINTPTR], int64(v))
2815 }
2816 return s.newValue0I(ssa.OpConst64, types.Types[types.TUINTPTR], int64(v))
2817 }
2818
2819 func (s *state) conv(n ir.Node, v *ssa.Value, ft, tt *types.Type) *ssa.Value {
2820 if ft.IsBoolean() && tt.IsKind(types.TUINT8) {
2821
2822 return s.newValue1(ssa.OpCvtBoolToUint8, tt, v)
2823 }
2824 if ft.IsInteger() && tt.IsInteger() {
2825 var op ssa.Op
2826 if tt.Size() == ft.Size() {
2827 op = ssa.OpCopy
2828 } else if tt.Size() < ft.Size() {
2829
2830 switch 10*ft.Size() + tt.Size() {
2831 case 21:
2832 op = ssa.OpTrunc16to8
2833 case 41:
2834 op = ssa.OpTrunc32to8
2835 case 42:
2836 op = ssa.OpTrunc32to16
2837 case 81:
2838 op = ssa.OpTrunc64to8
2839 case 82:
2840 op = ssa.OpTrunc64to16
2841 case 84:
2842 op = ssa.OpTrunc64to32
2843 default:
2844 s.Fatalf("weird integer truncation %v -> %v", ft, tt)
2845 }
2846 } else if ft.IsSigned() {
2847
2848 switch 10*ft.Size() + tt.Size() {
2849 case 12:
2850 op = ssa.OpSignExt8to16
2851 case 14:
2852 op = ssa.OpSignExt8to32
2853 case 18:
2854 op = ssa.OpSignExt8to64
2855 case 24:
2856 op = ssa.OpSignExt16to32
2857 case 28:
2858 op = ssa.OpSignExt16to64
2859 case 48:
2860 op = ssa.OpSignExt32to64
2861 default:
2862 s.Fatalf("bad integer sign extension %v -> %v", ft, tt)
2863 }
2864 } else {
2865
2866 switch 10*ft.Size() + tt.Size() {
2867 case 12:
2868 op = ssa.OpZeroExt8to16
2869 case 14:
2870 op = ssa.OpZeroExt8to32
2871 case 18:
2872 op = ssa.OpZeroExt8to64
2873 case 24:
2874 op = ssa.OpZeroExt16to32
2875 case 28:
2876 op = ssa.OpZeroExt16to64
2877 case 48:
2878 op = ssa.OpZeroExt32to64
2879 default:
2880 s.Fatalf("weird integer sign extension %v -> %v", ft, tt)
2881 }
2882 }
2883 return s.newValue1(op, tt, v)
2884 }
2885
2886 if ft.IsComplex() && tt.IsComplex() {
2887 var op ssa.Op
2888 if ft.Size() == tt.Size() {
2889 switch ft.Size() {
2890 case 8:
2891 op = ssa.OpRound32F
2892 case 16:
2893 op = ssa.OpRound64F
2894 default:
2895 s.Fatalf("weird complex conversion %v -> %v", ft, tt)
2896 }
2897 } else if ft.Size() == 8 && tt.Size() == 16 {
2898 op = ssa.OpCvt32Fto64F
2899 } else if ft.Size() == 16 && tt.Size() == 8 {
2900 op = ssa.OpCvt64Fto32F
2901 } else {
2902 s.Fatalf("weird complex conversion %v -> %v", ft, tt)
2903 }
2904 ftp := types.FloatForComplex(ft)
2905 ttp := types.FloatForComplex(tt)
2906 return s.newValue2(ssa.OpComplexMake, tt,
2907 s.newValueOrSfCall1(op, ttp, s.newValue1(ssa.OpComplexReal, ftp, v)),
2908 s.newValueOrSfCall1(op, ttp, s.newValue1(ssa.OpComplexImag, ftp, v)))
2909 }
2910
2911 if tt.IsComplex() {
2912
2913 et := types.FloatForComplex(tt)
2914 v = s.conv(n, v, ft, et)
2915 return s.newValue2(ssa.OpComplexMake, tt, v, s.zeroVal(et))
2916 }
2917
2918 if ft.IsFloat() || tt.IsFloat() {
2919 cft, ctt := s.concreteEtype(ft), s.concreteEtype(tt)
2920 conv, ok := fpConvOpToSSA[twoTypes{cft, ctt}]
2921
2922
2923 if ctt == types.TUINT32 && ft.IsFloat() && !base.ConvertHash.MatchPosWithInfo(n.Pos(), "U", nil) {
2924
2925 conv.op1 = ssa.OpCvt64Fto64
2926 if cft == types.TFLOAT32 {
2927 conv.op1 = ssa.OpCvt32Fto64
2928 }
2929 conv.op2 = ssa.OpTrunc64to32
2930
2931 }
2932 if s.config.RegSize == 4 && Arch.LinkArch.Family != sys.MIPS && !s.softFloat {
2933 if conv1, ok1 := fpConvOpToSSA32[twoTypes{s.concreteEtype(ft), s.concreteEtype(tt)}]; ok1 {
2934 conv = conv1
2935 }
2936 }
2937 if Arch.LinkArch.Family == sys.ARM64 || Arch.LinkArch.Family == sys.Wasm || Arch.LinkArch.Family == sys.S390X || s.softFloat {
2938 if conv1, ok1 := uint64fpConvOpToSSA[twoTypes{s.concreteEtype(ft), s.concreteEtype(tt)}]; ok1 {
2939 conv = conv1
2940 }
2941 }
2942
2943 if Arch.LinkArch.Family == sys.MIPS && !s.softFloat {
2944 if ft.Size() == 4 && ft.IsInteger() && !ft.IsSigned() {
2945
2946 if tt.Size() == 4 {
2947 return s.uint32Tofloat32(n, v, ft, tt)
2948 }
2949 if tt.Size() == 8 {
2950 return s.uint32Tofloat64(n, v, ft, tt)
2951 }
2952 } else if tt.Size() == 4 && tt.IsInteger() && !tt.IsSigned() {
2953
2954 if ft.Size() == 4 {
2955 return s.float32ToUint32(n, v, ft, tt)
2956 }
2957 if ft.Size() == 8 {
2958 return s.float64ToUint32(n, v, ft, tt)
2959 }
2960 }
2961 }
2962
2963 if !ok {
2964 s.Fatalf("weird float conversion %v -> %v", ft, tt)
2965 }
2966 op1, op2, it := conv.op1, conv.op2, conv.intermediateType
2967
2968 if op1 != ssa.OpInvalid && op2 != ssa.OpInvalid {
2969
2970 if op1 == ssa.OpCopy {
2971 if op2 == ssa.OpCopy {
2972 return v
2973 }
2974 return s.newValueOrSfCall1(op2, tt, v)
2975 }
2976 if op2 == ssa.OpCopy {
2977 return s.newValueOrSfCall1(op1, tt, v)
2978 }
2979 return s.newValueOrSfCall1(op2, tt, s.newValueOrSfCall1(op1, types.Types[it], v))
2980 }
2981
2982 if ft.IsInteger() {
2983
2984 if tt.Size() == 4 {
2985 return s.uint64Tofloat32(n, v, ft, tt)
2986 }
2987 if tt.Size() == 8 {
2988 return s.uint64Tofloat64(n, v, ft, tt)
2989 }
2990 s.Fatalf("weird unsigned integer to float conversion %v -> %v", ft, tt)
2991 }
2992
2993 if ft.Size() == 4 {
2994 switch tt.Size() {
2995 case 8:
2996 return s.float32ToUint64(n, v, ft, tt)
2997 case 4, 2, 1:
2998
2999 return s.float32ToUint32(n, v, ft, tt)
3000 }
3001 }
3002 if ft.Size() == 8 {
3003 switch tt.Size() {
3004 case 8:
3005 return s.float64ToUint64(n, v, ft, tt)
3006 case 4, 2, 1:
3007
3008 return s.float64ToUint32(n, v, ft, tt)
3009 }
3010
3011 }
3012 s.Fatalf("weird float to unsigned integer conversion %v -> %v", ft, tt)
3013 return nil
3014 }
3015
3016 s.Fatalf("unhandled OCONV %s -> %s", ft.Kind(), tt.Kind())
3017 return nil
3018 }
3019
3020
3021 func (s *state) expr(n ir.Node) *ssa.Value {
3022 return s.exprCheckPtr(n, true)
3023 }
3024
3025 func (s *state) exprCheckPtr(n ir.Node, checkPtrOK bool) *ssa.Value {
3026 if ir.HasUniquePos(n) {
3027
3028
3029 s.pushLine(n.Pos())
3030 defer s.popLine()
3031 }
3032
3033 s.stmtList(n.Init())
3034 switch n.Op() {
3035 case ir.OBYTES2STRTMP:
3036 n := n.(*ir.ConvExpr)
3037 slice := s.expr(n.X)
3038 ptr := s.newValue1(ssa.OpSlicePtr, s.f.Config.Types.BytePtr, slice)
3039 len := s.newValue1(ssa.OpSliceLen, types.Types[types.TINT], slice)
3040 return s.newValue2(ssa.OpStringMake, n.Type(), ptr, len)
3041 case ir.OSTR2BYTESTMP:
3042 n := n.(*ir.ConvExpr)
3043 str := s.expr(n.X)
3044 ptr := s.newValue1(ssa.OpStringPtr, s.f.Config.Types.BytePtr, str)
3045 if !n.NonNil() {
3046
3047
3048
3049 cond := s.newValue2(ssa.OpNeqPtr, types.Types[types.TBOOL], ptr, s.constNil(ptr.Type))
3050 zerobase := s.newValue1A(ssa.OpAddr, ptr.Type, ir.Syms.Zerobase, s.sb)
3051 ptr = s.ternary(cond, ptr, zerobase)
3052 }
3053 len := s.newValue1(ssa.OpStringLen, types.Types[types.TINT], str)
3054 return s.newValue3(ssa.OpSliceMake, n.Type(), ptr, len, len)
3055 case ir.OCFUNC:
3056 n := n.(*ir.UnaryExpr)
3057 aux := n.X.(*ir.Name).Linksym()
3058
3059
3060 if aux.ABI() != obj.ABIInternal {
3061 s.Fatalf("expected ABIInternal: %v", aux.ABI())
3062 }
3063 return s.entryNewValue1A(ssa.OpAddr, n.Type(), aux, s.sb)
3064 case ir.ONAME:
3065 n := n.(*ir.Name)
3066 if n.Class == ir.PFUNC {
3067
3068 sym := staticdata.FuncLinksym(n)
3069 return s.entryNewValue1A(ssa.OpAddr, types.NewPtr(n.Type()), sym, s.sb)
3070 }
3071 if s.canSSA(n) {
3072 return s.variable(n, n.Type())
3073 }
3074 return s.load(n.Type(), s.addr(n))
3075 case ir.OLINKSYMOFFSET:
3076 n := n.(*ir.LinksymOffsetExpr)
3077 return s.load(n.Type(), s.addr(n))
3078 case ir.ONIL:
3079 n := n.(*ir.NilExpr)
3080 t := n.Type()
3081 switch {
3082 case t.IsSlice():
3083 return s.constSlice(t)
3084 case t.IsInterface():
3085 return s.constInterface(t)
3086 default:
3087 return s.constNil(t)
3088 }
3089 case ir.OLITERAL:
3090 switch u := n.Val(); u.Kind() {
3091 case constant.Int:
3092 i := ir.IntVal(n.Type(), u)
3093 switch n.Type().Size() {
3094 case 1:
3095 return s.constInt8(n.Type(), int8(i))
3096 case 2:
3097 return s.constInt16(n.Type(), int16(i))
3098 case 4:
3099 return s.constInt32(n.Type(), int32(i))
3100 case 8:
3101 return s.constInt64(n.Type(), i)
3102 default:
3103 s.Fatalf("bad integer size %d", n.Type().Size())
3104 return nil
3105 }
3106 case constant.String:
3107 i := constant.StringVal(u)
3108 if i == "" {
3109 return s.constEmptyString(n.Type())
3110 }
3111 return s.entryNewValue0A(ssa.OpConstString, n.Type(), ssa.StringToAux(i))
3112 case constant.Bool:
3113 return s.constBool(constant.BoolVal(u))
3114 case constant.Float:
3115 f, _ := constant.Float64Val(u)
3116 switch n.Type().Size() {
3117 case 4:
3118 return s.constFloat32(n.Type(), f)
3119 case 8:
3120 return s.constFloat64(n.Type(), f)
3121 default:
3122 s.Fatalf("bad float size %d", n.Type().Size())
3123 return nil
3124 }
3125 case constant.Complex:
3126 re, _ := constant.Float64Val(constant.Real(u))
3127 im, _ := constant.Float64Val(constant.Imag(u))
3128 switch n.Type().Size() {
3129 case 8:
3130 pt := types.Types[types.TFLOAT32]
3131 return s.newValue2(ssa.OpComplexMake, n.Type(),
3132 s.constFloat32(pt, re),
3133 s.constFloat32(pt, im))
3134 case 16:
3135 pt := types.Types[types.TFLOAT64]
3136 return s.newValue2(ssa.OpComplexMake, n.Type(),
3137 s.constFloat64(pt, re),
3138 s.constFloat64(pt, im))
3139 default:
3140 s.Fatalf("bad complex size %d", n.Type().Size())
3141 return nil
3142 }
3143 default:
3144 s.Fatalf("unhandled OLITERAL %v", u.Kind())
3145 return nil
3146 }
3147 case ir.OCONVNOP:
3148 n := n.(*ir.ConvExpr)
3149 to := n.Type()
3150 from := n.X.Type()
3151
3152
3153
3154 x := s.expr(n.X)
3155 if to == from {
3156 return x
3157 }
3158
3159
3160
3161
3162
3163 if to.IsPtrShaped() != from.IsPtrShaped() {
3164 return s.newValue2(ssa.OpConvert, to, x, s.mem())
3165 }
3166
3167 v := s.newValue1(ssa.OpCopy, to, x)
3168
3169
3170 if to.Kind() == types.TFUNC && from.IsPtrShaped() {
3171 return v
3172 }
3173
3174
3175 if from.Kind() == to.Kind() {
3176 return v
3177 }
3178
3179
3180 if to.IsUnsafePtr() && from.IsPtrShaped() || from.IsUnsafePtr() && to.IsPtrShaped() {
3181 if s.checkPtrEnabled && checkPtrOK && to.IsPtr() && from.IsUnsafePtr() {
3182 s.checkPtrAlignment(n, v, nil)
3183 }
3184 return v
3185 }
3186
3187
3188 mt := types.NewPtr(reflectdata.MapType())
3189 if to.Kind() == types.TMAP && from == mt {
3190 return v
3191 }
3192
3193 types.CalcSize(from)
3194 types.CalcSize(to)
3195 if from.Size() != to.Size() {
3196 s.Fatalf("CONVNOP width mismatch %v (%d) -> %v (%d)\n", from, from.Size(), to, to.Size())
3197 return nil
3198 }
3199 if etypesign(from.Kind()) != etypesign(to.Kind()) {
3200 s.Fatalf("CONVNOP sign mismatch %v (%s) -> %v (%s)\n", from, from.Kind(), to, to.Kind())
3201 return nil
3202 }
3203
3204 if base.Flag.Cfg.Instrumenting {
3205
3206
3207
3208 return v
3209 }
3210
3211 if etypesign(from.Kind()) == 0 {
3212 s.Fatalf("CONVNOP unrecognized non-integer %v -> %v\n", from, to)
3213 return nil
3214 }
3215
3216
3217 return v
3218
3219 case ir.OCONV:
3220 n := n.(*ir.ConvExpr)
3221 x := s.expr(n.X)
3222 return s.conv(n, x, n.X.Type(), n.Type())
3223
3224 case ir.ODOTTYPE:
3225 n := n.(*ir.TypeAssertExpr)
3226 res, _ := s.dottype(n, false)
3227 return res
3228
3229 case ir.ODYNAMICDOTTYPE:
3230 n := n.(*ir.DynamicTypeAssertExpr)
3231 res, _ := s.dynamicDottype(n, false)
3232 return res
3233
3234
3235 case ir.OLT, ir.OEQ, ir.ONE, ir.OLE, ir.OGE, ir.OGT:
3236 n := n.(*ir.BinaryExpr)
3237 a := s.expr(n.X)
3238 b := s.expr(n.Y)
3239 if n.X.Type().IsComplex() {
3240 pt := types.FloatForComplex(n.X.Type())
3241 op := s.ssaOp(ir.OEQ, pt)
3242 r := s.newValueOrSfCall2(op, types.Types[types.TBOOL], s.newValue1(ssa.OpComplexReal, pt, a), s.newValue1(ssa.OpComplexReal, pt, b))
3243 i := s.newValueOrSfCall2(op, types.Types[types.TBOOL], s.newValue1(ssa.OpComplexImag, pt, a), s.newValue1(ssa.OpComplexImag, pt, b))
3244 c := s.newValue2(ssa.OpAndB, types.Types[types.TBOOL], r, i)
3245 switch n.Op() {
3246 case ir.OEQ:
3247 return c
3248 case ir.ONE:
3249 return s.newValue1(ssa.OpNot, types.Types[types.TBOOL], c)
3250 default:
3251 s.Fatalf("ordered complex compare %v", n.Op())
3252 }
3253 }
3254
3255
3256 op := n.Op()
3257 switch op {
3258 case ir.OGE:
3259 op, a, b = ir.OLE, b, a
3260 case ir.OGT:
3261 op, a, b = ir.OLT, b, a
3262 }
3263 if n.X.Type().IsFloat() {
3264
3265 return s.newValueOrSfCall2(s.ssaOp(op, n.X.Type()), types.Types[types.TBOOL], a, b)
3266 }
3267
3268 return s.newValue2(s.ssaOp(op, n.X.Type()), types.Types[types.TBOOL], a, b)
3269 case ir.OMUL:
3270 n := n.(*ir.BinaryExpr)
3271 a := s.expr(n.X)
3272 b := s.expr(n.Y)
3273 if n.Type().IsComplex() {
3274 mulop := ssa.OpMul64F
3275 addop := ssa.OpAdd64F
3276 subop := ssa.OpSub64F
3277 pt := types.FloatForComplex(n.Type())
3278 wt := types.Types[types.TFLOAT64]
3279
3280 areal := s.newValue1(ssa.OpComplexReal, pt, a)
3281 breal := s.newValue1(ssa.OpComplexReal, pt, b)
3282 aimag := s.newValue1(ssa.OpComplexImag, pt, a)
3283 bimag := s.newValue1(ssa.OpComplexImag, pt, b)
3284
3285 if pt != wt {
3286 areal = s.newValueOrSfCall1(ssa.OpCvt32Fto64F, wt, areal)
3287 breal = s.newValueOrSfCall1(ssa.OpCvt32Fto64F, wt, breal)
3288 aimag = s.newValueOrSfCall1(ssa.OpCvt32Fto64F, wt, aimag)
3289 bimag = s.newValueOrSfCall1(ssa.OpCvt32Fto64F, wt, bimag)
3290 }
3291
3292 xreal := s.newValueOrSfCall2(subop, wt, s.newValueOrSfCall2(mulop, wt, areal, breal), s.newValueOrSfCall2(mulop, wt, aimag, bimag))
3293 ximag := s.newValueOrSfCall2(addop, wt, s.newValueOrSfCall2(mulop, wt, areal, bimag), s.newValueOrSfCall2(mulop, wt, aimag, breal))
3294
3295 if pt != wt {
3296 xreal = s.newValueOrSfCall1(ssa.OpCvt64Fto32F, pt, xreal)
3297 ximag = s.newValueOrSfCall1(ssa.OpCvt64Fto32F, pt, ximag)
3298 }
3299
3300 return s.newValue2(ssa.OpComplexMake, n.Type(), xreal, ximag)
3301 }
3302
3303 if n.Type().IsFloat() {
3304 return s.newValueOrSfCall2(s.ssaOp(n.Op(), n.Type()), a.Type, a, b)
3305 }
3306
3307 return s.newValue2(s.ssaOp(n.Op(), n.Type()), a.Type, a, b)
3308
3309 case ir.ODIV:
3310 n := n.(*ir.BinaryExpr)
3311 a := s.expr(n.X)
3312 b := s.expr(n.Y)
3313 if n.Type().IsComplex() {
3314
3315
3316
3317 mulop := ssa.OpMul64F
3318 addop := ssa.OpAdd64F
3319 subop := ssa.OpSub64F
3320 divop := ssa.OpDiv64F
3321 pt := types.FloatForComplex(n.Type())
3322 wt := types.Types[types.TFLOAT64]
3323
3324 areal := s.newValue1(ssa.OpComplexReal, pt, a)
3325 breal := s.newValue1(ssa.OpComplexReal, pt, b)
3326 aimag := s.newValue1(ssa.OpComplexImag, pt, a)
3327 bimag := s.newValue1(ssa.OpComplexImag, pt, b)
3328
3329 if pt != wt {
3330 areal = s.newValueOrSfCall1(ssa.OpCvt32Fto64F, wt, areal)
3331 breal = s.newValueOrSfCall1(ssa.OpCvt32Fto64F, wt, breal)
3332 aimag = s.newValueOrSfCall1(ssa.OpCvt32Fto64F, wt, aimag)
3333 bimag = s.newValueOrSfCall1(ssa.OpCvt32Fto64F, wt, bimag)
3334 }
3335
3336 denom := s.newValueOrSfCall2(addop, wt, s.newValueOrSfCall2(mulop, wt, breal, breal), s.newValueOrSfCall2(mulop, wt, bimag, bimag))
3337 xreal := s.newValueOrSfCall2(addop, wt, s.newValueOrSfCall2(mulop, wt, areal, breal), s.newValueOrSfCall2(mulop, wt, aimag, bimag))
3338 ximag := s.newValueOrSfCall2(subop, wt, s.newValueOrSfCall2(mulop, wt, aimag, breal), s.newValueOrSfCall2(mulop, wt, areal, bimag))
3339
3340
3341
3342
3343
3344 xreal = s.newValueOrSfCall2(divop, wt, xreal, denom)
3345 ximag = s.newValueOrSfCall2(divop, wt, ximag, denom)
3346
3347 if pt != wt {
3348 xreal = s.newValueOrSfCall1(ssa.OpCvt64Fto32F, pt, xreal)
3349 ximag = s.newValueOrSfCall1(ssa.OpCvt64Fto32F, pt, ximag)
3350 }
3351 return s.newValue2(ssa.OpComplexMake, n.Type(), xreal, ximag)
3352 }
3353 if n.Type().IsFloat() {
3354 return s.newValueOrSfCall2(s.ssaOp(n.Op(), n.Type()), a.Type, a, b)
3355 }
3356 return s.intDivide(n, a, b)
3357 case ir.OMOD:
3358 n := n.(*ir.BinaryExpr)
3359 a := s.expr(n.X)
3360 b := s.expr(n.Y)
3361 return s.intDivide(n, a, b)
3362 case ir.OADD, ir.OSUB:
3363 n := n.(*ir.BinaryExpr)
3364 a := s.expr(n.X)
3365 b := s.expr(n.Y)
3366 if n.Type().IsComplex() {
3367 pt := types.FloatForComplex(n.Type())
3368 op := s.ssaOp(n.Op(), pt)
3369 return s.newValue2(ssa.OpComplexMake, n.Type(),
3370 s.newValueOrSfCall2(op, pt, s.newValue1(ssa.OpComplexReal, pt, a), s.newValue1(ssa.OpComplexReal, pt, b)),
3371 s.newValueOrSfCall2(op, pt, s.newValue1(ssa.OpComplexImag, pt, a), s.newValue1(ssa.OpComplexImag, pt, b)))
3372 }
3373 if n.Type().IsFloat() {
3374 return s.newValueOrSfCall2(s.ssaOp(n.Op(), n.Type()), a.Type, a, b)
3375 }
3376 return s.newValue2(s.ssaOp(n.Op(), n.Type()), a.Type, a, b)
3377 case ir.OAND, ir.OOR, ir.OXOR:
3378 n := n.(*ir.BinaryExpr)
3379 a := s.expr(n.X)
3380 b := s.expr(n.Y)
3381 return s.newValue2(s.ssaOp(n.Op(), n.Type()), a.Type, a, b)
3382 case ir.OANDNOT:
3383 n := n.(*ir.BinaryExpr)
3384 a := s.expr(n.X)
3385 b := s.expr(n.Y)
3386 b = s.newValue1(s.ssaOp(ir.OBITNOT, b.Type), b.Type, b)
3387 return s.newValue2(s.ssaOp(ir.OAND, n.Type()), a.Type, a, b)
3388 case ir.OLSH, ir.ORSH:
3389 n := n.(*ir.BinaryExpr)
3390 a := s.expr(n.X)
3391 b := s.expr(n.Y)
3392 bt := b.Type
3393 if bt.IsSigned() {
3394 cmp := s.newValue2(s.ssaOp(ir.OLE, bt), types.Types[types.TBOOL], s.zeroVal(bt), b)
3395 s.check(cmp, ir.Syms.Panicshift)
3396 bt = bt.ToUnsigned()
3397 }
3398 return s.newValue2(s.ssaShiftOp(n.Op(), n.Type(), bt), a.Type, a, b)
3399 case ir.OANDAND, ir.OOROR:
3400
3401
3402
3403
3404
3405
3406
3407
3408
3409
3410
3411
3412
3413 n := n.(*ir.LogicalExpr)
3414 el := s.expr(n.X)
3415 s.vars[n] = el
3416
3417 b := s.endBlock()
3418 b.Kind = ssa.BlockIf
3419 b.SetControl(el)
3420
3421
3422
3423
3424
3425 bRight := s.f.NewBlock(ssa.BlockPlain)
3426 bResult := s.f.NewBlock(ssa.BlockPlain)
3427 if n.Op() == ir.OANDAND {
3428 b.AddEdgeTo(bRight)
3429 b.AddEdgeTo(bResult)
3430 } else if n.Op() == ir.OOROR {
3431 b.AddEdgeTo(bResult)
3432 b.AddEdgeTo(bRight)
3433 }
3434
3435 s.startBlock(bRight)
3436 er := s.expr(n.Y)
3437 s.vars[n] = er
3438
3439 b = s.endBlock()
3440 b.AddEdgeTo(bResult)
3441
3442 s.startBlock(bResult)
3443 return s.variable(n, types.Types[types.TBOOL])
3444 case ir.OCOMPLEX:
3445 n := n.(*ir.BinaryExpr)
3446 r := s.expr(n.X)
3447 i := s.expr(n.Y)
3448 return s.newValue2(ssa.OpComplexMake, n.Type(), r, i)
3449
3450
3451 case ir.ONEG:
3452 n := n.(*ir.UnaryExpr)
3453 a := s.expr(n.X)
3454 if n.Type().IsComplex() {
3455 tp := types.FloatForComplex(n.Type())
3456 negop := s.ssaOp(n.Op(), tp)
3457 return s.newValue2(ssa.OpComplexMake, n.Type(),
3458 s.newValue1(negop, tp, s.newValue1(ssa.OpComplexReal, tp, a)),
3459 s.newValue1(negop, tp, s.newValue1(ssa.OpComplexImag, tp, a)))
3460 }
3461 return s.newValue1(s.ssaOp(n.Op(), n.Type()), a.Type, a)
3462 case ir.ONOT, ir.OBITNOT:
3463 n := n.(*ir.UnaryExpr)
3464 a := s.expr(n.X)
3465 return s.newValue1(s.ssaOp(n.Op(), n.Type()), a.Type, a)
3466 case ir.OIMAG, ir.OREAL:
3467 n := n.(*ir.UnaryExpr)
3468 a := s.expr(n.X)
3469 return s.newValue1(s.ssaOp(n.Op(), n.X.Type()), n.Type(), a)
3470 case ir.OPLUS:
3471 n := n.(*ir.UnaryExpr)
3472 return s.expr(n.X)
3473
3474 case ir.OADDR:
3475 n := n.(*ir.AddrExpr)
3476 return s.addr(n.X)
3477
3478 case ir.ORESULT:
3479 n := n.(*ir.ResultExpr)
3480 if s.prevCall == nil || s.prevCall.Op != ssa.OpStaticLECall && s.prevCall.Op != ssa.OpInterLECall && s.prevCall.Op != ssa.OpClosureLECall {
3481 panic("Expected to see a previous call")
3482 }
3483 which := n.Index
3484 if which == -1 {
3485 panic(fmt.Errorf("ORESULT %v does not match call %s", n, s.prevCall))
3486 }
3487 return s.resultOfCall(s.prevCall, which, n.Type())
3488
3489 case ir.ODEREF:
3490 n := n.(*ir.StarExpr)
3491 p := s.exprPtr(n.X, n.Bounded(), n.Pos())
3492 return s.load(n.Type(), p)
3493
3494 case ir.ODOT:
3495 n := n.(*ir.SelectorExpr)
3496 if n.X.Op() == ir.OSTRUCTLIT {
3497
3498
3499
3500 if !ir.IsZero(n.X) {
3501 s.Fatalf("literal with nonzero value in SSA: %v", n.X)
3502 }
3503 return s.zeroVal(n.Type())
3504 }
3505
3506
3507
3508
3509 if ir.IsAddressable(n) && !s.canSSA(n) {
3510 p := s.addr(n)
3511 return s.load(n.Type(), p)
3512 }
3513 v := s.expr(n.X)
3514 return s.newValue1I(ssa.OpStructSelect, n.Type(), int64(fieldIdx(n)), v)
3515
3516 case ir.ODOTPTR:
3517 n := n.(*ir.SelectorExpr)
3518 p := s.exprPtr(n.X, n.Bounded(), n.Pos())
3519 p = s.newValue1I(ssa.OpOffPtr, types.NewPtr(n.Type()), n.Offset(), p)
3520 return s.load(n.Type(), p)
3521
3522 case ir.OINDEX:
3523 n := n.(*ir.IndexExpr)
3524 switch {
3525 case n.X.Type().IsString():
3526 if n.Bounded() && ir.IsConst(n.X, constant.String) && ir.IsConst(n.Index, constant.Int) {
3527
3528
3529
3530 return s.newValue0I(ssa.OpConst8, types.Types[types.TUINT8], int64(int8(ir.StringVal(n.X)[ir.Int64Val(n.Index)])))
3531 }
3532 a := s.expr(n.X)
3533 i := s.expr(n.Index)
3534 len := s.newValue1(ssa.OpStringLen, types.Types[types.TINT], a)
3535 i = s.boundsCheck(i, len, ssa.BoundsIndex, n.Bounded())
3536 ptrtyp := s.f.Config.Types.BytePtr
3537 ptr := s.newValue1(ssa.OpStringPtr, ptrtyp, a)
3538 if ir.IsConst(n.Index, constant.Int) {
3539 ptr = s.newValue1I(ssa.OpOffPtr, ptrtyp, ir.Int64Val(n.Index), ptr)
3540 } else {
3541 ptr = s.newValue2(ssa.OpAddPtr, ptrtyp, ptr, i)
3542 }
3543 return s.load(types.Types[types.TUINT8], ptr)
3544 case n.X.Type().IsSlice():
3545 p := s.addr(n)
3546 return s.load(n.X.Type().Elem(), p)
3547 case n.X.Type().IsArray():
3548 if ssa.CanSSA(n.X.Type()) {
3549
3550 bound := n.X.Type().NumElem()
3551 a := s.expr(n.X)
3552 i := s.expr(n.Index)
3553 if bound == 0 {
3554
3555
3556 z := s.constInt(types.Types[types.TINT], 0)
3557 s.boundsCheck(z, z, ssa.BoundsIndex, false)
3558
3559
3560 return s.load(n.Type(), s.constNil(n.Type().PtrTo()))
3561 }
3562 len := s.constInt(types.Types[types.TINT], bound)
3563 s.boundsCheck(i, len, ssa.BoundsIndex, n.Bounded())
3564 return s.newValue1I(ssa.OpArraySelect, n.Type(), 0, a)
3565 }
3566 p := s.addr(n)
3567 return s.load(n.X.Type().Elem(), p)
3568 default:
3569 s.Fatalf("bad type for index %v", n.X.Type())
3570 return nil
3571 }
3572
3573 case ir.OLEN, ir.OCAP:
3574 n := n.(*ir.UnaryExpr)
3575
3576
3577 a := s.expr(n.X)
3578 t := n.X.Type()
3579 switch {
3580 case t.IsSlice():
3581 op := ssa.OpSliceLen
3582 if n.Op() == ir.OCAP {
3583 op = ssa.OpSliceCap
3584 }
3585 return s.newValue1(op, types.Types[types.TINT], a)
3586 case t.IsString():
3587 return s.newValue1(ssa.OpStringLen, types.Types[types.TINT], a)
3588 case t.IsMap(), t.IsChan():
3589 return s.referenceTypeBuiltin(n, a)
3590 case t.IsArray():
3591 return s.constInt(types.Types[types.TINT], t.NumElem())
3592 case t.IsPtr() && t.Elem().IsArray():
3593 return s.constInt(types.Types[types.TINT], t.Elem().NumElem())
3594 default:
3595 s.Fatalf("bad type in len/cap: %v", t)
3596 return nil
3597 }
3598
3599 case ir.OSPTR:
3600 n := n.(*ir.UnaryExpr)
3601 a := s.expr(n.X)
3602 if n.X.Type().IsSlice() {
3603 if n.Bounded() {
3604 return s.newValue1(ssa.OpSlicePtr, n.Type(), a)
3605 }
3606 return s.newValue1(ssa.OpSlicePtrUnchecked, n.Type(), a)
3607 } else {
3608 return s.newValue1(ssa.OpStringPtr, n.Type(), a)
3609 }
3610
3611 case ir.OITAB:
3612 n := n.(*ir.UnaryExpr)
3613 a := s.expr(n.X)
3614 return s.newValue1(ssa.OpITab, n.Type(), a)
3615
3616 case ir.OIDATA:
3617 n := n.(*ir.UnaryExpr)
3618 a := s.expr(n.X)
3619 return s.newValue1(ssa.OpIData, n.Type(), a)
3620
3621 case ir.OMAKEFACE:
3622 n := n.(*ir.BinaryExpr)
3623 tab := s.expr(n.X)
3624 data := s.expr(n.Y)
3625 return s.newValue2(ssa.OpIMake, n.Type(), tab, data)
3626
3627 case ir.OSLICEHEADER:
3628 n := n.(*ir.SliceHeaderExpr)
3629 p := s.expr(n.Ptr)
3630 l := s.expr(n.Len)
3631 c := s.expr(n.Cap)
3632 return s.newValue3(ssa.OpSliceMake, n.Type(), p, l, c)
3633
3634 case ir.OSTRINGHEADER:
3635 n := n.(*ir.StringHeaderExpr)
3636 p := s.expr(n.Ptr)
3637 l := s.expr(n.Len)
3638 return s.newValue2(ssa.OpStringMake, n.Type(), p, l)
3639
3640 case ir.OSLICE, ir.OSLICEARR, ir.OSLICE3, ir.OSLICE3ARR:
3641 n := n.(*ir.SliceExpr)
3642 check := s.checkPtrEnabled && n.Op() == ir.OSLICE3ARR && n.X.Op() == ir.OCONVNOP && n.X.(*ir.ConvExpr).X.Type().IsUnsafePtr()
3643 v := s.exprCheckPtr(n.X, !check)
3644 var i, j, k *ssa.Value
3645 if n.Low != nil {
3646 i = s.expr(n.Low)
3647 }
3648 if n.High != nil {
3649 j = s.expr(n.High)
3650 }
3651 if n.Max != nil {
3652 k = s.expr(n.Max)
3653 }
3654 p, l, c := s.slice(v, i, j, k, n.Bounded())
3655 if check {
3656
3657 s.checkPtrAlignment(n.X.(*ir.ConvExpr), v, s.conv(n.Max, k, k.Type, types.Types[types.TUINTPTR]))
3658 }
3659 return s.newValue3(ssa.OpSliceMake, n.Type(), p, l, c)
3660
3661 case ir.OSLICESTR:
3662 n := n.(*ir.SliceExpr)
3663 v := s.expr(n.X)
3664 var i, j *ssa.Value
3665 if n.Low != nil {
3666 i = s.expr(n.Low)
3667 }
3668 if n.High != nil {
3669 j = s.expr(n.High)
3670 }
3671 p, l, _ := s.slice(v, i, j, nil, n.Bounded())
3672 return s.newValue2(ssa.OpStringMake, n.Type(), p, l)
3673
3674 case ir.OSLICE2ARRPTR:
3675
3676
3677
3678
3679 n := n.(*ir.ConvExpr)
3680 v := s.expr(n.X)
3681 nelem := n.Type().Elem().NumElem()
3682 arrlen := s.constInt(types.Types[types.TINT], nelem)
3683 cap := s.newValue1(ssa.OpSliceLen, types.Types[types.TINT], v)
3684 s.boundsCheck(arrlen, cap, ssa.BoundsConvert, false)
3685 op := ssa.OpSlicePtr
3686 if nelem == 0 {
3687 op = ssa.OpSlicePtrUnchecked
3688 }
3689 return s.newValue1(op, n.Type(), v)
3690
3691 case ir.OCALLFUNC:
3692 n := n.(*ir.CallExpr)
3693 if ir.IsIntrinsicCall(n) {
3694 return s.intrinsicCall(n)
3695 }
3696 fallthrough
3697
3698 case ir.OCALLINTER:
3699 n := n.(*ir.CallExpr)
3700 return s.callResult(n, callNormal)
3701
3702 case ir.OGETG:
3703 n := n.(*ir.CallExpr)
3704 return s.newValue1(ssa.OpGetG, n.Type(), s.mem())
3705
3706 case ir.OGETCALLERSP:
3707 n := n.(*ir.CallExpr)
3708 return s.newValue1(ssa.OpGetCallerSP, n.Type(), s.mem())
3709
3710 case ir.OAPPEND:
3711 return s.append(n.(*ir.CallExpr), false)
3712
3713 case ir.OMOVE2HEAP:
3714 return s.move2heap(n.(*ir.MoveToHeapExpr))
3715
3716 case ir.OMIN, ir.OMAX:
3717 return s.minMax(n.(*ir.CallExpr))
3718
3719 case ir.OSTRUCTLIT, ir.OARRAYLIT:
3720
3721
3722
3723 n := n.(*ir.CompLitExpr)
3724 if !ir.IsZero(n) {
3725 s.Fatalf("literal with nonzero value in SSA: %v", n)
3726 }
3727 return s.zeroVal(n.Type())
3728
3729 case ir.ONEW:
3730 n := n.(*ir.UnaryExpr)
3731 if x, ok := n.X.(*ir.DynamicType); ok && x.Op() == ir.ODYNAMICTYPE {
3732 return s.newObjectNonSpecialized(n.Type().Elem(), s.expr(x.RType))
3733 }
3734 return s.newObject(n.Type().Elem())
3735
3736 case ir.OUNSAFEADD:
3737 n := n.(*ir.BinaryExpr)
3738 ptr := s.expr(n.X)
3739 len := s.expr(n.Y)
3740
3741
3742
3743 len = s.conv(n, len, len.Type, types.Types[types.TUINTPTR])
3744
3745 return s.newValue2(ssa.OpAddPtr, n.Type(), ptr, len)
3746
3747 default:
3748 s.Fatalf("unhandled expr %v", n.Op())
3749 return nil
3750 }
3751 }
3752
3753 func (s *state) resultOfCall(c *ssa.Value, which int64, t *types.Type) *ssa.Value {
3754 aux := c.Aux.(*ssa.AuxCall)
3755 pa := aux.ParamAssignmentForResult(which)
3756
3757
3758 if len(pa.Registers) == 0 && !ssa.CanSSA(t) {
3759 addr := s.newValue1I(ssa.OpSelectNAddr, types.NewPtr(t), which, c)
3760 return s.rawLoad(t, addr)
3761 }
3762 return s.newValue1I(ssa.OpSelectN, t, which, c)
3763 }
3764
3765 func (s *state) resultAddrOfCall(c *ssa.Value, which int64, t *types.Type) *ssa.Value {
3766 aux := c.Aux.(*ssa.AuxCall)
3767 pa := aux.ParamAssignmentForResult(which)
3768 if len(pa.Registers) == 0 {
3769 return s.newValue1I(ssa.OpSelectNAddr, types.NewPtr(t), which, c)
3770 }
3771 _, addr := s.temp(c.Pos, t)
3772 rval := s.newValue1I(ssa.OpSelectN, t, which, c)
3773 s.vars[memVar] = s.newValue3Apos(ssa.OpStore, types.TypeMem, t, addr, rval, s.mem(), false)
3774 return addr
3775 }
3776
3777
3778 func (s *state) getBackingStoreInfoForAppend(n *ir.CallExpr) *backingStoreInfo {
3779 if n.Esc() != ir.EscNone {
3780 return nil
3781 }
3782 return s.getBackingStoreInfo(n.Args[0])
3783 }
3784 func (s *state) getBackingStoreInfo(n ir.Node) *backingStoreInfo {
3785 t := n.Type()
3786 et := t.Elem()
3787 maxStackSize := int64(base.Debug.VariableMakeThreshold)
3788 if et.Size() == 0 || et.Size() > maxStackSize {
3789 return nil
3790 }
3791 if base.Flag.N != 0 {
3792 return nil
3793 }
3794 if !base.VariableMakeHash.MatchPos(n.Pos(), nil) {
3795 return nil
3796 }
3797 i := s.backingStores[n]
3798 if i != nil {
3799 return i
3800 }
3801
3802
3803 K := maxStackSize / et.Size()
3804 KT := types.NewArray(et, K)
3805 KT.SetNoalg(true)
3806 types.CalcArraySize(KT)
3807
3808 align := types.NewArray(types.Types[types.TUINTPTR], 0)
3809 types.CalcArraySize(align)
3810 storeTyp := types.NewStruct([]*types.Field{
3811 {Sym: types.BlankSym, Type: align},
3812 {Sym: types.BlankSym, Type: KT},
3813 })
3814 storeTyp.SetNoalg(true)
3815 types.CalcStructSize(storeTyp)
3816
3817
3818 backingStore := typecheck.TempAt(n.Pos(), s.curfn, storeTyp)
3819 backingStore.SetAddrtaken(true)
3820
3821
3822 used := typecheck.TempAt(n.Pos(), s.curfn, types.Types[types.TBOOL])
3823 if s.curBlock == s.f.Entry {
3824 s.vars[used] = s.constBool(false)
3825 } else {
3826
3827 s.defvars[s.f.Entry.ID][used] = s.constBool(false)
3828 }
3829
3830
3831 if s.backingStores == nil {
3832 s.backingStores = map[ir.Node]*backingStoreInfo{}
3833 }
3834 i = &backingStoreInfo{K: K, store: backingStore, used: used, usedStatic: false}
3835 s.backingStores[n] = i
3836 return i
3837 }
3838
3839
3840
3841
3842
3843
3844
3845
3846
3847 func (s *state) append(n *ir.CallExpr, inplace bool) *ssa.Value {
3848
3849
3850
3851
3852
3853
3854
3855
3856
3857
3858
3859
3860
3861
3862
3863
3864
3865
3866
3867
3868
3869
3870
3871
3872
3873
3874
3875
3876
3877
3878
3879
3880 et := n.Type().Elem()
3881 pt := types.NewPtr(et)
3882
3883
3884 sn := n.Args[0]
3885 var slice, addr *ssa.Value
3886 if inplace {
3887 addr = s.addr(sn)
3888 slice = s.load(n.Type(), addr)
3889 } else {
3890 slice = s.expr(sn)
3891 }
3892
3893
3894 grow := s.f.NewBlock(ssa.BlockPlain)
3895 assign := s.f.NewBlock(ssa.BlockPlain)
3896
3897
3898 p := s.newValue1(ssa.OpSlicePtr, pt, slice)
3899 l := s.newValue1(ssa.OpSliceLen, types.Types[types.TINT], slice)
3900 c := s.newValue1(ssa.OpSliceCap, types.Types[types.TINT], slice)
3901
3902
3903 nargs := s.constInt(types.Types[types.TINT], int64(len(n.Args)-1))
3904 oldLen := l
3905 l = s.newValue2(s.ssaOp(ir.OADD, types.Types[types.TINT]), types.Types[types.TINT], l, nargs)
3906
3907
3908 cmp := s.newValue2(s.ssaOp(ir.OLT, types.Types[types.TUINT]), types.Types[types.TBOOL], c, l)
3909
3910
3911 s.vars[ptrVar] = p
3912 s.vars[lenVar] = l
3913 if !inplace {
3914 s.vars[capVar] = c
3915 }
3916
3917 b := s.endBlock()
3918 b.Kind = ssa.BlockIf
3919 b.Likely = ssa.BranchUnlikely
3920 b.SetControl(cmp)
3921 b.AddEdgeTo(grow)
3922 b.AddEdgeTo(assign)
3923
3924
3925
3926
3927
3928
3929
3930
3931
3932
3933
3934
3935
3936
3937
3938
3939
3940
3941
3942
3943
3944
3945
3946 var info *backingStoreInfo
3947 if !inplace {
3948 info = s.getBackingStoreInfoForAppend(n)
3949 }
3950
3951 if !inplace && info != nil && !n.UseBuf && !info.usedStatic {
3952
3953
3954
3955
3956
3957
3958
3959
3960
3961
3962
3963
3964
3965
3966
3967
3968
3969
3970
3971
3972
3973
3974
3975 info.usedStatic = true
3976
3977
3978 usedTestBlock := s.f.NewBlock(ssa.BlockPlain)
3979 oldLenTestBlock := s.f.NewBlock(ssa.BlockPlain)
3980 bodyBlock := s.f.NewBlock(ssa.BlockPlain)
3981 growSlice := s.f.NewBlock(ssa.BlockPlain)
3982 tInt := types.Types[types.TINT]
3983 tBool := types.Types[types.TBOOL]
3984
3985
3986 s.startBlock(grow)
3987 kTest := s.newValue2(s.ssaOp(ir.OLE, tInt), tBool, l, s.constInt(tInt, info.K))
3988 b := s.endBlock()
3989 b.Kind = ssa.BlockIf
3990 b.SetControl(kTest)
3991 b.AddEdgeTo(usedTestBlock)
3992 b.AddEdgeTo(growSlice)
3993 b.Likely = ssa.BranchLikely
3994
3995
3996 s.startBlock(usedTestBlock)
3997 usedTest := s.newValue1(ssa.OpNot, tBool, s.expr(info.used))
3998 b = s.endBlock()
3999 b.Kind = ssa.BlockIf
4000 b.SetControl(usedTest)
4001 b.AddEdgeTo(oldLenTestBlock)
4002 b.AddEdgeTo(growSlice)
4003 b.Likely = ssa.BranchLikely
4004
4005
4006 s.startBlock(oldLenTestBlock)
4007 oldLenTest := s.newValue2(s.ssaOp(ir.OEQ, tInt), tBool, oldLen, s.constInt(tInt, 0))
4008 b = s.endBlock()
4009 b.Kind = ssa.BlockIf
4010 b.SetControl(oldLenTest)
4011 b.AddEdgeTo(bodyBlock)
4012 b.AddEdgeTo(growSlice)
4013 b.Likely = ssa.BranchLikely
4014
4015
4016 s.startBlock(bodyBlock)
4017 if et.HasPointers() {
4018 s.vars[memVar] = s.newValue1A(ssa.OpVarDef, types.TypeMem, info.store, s.mem())
4019 }
4020 addr := s.addr(info.store)
4021 s.zero(info.store.Type(), addr)
4022
4023
4024 s.vars[ptrVar] = addr
4025 s.vars[lenVar] = l
4026 s.vars[capVar] = s.constInt(tInt, info.K)
4027
4028
4029 s.assign(info.used, s.constBool(true), false, 0)
4030 b = s.endBlock()
4031 b.AddEdgeTo(assign)
4032
4033
4034 grow = growSlice
4035 }
4036
4037
4038 s.startBlock(grow)
4039 taddr := s.expr(n.Fun)
4040 var r []*ssa.Value
4041 if info != nil && n.UseBuf {
4042
4043 if et.HasPointers() && !info.usedStatic {
4044
4045
4046
4047 mem := s.defvars[s.f.Entry.ID][memVar]
4048 mem = s.f.Entry.NewValue1A(n.Pos(), ssa.OpVarDef, types.TypeMem, info.store, mem)
4049 addr := s.f.Entry.NewValue2A(n.Pos(), ssa.OpLocalAddr, types.NewPtr(info.store.Type()), info.store, s.sp, mem)
4050 mem = s.f.Entry.NewValue2I(n.Pos(), ssa.OpZero, types.TypeMem, info.store.Type().Size(), addr, mem)
4051 mem.Aux = info.store.Type()
4052 s.defvars[s.f.Entry.ID][memVar] = mem
4053 info.usedStatic = true
4054 }
4055 fn := ir.Syms.GrowsliceBuf
4056 if goexperiment.RuntimeFreegc && n.AppendNoAlias && !et.HasPointers() {
4057
4058
4059
4060
4061 fn = ir.Syms.GrowsliceBufNoAlias
4062 }
4063 r = s.rtcall(fn, true, []*types.Type{n.Type()}, p, l, c, nargs, taddr, s.addr(info.store), s.constInt(types.Types[types.TINT], info.K))
4064 } else {
4065 fn := ir.Syms.Growslice
4066 if goexperiment.RuntimeFreegc && n.AppendNoAlias && !et.HasPointers() {
4067
4068
4069
4070
4071 fn = ir.Syms.GrowsliceNoAlias
4072 }
4073 r = s.rtcall(fn, true, []*types.Type{n.Type()}, p, l, c, nargs, taddr)
4074 }
4075
4076
4077 p = s.newValue1(ssa.OpSlicePtr, pt, r[0])
4078 l = s.newValue1(ssa.OpSliceLen, types.Types[types.TINT], r[0])
4079 c = s.newValue1(ssa.OpSliceCap, types.Types[types.TINT], r[0])
4080
4081 s.vars[ptrVar] = p
4082 s.vars[lenVar] = l
4083 s.vars[capVar] = c
4084 if inplace {
4085 if sn.Op() == ir.ONAME {
4086 sn := sn.(*ir.Name)
4087 if sn.Class != ir.PEXTERN {
4088
4089 s.vars[memVar] = s.newValue1A(ssa.OpVarDef, types.TypeMem, sn, s.mem())
4090 }
4091 }
4092 capaddr := s.newValue1I(ssa.OpOffPtr, s.f.Config.Types.IntPtr, types.SliceCapOffset, addr)
4093 s.store(types.Types[types.TINT], capaddr, c)
4094 s.store(pt, addr, p)
4095 }
4096
4097 b = s.endBlock()
4098 b.AddEdgeTo(assign)
4099
4100
4101 s.startBlock(assign)
4102 p = s.variable(ptrVar, pt)
4103 l = s.variable(lenVar, types.Types[types.TINT])
4104 if !inplace {
4105 c = s.variable(capVar, types.Types[types.TINT])
4106 }
4107
4108 if inplace {
4109
4110
4111 lenaddr := s.newValue1I(ssa.OpOffPtr, s.f.Config.Types.IntPtr, types.SliceLenOffset, addr)
4112 s.store(types.Types[types.TINT], lenaddr, l)
4113 }
4114
4115
4116 type argRec struct {
4117
4118
4119 v *ssa.Value
4120 store bool
4121 }
4122 args := make([]argRec, 0, len(n.Args[1:]))
4123 for _, n := range n.Args[1:] {
4124 if ssa.CanSSA(n.Type()) {
4125 args = append(args, argRec{v: s.expr(n), store: true})
4126 } else {
4127 v := s.addr(n)
4128 args = append(args, argRec{v: v})
4129 }
4130 }
4131
4132
4133 oldLen = s.newValue2(s.ssaOp(ir.OSUB, types.Types[types.TINT]), types.Types[types.TINT], l, nargs)
4134 p2 := s.newValue2(ssa.OpPtrIndex, pt, p, oldLen)
4135 for i, arg := range args {
4136 addr := s.newValue2(ssa.OpPtrIndex, pt, p2, s.constInt(types.Types[types.TINT], int64(i)))
4137 if arg.store {
4138 s.storeType(et, addr, arg.v, 0, true)
4139 } else {
4140 s.move(et, addr, arg.v)
4141 }
4142 }
4143
4144
4145
4146
4147
4148 delete(s.vars, ptrVar)
4149 delete(s.vars, lenVar)
4150 if !inplace {
4151 delete(s.vars, capVar)
4152 }
4153
4154
4155 if inplace {
4156 return nil
4157 }
4158 return s.newValue3(ssa.OpSliceMake, n.Type(), p, l, c)
4159 }
4160
4161 func (s *state) move2heap(n *ir.MoveToHeapExpr) *ssa.Value {
4162
4163
4164
4165
4166
4167
4168
4169
4170 slice := s.expr(n.Slice)
4171 et := slice.Type.Elem()
4172 pt := types.NewPtr(et)
4173
4174 info := s.getBackingStoreInfo(n)
4175 if info == nil {
4176
4177
4178 return slice
4179 }
4180
4181
4182 p := s.newValue1(ssa.OpSlicePtr, pt, slice)
4183 l := s.newValue1(ssa.OpSliceLen, types.Types[types.TINT], slice)
4184 c := s.newValue1(ssa.OpSliceCap, types.Types[types.TINT], slice)
4185
4186 moveBlock := s.f.NewBlock(ssa.BlockPlain)
4187 mergeBlock := s.f.NewBlock(ssa.BlockPlain)
4188
4189 s.vars[ptrVar] = p
4190 s.vars[lenVar] = l
4191 s.vars[capVar] = c
4192
4193
4194
4195 sub := ssa.OpSub64
4196 less := ssa.OpLess64U
4197 if s.config.PtrSize == 4 {
4198 sub = ssa.OpSub32
4199 less = ssa.OpLess32U
4200 }
4201 callerSP := s.newValue1(ssa.OpGetCallerSP, types.Types[types.TUINTPTR], s.mem())
4202 frameSize := s.newValue2(sub, types.Types[types.TUINTPTR], callerSP, s.sp)
4203 pInt := s.newValue2(ssa.OpConvert, types.Types[types.TUINTPTR], p, s.mem())
4204 off := s.newValue2(sub, types.Types[types.TUINTPTR], pInt, s.sp)
4205 cond := s.newValue2(less, types.Types[types.TBOOL], off, frameSize)
4206
4207 b := s.endBlock()
4208 b.Kind = ssa.BlockIf
4209 b.Likely = ssa.BranchUnlikely
4210 b.SetControl(cond)
4211 b.AddEdgeTo(moveBlock)
4212 b.AddEdgeTo(mergeBlock)
4213
4214
4215 s.startBlock(moveBlock)
4216 var newSlice *ssa.Value
4217 if et.HasPointers() {
4218 typ := s.expr(n.RType)
4219 if n.PreserveCapacity {
4220 newSlice = s.rtcall(ir.Syms.MoveSlice, true, []*types.Type{slice.Type}, typ, p, l, c)[0]
4221 } else {
4222 newSlice = s.rtcall(ir.Syms.MoveSliceNoCap, true, []*types.Type{slice.Type}, typ, p, l)[0]
4223 }
4224 } else {
4225 elemSize := s.constInt(types.Types[types.TUINTPTR], et.Size())
4226 if n.PreserveCapacity {
4227 newSlice = s.rtcall(ir.Syms.MoveSliceNoScan, true, []*types.Type{slice.Type}, elemSize, p, l, c)[0]
4228 } else {
4229 newSlice = s.rtcall(ir.Syms.MoveSliceNoCapNoScan, true, []*types.Type{slice.Type}, elemSize, p, l)[0]
4230 }
4231 }
4232
4233 s.vars[ptrVar] = s.newValue1(ssa.OpSlicePtr, pt, newSlice)
4234 s.vars[lenVar] = s.newValue1(ssa.OpSliceLen, types.Types[types.TINT], newSlice)
4235 s.vars[capVar] = s.newValue1(ssa.OpSliceCap, types.Types[types.TINT], newSlice)
4236 b = s.endBlock()
4237 b.AddEdgeTo(mergeBlock)
4238
4239
4240 s.startBlock(mergeBlock)
4241 p = s.variable(ptrVar, pt)
4242 l = s.variable(lenVar, types.Types[types.TINT])
4243 c = s.variable(capVar, types.Types[types.TINT])
4244 delete(s.vars, ptrVar)
4245 delete(s.vars, lenVar)
4246 delete(s.vars, capVar)
4247 return s.newValue3(ssa.OpSliceMake, slice.Type, p, l, c)
4248 }
4249
4250
4251 func (s *state) minMax(n *ir.CallExpr) *ssa.Value {
4252
4253
4254
4255 fold := func(op func(x, a *ssa.Value) *ssa.Value) *ssa.Value {
4256 x := s.expr(n.Args[0])
4257 for _, arg := range n.Args[1:] {
4258 x = op(x, s.expr(arg))
4259 }
4260 return x
4261 }
4262
4263 typ := n.Type()
4264
4265 if typ.IsFloat() || typ.IsString() {
4266
4267
4268
4269
4270
4271
4272
4273
4274 if typ.IsFloat() {
4275 hasIntrinsic := false
4276 switch Arch.LinkArch.Family {
4277 case sys.AMD64, sys.ARM64, sys.Loong64, sys.RISCV64, sys.S390X:
4278 hasIntrinsic = true
4279 case sys.PPC64:
4280 hasIntrinsic = buildcfg.GOPPC64 >= 9
4281 }
4282
4283 if hasIntrinsic {
4284 var op ssa.Op
4285 switch {
4286 case typ.Kind() == types.TFLOAT64 && n.Op() == ir.OMIN:
4287 op = ssa.OpMin64F
4288 case typ.Kind() == types.TFLOAT64 && n.Op() == ir.OMAX:
4289 op = ssa.OpMax64F
4290 case typ.Kind() == types.TFLOAT32 && n.Op() == ir.OMIN:
4291 op = ssa.OpMin32F
4292 case typ.Kind() == types.TFLOAT32 && n.Op() == ir.OMAX:
4293 op = ssa.OpMax32F
4294 }
4295 return fold(func(x, a *ssa.Value) *ssa.Value {
4296 return s.newValue2(op, typ, x, a)
4297 })
4298 }
4299 }
4300 var name string
4301 switch typ.Kind() {
4302 case types.TFLOAT32:
4303 switch n.Op() {
4304 case ir.OMIN:
4305 name = "fmin32"
4306 case ir.OMAX:
4307 name = "fmax32"
4308 }
4309 case types.TFLOAT64:
4310 switch n.Op() {
4311 case ir.OMIN:
4312 name = "fmin64"
4313 case ir.OMAX:
4314 name = "fmax64"
4315 }
4316 case types.TSTRING:
4317 switch n.Op() {
4318 case ir.OMIN:
4319 name = "strmin"
4320 case ir.OMAX:
4321 name = "strmax"
4322 }
4323 }
4324 fn := typecheck.LookupRuntimeFunc(name)
4325
4326 return fold(func(x, a *ssa.Value) *ssa.Value {
4327 return s.rtcall(fn, true, []*types.Type{typ}, x, a)[0]
4328 })
4329 }
4330
4331 if typ.IsInteger() {
4332 if Arch.LinkArch.Family == sys.RISCV64 && buildcfg.GORISCV64 >= 22 && typ.Size() == 8 {
4333 var op ssa.Op
4334 switch {
4335 case typ.IsSigned() && n.Op() == ir.OMIN:
4336 op = ssa.OpMin64
4337 case typ.IsSigned() && n.Op() == ir.OMAX:
4338 op = ssa.OpMax64
4339 case typ.IsUnsigned() && n.Op() == ir.OMIN:
4340 op = ssa.OpMin64u
4341 case typ.IsUnsigned() && n.Op() == ir.OMAX:
4342 op = ssa.OpMax64u
4343 }
4344 return fold(func(x, a *ssa.Value) *ssa.Value {
4345 return s.newValue2(op, typ, x, a)
4346 })
4347 }
4348 }
4349
4350 lt := s.ssaOp(ir.OLT, typ)
4351
4352 return fold(func(x, a *ssa.Value) *ssa.Value {
4353 switch n.Op() {
4354 case ir.OMIN:
4355
4356 return s.ternary(s.newValue2(lt, types.Types[types.TBOOL], a, x), a, x)
4357 case ir.OMAX:
4358
4359 return s.ternary(s.newValue2(lt, types.Types[types.TBOOL], x, a), a, x)
4360 }
4361 panic("unreachable")
4362 })
4363 }
4364
4365
4366 func (s *state) ternary(cond, x, y *ssa.Value) *ssa.Value {
4367
4368
4369 ternaryVar := ssaMarker("ternary")
4370
4371 bThen := s.f.NewBlock(ssa.BlockPlain)
4372 bElse := s.f.NewBlock(ssa.BlockPlain)
4373 bEnd := s.f.NewBlock(ssa.BlockPlain)
4374
4375 b := s.endBlock()
4376 b.Kind = ssa.BlockIf
4377 b.SetControl(cond)
4378 b.AddEdgeTo(bThen)
4379 b.AddEdgeTo(bElse)
4380
4381 s.startBlock(bThen)
4382 s.vars[ternaryVar] = x
4383 s.endBlock().AddEdgeTo(bEnd)
4384
4385 s.startBlock(bElse)
4386 s.vars[ternaryVar] = y
4387 s.endBlock().AddEdgeTo(bEnd)
4388
4389 s.startBlock(bEnd)
4390 r := s.variable(ternaryVar, x.Type)
4391 delete(s.vars, ternaryVar)
4392 return r
4393 }
4394
4395
4396
4397
4398
4399 func (s *state) condBranch(cond ir.Node, yes, no *ssa.Block, likely int8) {
4400 switch cond.Op() {
4401 case ir.OANDAND:
4402 cond := cond.(*ir.LogicalExpr)
4403 mid := s.f.NewBlock(ssa.BlockPlain)
4404 s.stmtList(cond.Init())
4405 s.condBranch(cond.X, mid, no, max(likely, 0))
4406 s.startBlock(mid)
4407 s.condBranch(cond.Y, yes, no, likely)
4408 return
4409
4410
4411
4412
4413
4414
4415 case ir.OOROR:
4416 cond := cond.(*ir.LogicalExpr)
4417 mid := s.f.NewBlock(ssa.BlockPlain)
4418 s.stmtList(cond.Init())
4419 s.condBranch(cond.X, yes, mid, min(likely, 0))
4420 s.startBlock(mid)
4421 s.condBranch(cond.Y, yes, no, likely)
4422 return
4423
4424
4425
4426 case ir.ONOT:
4427 cond := cond.(*ir.UnaryExpr)
4428 s.stmtList(cond.Init())
4429 s.condBranch(cond.X, no, yes, -likely)
4430 return
4431 case ir.OCONVNOP:
4432 cond := cond.(*ir.ConvExpr)
4433 s.stmtList(cond.Init())
4434 s.condBranch(cond.X, yes, no, likely)
4435 return
4436 }
4437 c := s.expr(cond)
4438 b := s.endBlock()
4439 b.Kind = ssa.BlockIf
4440 b.SetControl(c)
4441 b.Likely = ssa.BranchPrediction(likely)
4442 b.AddEdgeTo(yes)
4443 b.AddEdgeTo(no)
4444 }
4445
4446 type skipMask uint8
4447
4448 const (
4449 skipPtr skipMask = 1 << iota
4450 skipLen
4451 skipCap
4452 )
4453
4454
4455
4456
4457
4458
4459
4460 func (s *state) assign(left ir.Node, right *ssa.Value, deref bool, skip skipMask) {
4461 s.assignWhichMayOverlap(left, right, deref, skip, false)
4462 }
4463 func (s *state) assignWhichMayOverlap(left ir.Node, right *ssa.Value, deref bool, skip skipMask, mayOverlap bool) {
4464 if left.Op() == ir.ONAME && ir.IsBlank(left) {
4465 return
4466 }
4467 t := left.Type()
4468 types.CalcSize(t)
4469 if s.canSSA(left) {
4470 if deref {
4471 s.Fatalf("can SSA LHS %v but not RHS %s", left, right)
4472 }
4473 if left.Op() == ir.ODOT {
4474
4475
4476
4477
4478
4479
4480
4481
4482
4483
4484 left := left.(*ir.SelectorExpr)
4485 t := left.X.Type()
4486 nf := t.NumFields()
4487 idx := fieldIdx(left)
4488
4489
4490 old := s.expr(left.X)
4491
4492 if left.Type().Size() == 0 {
4493
4494 return
4495 }
4496
4497
4498 new := s.newValue0(ssa.OpStructMake, t)
4499
4500
4501 for i := 0; i < nf; i++ {
4502 if i == idx {
4503 new.AddArg(right)
4504 } else {
4505 new.AddArg(s.newValue1I(ssa.OpStructSelect, t.FieldType(i), int64(i), old))
4506 }
4507 }
4508
4509
4510 s.assign(left.X, new, false, 0)
4511
4512 return
4513 }
4514 if left.Op() == ir.OINDEX && left.(*ir.IndexExpr).X.Type().IsArray() {
4515 left := left.(*ir.IndexExpr)
4516 s.pushLine(left.Pos())
4517 defer s.popLine()
4518
4519
4520 t := left.X.Type()
4521 n := t.NumElem()
4522
4523 i := s.expr(left.Index)
4524 if n == 0 {
4525
4526
4527 z := s.constInt(types.Types[types.TINT], 0)
4528 s.boundsCheck(z, z, ssa.BoundsIndex, false)
4529 return
4530 }
4531 if n != 1 {
4532
4533
4534
4535
4536
4537
4538
4539
4540 return
4541 }
4542 if t.Size() == 0 {
4543 return
4544 }
4545
4546
4547 len := s.constInt(types.Types[types.TINT], 1)
4548 s.boundsCheck(i, len, ssa.BoundsIndex, false)
4549 v := s.newValue1(ssa.OpArrayMake1, t, right)
4550 s.assign(left.X, v, false, 0)
4551 return
4552 }
4553 left := left.(*ir.Name)
4554
4555 s.vars[left] = right
4556 s.addNamedValue(left, right)
4557 return
4558 }
4559
4560
4561
4562 if base, ok := clobberBase(left).(*ir.Name); ok && base.OnStack() && skip == 0 && (t.HasPointers() || ssa.IsMergeCandidate(base)) {
4563 s.vars[memVar] = s.newValue1Apos(ssa.OpVarDef, types.TypeMem, base, s.mem(), !ir.IsAutoTmp(base))
4564 }
4565
4566
4567 addr := s.addr(left)
4568 if ir.IsReflectHeaderDataField(left) {
4569
4570
4571
4572
4573
4574 t = types.Types[types.TUNSAFEPTR]
4575 }
4576 if deref {
4577
4578 if right == nil {
4579 s.zero(t, addr)
4580 } else {
4581 s.moveWhichMayOverlap(t, addr, right, mayOverlap)
4582 }
4583 return
4584 }
4585
4586 s.storeType(t, addr, right, skip, !ir.IsAutoTmp(left))
4587 }
4588
4589
4590 func (s *state) zeroVal(t *types.Type) *ssa.Value {
4591 if t.Size() == 0 {
4592 return s.entryNewValue0(ssa.OpEmpty, t)
4593 }
4594 switch {
4595 case t.IsInteger():
4596 switch t.Size() {
4597 case 1:
4598 return s.constInt8(t, 0)
4599 case 2:
4600 return s.constInt16(t, 0)
4601 case 4:
4602 return s.constInt32(t, 0)
4603 case 8:
4604 return s.constInt64(t, 0)
4605 default:
4606 s.Fatalf("bad sized integer type %v", t)
4607 }
4608 case t.IsFloat():
4609 switch t.Size() {
4610 case 4:
4611 return s.constFloat32(t, 0)
4612 case 8:
4613 return s.constFloat64(t, 0)
4614 default:
4615 s.Fatalf("bad sized float type %v", t)
4616 }
4617 case t.IsComplex():
4618 switch t.Size() {
4619 case 8:
4620 z := s.constFloat32(types.Types[types.TFLOAT32], 0)
4621 return s.entryNewValue2(ssa.OpComplexMake, t, z, z)
4622 case 16:
4623 z := s.constFloat64(types.Types[types.TFLOAT64], 0)
4624 return s.entryNewValue2(ssa.OpComplexMake, t, z, z)
4625 default:
4626 s.Fatalf("bad sized complex type %v", t)
4627 }
4628
4629 case t.IsString():
4630 return s.constEmptyString(t)
4631 case t.IsPtrShaped():
4632 return s.constNil(t)
4633 case t.IsBoolean():
4634 return s.constBool(false)
4635 case t.IsInterface():
4636 return s.constInterface(t)
4637 case t.IsSlice():
4638 return s.constSlice(t)
4639 case isStructNotSIMD(t):
4640 n := t.NumFields()
4641 v := s.entryNewValue0(ssa.OpStructMake, t)
4642 for i := 0; i < n; i++ {
4643 v.AddArg(s.zeroVal(t.FieldType(i)))
4644 }
4645 return v
4646 case t.IsArray() && t.NumElem() == 1:
4647 return s.entryNewValue1(ssa.OpArrayMake1, t, s.zeroVal(t.Elem()))
4648 case t.IsSIMD():
4649 return s.newValue0(ssa.OpZeroSIMD, t)
4650 }
4651 s.Fatalf("zero for type %v not implemented", t)
4652 return nil
4653 }
4654
4655 type callKind int8
4656
4657 const (
4658 callNormal callKind = iota
4659 callDefer
4660 callDeferStack
4661 callGo
4662 callTail
4663 )
4664
4665 type sfRtCallDef struct {
4666 rtfn *obj.LSym
4667 rtype types.Kind
4668 }
4669
4670 var softFloatOps map[ssa.Op]sfRtCallDef
4671
4672 func softfloatInit() {
4673
4674 softFloatOps = map[ssa.Op]sfRtCallDef{
4675 ssa.OpAdd32F: {typecheck.LookupRuntimeFunc("fadd32"), types.TFLOAT32},
4676 ssa.OpAdd64F: {typecheck.LookupRuntimeFunc("fadd64"), types.TFLOAT64},
4677 ssa.OpSub32F: {typecheck.LookupRuntimeFunc("fadd32"), types.TFLOAT32},
4678 ssa.OpSub64F: {typecheck.LookupRuntimeFunc("fadd64"), types.TFLOAT64},
4679 ssa.OpMul32F: {typecheck.LookupRuntimeFunc("fmul32"), types.TFLOAT32},
4680 ssa.OpMul64F: {typecheck.LookupRuntimeFunc("fmul64"), types.TFLOAT64},
4681 ssa.OpDiv32F: {typecheck.LookupRuntimeFunc("fdiv32"), types.TFLOAT32},
4682 ssa.OpDiv64F: {typecheck.LookupRuntimeFunc("fdiv64"), types.TFLOAT64},
4683
4684 ssa.OpEq64F: {typecheck.LookupRuntimeFunc("feq64"), types.TBOOL},
4685 ssa.OpEq32F: {typecheck.LookupRuntimeFunc("feq32"), types.TBOOL},
4686 ssa.OpNeq64F: {typecheck.LookupRuntimeFunc("feq64"), types.TBOOL},
4687 ssa.OpNeq32F: {typecheck.LookupRuntimeFunc("feq32"), types.TBOOL},
4688 ssa.OpLess64F: {typecheck.LookupRuntimeFunc("fgt64"), types.TBOOL},
4689 ssa.OpLess32F: {typecheck.LookupRuntimeFunc("fgt32"), types.TBOOL},
4690 ssa.OpLeq64F: {typecheck.LookupRuntimeFunc("fge64"), types.TBOOL},
4691 ssa.OpLeq32F: {typecheck.LookupRuntimeFunc("fge32"), types.TBOOL},
4692
4693 ssa.OpCvt32to32F: {typecheck.LookupRuntimeFunc("fint32to32"), types.TFLOAT32},
4694 ssa.OpCvt32Fto32: {typecheck.LookupRuntimeFunc("f32toint32"), types.TINT32},
4695 ssa.OpCvt64to32F: {typecheck.LookupRuntimeFunc("fint64to32"), types.TFLOAT32},
4696 ssa.OpCvt32Fto64: {typecheck.LookupRuntimeFunc("f32toint64"), types.TINT64},
4697 ssa.OpCvt64Uto32F: {typecheck.LookupRuntimeFunc("fuint64to32"), types.TFLOAT32},
4698 ssa.OpCvt32Fto64U: {typecheck.LookupRuntimeFunc("f32touint64"), types.TUINT64},
4699 ssa.OpCvt32to64F: {typecheck.LookupRuntimeFunc("fint32to64"), types.TFLOAT64},
4700 ssa.OpCvt64Fto32: {typecheck.LookupRuntimeFunc("f64toint32"), types.TINT32},
4701 ssa.OpCvt64to64F: {typecheck.LookupRuntimeFunc("fint64to64"), types.TFLOAT64},
4702 ssa.OpCvt64Fto64: {typecheck.LookupRuntimeFunc("f64toint64"), types.TINT64},
4703 ssa.OpCvt64Uto64F: {typecheck.LookupRuntimeFunc("fuint64to64"), types.TFLOAT64},
4704 ssa.OpCvt64Fto64U: {typecheck.LookupRuntimeFunc("f64touint64"), types.TUINT64},
4705 ssa.OpCvt32Fto64F: {typecheck.LookupRuntimeFunc("f32to64"), types.TFLOAT64},
4706 ssa.OpCvt64Fto32F: {typecheck.LookupRuntimeFunc("f64to32"), types.TFLOAT32},
4707 }
4708 }
4709
4710
4711
4712 func (s *state) sfcall(op ssa.Op, args ...*ssa.Value) (*ssa.Value, bool) {
4713 f2i := func(t *types.Type) *types.Type {
4714 switch t.Kind() {
4715 case types.TFLOAT32:
4716 return types.Types[types.TUINT32]
4717 case types.TFLOAT64:
4718 return types.Types[types.TUINT64]
4719 }
4720 return t
4721 }
4722
4723 if callDef, ok := softFloatOps[op]; ok {
4724 switch op {
4725 case ssa.OpLess32F,
4726 ssa.OpLess64F,
4727 ssa.OpLeq32F,
4728 ssa.OpLeq64F:
4729 args[0], args[1] = args[1], args[0]
4730 case ssa.OpSub32F,
4731 ssa.OpSub64F:
4732 args[1] = s.newValue1(s.ssaOp(ir.ONEG, types.Types[callDef.rtype]), args[1].Type, args[1])
4733 }
4734
4735
4736
4737 for i, a := range args {
4738 if a.Type.IsFloat() {
4739 args[i] = s.newValue1(ssa.OpCopy, f2i(a.Type), a)
4740 }
4741 }
4742
4743 rt := types.Types[callDef.rtype]
4744 result := s.rtcall(callDef.rtfn, true, []*types.Type{f2i(rt)}, args...)[0]
4745 if rt.IsFloat() {
4746 result = s.newValue1(ssa.OpCopy, rt, result)
4747 }
4748 if op == ssa.OpNeq32F || op == ssa.OpNeq64F {
4749 result = s.newValue1(ssa.OpNot, result.Type, result)
4750 }
4751 return result, true
4752 }
4753 return nil, false
4754 }
4755
4756
4757 func (s *state) split(v *ssa.Value) (*ssa.Value, *ssa.Value) {
4758 p0 := s.newValue1(ssa.OpSelect0, v.Type.FieldType(0), v)
4759 p1 := s.newValue1(ssa.OpSelect1, v.Type.FieldType(1), v)
4760 return p0, p1
4761 }
4762
4763
4764 func (s *state) intrinsicCall(n *ir.CallExpr) *ssa.Value {
4765 v := findIntrinsic(n.Fun.Sym())(s, n, s.intrinsicArgs(n))
4766 if ssa.IntrinsicsDebug > 0 {
4767 x := v
4768 if x == nil {
4769 x = s.mem()
4770 }
4771 if x.Op == ssa.OpSelect0 || x.Op == ssa.OpSelect1 {
4772 x = x.Args[0]
4773 }
4774 base.WarnfAt(n.Pos(), "intrinsic substitution for %v with %s", n.Fun.Sym().Name, x.LongString())
4775 }
4776 return v
4777 }
4778
4779
4780 func (s *state) intrinsicArgs(n *ir.CallExpr) []*ssa.Value {
4781 args := make([]*ssa.Value, len(n.Args))
4782 for i, n := range n.Args {
4783 args[i] = s.expr(n)
4784 }
4785 return args
4786 }
4787
4788
4789
4790
4791
4792
4793
4794 func (s *state) openDeferRecord(n *ir.CallExpr) {
4795 if len(n.Args) != 0 || n.Op() != ir.OCALLFUNC || n.Fun.Type().NumResults() != 0 {
4796 s.Fatalf("defer call with arguments or results: %v", n)
4797 }
4798
4799 opendefer := &openDeferInfo{
4800 n: n,
4801 }
4802 fn := n.Fun
4803
4804
4805
4806 closureVal := s.expr(fn)
4807 closure := s.openDeferSave(fn.Type(), closureVal)
4808 opendefer.closureNode = closure.Aux.(*ir.Name)
4809 if !(fn.Op() == ir.ONAME && fn.(*ir.Name).Class == ir.PFUNC) {
4810 opendefer.closure = closure
4811 }
4812 index := len(s.openDefers)
4813 s.openDefers = append(s.openDefers, opendefer)
4814
4815
4816
4817 bitvalue := s.constInt8(types.Types[types.TUINT8], 1<<uint(index))
4818 newDeferBits := s.newValue2(ssa.OpOr8, types.Types[types.TUINT8], s.variable(deferBitsVar, types.Types[types.TUINT8]), bitvalue)
4819 s.vars[deferBitsVar] = newDeferBits
4820 s.store(types.Types[types.TUINT8], s.deferBitsAddr, newDeferBits)
4821 }
4822
4823
4824
4825
4826
4827
4828 func (s *state) openDeferSave(t *types.Type, val *ssa.Value) *ssa.Value {
4829 if !ssa.CanSSA(t) {
4830 s.Fatalf("openDeferSave of non-SSA-able type %v val=%v", t, val)
4831 }
4832 if !t.HasPointers() {
4833 s.Fatalf("openDeferSave of pointerless type %v val=%v", t, val)
4834 }
4835 pos := val.Pos
4836 temp := typecheck.TempAt(pos.WithNotStmt(), s.curfn, t)
4837 temp.SetOpenDeferSlot(true)
4838 temp.SetFrameOffset(int64(len(s.openDefers)))
4839 var addrTemp *ssa.Value
4840
4841
4842 if s.curBlock.ID != s.f.Entry.ID {
4843
4844
4845
4846 if t.HasPointers() {
4847 s.defvars[s.f.Entry.ID][memVar] = s.f.Entry.NewValue1A(src.NoXPos, ssa.OpVarDef, types.TypeMem, temp, s.defvars[s.f.Entry.ID][memVar])
4848 }
4849 s.defvars[s.f.Entry.ID][memVar] = s.f.Entry.NewValue1A(src.NoXPos, ssa.OpVarLive, types.TypeMem, temp, s.defvars[s.f.Entry.ID][memVar])
4850 addrTemp = s.f.Entry.NewValue2A(src.NoXPos, ssa.OpLocalAddr, types.NewPtr(temp.Type()), temp, s.sp, s.defvars[s.f.Entry.ID][memVar])
4851 } else {
4852
4853
4854
4855 if t.HasPointers() {
4856 s.vars[memVar] = s.newValue1Apos(ssa.OpVarDef, types.TypeMem, temp, s.mem(), false)
4857 }
4858 s.vars[memVar] = s.newValue1Apos(ssa.OpVarLive, types.TypeMem, temp, s.mem(), false)
4859 addrTemp = s.newValue2Apos(ssa.OpLocalAddr, types.NewPtr(temp.Type()), temp, s.sp, s.mem(), false)
4860 }
4861
4862
4863
4864
4865
4866 temp.SetNeedzero(true)
4867
4868
4869 s.store(t, addrTemp, val)
4870 return addrTemp
4871 }
4872
4873
4874
4875
4876
4877 func (s *state) openDeferExit() {
4878 deferExit := s.f.NewBlock(ssa.BlockPlain)
4879 s.endBlock().AddEdgeTo(deferExit)
4880 s.startBlock(deferExit)
4881 s.lastDeferExit = deferExit
4882 s.lastDeferCount = len(s.openDefers)
4883 zeroval := s.constInt8(types.Types[types.TUINT8], 0)
4884
4885 for i := len(s.openDefers) - 1; i >= 0; i-- {
4886 r := s.openDefers[i]
4887 bCond := s.f.NewBlock(ssa.BlockPlain)
4888 bEnd := s.f.NewBlock(ssa.BlockPlain)
4889
4890 deferBits := s.variable(deferBitsVar, types.Types[types.TUINT8])
4891
4892
4893 bitval := s.constInt8(types.Types[types.TUINT8], 1<<uint(i))
4894 andval := s.newValue2(ssa.OpAnd8, types.Types[types.TUINT8], deferBits, bitval)
4895 eqVal := s.newValue2(ssa.OpEq8, types.Types[types.TBOOL], andval, zeroval)
4896 b := s.endBlock()
4897 b.Kind = ssa.BlockIf
4898 b.SetControl(eqVal)
4899 b.AddEdgeTo(bEnd)
4900 b.AddEdgeTo(bCond)
4901 bCond.AddEdgeTo(bEnd)
4902 s.startBlock(bCond)
4903
4904
4905
4906 nbitval := s.newValue1(ssa.OpCom8, types.Types[types.TUINT8], bitval)
4907 maskedval := s.newValue2(ssa.OpAnd8, types.Types[types.TUINT8], deferBits, nbitval)
4908 s.store(types.Types[types.TUINT8], s.deferBitsAddr, maskedval)
4909
4910
4911 s.vars[deferBitsVar] = maskedval
4912
4913
4914
4915
4916 fn := r.n.Fun
4917 stksize := fn.Type().ArgWidth()
4918 var callArgs []*ssa.Value
4919 var call *ssa.Value
4920 if r.closure != nil {
4921 v := s.load(r.closure.Type.Elem(), r.closure)
4922 s.maybeNilCheckClosure(v, callDefer)
4923 codeptr := s.rawLoad(types.Types[types.TUINTPTR], v)
4924 aux := ssa.ClosureAuxCall(s.f.ABIDefault.ABIAnalyzeTypes(nil, nil))
4925 call = s.newValue2A(ssa.OpClosureLECall, aux.LateExpansionResultType(), aux, codeptr, v)
4926 } else {
4927 aux := ssa.StaticAuxCall(fn.(*ir.Name).Linksym(), s.f.ABIDefault.ABIAnalyzeTypes(nil, nil))
4928 call = s.newValue0A(ssa.OpStaticLECall, aux.LateExpansionResultType(), aux)
4929 }
4930 callArgs = append(callArgs, s.mem())
4931 call.AddArgs(callArgs...)
4932 call.AuxInt = stksize
4933 s.vars[memVar] = s.newValue1I(ssa.OpSelectN, types.TypeMem, 0, call)
4934
4935
4936
4937
4938 if r.closureNode != nil {
4939 s.vars[memVar] = s.newValue1Apos(ssa.OpVarLive, types.TypeMem, r.closureNode, s.mem(), false)
4940 }
4941
4942 s.endBlock()
4943 s.startBlock(bEnd)
4944 }
4945 }
4946
4947 func (s *state) callResult(n *ir.CallExpr, k callKind) *ssa.Value {
4948 return s.call(n, k, false, nil)
4949 }
4950
4951 func (s *state) callAddr(n *ir.CallExpr, k callKind) *ssa.Value {
4952 return s.call(n, k, true, nil)
4953 }
4954
4955
4956
4957 func (s *state) call(n *ir.CallExpr, k callKind, returnResultAddr bool, deferExtra ir.Expr) *ssa.Value {
4958 s.prevCall = nil
4959 var calleeLSym *obj.LSym
4960 var closure *ssa.Value
4961 var codeptr *ssa.Value
4962 var dextra *ssa.Value
4963 var rcvr *ssa.Value
4964 fn := n.Fun
4965 var ACArgs []*types.Type
4966 var ACResults []*types.Type
4967 var callArgs []*ssa.Value
4968
4969 callABI := s.f.ABIDefault
4970
4971 if k != callNormal && k != callTail && (len(n.Args) != 0 || n.Op() == ir.OCALLINTER || n.Fun.Type().NumResults() != 0) {
4972 s.Fatalf("go/defer call with arguments: %v", n)
4973 }
4974
4975 isCallDeferRangeFunc := false
4976
4977 switch n.Op() {
4978 case ir.OCALLFUNC:
4979 if (k == callNormal || k == callTail) && fn.Op() == ir.ONAME && fn.(*ir.Name).Class == ir.PFUNC {
4980 fn := fn.(*ir.Name)
4981 calleeLSym = callTargetLSym(fn)
4982 if buildcfg.Experiment.RegabiArgs {
4983
4984
4985
4986
4987
4988 if fn.Func != nil {
4989 callABI = abiForFunc(fn.Func, s.f.ABI0, s.f.ABI1)
4990 }
4991 } else {
4992
4993 inRegistersImported := fn.Pragma()&ir.RegisterParams != 0
4994 inRegistersSamePackage := fn.Func != nil && fn.Func.Pragma&ir.RegisterParams != 0
4995 if inRegistersImported || inRegistersSamePackage {
4996 callABI = s.f.ABI1
4997 }
4998 }
4999 if fn := n.Fun.Sym().Name; n.Fun.Sym().Pkg == ir.Pkgs.Runtime && fn == "deferrangefunc" {
5000 isCallDeferRangeFunc = true
5001 }
5002 break
5003 }
5004 closure = s.expr(fn)
5005 if k != callDefer && k != callDeferStack {
5006
5007
5008 s.maybeNilCheckClosure(closure, k)
5009 }
5010 case ir.OCALLINTER:
5011 if fn.Op() != ir.ODOTINTER {
5012 s.Fatalf("OCALLINTER: n.Left not an ODOTINTER: %v", fn.Op())
5013 }
5014 fn := fn.(*ir.SelectorExpr)
5015 var iclosure *ssa.Value
5016 iclosure, rcvr = s.getClosureAndRcvr(fn)
5017 if k == callNormal || k == callTail {
5018 codeptr = s.load(types.Types[types.TUINTPTR], iclosure)
5019 } else {
5020 closure = iclosure
5021 }
5022 }
5023 if deferExtra != nil {
5024 dextra = s.expr(deferExtra)
5025 }
5026
5027 params := callABI.ABIAnalyze(n.Fun.Type(), false )
5028 types.CalcSize(fn.Type())
5029 stksize := params.ArgWidth()
5030
5031 res := n.Fun.Type().Results()
5032 if k == callNormal || k == callTail {
5033 for _, p := range params.OutParams() {
5034 ACResults = append(ACResults, p.Type)
5035 }
5036 }
5037
5038 var call *ssa.Value
5039 if k == callDeferStack {
5040 if stksize != 0 {
5041 s.Fatalf("deferprocStack with non-zero stack size %d: %v", stksize, n)
5042 }
5043
5044 t := deferstruct()
5045 n, addr := s.temp(n.Pos(), t)
5046 n.SetNonMergeable(true)
5047 s.store(closure.Type,
5048 s.newValue1I(ssa.OpOffPtr, closure.Type.PtrTo(), t.FieldOff(deferStructFnField), addr),
5049 closure)
5050
5051
5052 ACArgs = append(ACArgs, types.Types[types.TUINTPTR])
5053 aux := ssa.StaticAuxCall(ir.Syms.DeferprocStack, s.f.ABIDefault.ABIAnalyzeTypes(ACArgs, ACResults))
5054 callArgs = append(callArgs, addr, s.mem())
5055 call = s.newValue0A(ssa.OpStaticLECall, aux.LateExpansionResultType(), aux)
5056 call.AddArgs(callArgs...)
5057 call.AuxInt = int64(types.PtrSize)
5058 } else {
5059
5060
5061 argStart := base.Ctxt.Arch.FixedFrameSize
5062
5063 if k != callNormal && k != callTail {
5064
5065 ACArgs = append(ACArgs, types.Types[types.TUINTPTR])
5066 callArgs = append(callArgs, closure)
5067 stksize += int64(types.PtrSize)
5068 argStart += int64(types.PtrSize)
5069 if dextra != nil {
5070
5071 ACArgs = append(ACArgs, types.Types[types.TINTER])
5072 callArgs = append(callArgs, dextra)
5073 stksize += 2 * int64(types.PtrSize)
5074 argStart += 2 * int64(types.PtrSize)
5075 }
5076 }
5077
5078
5079 if rcvr != nil {
5080 callArgs = append(callArgs, rcvr)
5081 }
5082
5083
5084 t := n.Fun.Type()
5085 args := n.Args
5086
5087 for _, p := range params.InParams() {
5088 ACArgs = append(ACArgs, p.Type)
5089 }
5090
5091
5092
5093
5094 if s.curBlock.ID == s.f.Entry.ID && s.hasOpenDefers {
5095 b := s.endBlock()
5096 b.Kind = ssa.BlockPlain
5097 curb := s.f.NewBlock(ssa.BlockPlain)
5098 b.AddEdgeTo(curb)
5099 s.startBlock(curb)
5100 }
5101
5102 for i, n := range args {
5103 callArgs = append(callArgs, s.putArg(n, t.Param(i).Type))
5104 }
5105
5106 callArgs = append(callArgs, s.mem())
5107
5108
5109 switch {
5110 case k == callDefer:
5111 sym := ir.Syms.Deferproc
5112 if dextra != nil {
5113 sym = ir.Syms.Deferprocat
5114 }
5115 aux := ssa.StaticAuxCall(sym, s.f.ABIDefault.ABIAnalyzeTypes(ACArgs, ACResults))
5116 call = s.newValue0A(ssa.OpStaticLECall, aux.LateExpansionResultType(), aux)
5117 case k == callGo:
5118 aux := ssa.StaticAuxCall(ir.Syms.Newproc, s.f.ABIDefault.ABIAnalyzeTypes(ACArgs, ACResults))
5119 call = s.newValue0A(ssa.OpStaticLECall, aux.LateExpansionResultType(), aux)
5120 case closure != nil:
5121
5122
5123
5124
5125
5126 codeptr = s.rawLoad(types.Types[types.TUINTPTR], closure)
5127 aux := ssa.ClosureAuxCall(callABI.ABIAnalyzeTypes(ACArgs, ACResults))
5128 call = s.newValue2A(ssa.OpClosureLECall, aux.LateExpansionResultType(), aux, codeptr, closure)
5129 case codeptr != nil:
5130
5131 aux := ssa.InterfaceAuxCall(params)
5132 call = s.newValue1A(ssa.OpInterLECall, aux.LateExpansionResultType(), aux, codeptr)
5133 if k == callTail {
5134 call.Op = ssa.OpTailLECallInter
5135 stksize = 0
5136 }
5137 case calleeLSym != nil:
5138 aux := ssa.StaticAuxCall(calleeLSym, params)
5139 call = s.newValue0A(ssa.OpStaticLECall, aux.LateExpansionResultType(), aux)
5140 if k == callTail {
5141 call.Op = ssa.OpTailLECall
5142 stksize = 0
5143 }
5144 default:
5145 s.Fatalf("bad call type %v %v", n.Op(), n)
5146 }
5147 call.AddArgs(callArgs...)
5148 call.AuxInt = stksize
5149 }
5150 s.prevCall = call
5151 s.vars[memVar] = s.newValue1I(ssa.OpSelectN, types.TypeMem, int64(len(ACResults)), call)
5152
5153 for _, v := range n.KeepAlive {
5154 if !v.Addrtaken() {
5155 s.Fatalf("KeepAlive variable %v must have Addrtaken set", v)
5156 }
5157 switch v.Class {
5158 case ir.PAUTO, ir.PPARAM, ir.PPARAMOUT:
5159 default:
5160 s.Fatalf("KeepAlive variable %v must be Auto or Arg", v)
5161 }
5162 s.vars[memVar] = s.newValue1A(ssa.OpVarLive, types.TypeMem, v, s.mem())
5163 }
5164
5165
5166 if k == callDefer || k == callDeferStack || isCallDeferRangeFunc {
5167 b := s.endBlock()
5168 b.Kind = ssa.BlockDefer
5169 b.SetControl(call)
5170 bNext := s.f.NewBlock(ssa.BlockPlain)
5171 b.AddEdgeTo(bNext)
5172 r := s.f.DeferReturn
5173 if r == nil {
5174 r = s.f.NewBlock(ssa.BlockPlain)
5175 s.startBlock(r)
5176 s.exit()
5177 s.f.DeferReturn = r
5178 }
5179 b.AddEdgeTo(r)
5180 b.Likely = ssa.BranchLikely
5181 s.startBlock(bNext)
5182 }
5183
5184 if len(res) == 0 || k != callNormal {
5185
5186 return nil
5187 }
5188 fp := res[0]
5189 if returnResultAddr {
5190 return s.resultAddrOfCall(call, 0, fp.Type)
5191 }
5192 return s.newValue1I(ssa.OpSelectN, fp.Type, 0, call)
5193 }
5194
5195
5196
5197 func (s *state) maybeNilCheckClosure(closure *ssa.Value, k callKind) {
5198 if Arch.LinkArch.Family == sys.Wasm || buildcfg.GOOS == "aix" && k != callGo {
5199
5200
5201 s.nilCheck(closure)
5202 }
5203 }
5204
5205
5206
5207 func (s *state) getClosureAndRcvr(fn *ir.SelectorExpr) (*ssa.Value, *ssa.Value) {
5208 i := s.expr(fn.X)
5209 itab := s.newValue1(ssa.OpITab, types.Types[types.TUINTPTR], i)
5210 s.nilCheck(itab)
5211 itabidx := fn.Offset() + rttype.ITab.OffsetOf("Fun")
5212 closure := s.newValue1I(ssa.OpOffPtr, s.f.Config.Types.UintptrPtr, itabidx, itab)
5213 rcvr := s.newValue1(ssa.OpIData, s.f.Config.Types.BytePtr, i)
5214 return closure, rcvr
5215 }
5216
5217
5218
5219 func etypesign(e types.Kind) int8 {
5220 switch e {
5221 case types.TINT8, types.TINT16, types.TINT32, types.TINT64, types.TINT:
5222 return -1
5223 case types.TUINT8, types.TUINT16, types.TUINT32, types.TUINT64, types.TUINT, types.TUINTPTR, types.TUNSAFEPTR:
5224 return +1
5225 }
5226 return 0
5227 }
5228
5229
5230
5231 func (s *state) addr(n ir.Node) *ssa.Value {
5232 if n.Op() != ir.ONAME {
5233 s.pushLine(n.Pos())
5234 defer s.popLine()
5235 }
5236
5237 if s.canSSA(n) {
5238
5239
5240
5241
5242
5243
5244
5245
5246 return s.newValue1A(ssa.OpAddr, n.Type().PtrTo(), ir.Syms.Zerobase, s.sb)
5247 }
5248
5249 t := types.NewPtr(n.Type())
5250 linksymOffset := func(lsym *obj.LSym, offset int64) *ssa.Value {
5251 v := s.entryNewValue1A(ssa.OpAddr, t, lsym, s.sb)
5252
5253 if offset != 0 {
5254 v = s.entryNewValue1I(ssa.OpOffPtr, v.Type, offset, v)
5255 }
5256 return v
5257 }
5258 switch n.Op() {
5259 case ir.OLINKSYMOFFSET:
5260 no := n.(*ir.LinksymOffsetExpr)
5261 return linksymOffset(no.Linksym, no.Offset_)
5262 case ir.ONAME:
5263 n := n.(*ir.Name)
5264 if n.Heapaddr != nil {
5265 return s.expr(n.Heapaddr)
5266 }
5267 switch n.Class {
5268 case ir.PEXTERN:
5269
5270 return linksymOffset(n.Linksym(), 0)
5271 case ir.PPARAM:
5272
5273 v := s.decladdrs[n]
5274 if v != nil {
5275 return v
5276 }
5277 s.Fatalf("addr of undeclared ONAME %v. declared: %v", n, s.decladdrs)
5278 return nil
5279 case ir.PAUTO:
5280 return s.newValue2Apos(ssa.OpLocalAddr, t, n, s.sp, s.mem(), !ir.IsAutoTmp(n))
5281
5282 case ir.PPARAMOUT:
5283
5284
5285 return s.newValue2Apos(ssa.OpLocalAddr, t, n, s.sp, s.mem(), true)
5286 default:
5287 s.Fatalf("variable address class %v not implemented", n.Class)
5288 return nil
5289 }
5290 case ir.ORESULT:
5291
5292 n := n.(*ir.ResultExpr)
5293 return s.resultAddrOfCall(s.prevCall, n.Index, n.Type())
5294 case ir.OINDEX:
5295 n := n.(*ir.IndexExpr)
5296 if n.X.Type().IsSlice() {
5297 a := s.expr(n.X)
5298 i := s.expr(n.Index)
5299 len := s.newValue1(ssa.OpSliceLen, types.Types[types.TINT], a)
5300 i = s.boundsCheck(i, len, ssa.BoundsIndex, n.Bounded())
5301 p := s.newValue1(ssa.OpSlicePtr, t, a)
5302 return s.newValue2(ssa.OpPtrIndex, t, p, i)
5303 } else {
5304 a := s.addr(n.X)
5305 i := s.expr(n.Index)
5306 len := s.constInt(types.Types[types.TINT], n.X.Type().NumElem())
5307 i = s.boundsCheck(i, len, ssa.BoundsIndex, n.Bounded())
5308 return s.newValue2(ssa.OpPtrIndex, types.NewPtr(n.X.Type().Elem()), a, i)
5309 }
5310 case ir.ODEREF:
5311 n := n.(*ir.StarExpr)
5312 return s.exprPtr(n.X, n.Bounded(), n.Pos())
5313 case ir.ODOT:
5314 n := n.(*ir.SelectorExpr)
5315 p := s.addr(n.X)
5316 return s.newValue1I(ssa.OpOffPtr, t, n.Offset(), p)
5317 case ir.ODOTPTR:
5318 n := n.(*ir.SelectorExpr)
5319 p := s.exprPtr(n.X, n.Bounded(), n.Pos())
5320 return s.newValue1I(ssa.OpOffPtr, t, n.Offset(), p)
5321 case ir.OCONVNOP:
5322 n := n.(*ir.ConvExpr)
5323 if n.Type() == n.X.Type() {
5324 return s.addr(n.X)
5325 }
5326 addr := s.addr(n.X)
5327 return s.newValue1(ssa.OpCopy, t, addr)
5328 case ir.OCALLFUNC, ir.OCALLINTER:
5329 n := n.(*ir.CallExpr)
5330 return s.callAddr(n, callNormal)
5331 case ir.ODOTTYPE, ir.ODYNAMICDOTTYPE:
5332 var v *ssa.Value
5333 if n.Op() == ir.ODOTTYPE {
5334 v, _ = s.dottype(n.(*ir.TypeAssertExpr), false)
5335 } else {
5336 v, _ = s.dynamicDottype(n.(*ir.DynamicTypeAssertExpr), false)
5337 }
5338 if v.Op != ssa.OpLoad {
5339 s.Fatalf("dottype of non-load")
5340 }
5341 if v.Args[1] != s.mem() {
5342 s.Fatalf("memory no longer live from dottype load")
5343 }
5344 return v.Args[0]
5345 default:
5346 s.Fatalf("unhandled addr %v", n.Op())
5347 return nil
5348 }
5349 }
5350
5351
5352
5353 func (s *state) canSSA(n ir.Node) bool {
5354 if base.Flag.N != 0 {
5355 return false
5356 }
5357 for {
5358 nn := n
5359 if nn.Op() == ir.ODOT {
5360 nn := nn.(*ir.SelectorExpr)
5361 n = nn.X
5362 continue
5363 }
5364 if nn.Op() == ir.OINDEX {
5365 nn := nn.(*ir.IndexExpr)
5366 if nn.X.Type().IsArray() {
5367 n = nn.X
5368 continue
5369 }
5370 }
5371 break
5372 }
5373 if n.Op() != ir.ONAME {
5374 return false
5375 }
5376 return s.canSSAName(n.(*ir.Name)) && ssa.CanSSA(n.Type())
5377 }
5378
5379 func (s *state) canSSAName(name *ir.Name) bool {
5380 if name.Addrtaken() || !name.OnStack() {
5381 return false
5382 }
5383 switch name.Class {
5384 case ir.PPARAMOUT:
5385 if s.hasdefer {
5386
5387
5388
5389
5390
5391 return false
5392 }
5393 if s.cgoUnsafeArgs {
5394
5395
5396 return false
5397 }
5398 }
5399 return true
5400
5401 }
5402
5403
5404 func (s *state) exprPtr(n ir.Node, bounded bool, lineno src.XPos) *ssa.Value {
5405 p := s.expr(n)
5406 if bounded || n.NonNil() {
5407 if s.f.Frontend().Debug_checknil() && lineno.Line() > 1 {
5408 s.f.Warnl(lineno, "removed nil check")
5409 }
5410 return p
5411 }
5412 p = s.nilCheck(p)
5413 return p
5414 }
5415
5416
5417
5418
5419
5420
5421 func (s *state) nilCheck(ptr *ssa.Value) *ssa.Value {
5422 if base.Debug.DisableNil != 0 || s.curfn.NilCheckDisabled() {
5423 return ptr
5424 }
5425 return s.newValue2(ssa.OpNilCheck, ptr.Type, ptr, s.mem())
5426 }
5427
5428
5429
5430
5431
5432
5433
5434 func (s *state) boundsCheck(idx, len *ssa.Value, kind ssa.BoundsKind, bounded bool) *ssa.Value {
5435 idx = s.extendIndex(idx, len, kind, bounded)
5436
5437 if bounded || base.Flag.B != 0 {
5438
5439
5440
5441
5442
5443
5444
5445
5446
5447
5448
5449
5450
5451
5452
5453
5454
5455
5456
5457
5458 return idx
5459 }
5460
5461 bNext := s.f.NewBlock(ssa.BlockPlain)
5462 bPanic := s.f.NewBlock(ssa.BlockExit)
5463
5464 if !idx.Type.IsSigned() {
5465 switch kind {
5466 case ssa.BoundsIndex:
5467 kind = ssa.BoundsIndexU
5468 case ssa.BoundsSliceAlen:
5469 kind = ssa.BoundsSliceAlenU
5470 case ssa.BoundsSliceAcap:
5471 kind = ssa.BoundsSliceAcapU
5472 case ssa.BoundsSliceB:
5473 kind = ssa.BoundsSliceBU
5474 case ssa.BoundsSlice3Alen:
5475 kind = ssa.BoundsSlice3AlenU
5476 case ssa.BoundsSlice3Acap:
5477 kind = ssa.BoundsSlice3AcapU
5478 case ssa.BoundsSlice3B:
5479 kind = ssa.BoundsSlice3BU
5480 case ssa.BoundsSlice3C:
5481 kind = ssa.BoundsSlice3CU
5482 }
5483 }
5484
5485 var cmp *ssa.Value
5486 if kind == ssa.BoundsIndex || kind == ssa.BoundsIndexU {
5487 cmp = s.newValue2(ssa.OpIsInBounds, types.Types[types.TBOOL], idx, len)
5488 } else {
5489 cmp = s.newValue2(ssa.OpIsSliceInBounds, types.Types[types.TBOOL], idx, len)
5490 }
5491 b := s.endBlock()
5492 b.Kind = ssa.BlockIf
5493 b.SetControl(cmp)
5494 b.Likely = ssa.BranchLikely
5495 b.AddEdgeTo(bNext)
5496 b.AddEdgeTo(bPanic)
5497
5498 s.startBlock(bPanic)
5499 if Arch.LinkArch.Family == sys.Wasm {
5500
5501
5502 s.rtcall(BoundsCheckFunc[kind], false, nil, idx, len)
5503 } else {
5504 mem := s.newValue3I(ssa.OpPanicBounds, types.TypeMem, int64(kind), idx, len, s.mem())
5505 s.endBlock().SetControl(mem)
5506 }
5507 s.startBlock(bNext)
5508
5509
5510 if base.Flag.Cfg.SpectreIndex {
5511 op := ssa.OpSpectreIndex
5512 if kind != ssa.BoundsIndex && kind != ssa.BoundsIndexU {
5513 op = ssa.OpSpectreSliceIndex
5514 }
5515 idx = s.newValue2(op, types.Types[types.TINT], idx, len)
5516 }
5517
5518 return idx
5519 }
5520
5521
5522 func (s *state) check(cmp *ssa.Value, fn *obj.LSym) {
5523 b := s.endBlock()
5524 b.Kind = ssa.BlockIf
5525 b.SetControl(cmp)
5526 b.Likely = ssa.BranchLikely
5527 bNext := s.f.NewBlock(ssa.BlockPlain)
5528 line := s.peekPos()
5529 pos := base.Ctxt.PosTable.Pos(line)
5530 fl := funcLine{f: fn, base: pos.Base(), line: pos.Line()}
5531 bPanic := s.panics[fl]
5532 if bPanic == nil {
5533 bPanic = s.f.NewBlock(ssa.BlockPlain)
5534 s.panics[fl] = bPanic
5535 s.startBlock(bPanic)
5536
5537
5538 s.rtcall(fn, false, nil)
5539 }
5540 b.AddEdgeTo(bNext)
5541 b.AddEdgeTo(bPanic)
5542 s.startBlock(bNext)
5543 }
5544
5545 func (s *state) intDivide(n ir.Node, a, b *ssa.Value) *ssa.Value {
5546 needcheck := true
5547 switch b.Op {
5548 case ssa.OpConst8, ssa.OpConst16, ssa.OpConst32, ssa.OpConst64:
5549 if b.AuxInt != 0 {
5550 needcheck = false
5551 }
5552 }
5553 if needcheck {
5554
5555 cmp := s.newValue2(s.ssaOp(ir.ONE, n.Type()), types.Types[types.TBOOL], b, s.zeroVal(n.Type()))
5556 s.check(cmp, ir.Syms.Panicdivide)
5557 }
5558 return s.newValue2(s.ssaOp(n.Op(), n.Type()), a.Type, a, b)
5559 }
5560
5561
5562
5563
5564
5565 func (s *state) rtcall(fn *obj.LSym, returns bool, results []*types.Type, args ...*ssa.Value) []*ssa.Value {
5566 s.prevCall = nil
5567
5568 off := base.Ctxt.Arch.FixedFrameSize
5569 var callArgs []*ssa.Value
5570 var callArgTypes []*types.Type
5571
5572 for _, arg := range args {
5573 t := arg.Type
5574 off = types.RoundUp(off, t.Alignment())
5575 size := t.Size()
5576 callArgs = append(callArgs, arg)
5577 callArgTypes = append(callArgTypes, t)
5578 off += size
5579 }
5580 off = types.RoundUp(off, int64(types.RegSize))
5581
5582
5583 var call *ssa.Value
5584 aux := ssa.StaticAuxCall(fn, s.f.ABIDefault.ABIAnalyzeTypes(callArgTypes, results))
5585 callArgs = append(callArgs, s.mem())
5586 call = s.newValue0A(ssa.OpStaticLECall, aux.LateExpansionResultType(), aux)
5587 call.AddArgs(callArgs...)
5588 s.vars[memVar] = s.newValue1I(ssa.OpSelectN, types.TypeMem, int64(len(results)), call)
5589
5590 if !returns {
5591
5592 b := s.endBlock()
5593 b.Kind = ssa.BlockExit
5594 b.SetControl(call)
5595 call.AuxInt = off - base.Ctxt.Arch.FixedFrameSize
5596 if len(results) > 0 {
5597 s.Fatalf("panic call can't have results")
5598 }
5599 return nil
5600 }
5601
5602
5603 res := make([]*ssa.Value, len(results))
5604 for i, t := range results {
5605 off = types.RoundUp(off, t.Alignment())
5606 res[i] = s.resultOfCall(call, int64(i), t)
5607 off += t.Size()
5608 }
5609 off = types.RoundUp(off, int64(types.PtrSize))
5610
5611
5612 call.AuxInt = off
5613
5614 return res
5615 }
5616
5617
5618 func (s *state) storeType(t *types.Type, left, right *ssa.Value, skip skipMask, leftIsStmt bool) {
5619 s.instrument(t, left, instrumentWrite)
5620
5621 if skip == 0 && (!t.HasPointers() || ssa.IsStackAddr(left)) {
5622
5623 s.vars[memVar] = s.newValue3Apos(ssa.OpStore, types.TypeMem, t, left, right, s.mem(), leftIsStmt)
5624 return
5625 }
5626
5627
5628
5629
5630
5631
5632 s.storeTypeScalars(t, left, right, skip)
5633 if skip&skipPtr == 0 && t.HasPointers() {
5634 s.storeTypePtrs(t, left, right)
5635 }
5636 }
5637
5638
5639 func (s *state) storeTypeScalars(t *types.Type, left, right *ssa.Value, skip skipMask) {
5640 switch {
5641 case t.IsBoolean() || t.IsInteger() || t.IsFloat() || t.IsComplex() || t.IsSIMD():
5642 s.store(t, left, right)
5643 case t.IsPtrShaped():
5644 if t.IsPtr() && t.Elem().NotInHeap() {
5645 s.store(t, left, right)
5646 }
5647
5648 case t.IsString():
5649 if skip&skipLen != 0 {
5650 return
5651 }
5652 len := s.newValue1(ssa.OpStringLen, types.Types[types.TINT], right)
5653 lenAddr := s.newValue1I(ssa.OpOffPtr, s.f.Config.Types.IntPtr, s.config.PtrSize, left)
5654 s.store(types.Types[types.TINT], lenAddr, len)
5655 case t.IsSlice():
5656 if skip&skipLen == 0 {
5657 len := s.newValue1(ssa.OpSliceLen, types.Types[types.TINT], right)
5658 lenAddr := s.newValue1I(ssa.OpOffPtr, s.f.Config.Types.IntPtr, s.config.PtrSize, left)
5659 s.store(types.Types[types.TINT], lenAddr, len)
5660 }
5661 if skip&skipCap == 0 {
5662 cap := s.newValue1(ssa.OpSliceCap, types.Types[types.TINT], right)
5663 capAddr := s.newValue1I(ssa.OpOffPtr, s.f.Config.Types.IntPtr, 2*s.config.PtrSize, left)
5664 s.store(types.Types[types.TINT], capAddr, cap)
5665 }
5666 case t.IsInterface():
5667
5668 itab := s.newValue1(ssa.OpITab, s.f.Config.Types.BytePtr, right)
5669 s.store(types.Types[types.TUINTPTR], left, itab)
5670 case isStructNotSIMD(t):
5671 n := t.NumFields()
5672 for i := 0; i < n; i++ {
5673 ft := t.FieldType(i)
5674 addr := s.newValue1I(ssa.OpOffPtr, ft.PtrTo(), t.FieldOff(i), left)
5675 val := s.newValue1I(ssa.OpStructSelect, ft, int64(i), right)
5676 s.storeTypeScalars(ft, addr, val, 0)
5677 }
5678 case t.IsArray() && t.Size() == 0:
5679
5680 case t.IsArray() && t.NumElem() == 1:
5681 s.storeTypeScalars(t.Elem(), left, s.newValue1I(ssa.OpArraySelect, t.Elem(), 0, right), 0)
5682 default:
5683 s.Fatalf("bad write barrier type %v", t)
5684 }
5685 }
5686
5687
5688 func (s *state) storeTypePtrs(t *types.Type, left, right *ssa.Value) {
5689 switch {
5690 case t.IsPtrShaped():
5691 if t.IsPtr() && t.Elem().NotInHeap() {
5692 break
5693 }
5694 s.store(t, left, right)
5695 case t.IsString():
5696 ptr := s.newValue1(ssa.OpStringPtr, s.f.Config.Types.BytePtr, right)
5697 s.store(s.f.Config.Types.BytePtr, left, ptr)
5698 case t.IsSlice():
5699 elType := types.NewPtr(t.Elem())
5700 ptr := s.newValue1(ssa.OpSlicePtr, elType, right)
5701 s.store(elType, left, ptr)
5702 case t.IsInterface():
5703
5704 idata := s.newValue1(ssa.OpIData, s.f.Config.Types.BytePtr, right)
5705 idataAddr := s.newValue1I(ssa.OpOffPtr, s.f.Config.Types.BytePtrPtr, s.config.PtrSize, left)
5706 s.store(s.f.Config.Types.BytePtr, idataAddr, idata)
5707 case isStructNotSIMD(t):
5708 n := t.NumFields()
5709 for i := 0; i < n; i++ {
5710 ft := t.FieldType(i)
5711 if !ft.HasPointers() {
5712 continue
5713 }
5714 addr := s.newValue1I(ssa.OpOffPtr, ft.PtrTo(), t.FieldOff(i), left)
5715 val := s.newValue1I(ssa.OpStructSelect, ft, int64(i), right)
5716 s.storeTypePtrs(ft, addr, val)
5717 }
5718 case t.IsArray() && t.Size() == 0:
5719
5720 case t.IsArray() && t.NumElem() == 1:
5721 s.storeTypePtrs(t.Elem(), left, s.newValue1I(ssa.OpArraySelect, t.Elem(), 0, right))
5722 default:
5723 s.Fatalf("bad write barrier type %v", t)
5724 }
5725 }
5726
5727
5728 func (s *state) putArg(n ir.Node, t *types.Type) *ssa.Value {
5729 var a *ssa.Value
5730 if !ssa.CanSSA(t) {
5731 a = s.newValue2(ssa.OpDereference, t, s.addr(n), s.mem())
5732 } else {
5733 a = s.expr(n)
5734 }
5735 return a
5736 }
5737
5738
5739
5740
5741 func (s *state) slice(v, i, j, k *ssa.Value, bounded bool) (p, l, c *ssa.Value) {
5742 t := v.Type
5743 var ptr, len, cap *ssa.Value
5744 switch {
5745 case t.IsSlice():
5746 ptr = s.newValue1(ssa.OpSlicePtr, types.NewPtr(t.Elem()), v)
5747 len = s.newValue1(ssa.OpSliceLen, types.Types[types.TINT], v)
5748 cap = s.newValue1(ssa.OpSliceCap, types.Types[types.TINT], v)
5749 case t.IsString():
5750 ptr = s.newValue1(ssa.OpStringPtr, types.NewPtr(types.Types[types.TUINT8]), v)
5751 len = s.newValue1(ssa.OpStringLen, types.Types[types.TINT], v)
5752 cap = len
5753 case t.IsPtr():
5754 if !t.Elem().IsArray() {
5755 s.Fatalf("bad ptr to array in slice %v\n", t)
5756 }
5757 nv := s.nilCheck(v)
5758 ptr = s.newValue1(ssa.OpCopy, types.NewPtr(t.Elem().Elem()), nv)
5759 len = s.constInt(types.Types[types.TINT], t.Elem().NumElem())
5760 cap = len
5761 default:
5762 s.Fatalf("bad type in slice %v\n", t)
5763 }
5764
5765
5766 if i == nil {
5767 i = s.constInt(types.Types[types.TINT], 0)
5768 }
5769 if j == nil {
5770 j = len
5771 }
5772 three := true
5773 if k == nil {
5774 three = false
5775 k = cap
5776 }
5777
5778
5779
5780
5781 if three {
5782 if k != cap {
5783 kind := ssa.BoundsSlice3Alen
5784 if t.IsSlice() {
5785 kind = ssa.BoundsSlice3Acap
5786 }
5787 k = s.boundsCheck(k, cap, kind, bounded)
5788 }
5789 if j != k {
5790 j = s.boundsCheck(j, k, ssa.BoundsSlice3B, bounded)
5791 }
5792 i = s.boundsCheck(i, j, ssa.BoundsSlice3C, bounded)
5793 } else {
5794 if j != k {
5795 kind := ssa.BoundsSliceAlen
5796 if t.IsSlice() {
5797 kind = ssa.BoundsSliceAcap
5798 }
5799 j = s.boundsCheck(j, k, kind, bounded)
5800 }
5801 i = s.boundsCheck(i, j, ssa.BoundsSliceB, bounded)
5802 }
5803
5804
5805 subOp := s.ssaOp(ir.OSUB, types.Types[types.TINT])
5806 mulOp := s.ssaOp(ir.OMUL, types.Types[types.TINT])
5807 andOp := s.ssaOp(ir.OAND, types.Types[types.TINT])
5808
5809
5810
5811
5812
5813 rlen := s.newValue2(subOp, types.Types[types.TINT], j, i)
5814 rcap := rlen
5815 if j != k && !t.IsString() {
5816 rcap = s.newValue2(subOp, types.Types[types.TINT], k, i)
5817 }
5818
5819 if (i.Op == ssa.OpConst64 || i.Op == ssa.OpConst32) && i.AuxInt == 0 {
5820
5821 return ptr, rlen, rcap
5822 }
5823
5824
5825
5826
5827
5828
5829
5830
5831
5832
5833
5834
5835
5836
5837
5838 stride := s.constInt(types.Types[types.TINT], ptr.Type.Elem().Size())
5839
5840
5841 delta := s.newValue2(mulOp, types.Types[types.TINT], i, stride)
5842
5843
5844
5845 mask := s.newValue1(ssa.OpSlicemask, types.Types[types.TINT], rcap)
5846 delta = s.newValue2(andOp, types.Types[types.TINT], delta, mask)
5847
5848
5849 rptr := s.newValue2(ssa.OpAddPtr, ptr.Type, ptr, delta)
5850
5851 return rptr, rlen, rcap
5852 }
5853
5854 type u642fcvtTab struct {
5855 leq, cvt2F, and, rsh, or, add ssa.Op
5856 one func(*state, *types.Type, int64) *ssa.Value
5857 }
5858
5859 var u64_f64 = u642fcvtTab{
5860 leq: ssa.OpLeq64,
5861 cvt2F: ssa.OpCvt64to64F,
5862 and: ssa.OpAnd64,
5863 rsh: ssa.OpRsh64Ux64,
5864 or: ssa.OpOr64,
5865 add: ssa.OpAdd64F,
5866 one: (*state).constInt64,
5867 }
5868
5869 var u64_f32 = u642fcvtTab{
5870 leq: ssa.OpLeq64,
5871 cvt2F: ssa.OpCvt64to32F,
5872 and: ssa.OpAnd64,
5873 rsh: ssa.OpRsh64Ux64,
5874 or: ssa.OpOr64,
5875 add: ssa.OpAdd32F,
5876 one: (*state).constInt64,
5877 }
5878
5879 func (s *state) uint64Tofloat64(n ir.Node, x *ssa.Value, ft, tt *types.Type) *ssa.Value {
5880 return s.uint64Tofloat(&u64_f64, n, x, ft, tt)
5881 }
5882
5883 func (s *state) uint64Tofloat32(n ir.Node, x *ssa.Value, ft, tt *types.Type) *ssa.Value {
5884 return s.uint64Tofloat(&u64_f32, n, x, ft, tt)
5885 }
5886
5887 func (s *state) uint64Tofloat(cvttab *u642fcvtTab, n ir.Node, x *ssa.Value, ft, tt *types.Type) *ssa.Value {
5888
5889
5890
5891
5892
5893
5894
5895
5896
5897
5898
5899
5900
5901
5902
5903
5904
5905
5906
5907
5908
5909
5910
5911
5912
5913 cmp := s.newValue2(cvttab.leq, types.Types[types.TBOOL], s.zeroVal(ft), x)
5914
5915 b := s.endBlock()
5916 b.Kind = ssa.BlockIf
5917 b.SetControl(cmp)
5918 b.Likely = ssa.BranchLikely
5919
5920 bThen := s.f.NewBlock(ssa.BlockPlain)
5921 bElse := s.f.NewBlock(ssa.BlockPlain)
5922 bAfter := s.f.NewBlock(ssa.BlockPlain)
5923
5924 b.AddEdgeTo(bThen)
5925 s.startBlock(bThen)
5926 a0 := s.newValue1(cvttab.cvt2F, tt, x)
5927 s.vars[n] = a0
5928 s.endBlock()
5929 bThen.AddEdgeTo(bAfter)
5930
5931 b.AddEdgeTo(bElse)
5932 s.startBlock(bElse)
5933 one := cvttab.one(s, ft, 1)
5934 y := s.newValue2(cvttab.and, ft, x, one)
5935 z := s.newValue2(cvttab.rsh, ft, x, one)
5936 z = s.newValue2(cvttab.or, ft, z, y)
5937 a := s.newValue1(cvttab.cvt2F, tt, z)
5938 a1 := s.newValue2(cvttab.add, tt, a, a)
5939 s.vars[n] = a1
5940 s.endBlock()
5941 bElse.AddEdgeTo(bAfter)
5942
5943 s.startBlock(bAfter)
5944 return s.variable(n, n.Type())
5945 }
5946
5947 type u322fcvtTab struct {
5948 cvtI2F, cvtF2F ssa.Op
5949 }
5950
5951 var u32_f64 = u322fcvtTab{
5952 cvtI2F: ssa.OpCvt32to64F,
5953 cvtF2F: ssa.OpCopy,
5954 }
5955
5956 var u32_f32 = u322fcvtTab{
5957 cvtI2F: ssa.OpCvt32to32F,
5958 cvtF2F: ssa.OpCvt64Fto32F,
5959 }
5960
5961 func (s *state) uint32Tofloat64(n ir.Node, x *ssa.Value, ft, tt *types.Type) *ssa.Value {
5962 return s.uint32Tofloat(&u32_f64, n, x, ft, tt)
5963 }
5964
5965 func (s *state) uint32Tofloat32(n ir.Node, x *ssa.Value, ft, tt *types.Type) *ssa.Value {
5966 return s.uint32Tofloat(&u32_f32, n, x, ft, tt)
5967 }
5968
5969 func (s *state) uint32Tofloat(cvttab *u322fcvtTab, n ir.Node, x *ssa.Value, ft, tt *types.Type) *ssa.Value {
5970
5971
5972
5973
5974
5975 cmp := s.newValue2(ssa.OpLeq32, types.Types[types.TBOOL], s.zeroVal(ft), x)
5976 b := s.endBlock()
5977 b.Kind = ssa.BlockIf
5978 b.SetControl(cmp)
5979 b.Likely = ssa.BranchLikely
5980
5981 bThen := s.f.NewBlock(ssa.BlockPlain)
5982 bElse := s.f.NewBlock(ssa.BlockPlain)
5983 bAfter := s.f.NewBlock(ssa.BlockPlain)
5984
5985 b.AddEdgeTo(bThen)
5986 s.startBlock(bThen)
5987 a0 := s.newValue1(cvttab.cvtI2F, tt, x)
5988 s.vars[n] = a0
5989 s.endBlock()
5990 bThen.AddEdgeTo(bAfter)
5991
5992 b.AddEdgeTo(bElse)
5993 s.startBlock(bElse)
5994 a1 := s.newValue1(ssa.OpCvt32to64F, types.Types[types.TFLOAT64], x)
5995 twoToThe32 := s.constFloat64(types.Types[types.TFLOAT64], float64(1<<32))
5996 a2 := s.newValue2(ssa.OpAdd64F, types.Types[types.TFLOAT64], a1, twoToThe32)
5997 a3 := s.newValue1(cvttab.cvtF2F, tt, a2)
5998
5999 s.vars[n] = a3
6000 s.endBlock()
6001 bElse.AddEdgeTo(bAfter)
6002
6003 s.startBlock(bAfter)
6004 return s.variable(n, n.Type())
6005 }
6006
6007
6008 func (s *state) referenceTypeBuiltin(n *ir.UnaryExpr, x *ssa.Value) *ssa.Value {
6009 if !n.X.Type().IsMap() && !n.X.Type().IsChan() {
6010 s.Fatalf("node must be a map or a channel")
6011 }
6012 if n.X.Type().IsChan() && n.Op() == ir.OLEN {
6013 s.Fatalf("cannot inline len(chan)")
6014 }
6015 if n.X.Type().IsChan() && n.Op() == ir.OCAP {
6016 s.Fatalf("cannot inline cap(chan)")
6017 }
6018 if n.X.Type().IsMap() && n.Op() == ir.OCAP {
6019 s.Fatalf("cannot inline cap(map)")
6020 }
6021
6022
6023
6024
6025
6026
6027
6028
6029 lenType := n.Type()
6030 nilValue := s.constNil(types.Types[types.TUINTPTR])
6031 cmp := s.newValue2(ssa.OpEqPtr, types.Types[types.TBOOL], x, nilValue)
6032 b := s.endBlock()
6033 b.Kind = ssa.BlockIf
6034 b.SetControl(cmp)
6035 b.Likely = ssa.BranchUnlikely
6036
6037 bThen := s.f.NewBlock(ssa.BlockPlain)
6038 bElse := s.f.NewBlock(ssa.BlockPlain)
6039 bAfter := s.f.NewBlock(ssa.BlockPlain)
6040
6041
6042 b.AddEdgeTo(bThen)
6043 s.startBlock(bThen)
6044 s.vars[n] = s.zeroVal(lenType)
6045 s.endBlock()
6046 bThen.AddEdgeTo(bAfter)
6047
6048 b.AddEdgeTo(bElse)
6049 s.startBlock(bElse)
6050 switch n.Op() {
6051 case ir.OLEN:
6052 if n.X.Type().IsMap() {
6053
6054 loadType := reflectdata.MapType().Field(0).Type
6055 load := s.load(loadType, x)
6056 s.vars[n] = s.conv(nil, load, loadType, lenType)
6057 } else {
6058
6059 s.vars[n] = s.load(lenType, x)
6060 }
6061 case ir.OCAP:
6062
6063 sw := s.newValue1I(ssa.OpOffPtr, lenType.PtrTo(), lenType.Size(), x)
6064 s.vars[n] = s.load(lenType, sw)
6065 default:
6066 s.Fatalf("op must be OLEN or OCAP")
6067 }
6068 s.endBlock()
6069 bElse.AddEdgeTo(bAfter)
6070
6071 s.startBlock(bAfter)
6072 return s.variable(n, lenType)
6073 }
6074
6075 type f2uCvtTab struct {
6076 ltf, cvt2U, subf, or ssa.Op
6077 floatValue func(*state, *types.Type, float64) *ssa.Value
6078 intValue func(*state, *types.Type, int64) *ssa.Value
6079 cutoff uint64
6080 }
6081
6082 var f32_u64 = f2uCvtTab{
6083 ltf: ssa.OpLess32F,
6084 cvt2U: ssa.OpCvt32Fto64,
6085 subf: ssa.OpSub32F,
6086 or: ssa.OpOr64,
6087 floatValue: (*state).constFloat32,
6088 intValue: (*state).constInt64,
6089 cutoff: 1 << 63,
6090 }
6091
6092 var f64_u64 = f2uCvtTab{
6093 ltf: ssa.OpLess64F,
6094 cvt2U: ssa.OpCvt64Fto64,
6095 subf: ssa.OpSub64F,
6096 or: ssa.OpOr64,
6097 floatValue: (*state).constFloat64,
6098 intValue: (*state).constInt64,
6099 cutoff: 1 << 63,
6100 }
6101
6102 var f32_u32 = f2uCvtTab{
6103 ltf: ssa.OpLess32F,
6104 cvt2U: ssa.OpCvt32Fto32,
6105 subf: ssa.OpSub32F,
6106 or: ssa.OpOr32,
6107 floatValue: (*state).constFloat32,
6108 intValue: func(s *state, t *types.Type, v int64) *ssa.Value { return s.constInt32(t, int32(v)) },
6109 cutoff: 1 << 31,
6110 }
6111
6112 var f64_u32 = f2uCvtTab{
6113 ltf: ssa.OpLess64F,
6114 cvt2U: ssa.OpCvt64Fto32,
6115 subf: ssa.OpSub64F,
6116 or: ssa.OpOr32,
6117 floatValue: (*state).constFloat64,
6118 intValue: func(s *state, t *types.Type, v int64) *ssa.Value { return s.constInt32(t, int32(v)) },
6119 cutoff: 1 << 31,
6120 }
6121
6122 func (s *state) float32ToUint64(n ir.Node, x *ssa.Value, ft, tt *types.Type) *ssa.Value {
6123 return s.floatToUint(&f32_u64, n, x, ft, tt)
6124 }
6125 func (s *state) float64ToUint64(n ir.Node, x *ssa.Value, ft, tt *types.Type) *ssa.Value {
6126 return s.floatToUint(&f64_u64, n, x, ft, tt)
6127 }
6128
6129 func (s *state) float32ToUint32(n ir.Node, x *ssa.Value, ft, tt *types.Type) *ssa.Value {
6130 return s.floatToUint(&f32_u32, n, x, ft, tt)
6131 }
6132
6133 func (s *state) float64ToUint32(n ir.Node, x *ssa.Value, ft, tt *types.Type) *ssa.Value {
6134 return s.floatToUint(&f64_u32, n, x, ft, tt)
6135 }
6136
6137 func (s *state) floatToUint(cvttab *f2uCvtTab, n ir.Node, x *ssa.Value, ft, tt *types.Type) *ssa.Value {
6138
6139
6140
6141
6142
6143
6144
6145
6146
6147
6148
6149
6150
6151 cutoff := cvttab.floatValue(s, ft, float64(cvttab.cutoff))
6152 cmp := s.newValueOrSfCall2(cvttab.ltf, types.Types[types.TBOOL], x, cutoff)
6153 b := s.endBlock()
6154 b.Kind = ssa.BlockIf
6155 b.SetControl(cmp)
6156 b.Likely = ssa.BranchLikely
6157
6158 var bThen, bZero *ssa.Block
6159
6160 newConversion := base.ConvertHash.MatchPosWithInfo(n.Pos(), "U", nil)
6161 if newConversion {
6162 bZero = s.f.NewBlock(ssa.BlockPlain)
6163 bThen = s.f.NewBlock(ssa.BlockIf)
6164 } else {
6165 bThen = s.f.NewBlock(ssa.BlockPlain)
6166 }
6167
6168 bElse := s.f.NewBlock(ssa.BlockPlain)
6169 bAfter := s.f.NewBlock(ssa.BlockPlain)
6170
6171 b.AddEdgeTo(bThen)
6172 s.startBlock(bThen)
6173 a0 := s.newValueOrSfCall1(cvttab.cvt2U, tt, x)
6174 s.vars[n] = a0
6175
6176 if newConversion {
6177 cmpz := s.newValueOrSfCall2(cvttab.ltf, types.Types[types.TBOOL], x, cvttab.floatValue(s, ft, 0.0))
6178 s.endBlock()
6179 bThen.SetControl(cmpz)
6180 bThen.AddEdgeTo(bZero)
6181 bThen.Likely = ssa.BranchUnlikely
6182 bThen.AddEdgeTo(bAfter)
6183
6184 s.startBlock(bZero)
6185 s.vars[n] = cvttab.intValue(s, tt, 0)
6186 s.endBlock()
6187 bZero.AddEdgeTo(bAfter)
6188 } else {
6189 s.endBlock()
6190 bThen.AddEdgeTo(bAfter)
6191 }
6192
6193 b.AddEdgeTo(bElse)
6194 s.startBlock(bElse)
6195 y := s.newValueOrSfCall2(cvttab.subf, ft, x, cutoff)
6196 y = s.newValueOrSfCall1(cvttab.cvt2U, tt, y)
6197 z := cvttab.intValue(s, tt, int64(-cvttab.cutoff))
6198 a1 := s.newValue2(cvttab.or, tt, y, z)
6199 s.vars[n] = a1
6200 s.endBlock()
6201 bElse.AddEdgeTo(bAfter)
6202
6203 s.startBlock(bAfter)
6204 return s.variable(n, n.Type())
6205 }
6206
6207
6208
6209
6210 func (s *state) dottype(n *ir.TypeAssertExpr, commaok bool) (res, resok *ssa.Value) {
6211 iface := s.expr(n.X)
6212 target := s.reflectType(n.Type())
6213 var targetItab *ssa.Value
6214 if n.ITab != nil {
6215 targetItab = s.expr(n.ITab)
6216 }
6217
6218 if n.UseNilPanic {
6219 if commaok {
6220 base.Fatalf("unexpected *ir.TypeAssertExpr with UseNilPanic == true && commaok == true")
6221 }
6222 if n.Type().IsInterface() {
6223
6224
6225 base.Fatalf("unexpected *ir.TypeAssertExpr with UseNilPanic == true && Type().IsInterface() == true")
6226 }
6227 typs := s.f.Config.Types
6228 iface = s.newValue2(
6229 ssa.OpIMake,
6230 iface.Type,
6231 s.nilCheck(s.newValue1(ssa.OpITab, typs.BytePtr, iface)),
6232 s.newValue1(ssa.OpIData, typs.BytePtr, iface),
6233 )
6234 }
6235
6236 return s.dottype1(n.Pos(), n.X.Type(), n.Type(), iface, nil, target, targetItab, commaok, n.Descriptor)
6237 }
6238
6239 func (s *state) dynamicDottype(n *ir.DynamicTypeAssertExpr, commaok bool) (res, resok *ssa.Value) {
6240 iface := s.expr(n.X)
6241 var source, target, targetItab *ssa.Value
6242 if n.SrcRType != nil {
6243 source = s.expr(n.SrcRType)
6244 }
6245 if !n.X.Type().IsEmptyInterface() && !n.Type().IsInterface() {
6246 byteptr := s.f.Config.Types.BytePtr
6247 targetItab = s.expr(n.ITab)
6248
6249
6250 target = s.load(byteptr, s.newValue1I(ssa.OpOffPtr, byteptr, rttype.ITab.OffsetOf("Type"), targetItab))
6251 } else {
6252 target = s.expr(n.RType)
6253 }
6254 return s.dottype1(n.Pos(), n.X.Type(), n.Type(), iface, source, target, targetItab, commaok, nil)
6255 }
6256
6257
6258
6259
6260
6261
6262
6263
6264
6265 func (s *state) dottype1(pos src.XPos, src, dst *types.Type, iface, source, target, targetItab *ssa.Value, commaok bool, descriptor *obj.LSym) (res, resok *ssa.Value) {
6266 typs := s.f.Config.Types
6267 byteptr := typs.BytePtr
6268 if dst.IsInterface() {
6269 if dst.IsEmptyInterface() {
6270
6271
6272 if base.Debug.TypeAssert > 0 {
6273 base.WarnfAt(pos, "type assertion inlined")
6274 }
6275
6276
6277 itab := s.newValue1(ssa.OpITab, byteptr, iface)
6278
6279 cond := s.newValue2(ssa.OpNeqPtr, types.Types[types.TBOOL], itab, s.constNil(byteptr))
6280
6281 if src.IsEmptyInterface() && commaok {
6282
6283 return iface, cond
6284 }
6285
6286
6287 b := s.endBlock()
6288 b.Kind = ssa.BlockIf
6289 b.SetControl(cond)
6290 b.Likely = ssa.BranchLikely
6291 bOk := s.f.NewBlock(ssa.BlockPlain)
6292 bFail := s.f.NewBlock(ssa.BlockPlain)
6293 b.AddEdgeTo(bOk)
6294 b.AddEdgeTo(bFail)
6295
6296 if !commaok {
6297
6298 s.startBlock(bFail)
6299 s.rtcall(ir.Syms.Panicnildottype, false, nil, target)
6300
6301
6302 s.startBlock(bOk)
6303 if src.IsEmptyInterface() {
6304 res = iface
6305 return
6306 }
6307
6308 off := s.newValue1I(ssa.OpOffPtr, byteptr, rttype.ITab.OffsetOf("Type"), itab)
6309 typ := s.load(byteptr, off)
6310 idata := s.newValue1(ssa.OpIData, byteptr, iface)
6311 res = s.newValue2(ssa.OpIMake, dst, typ, idata)
6312 return
6313 }
6314
6315 s.startBlock(bOk)
6316
6317
6318 off := s.newValue1I(ssa.OpOffPtr, byteptr, rttype.ITab.OffsetOf("Type"), itab)
6319 s.vars[typVar] = s.load(byteptr, off)
6320 s.endBlock()
6321
6322
6323 s.startBlock(bFail)
6324 s.vars[typVar] = itab
6325 s.endBlock()
6326
6327
6328 bEnd := s.f.NewBlock(ssa.BlockPlain)
6329 bOk.AddEdgeTo(bEnd)
6330 bFail.AddEdgeTo(bEnd)
6331 s.startBlock(bEnd)
6332 idata := s.newValue1(ssa.OpIData, byteptr, iface)
6333 res = s.newValue2(ssa.OpIMake, dst, s.variable(typVar, byteptr), idata)
6334 resok = cond
6335 delete(s.vars, typVar)
6336 return
6337 }
6338
6339 if base.Debug.TypeAssert > 0 {
6340 base.WarnfAt(pos, "type assertion not inlined")
6341 }
6342
6343 itab := s.newValue1(ssa.OpITab, byteptr, iface)
6344 data := s.newValue1(ssa.OpIData, types.Types[types.TUNSAFEPTR], iface)
6345
6346
6347 bNil := s.f.NewBlock(ssa.BlockPlain)
6348 bNonNil := s.f.NewBlock(ssa.BlockPlain)
6349 bMerge := s.f.NewBlock(ssa.BlockPlain)
6350 cond := s.newValue2(ssa.OpNeqPtr, types.Types[types.TBOOL], itab, s.constNil(byteptr))
6351 b := s.endBlock()
6352 b.Kind = ssa.BlockIf
6353 b.SetControl(cond)
6354 b.Likely = ssa.BranchLikely
6355 b.AddEdgeTo(bNonNil)
6356 b.AddEdgeTo(bNil)
6357
6358 s.startBlock(bNil)
6359 if commaok {
6360 s.vars[typVar] = itab
6361 b := s.endBlock()
6362 b.AddEdgeTo(bMerge)
6363 } else {
6364
6365 s.rtcall(ir.Syms.Panicnildottype, false, nil, target)
6366 }
6367
6368
6369 s.startBlock(bNonNil)
6370 typ := itab
6371 if !src.IsEmptyInterface() {
6372 typ = s.load(byteptr, s.newValue1I(ssa.OpOffPtr, byteptr, rttype.ITab.OffsetOf("Type"), itab))
6373 }
6374
6375
6376 var d *ssa.Value
6377 if descriptor != nil {
6378 d = s.newValue1A(ssa.OpAddr, byteptr, descriptor, s.sb)
6379 if base.Flag.N == 0 && rtabi.UseInterfaceSwitchCache(Arch.LinkArch.Family) {
6380
6381
6382 if intrinsics.lookup(Arch.LinkArch.Arch, "internal/runtime/atomic", "Loadp") == nil {
6383 s.Fatalf("atomic load not available")
6384 }
6385
6386 var mul, and, add, zext ssa.Op
6387 if s.config.PtrSize == 4 {
6388 mul = ssa.OpMul32
6389 and = ssa.OpAnd32
6390 add = ssa.OpAdd32
6391 zext = ssa.OpCopy
6392 } else {
6393 mul = ssa.OpMul64
6394 and = ssa.OpAnd64
6395 add = ssa.OpAdd64
6396 zext = ssa.OpZeroExt32to64
6397 }
6398
6399 loopHead := s.f.NewBlock(ssa.BlockPlain)
6400 loopBody := s.f.NewBlock(ssa.BlockPlain)
6401 cacheHit := s.f.NewBlock(ssa.BlockPlain)
6402 cacheMiss := s.f.NewBlock(ssa.BlockPlain)
6403
6404
6405
6406 atomicLoad := s.newValue2(ssa.OpAtomicLoadPtr, types.NewTuple(typs.BytePtr, types.TypeMem), d, s.mem())
6407 cache := s.newValue1(ssa.OpSelect0, typs.BytePtr, atomicLoad)
6408 s.vars[memVar] = s.newValue1(ssa.OpSelect1, types.TypeMem, atomicLoad)
6409
6410
6411 var hash *ssa.Value
6412 if src.IsEmptyInterface() {
6413 hash = s.newValue2(ssa.OpLoad, typs.UInt32, s.newValue1I(ssa.OpOffPtr, typs.UInt32Ptr, rttype.Type.OffsetOf("Hash"), typ), s.mem())
6414 } else {
6415 hash = s.newValue2(ssa.OpLoad, typs.UInt32, s.newValue1I(ssa.OpOffPtr, typs.UInt32Ptr, rttype.ITab.OffsetOf("Hash"), itab), s.mem())
6416 }
6417 hash = s.newValue1(zext, typs.Uintptr, hash)
6418 s.vars[hashVar] = hash
6419
6420 mask := s.newValue2(ssa.OpLoad, typs.Uintptr, cache, s.mem())
6421
6422 b := s.endBlock()
6423 b.AddEdgeTo(loopHead)
6424
6425
6426
6427 s.startBlock(loopHead)
6428 idx := s.newValue2(and, typs.Uintptr, s.variable(hashVar, typs.Uintptr), mask)
6429 idx = s.newValue2(mul, typs.Uintptr, idx, s.uintptrConstant(uint64(2*s.config.PtrSize)))
6430 idx = s.newValue2(add, typs.Uintptr, idx, s.uintptrConstant(uint64(s.config.PtrSize)))
6431 e := s.newValue2(ssa.OpAddPtr, typs.UintptrPtr, cache, idx)
6432
6433 s.vars[hashVar] = s.newValue2(add, typs.Uintptr, s.variable(hashVar, typs.Uintptr), s.uintptrConstant(1))
6434
6435
6436
6437 eTyp := s.newValue2(ssa.OpLoad, typs.Uintptr, e, s.mem())
6438 cmp1 := s.newValue2(ssa.OpEqPtr, typs.Bool, typ, eTyp)
6439 b = s.endBlock()
6440 b.Kind = ssa.BlockIf
6441 b.SetControl(cmp1)
6442 b.AddEdgeTo(cacheHit)
6443 b.AddEdgeTo(loopBody)
6444
6445
6446
6447 s.startBlock(loopBody)
6448 cmp2 := s.newValue2(ssa.OpEqPtr, typs.Bool, eTyp, s.constNil(typs.BytePtr))
6449 b = s.endBlock()
6450 b.Kind = ssa.BlockIf
6451 b.SetControl(cmp2)
6452 b.AddEdgeTo(cacheMiss)
6453 b.AddEdgeTo(loopHead)
6454
6455
6456
6457 s.startBlock(cacheHit)
6458 eItab := s.newValue2(ssa.OpLoad, typs.BytePtr, s.newValue1I(ssa.OpOffPtr, typs.BytePtrPtr, s.config.PtrSize, e), s.mem())
6459 s.vars[typVar] = eItab
6460 b = s.endBlock()
6461 b.AddEdgeTo(bMerge)
6462
6463
6464 s.startBlock(cacheMiss)
6465 }
6466 }
6467
6468
6469 if descriptor != nil {
6470 itab = s.rtcall(ir.Syms.TypeAssert, true, []*types.Type{byteptr}, d, typ)[0]
6471 } else {
6472 var fn *obj.LSym
6473 if commaok {
6474 fn = ir.Syms.AssertE2I2
6475 } else {
6476 fn = ir.Syms.AssertE2I
6477 }
6478 itab = s.rtcall(fn, true, []*types.Type{byteptr}, target, typ)[0]
6479 }
6480 s.vars[typVar] = itab
6481 b = s.endBlock()
6482 b.AddEdgeTo(bMerge)
6483
6484
6485 s.startBlock(bMerge)
6486 itab = s.variable(typVar, byteptr)
6487 var ok *ssa.Value
6488 if commaok {
6489 ok = s.newValue2(ssa.OpNeqPtr, types.Types[types.TBOOL], itab, s.constNil(byteptr))
6490 }
6491 return s.newValue2(ssa.OpIMake, dst, itab, data), ok
6492 }
6493
6494 if base.Debug.TypeAssert > 0 {
6495 base.WarnfAt(pos, "type assertion inlined")
6496 }
6497
6498
6499 direct := types.IsDirectIface(dst)
6500 itab := s.newValue1(ssa.OpITab, byteptr, iface)
6501 if base.Debug.TypeAssert > 0 {
6502 base.WarnfAt(pos, "type assertion inlined")
6503 }
6504 var wantedFirstWord *ssa.Value
6505 if src.IsEmptyInterface() {
6506
6507 wantedFirstWord = target
6508 } else {
6509
6510 wantedFirstWord = targetItab
6511 }
6512
6513 var tmp ir.Node
6514 var addr *ssa.Value
6515 if commaok && !ssa.CanSSA(dst) {
6516
6517
6518 tmp, addr = s.temp(pos, dst)
6519 }
6520
6521 cond := s.newValue2(ssa.OpEqPtr, types.Types[types.TBOOL], itab, wantedFirstWord)
6522 b := s.endBlock()
6523 b.Kind = ssa.BlockIf
6524 b.SetControl(cond)
6525 b.Likely = ssa.BranchLikely
6526
6527 bOk := s.f.NewBlock(ssa.BlockPlain)
6528 bFail := s.f.NewBlock(ssa.BlockPlain)
6529 b.AddEdgeTo(bOk)
6530 b.AddEdgeTo(bFail)
6531
6532 if !commaok {
6533
6534 s.startBlock(bFail)
6535 taddr := source
6536 if taddr == nil {
6537 taddr = s.reflectType(src)
6538 }
6539 if src.IsEmptyInterface() {
6540 s.rtcall(ir.Syms.PanicdottypeE, false, nil, itab, target, taddr)
6541 } else {
6542 s.rtcall(ir.Syms.PanicdottypeI, false, nil, itab, target, taddr)
6543 }
6544
6545
6546 s.startBlock(bOk)
6547 if direct {
6548 return s.newValue1(ssa.OpIData, dst, iface), nil
6549 }
6550 p := s.newValue1(ssa.OpIData, types.NewPtr(dst), iface)
6551 return s.load(dst, p), nil
6552 }
6553
6554
6555
6556 bEnd := s.f.NewBlock(ssa.BlockPlain)
6557
6558
6559 valVar := ssaMarker("val")
6560
6561
6562 s.startBlock(bOk)
6563 if tmp == nil {
6564 if direct {
6565 s.vars[valVar] = s.newValue1(ssa.OpIData, dst, iface)
6566 } else {
6567 p := s.newValue1(ssa.OpIData, types.NewPtr(dst), iface)
6568 s.vars[valVar] = s.load(dst, p)
6569 }
6570 } else {
6571 p := s.newValue1(ssa.OpIData, types.NewPtr(dst), iface)
6572 s.move(dst, addr, p)
6573 }
6574 s.vars[okVar] = s.constBool(true)
6575 s.endBlock()
6576 bOk.AddEdgeTo(bEnd)
6577
6578
6579 s.startBlock(bFail)
6580 if tmp == nil {
6581 s.vars[valVar] = s.zeroVal(dst)
6582 } else {
6583 s.zero(dst, addr)
6584 }
6585 s.vars[okVar] = s.constBool(false)
6586 s.endBlock()
6587 bFail.AddEdgeTo(bEnd)
6588
6589
6590 s.startBlock(bEnd)
6591 if tmp == nil {
6592 res = s.variable(valVar, dst)
6593 delete(s.vars, valVar)
6594 } else {
6595 res = s.load(dst, addr)
6596 }
6597 resok = s.variable(okVar, types.Types[types.TBOOL])
6598 delete(s.vars, okVar)
6599 return res, resok
6600 }
6601
6602
6603 func (s *state) temp(pos src.XPos, t *types.Type) (*ir.Name, *ssa.Value) {
6604 tmp := typecheck.TempAt(pos, s.curfn, t)
6605 if t.HasPointers() || (ssa.IsMergeCandidate(tmp) && t != deferstruct()) {
6606 s.vars[memVar] = s.newValue1A(ssa.OpVarDef, types.TypeMem, tmp, s.mem())
6607 }
6608 addr := s.addr(tmp)
6609 return tmp, addr
6610 }
6611
6612
6613 func (s *state) variable(n ir.Node, t *types.Type) *ssa.Value {
6614 v := s.vars[n]
6615 if v != nil {
6616 return v
6617 }
6618 v = s.fwdVars[n]
6619 if v != nil {
6620 return v
6621 }
6622
6623 if s.curBlock == s.f.Entry {
6624
6625 s.f.Fatalf("value %v (%v) incorrectly live at entry", n, v)
6626 }
6627
6628
6629 v = s.newValue0A(ssa.OpFwdRef, t, fwdRefAux{N: n})
6630 s.fwdVars[n] = v
6631 if n.Op() == ir.ONAME {
6632 s.addNamedValue(n.(*ir.Name), v)
6633 }
6634 return v
6635 }
6636
6637 func (s *state) mem() *ssa.Value {
6638 return s.variable(memVar, types.TypeMem)
6639 }
6640
6641 func (s *state) addNamedValue(n *ir.Name, v *ssa.Value) {
6642 if n.Class == ir.Pxxx {
6643
6644 return
6645 }
6646 if ir.IsAutoTmp(n) {
6647
6648 return
6649 }
6650 if n.Class == ir.PPARAMOUT {
6651
6652
6653 return
6654 }
6655 loc := ssa.LocalSlot{N: n, Type: n.Type(), Off: 0}
6656 values, ok := s.f.NamedValues[loc]
6657 if !ok {
6658 s.f.Names = append(s.f.Names, &loc)
6659 s.f.CanonicalLocalSlots[loc] = &loc
6660 }
6661 s.f.NamedValues[loc] = append(values, v)
6662 }
6663
6664
6665 type Branch struct {
6666 P *obj.Prog
6667 B *ssa.Block
6668 }
6669
6670
6671 type State struct {
6672 ABI obj.ABI
6673
6674 pp *objw.Progs
6675
6676
6677
6678 Branches []Branch
6679
6680
6681 JumpTables []*ssa.Block
6682
6683
6684 bstart []*obj.Prog
6685
6686 maxarg int64
6687
6688
6689
6690 livenessMap liveness.Map
6691
6692
6693
6694 partLiveArgs map[*ir.Name]bool
6695
6696
6697
6698
6699 lineRunStart *obj.Prog
6700
6701
6702 OnWasmStackSkipped int
6703 }
6704
6705 func (s *State) FuncInfo() *obj.FuncInfo {
6706 return s.pp.CurFunc.LSym.Func()
6707 }
6708
6709
6710 func (s *State) Prog(as obj.As) *obj.Prog {
6711 p := s.pp.Prog(as)
6712 if objw.LosesStmtMark(as) {
6713 return p
6714 }
6715
6716
6717 if s.lineRunStart == nil || s.lineRunStart.Pos.Line() != p.Pos.Line() {
6718 s.lineRunStart = p
6719 } else if p.Pos.IsStmt() == src.PosIsStmt {
6720 s.lineRunStart.Pos = s.lineRunStart.Pos.WithIsStmt()
6721 p.Pos = p.Pos.WithNotStmt()
6722 }
6723 return p
6724 }
6725
6726
6727 func (s *State) Pc() *obj.Prog {
6728 return s.pp.Next
6729 }
6730
6731
6732 func (s *State) SetPos(pos src.XPos) {
6733 s.pp.Pos = pos
6734 }
6735
6736
6737
6738
6739 func (s *State) Br(op obj.As, target *ssa.Block) *obj.Prog {
6740 p := s.Prog(op)
6741 p.To.Type = obj.TYPE_BRANCH
6742 s.Branches = append(s.Branches, Branch{P: p, B: target})
6743 return p
6744 }
6745
6746
6747
6748
6749
6750
6751 func (s *State) DebugFriendlySetPosFrom(v *ssa.Value) {
6752 switch v.Op {
6753 case ssa.OpPhi, ssa.OpCopy, ssa.OpLoadReg, ssa.OpStoreReg:
6754
6755 s.SetPos(v.Pos.WithNotStmt())
6756 default:
6757 p := v.Pos
6758 if p != src.NoXPos {
6759
6760
6761
6762
6763 if p.IsStmt() != src.PosIsStmt {
6764 if s.pp.Pos.IsStmt() == src.PosIsStmt && s.pp.Pos.SameFileAndLine(p) {
6765
6766
6767
6768
6769
6770
6771
6772
6773
6774
6775
6776
6777
6778 return
6779 }
6780 p = p.WithNotStmt()
6781
6782 }
6783 s.SetPos(p)
6784 } else {
6785 s.SetPos(s.pp.Pos.WithNotStmt())
6786 }
6787 }
6788 }
6789
6790
6791 func emitArgInfo(e *ssafn, f *ssa.Func, pp *objw.Progs) {
6792 ft := e.curfn.Type()
6793 if ft.NumRecvs() == 0 && ft.NumParams() == 0 {
6794 return
6795 }
6796
6797 x := EmitArgInfo(e.curfn, f.OwnAux.ABIInfo())
6798 x.Set(obj.AttrContentAddressable, true)
6799 e.curfn.LSym.Func().ArgInfo = x
6800
6801
6802 p := pp.Prog(obj.AFUNCDATA)
6803 p.From.SetConst(rtabi.FUNCDATA_ArgInfo)
6804 p.To.Type = obj.TYPE_MEM
6805 p.To.Name = obj.NAME_EXTERN
6806 p.To.Sym = x
6807 }
6808
6809
6810 func EmitArgInfo(f *ir.Func, abiInfo *abi.ABIParamResultInfo) *obj.LSym {
6811 x := base.Ctxt.Lookup(fmt.Sprintf("%s.arginfo%d", f.LSym.Name, f.ABI))
6812 x.Align = 1
6813
6814
6815
6816
6817 PtrSize := int64(types.PtrSize)
6818 uintptrTyp := types.Types[types.TUINTPTR]
6819
6820 isAggregate := func(t *types.Type) bool {
6821 return isStructNotSIMD(t) || t.IsArray() || t.IsComplex() || t.IsInterface() || t.IsString() || t.IsSlice()
6822 }
6823
6824 wOff := 0
6825 n := 0
6826 writebyte := func(o uint8) { wOff = objw.Uint8(x, wOff, o) }
6827
6828
6829 write1 := func(sz, offset int64) {
6830 if offset >= rtabi.TraceArgsSpecial {
6831 writebyte(rtabi.TraceArgsOffsetTooLarge)
6832 } else {
6833 writebyte(uint8(offset))
6834 writebyte(uint8(sz))
6835 }
6836 n++
6837 }
6838
6839
6840
6841 var visitType func(baseOffset int64, t *types.Type, depth int) bool
6842 visitType = func(baseOffset int64, t *types.Type, depth int) bool {
6843 if n >= rtabi.TraceArgsLimit {
6844 writebyte(rtabi.TraceArgsDotdotdot)
6845 return false
6846 }
6847 if !isAggregate(t) {
6848 write1(t.Size(), baseOffset)
6849 return true
6850 }
6851 writebyte(rtabi.TraceArgsStartAgg)
6852 depth++
6853 if depth >= rtabi.TraceArgsMaxDepth {
6854 writebyte(rtabi.TraceArgsDotdotdot)
6855 writebyte(rtabi.TraceArgsEndAgg)
6856 n++
6857 return true
6858 }
6859 switch {
6860 case t.IsInterface(), t.IsString():
6861 _ = visitType(baseOffset, uintptrTyp, depth) &&
6862 visitType(baseOffset+PtrSize, uintptrTyp, depth)
6863 case t.IsSlice():
6864 _ = visitType(baseOffset, uintptrTyp, depth) &&
6865 visitType(baseOffset+PtrSize, uintptrTyp, depth) &&
6866 visitType(baseOffset+PtrSize*2, uintptrTyp, depth)
6867 case t.IsComplex():
6868 _ = visitType(baseOffset, types.FloatForComplex(t), depth) &&
6869 visitType(baseOffset+t.Size()/2, types.FloatForComplex(t), depth)
6870 case t.IsArray():
6871 if t.NumElem() == 0 {
6872 n++
6873 break
6874 }
6875 for i := int64(0); i < t.NumElem(); i++ {
6876 if !visitType(baseOffset, t.Elem(), depth) {
6877 break
6878 }
6879 baseOffset += t.Elem().Size()
6880 }
6881 case isStructNotSIMD(t):
6882 if t.NumFields() == 0 {
6883 n++
6884 break
6885 }
6886 for _, field := range t.Fields() {
6887 if !visitType(baseOffset+field.Offset, field.Type, depth) {
6888 break
6889 }
6890 }
6891 }
6892 writebyte(rtabi.TraceArgsEndAgg)
6893 return true
6894 }
6895
6896 start := 0
6897 if strings.Contains(f.LSym.Name, "[") {
6898
6899 start = 1
6900 }
6901
6902 for _, a := range abiInfo.InParams()[start:] {
6903 if !visitType(a.FrameOffset(abiInfo), a.Type, 0) {
6904 break
6905 }
6906 }
6907 writebyte(rtabi.TraceArgsEndSeq)
6908 if wOff > rtabi.TraceArgsMaxLen {
6909 base.Fatalf("ArgInfo too large")
6910 }
6911
6912 return x
6913 }
6914
6915
6916 func emitWrappedFuncInfo(e *ssafn, pp *objw.Progs) {
6917 if base.Ctxt.Flag_linkshared {
6918
6919
6920 return
6921 }
6922
6923 wfn := e.curfn.WrappedFunc
6924 if wfn == nil {
6925 return
6926 }
6927
6928 wsym := wfn.Linksym()
6929 x := base.Ctxt.LookupInit(fmt.Sprintf("%s.wrapinfo", wsym.Name), func(x *obj.LSym) {
6930 objw.SymPtrOff(x, 0, wsym)
6931 x.Set(obj.AttrContentAddressable, true)
6932 x.Align = 4
6933 })
6934 e.curfn.LSym.Func().WrapInfo = x
6935
6936
6937 p := pp.Prog(obj.AFUNCDATA)
6938 p.From.SetConst(rtabi.FUNCDATA_WrapInfo)
6939 p.To.Type = obj.TYPE_MEM
6940 p.To.Name = obj.NAME_EXTERN
6941 p.To.Sym = x
6942 }
6943
6944
6945 func genssa(f *ssa.Func, pp *objw.Progs) {
6946 var s State
6947 s.ABI = f.OwnAux.Fn.ABI()
6948
6949 e := f.Frontend().(*ssafn)
6950
6951 gatherPrintInfo := f.PrintOrHtmlSSA || ssa.GenssaDump[f.Name]
6952
6953 var lv *liveness.Liveness
6954 s.livenessMap, s.partLiveArgs, lv = liveness.Compute(e.curfn, f, e.stkptrsize, pp, gatherPrintInfo)
6955 emitArgInfo(e, f, pp)
6956 argLiveBlockMap, argLiveValueMap := liveness.ArgLiveness(e.curfn, f, pp)
6957
6958 openDeferInfo := e.curfn.LSym.Func().OpenCodedDeferInfo
6959 if openDeferInfo != nil {
6960
6961
6962 p := pp.Prog(obj.AFUNCDATA)
6963 p.From.SetConst(rtabi.FUNCDATA_OpenCodedDeferInfo)
6964 p.To.Type = obj.TYPE_MEM
6965 p.To.Name = obj.NAME_EXTERN
6966 p.To.Sym = openDeferInfo
6967 }
6968
6969 emitWrappedFuncInfo(e, pp)
6970
6971
6972 s.bstart = make([]*obj.Prog, f.NumBlocks())
6973 s.pp = pp
6974 var progToValue map[*obj.Prog]*ssa.Value
6975 var progToBlock map[*obj.Prog]*ssa.Block
6976 var valueToProgAfter []*obj.Prog
6977 if gatherPrintInfo {
6978 progToValue = make(map[*obj.Prog]*ssa.Value, f.NumValues())
6979 progToBlock = make(map[*obj.Prog]*ssa.Block, f.NumBlocks())
6980 f.Logf("genssa %s\n", f.Name)
6981 progToBlock[s.pp.Next] = f.Blocks[0]
6982 }
6983
6984 if base.Ctxt.Flag_locationlists {
6985 if cap(f.Cache.ValueToProgAfter) < f.NumValues() {
6986 f.Cache.ValueToProgAfter = make([]*obj.Prog, f.NumValues())
6987 }
6988 valueToProgAfter = f.Cache.ValueToProgAfter[:f.NumValues()]
6989 clear(valueToProgAfter)
6990 }
6991
6992
6993
6994 firstPos := src.NoXPos
6995 for _, v := range f.Entry.Values {
6996 if v.Pos.IsStmt() == src.PosIsStmt && v.Op != ssa.OpArg && v.Op != ssa.OpArgIntReg && v.Op != ssa.OpArgFloatReg && v.Op != ssa.OpLoadReg && v.Op != ssa.OpStoreReg {
6997 firstPos = v.Pos
6998 v.Pos = firstPos.WithDefaultStmt()
6999 break
7000 }
7001 }
7002
7003
7004
7005
7006 var inlMarks map[*obj.Prog]int32
7007 var inlMarkList []*obj.Prog
7008
7009
7010
7011 var inlMarksByPos map[src.XPos][]*obj.Prog
7012
7013 var argLiveIdx int = -1
7014
7015
7016
7017
7018
7019 var hotAlign, hotRequire int64
7020
7021 if base.Debug.AlignHot > 0 {
7022 switch base.Ctxt.Arch.Name {
7023
7024
7025
7026
7027
7028 case "amd64", "386":
7029
7030
7031
7032 hotAlign = 64
7033 hotRequire = 31
7034 }
7035 }
7036
7037
7038 for i, b := range f.Blocks {
7039
7040 s.lineRunStart = nil
7041 s.SetPos(s.pp.Pos.WithNotStmt())
7042
7043 if hotAlign > 0 && b.Hotness&ssa.HotPgoInitial == ssa.HotPgoInitial {
7044
7045
7046
7047
7048
7049 p := s.pp.Prog(obj.APCALIGNMAX)
7050 p.From.SetConst(hotAlign)
7051 p.To.SetConst(hotRequire)
7052 }
7053
7054 s.bstart[b.ID] = s.pp.Next
7055
7056 if idx, ok := argLiveBlockMap[b.ID]; ok && idx != argLiveIdx {
7057 argLiveIdx = idx
7058 p := s.pp.Prog(obj.APCDATA)
7059 p.From.SetConst(rtabi.PCDATA_ArgLiveIndex)
7060 p.To.SetConst(int64(idx))
7061 }
7062
7063
7064 Arch.SSAMarkMoves(&s, b)
7065 for _, v := range b.Values {
7066 x := s.pp.Next
7067 s.DebugFriendlySetPosFrom(v)
7068
7069 if v.Op.ResultInArg0() && v.ResultReg() != v.Args[0].Reg() {
7070 v.Fatalf("input[0] and output not in same register %s", v.LongString())
7071 }
7072
7073 switch v.Op {
7074 case ssa.OpInitMem:
7075
7076 case ssa.OpArg:
7077
7078 case ssa.OpSP, ssa.OpSB:
7079
7080 case ssa.OpSelect0, ssa.OpSelect1, ssa.OpSelectN, ssa.OpMakeResult:
7081
7082 case ssa.OpGetG:
7083
7084
7085 case ssa.OpVarDef, ssa.OpVarLive, ssa.OpKeepAlive, ssa.OpWBend:
7086
7087 case ssa.OpPhi:
7088 CheckLoweredPhi(v)
7089 case ssa.OpConvert:
7090
7091 if v.Args[0].Reg() != v.Reg() {
7092 v.Fatalf("OpConvert should be a no-op: %s; %s", v.Args[0].LongString(), v.LongString())
7093 }
7094 case ssa.OpInlMark:
7095 p := Arch.Ginsnop(s.pp)
7096 if inlMarks == nil {
7097 inlMarks = map[*obj.Prog]int32{}
7098 inlMarksByPos = map[src.XPos][]*obj.Prog{}
7099 }
7100 inlMarks[p] = v.AuxInt32()
7101 inlMarkList = append(inlMarkList, p)
7102 pos := v.Pos.AtColumn1()
7103 inlMarksByPos[pos] = append(inlMarksByPos[pos], p)
7104 firstPos = src.NoXPos
7105
7106 default:
7107
7108 if firstPos != src.NoXPos && v.Op != ssa.OpArgIntReg && v.Op != ssa.OpArgFloatReg && v.Op != ssa.OpLoadReg && v.Op != ssa.OpStoreReg {
7109 s.SetPos(firstPos)
7110 firstPos = src.NoXPos
7111 }
7112
7113
7114 s.pp.NextLive = s.livenessMap.Get(v)
7115 s.pp.NextUnsafe = s.livenessMap.GetUnsafe(v)
7116
7117
7118 Arch.SSAGenValue(&s, v)
7119 }
7120
7121 if idx, ok := argLiveValueMap[v.ID]; ok && idx != argLiveIdx {
7122 argLiveIdx = idx
7123 p := s.pp.Prog(obj.APCDATA)
7124 p.From.SetConst(rtabi.PCDATA_ArgLiveIndex)
7125 p.To.SetConst(int64(idx))
7126 }
7127
7128 if base.Ctxt.Flag_locationlists {
7129 valueToProgAfter[v.ID] = s.pp.Next
7130 }
7131
7132 if gatherPrintInfo {
7133 for ; x != s.pp.Next; x = x.Link {
7134 progToValue[x] = v
7135 }
7136 }
7137 }
7138
7139 if s.bstart[b.ID] == s.pp.Next && len(b.Succs) == 1 && b.Succs[0].Block() == b {
7140 p := Arch.Ginsnop(s.pp)
7141 p.Pos = p.Pos.WithIsStmt()
7142 if b.Pos == src.NoXPos {
7143 b.Pos = p.Pos
7144 if b.Pos == src.NoXPos {
7145 b.Pos = s.pp.Text.Pos
7146 }
7147 }
7148 b.Pos = b.Pos.WithBogusLine()
7149 }
7150
7151
7152
7153
7154
7155 s.pp.NextUnsafe = s.livenessMap.GetUnsafeBlock(b)
7156
7157
7158 var next *ssa.Block
7159 if i < len(f.Blocks)-1 && base.Flag.N == 0 {
7160
7161
7162
7163
7164 next = f.Blocks[i+1]
7165 }
7166 x := s.pp.Next
7167 s.SetPos(b.Pos)
7168 Arch.SSAGenBlock(&s, b, next)
7169 if gatherPrintInfo {
7170 for ; x != s.pp.Next; x = x.Link {
7171 progToBlock[x] = b
7172 }
7173 }
7174 }
7175 if f.Blocks[len(f.Blocks)-1].Kind == ssa.BlockExit {
7176
7177
7178
7179
7180 Arch.Ginsnop(s.pp)
7181 }
7182 if openDeferInfo != nil {
7183
7184
7185
7186
7187
7188
7189
7190
7191 s.pp.NextLive = s.livenessMap.DeferReturn
7192 p := s.pp.Prog(obj.ACALL)
7193 p.To.Type = obj.TYPE_MEM
7194 p.To.Name = obj.NAME_EXTERN
7195 p.To.Sym = ir.Syms.Deferreturn
7196
7197
7198
7199
7200
7201 for _, o := range f.OwnAux.ABIInfo().OutParams() {
7202 n := o.Name
7203 rts, offs := o.RegisterTypesAndOffsets()
7204 for i := range o.Registers {
7205 Arch.LoadRegResult(&s, f, rts[i], ssa.ObjRegForAbiReg(o.Registers[i], f.Config), n, offs[i])
7206 }
7207 }
7208
7209 s.pp.Prog(obj.ARET)
7210 }
7211
7212 if inlMarks != nil {
7213 hasCall := false
7214
7215
7216
7217
7218 for p := s.pp.Text; p != nil; p = p.Link {
7219 if p.As == obj.ANOP || p.As == obj.AFUNCDATA || p.As == obj.APCDATA || p.As == obj.ATEXT ||
7220 p.As == obj.APCALIGN || p.As == obj.APCALIGNMAX || Arch.LinkArch.Family == sys.Wasm {
7221
7222
7223
7224
7225
7226 continue
7227 }
7228 if _, ok := inlMarks[p]; ok {
7229
7230
7231 continue
7232 }
7233 if p.As == obj.ACALL || p.As == obj.ADUFFCOPY || p.As == obj.ADUFFZERO {
7234 hasCall = true
7235 }
7236 pos := p.Pos.AtColumn1()
7237 marks := inlMarksByPos[pos]
7238 if len(marks) == 0 {
7239 continue
7240 }
7241 for _, m := range marks {
7242
7243
7244
7245 p.Pos = p.Pos.WithIsStmt()
7246 s.pp.CurFunc.LSym.Func().AddInlMark(p, inlMarks[m])
7247
7248 m.As = obj.ANOP
7249 m.Pos = src.NoXPos
7250 m.From = obj.Addr{}
7251 m.To = obj.Addr{}
7252 }
7253 delete(inlMarksByPos, pos)
7254 }
7255
7256 for _, p := range inlMarkList {
7257 if p.As != obj.ANOP {
7258 s.pp.CurFunc.LSym.Func().AddInlMark(p, inlMarks[p])
7259 }
7260 }
7261
7262 if e.stksize == 0 && !hasCall {
7263
7264
7265
7266
7267
7268
7269 for p := s.pp.Text; p != nil; p = p.Link {
7270 if p.As == obj.AFUNCDATA || p.As == obj.APCDATA || p.As == obj.ATEXT || p.As == obj.ANOP {
7271 continue
7272 }
7273 if base.Ctxt.PosTable.Pos(p.Pos).Base().InliningIndex() >= 0 {
7274
7275 nop := Arch.Ginsnop(s.pp)
7276 nop.Pos = e.curfn.Pos().WithIsStmt()
7277
7278
7279
7280
7281
7282 for x := s.pp.Text; x != nil; x = x.Link {
7283 if x.Link == nop {
7284 x.Link = nop.Link
7285 break
7286 }
7287 }
7288
7289 for x := s.pp.Text; x != nil; x = x.Link {
7290 if x.Link == p {
7291 nop.Link = p
7292 x.Link = nop
7293 break
7294 }
7295 }
7296 }
7297 break
7298 }
7299 }
7300 }
7301
7302 if base.Ctxt.Flag_locationlists {
7303 var debugInfo *ssa.FuncDebug
7304 debugInfo = e.curfn.DebugInfo.(*ssa.FuncDebug)
7305
7306
7307 debugInfo.EntryID = f.Entry.ID
7308 if e.curfn.ABI == obj.ABIInternal && base.Flag.N != 0 {
7309 ssa.BuildFuncDebugNoOptimized(base.Ctxt, f, base.Debug.LocationLists > 1, StackOffset, debugInfo)
7310 } else {
7311 ssa.BuildFuncDebug(base.Ctxt, f, base.Debug.LocationLists, StackOffset, debugInfo)
7312 }
7313 bstart := s.bstart
7314 idToIdx := make([]int, f.NumBlocks())
7315 for i, b := range f.Blocks {
7316 idToIdx[b.ID] = i
7317 }
7318
7319
7320
7321 debugInfo.GetPC = func(b, v ssa.ID) int64 {
7322 switch v {
7323 case ssa.BlockStart.ID:
7324 if b == f.Entry.ID {
7325 return 0
7326
7327 }
7328 return bstart[b].Pc
7329 case ssa.BlockEnd.ID:
7330 blk := f.Blocks[idToIdx[b]]
7331 nv := len(blk.Values)
7332 return valueToProgAfter[blk.Values[nv-1].ID].Pc
7333 case ssa.FuncEnd.ID:
7334 return e.curfn.LSym.Size
7335 default:
7336 return valueToProgAfter[v].Pc
7337 }
7338 }
7339 }
7340
7341
7342 for _, br := range s.Branches {
7343 br.P.To.SetTarget(s.bstart[br.B.ID])
7344 if br.P.Pos.IsStmt() != src.PosIsStmt {
7345 br.P.Pos = br.P.Pos.WithNotStmt()
7346 } else if v0 := br.B.FirstPossibleStmtValue(); v0 != nil && v0.Pos.Line() == br.P.Pos.Line() && v0.Pos.IsStmt() == src.PosIsStmt {
7347 br.P.Pos = br.P.Pos.WithNotStmt()
7348 }
7349
7350 }
7351
7352
7353 for _, jt := range s.JumpTables {
7354
7355 targets := make([]*obj.Prog, len(jt.Succs))
7356 for i, e := range jt.Succs {
7357 targets[i] = s.bstart[e.Block().ID]
7358 }
7359
7360
7361
7362 fi := s.pp.CurFunc.LSym.Func()
7363 fi.JumpTables = append(fi.JumpTables, obj.JumpTable{Sym: jt.Aux.(*obj.LSym), Targets: targets})
7364 }
7365
7366 if e.log {
7367 filename := ""
7368 for p := s.pp.Text; p != nil; p = p.Link {
7369 if p.Pos.IsKnown() && p.InnermostFilename() != filename {
7370 filename = p.InnermostFilename()
7371 f.Logf("# %s\n", filename)
7372 }
7373
7374 var s string
7375 if v, ok := progToValue[p]; ok {
7376 s = v.String()
7377 } else if b, ok := progToBlock[p]; ok {
7378 s = b.String()
7379 } else {
7380 s = " "
7381 }
7382 f.Logf(" %-6s\t%.5d (%s)\t%s\n", s, p.Pc, p.InnermostLineNumber(), p.InstructionString())
7383 }
7384 }
7385 if f.HTMLWriter != nil {
7386 var buf strings.Builder
7387 buf.WriteString("<code>")
7388 buf.WriteString("<dl class=\"ssa-gen\">")
7389 filename := ""
7390
7391 liveness := lv.Format(nil)
7392 if liveness != "" {
7393 buf.WriteString("<dt class=\"ssa-prog-src\"></dt><dd class=\"ssa-prog\">")
7394 buf.WriteString(html.EscapeString("# " + liveness))
7395 buf.WriteString("</dd>")
7396 }
7397
7398 for p := s.pp.Text; p != nil; p = p.Link {
7399
7400
7401 if p.Pos.IsKnown() && p.InnermostFilename() != filename {
7402 filename = p.InnermostFilename()
7403 buf.WriteString("<dt class=\"ssa-prog-src\"></dt><dd class=\"ssa-prog\">")
7404 buf.WriteString(html.EscapeString("# " + filename))
7405 buf.WriteString("</dd>")
7406 }
7407
7408 buf.WriteString("<dt class=\"ssa-prog-src\">")
7409 if v, ok := progToValue[p]; ok {
7410
7411
7412 if p.As != obj.APCDATA {
7413 if liveness := lv.Format(v); liveness != "" {
7414
7415 buf.WriteString("</dt><dd class=\"ssa-prog\">")
7416 buf.WriteString(html.EscapeString("# " + liveness))
7417 buf.WriteString("</dd>")
7418
7419 buf.WriteString("<dt class=\"ssa-prog-src\">")
7420 }
7421 }
7422
7423 buf.WriteString(v.HTML())
7424 } else if b, ok := progToBlock[p]; ok {
7425 buf.WriteString("<b>" + b.HTML() + "</b>")
7426 }
7427 buf.WriteString("</dt>")
7428 buf.WriteString("<dd class=\"ssa-prog\">")
7429 fmt.Fprintf(&buf, "%.5d <span class=\"l%v line-number\">(%s)</span> %s", p.Pc, p.InnermostLineNumber(), p.InnermostLineNumberHTML(), html.EscapeString(p.InstructionString()))
7430 buf.WriteString("</dd>")
7431 }
7432 buf.WriteString("</dl>")
7433 buf.WriteString("</code>")
7434 f.HTMLWriter.WriteColumn("genssa", "genssa", "ssa-prog", buf.String())
7435 }
7436 if ssa.GenssaDump[f.Name] {
7437 fi := f.DumpFileForPhase("genssa")
7438 if fi != nil {
7439
7440
7441 inliningDiffers := func(a, b []src.Pos) bool {
7442 if len(a) != len(b) {
7443 return true
7444 }
7445 for i := range a {
7446 if a[i].Filename() != b[i].Filename() {
7447 return true
7448 }
7449 if i != len(a)-1 && a[i].Line() != b[i].Line() {
7450 return true
7451 }
7452 }
7453 return false
7454 }
7455
7456 var allPosOld []src.Pos
7457 var allPos []src.Pos
7458
7459 for p := s.pp.Text; p != nil; p = p.Link {
7460 if p.Pos.IsKnown() {
7461 allPos = allPos[:0]
7462 p.Ctxt.AllPos(p.Pos, func(pos src.Pos) { allPos = append(allPos, pos) })
7463 if inliningDiffers(allPos, allPosOld) {
7464 for _, pos := range allPos {
7465 fmt.Fprintf(fi, "# %s:%d\n", pos.Filename(), pos.Line())
7466 }
7467 allPos, allPosOld = allPosOld, allPos
7468 }
7469 }
7470
7471 var s string
7472 if v, ok := progToValue[p]; ok {
7473 s = v.String()
7474 } else if b, ok := progToBlock[p]; ok {
7475 s = b.String()
7476 } else {
7477 s = " "
7478 }
7479 fmt.Fprintf(fi, " %-6s\t%.5d %s\t%s\n", s, p.Pc, ssa.StmtString(p.Pos), p.InstructionString())
7480 }
7481 fi.Close()
7482 }
7483 }
7484
7485 defframe(&s, e, f)
7486
7487 f.HTMLWriter.Close()
7488 f.HTMLWriter = nil
7489 }
7490
7491 func defframe(s *State, e *ssafn, f *ssa.Func) {
7492 pp := s.pp
7493
7494 s.maxarg = types.RoundUp(s.maxarg, e.stkalign)
7495 frame := s.maxarg + e.stksize
7496 if Arch.PadFrame != nil {
7497 frame = Arch.PadFrame(frame)
7498 }
7499
7500
7501 pp.Text.To.Type = obj.TYPE_TEXTSIZE
7502 pp.Text.To.Val = int32(types.RoundUp(f.OwnAux.ArgWidth(), int64(types.RegSize)))
7503 pp.Text.To.Offset = frame
7504
7505 p := pp.Text
7506
7507
7508
7509
7510
7511
7512
7513
7514
7515
7516 if f.OwnAux.ABIInfo().InRegistersUsed() != 0 && base.Flag.N == 0 {
7517
7518
7519 type nameOff struct {
7520 n *ir.Name
7521 off int64
7522 }
7523 partLiveArgsSpilled := make(map[nameOff]bool)
7524 for _, v := range f.Entry.Values {
7525 if v.Op.IsCall() {
7526 break
7527 }
7528 if v.Op != ssa.OpStoreReg || v.Args[0].Op != ssa.OpArgIntReg {
7529 continue
7530 }
7531 n, off := ssa.AutoVar(v)
7532 if n.Class != ir.PPARAM || n.Addrtaken() || !ssa.CanSSA(n.Type()) || !s.partLiveArgs[n] {
7533 continue
7534 }
7535 partLiveArgsSpilled[nameOff{n, off}] = true
7536 }
7537
7538
7539 for _, a := range f.OwnAux.ABIInfo().InParams() {
7540 n := a.Name
7541 if n == nil || n.Addrtaken() || !ssa.CanSSA(n.Type()) || !s.partLiveArgs[n] || len(a.Registers) <= 1 {
7542 continue
7543 }
7544 rts, offs := a.RegisterTypesAndOffsets()
7545 for i := range a.Registers {
7546 if !rts[i].HasPointers() {
7547 continue
7548 }
7549 if partLiveArgsSpilled[nameOff{n, offs[i]}] {
7550 continue
7551 }
7552 reg := ssa.ObjRegForAbiReg(a.Registers[i], f.Config)
7553 p = Arch.SpillArgReg(pp, p, f, rts[i], reg, n, offs[i])
7554 }
7555 }
7556 }
7557
7558
7559
7560
7561 var lo, hi int64
7562
7563
7564
7565 var state uint32
7566
7567
7568
7569 for _, n := range e.curfn.Dcl {
7570 if !n.Needzero() {
7571 continue
7572 }
7573 if n.Class != ir.PAUTO {
7574 e.Fatalf(n.Pos(), "needzero class %d", n.Class)
7575 }
7576 if n.Type().Size()%int64(types.PtrSize) != 0 || n.FrameOffset()%int64(types.PtrSize) != 0 || n.Type().Size() == 0 {
7577 e.Fatalf(n.Pos(), "var %L has size %d offset %d", n, n.Type().Size(), n.Offset_)
7578 }
7579
7580 if lo != hi && n.FrameOffset()+n.Type().Size() >= lo-int64(2*types.RegSize) {
7581
7582 lo = n.FrameOffset()
7583 continue
7584 }
7585
7586
7587 p = Arch.ZeroRange(pp, p, frame+lo, hi-lo, &state)
7588
7589
7590 lo = n.FrameOffset()
7591 hi = lo + n.Type().Size()
7592 }
7593
7594
7595 Arch.ZeroRange(pp, p, frame+lo, hi-lo, &state)
7596 }
7597
7598
7599 type IndexJump struct {
7600 Jump obj.As
7601 Index int
7602 }
7603
7604 func (s *State) oneJump(b *ssa.Block, jump *IndexJump) {
7605 p := s.Br(jump.Jump, b.Succs[jump.Index].Block())
7606 p.Pos = b.Pos
7607 }
7608
7609
7610
7611 func (s *State) CombJump(b, next *ssa.Block, jumps *[2][2]IndexJump) {
7612 switch next {
7613 case b.Succs[0].Block():
7614 s.oneJump(b, &jumps[0][0])
7615 s.oneJump(b, &jumps[0][1])
7616 case b.Succs[1].Block():
7617 s.oneJump(b, &jumps[1][0])
7618 s.oneJump(b, &jumps[1][1])
7619 default:
7620 var q *obj.Prog
7621 if b.Likely != ssa.BranchUnlikely {
7622 s.oneJump(b, &jumps[1][0])
7623 s.oneJump(b, &jumps[1][1])
7624 q = s.Br(obj.AJMP, b.Succs[1].Block())
7625 } else {
7626 s.oneJump(b, &jumps[0][0])
7627 s.oneJump(b, &jumps[0][1])
7628 q = s.Br(obj.AJMP, b.Succs[0].Block())
7629 }
7630 q.Pos = b.Pos
7631 }
7632 }
7633
7634
7635 func AddAux(a *obj.Addr, v *ssa.Value) {
7636 AddAux2(a, v, v.AuxInt)
7637 }
7638 func AddAux2(a *obj.Addr, v *ssa.Value, offset int64) {
7639 if a.Type != obj.TYPE_MEM && a.Type != obj.TYPE_ADDR {
7640 v.Fatalf("bad AddAux addr %v", a)
7641 }
7642
7643 a.Offset += offset
7644
7645
7646 if v.Aux == nil {
7647 return
7648 }
7649
7650 switch n := v.Aux.(type) {
7651 case *ssa.AuxCall:
7652 a.Name = obj.NAME_EXTERN
7653 a.Sym = n.Fn
7654 case *obj.LSym:
7655 a.Name = obj.NAME_EXTERN
7656 a.Sym = n
7657 case *ir.Name:
7658 if n.Class == ir.PPARAM || (n.Class == ir.PPARAMOUT && !n.IsOutputParamInRegisters()) {
7659 a.Name = obj.NAME_PARAM
7660 } else {
7661 a.Name = obj.NAME_AUTO
7662 }
7663 a.Sym = n.Linksym()
7664 a.Offset += n.FrameOffset()
7665 default:
7666 v.Fatalf("aux in %s not implemented %#v", v, v.Aux)
7667 }
7668 }
7669
7670
7671
7672 func (s *state) extendIndex(idx, len *ssa.Value, kind ssa.BoundsKind, bounded bool) *ssa.Value {
7673 size := idx.Type.Size()
7674 if size == s.config.PtrSize {
7675 return idx
7676 }
7677 if size > s.config.PtrSize {
7678
7679
7680 var lo *ssa.Value
7681 if idx.Type.IsSigned() {
7682 lo = s.newValue1(ssa.OpInt64Lo, types.Types[types.TINT], idx)
7683 } else {
7684 lo = s.newValue1(ssa.OpInt64Lo, types.Types[types.TUINT], idx)
7685 }
7686 if bounded || base.Flag.B != 0 {
7687 return lo
7688 }
7689 bNext := s.f.NewBlock(ssa.BlockPlain)
7690 bPanic := s.f.NewBlock(ssa.BlockExit)
7691 hi := s.newValue1(ssa.OpInt64Hi, types.Types[types.TUINT32], idx)
7692 cmp := s.newValue2(ssa.OpEq32, types.Types[types.TBOOL], hi, s.constInt32(types.Types[types.TUINT32], 0))
7693 if !idx.Type.IsSigned() {
7694 switch kind {
7695 case ssa.BoundsIndex:
7696 kind = ssa.BoundsIndexU
7697 case ssa.BoundsSliceAlen:
7698 kind = ssa.BoundsSliceAlenU
7699 case ssa.BoundsSliceAcap:
7700 kind = ssa.BoundsSliceAcapU
7701 case ssa.BoundsSliceB:
7702 kind = ssa.BoundsSliceBU
7703 case ssa.BoundsSlice3Alen:
7704 kind = ssa.BoundsSlice3AlenU
7705 case ssa.BoundsSlice3Acap:
7706 kind = ssa.BoundsSlice3AcapU
7707 case ssa.BoundsSlice3B:
7708 kind = ssa.BoundsSlice3BU
7709 case ssa.BoundsSlice3C:
7710 kind = ssa.BoundsSlice3CU
7711 }
7712 }
7713 b := s.endBlock()
7714 b.Kind = ssa.BlockIf
7715 b.SetControl(cmp)
7716 b.Likely = ssa.BranchLikely
7717 b.AddEdgeTo(bNext)
7718 b.AddEdgeTo(bPanic)
7719
7720 s.startBlock(bPanic)
7721 mem := s.newValue4I(ssa.OpPanicExtend, types.TypeMem, int64(kind), hi, lo, len, s.mem())
7722 s.endBlock().SetControl(mem)
7723 s.startBlock(bNext)
7724
7725 return lo
7726 }
7727
7728
7729 var op ssa.Op
7730 if idx.Type.IsSigned() {
7731 switch 10*size + s.config.PtrSize {
7732 case 14:
7733 op = ssa.OpSignExt8to32
7734 case 18:
7735 op = ssa.OpSignExt8to64
7736 case 24:
7737 op = ssa.OpSignExt16to32
7738 case 28:
7739 op = ssa.OpSignExt16to64
7740 case 48:
7741 op = ssa.OpSignExt32to64
7742 default:
7743 s.Fatalf("bad signed index extension %s", idx.Type)
7744 }
7745 } else {
7746 switch 10*size + s.config.PtrSize {
7747 case 14:
7748 op = ssa.OpZeroExt8to32
7749 case 18:
7750 op = ssa.OpZeroExt8to64
7751 case 24:
7752 op = ssa.OpZeroExt16to32
7753 case 28:
7754 op = ssa.OpZeroExt16to64
7755 case 48:
7756 op = ssa.OpZeroExt32to64
7757 default:
7758 s.Fatalf("bad unsigned index extension %s", idx.Type)
7759 }
7760 }
7761 return s.newValue1(op, types.Types[types.TINT], idx)
7762 }
7763
7764
7765
7766 func CheckLoweredPhi(v *ssa.Value) {
7767 if v.Op != ssa.OpPhi {
7768 v.Fatalf("CheckLoweredPhi called with non-phi value: %v", v.LongString())
7769 }
7770 if v.Type.IsMemory() {
7771 return
7772 }
7773 f := v.Block.Func
7774 loc := f.RegAlloc[v.ID]
7775 for _, a := range v.Args {
7776 if aloc := f.RegAlloc[a.ID]; aloc != loc {
7777 v.Fatalf("phi arg at different location than phi: %v @ %s, but arg %v @ %s\n%s\n", v, loc, a, aloc, v.Block.Func)
7778 }
7779 }
7780 }
7781
7782
7783
7784
7785
7786 func CheckLoweredGetClosurePtr(v *ssa.Value) {
7787 entry := v.Block.Func.Entry
7788 if entry != v.Block {
7789 base.Fatalf("in %s, badly placed LoweredGetClosurePtr: %v %v", v.Block.Func.Name, v.Block, v)
7790 }
7791 for _, w := range entry.Values {
7792 if w == v {
7793 break
7794 }
7795 switch w.Op {
7796 case ssa.OpArgIntReg, ssa.OpArgFloatReg:
7797
7798 default:
7799 base.Fatalf("in %s, badly placed LoweredGetClosurePtr: %v %v", v.Block.Func.Name, v.Block, v)
7800 }
7801 }
7802 }
7803
7804
7805 func CheckArgReg(v *ssa.Value) {
7806 entry := v.Block.Func.Entry
7807 if entry != v.Block {
7808 base.Fatalf("in %s, badly placed ArgIReg or ArgFReg: %v %v", v.Block.Func.Name, v.Block, v)
7809 }
7810 }
7811
7812 func AddrAuto(a *obj.Addr, v *ssa.Value) {
7813 n, off := ssa.AutoVar(v)
7814 a.Type = obj.TYPE_MEM
7815 a.Sym = n.Linksym()
7816 a.Reg = int16(Arch.REGSP)
7817 a.Offset = n.FrameOffset() + off
7818 if n.Class == ir.PPARAM || (n.Class == ir.PPARAMOUT && !n.IsOutputParamInRegisters()) {
7819 a.Name = obj.NAME_PARAM
7820 } else {
7821 a.Name = obj.NAME_AUTO
7822 }
7823 }
7824
7825
7826
7827 func (s *State) Call(v *ssa.Value) *obj.Prog {
7828 pPosIsStmt := s.pp.Pos.IsStmt()
7829 s.PrepareCall(v)
7830
7831 p := s.Prog(obj.ACALL)
7832 if pPosIsStmt == src.PosIsStmt {
7833 p.Pos = v.Pos.WithIsStmt()
7834 } else {
7835 p.Pos = v.Pos.WithNotStmt()
7836 }
7837 if sym, ok := v.Aux.(*ssa.AuxCall); ok && sym.Fn != nil {
7838 p.To.Type = obj.TYPE_MEM
7839 p.To.Name = obj.NAME_EXTERN
7840 p.To.Sym = sym.Fn
7841 } else {
7842
7843 switch Arch.LinkArch.Family {
7844 case sys.AMD64, sys.I386, sys.PPC64, sys.RISCV64, sys.S390X, sys.Wasm:
7845 p.To.Type = obj.TYPE_REG
7846 case sys.ARM, sys.ARM64, sys.Loong64, sys.MIPS, sys.MIPS64:
7847 p.To.Type = obj.TYPE_MEM
7848 default:
7849 base.Fatalf("unknown indirect call family")
7850 }
7851 p.To.Reg = v.Args[0].Reg()
7852 }
7853 return p
7854 }
7855
7856
7857
7858 func (s *State) TailCall(v *ssa.Value) *obj.Prog {
7859 p := s.Call(v)
7860 p.As = obj.ARET
7861 return p
7862 }
7863
7864
7865
7866
7867 func (s *State) PrepareCall(v *ssa.Value) {
7868 idx := s.livenessMap.Get(v)
7869 if !idx.StackMapValid() {
7870
7871 if sym, ok := v.Aux.(*ssa.AuxCall); !ok || !(sym.Fn == ir.Syms.WBZero || sym.Fn == ir.Syms.WBMove) {
7872 base.Fatalf("missing stack map index for %v", v.LongString())
7873 }
7874 }
7875
7876 call, ok := v.Aux.(*ssa.AuxCall)
7877
7878 if ok {
7879
7880
7881 if nowritebarrierrecCheck != nil {
7882 nowritebarrierrecCheck.recordCall(s.pp.CurFunc, call.Fn, v.Pos)
7883 }
7884 }
7885
7886 if s.maxarg < v.AuxInt {
7887 s.maxarg = v.AuxInt
7888 }
7889 }
7890
7891
7892
7893 func (s *State) UseArgs(n int64) {
7894 if s.maxarg < n {
7895 s.maxarg = n
7896 }
7897 }
7898
7899
7900 func fieldIdx(n *ir.SelectorExpr) int {
7901 t := n.X.Type()
7902 if !isStructNotSIMD(t) {
7903 panic("ODOT's LHS is not a struct")
7904 }
7905
7906 for i, f := range t.Fields() {
7907 if f.Sym == n.Sel {
7908 if f.Offset != n.Offset() {
7909 panic("field offset doesn't match")
7910 }
7911 return i
7912 }
7913 }
7914 panic(fmt.Sprintf("can't find field in expr %v\n", n))
7915
7916
7917
7918 }
7919
7920
7921
7922 type ssafn struct {
7923 curfn *ir.Func
7924 strings map[string]*obj.LSym
7925 stksize int64
7926 stkptrsize int64
7927
7928
7929
7930
7931
7932 stkalign int64
7933
7934 log bool
7935 }
7936
7937
7938
7939 func (e *ssafn) StringData(s string) *obj.LSym {
7940 if aux, ok := e.strings[s]; ok {
7941 return aux
7942 }
7943 if e.strings == nil {
7944 e.strings = make(map[string]*obj.LSym)
7945 }
7946 data := staticdata.StringSym(e.curfn.Pos(), s)
7947 e.strings[s] = data
7948 return data
7949 }
7950
7951
7952 func (e *ssafn) SplitSlot(parent *ssa.LocalSlot, suffix string, offset int64, t *types.Type) ssa.LocalSlot {
7953 node := parent.N
7954
7955 if node.Class != ir.PAUTO || node.Addrtaken() {
7956
7957 return ssa.LocalSlot{N: node, Type: t, Off: parent.Off + offset}
7958 }
7959
7960 sym := &types.Sym{Name: node.Sym().Name + suffix, Pkg: types.LocalPkg}
7961 n := e.curfn.NewLocal(parent.N.Pos(), sym, t)
7962 n.SetUsed(true)
7963 n.SetEsc(ir.EscNever)
7964 types.CalcSize(t)
7965 return ssa.LocalSlot{N: n, Type: t, Off: 0, SplitOf: parent, SplitOffset: offset}
7966 }
7967
7968
7969 func (e *ssafn) Logf(msg string, args ...any) {
7970 if e.log {
7971 fmt.Printf(msg, args...)
7972 }
7973 }
7974
7975 func (e *ssafn) Log() bool {
7976 return e.log
7977 }
7978
7979
7980 func (e *ssafn) Fatalf(pos src.XPos, msg string, args ...any) {
7981 base.Pos = pos
7982 nargs := append([]any{ir.FuncName(e.curfn)}, args...)
7983 base.Fatalf("'%s': "+msg, nargs...)
7984 }
7985
7986
7987
7988 func (e *ssafn) Warnl(pos src.XPos, fmt_ string, args ...any) {
7989 base.WarnfAt(pos, fmt_, args...)
7990 }
7991
7992 func (e *ssafn) Debug_checknil() bool {
7993 return base.Debug.Nil != 0
7994 }
7995
7996 func (e *ssafn) UseWriteBarrier() bool {
7997 return base.Flag.WB
7998 }
7999
8000 func (e *ssafn) Syslook(name string) *obj.LSym {
8001 switch name {
8002 case "goschedguarded":
8003 return ir.Syms.Goschedguarded
8004 case "writeBarrier":
8005 return ir.Syms.WriteBarrier
8006 case "wbZero":
8007 return ir.Syms.WBZero
8008 case "wbMove":
8009 return ir.Syms.WBMove
8010 case "cgoCheckMemmove":
8011 return ir.Syms.CgoCheckMemmove
8012 case "cgoCheckPtrWrite":
8013 return ir.Syms.CgoCheckPtrWrite
8014 }
8015 e.Fatalf(src.NoXPos, "unknown Syslook func %v", name)
8016 return nil
8017 }
8018
8019 func (e *ssafn) Func() *ir.Func {
8020 return e.curfn
8021 }
8022
8023 func clobberBase(n ir.Node) ir.Node {
8024 if n.Op() == ir.ODOT {
8025 n := n.(*ir.SelectorExpr)
8026 if n.X.Type().NumFields() == 1 {
8027 return clobberBase(n.X)
8028 }
8029 }
8030 if n.Op() == ir.OINDEX {
8031 n := n.(*ir.IndexExpr)
8032 if n.X.Type().IsArray() && n.X.Type().NumElem() == 1 {
8033 return clobberBase(n.X)
8034 }
8035 }
8036 return n
8037 }
8038
8039
8040 func callTargetLSym(callee *ir.Name) *obj.LSym {
8041 if callee.Func == nil {
8042
8043
8044
8045 return callee.Linksym()
8046 }
8047
8048 return callee.LinksymABI(callee.Func.ABI)
8049 }
8050
8051
8052 const deferStructFnField = 4
8053
8054 var deferType *types.Type
8055
8056
8057
8058 func deferstruct() *types.Type {
8059 if deferType != nil {
8060 return deferType
8061 }
8062
8063 makefield := func(name string, t *types.Type) *types.Field {
8064 sym := (*types.Pkg)(nil).Lookup(name)
8065 return types.NewField(src.NoXPos, sym, t)
8066 }
8067
8068 fields := []*types.Field{
8069 makefield("heap", types.Types[types.TBOOL]),
8070 makefield("rangefunc", types.Types[types.TBOOL]),
8071 makefield("sp", types.Types[types.TUINTPTR]),
8072 makefield("pc", types.Types[types.TUINTPTR]),
8073
8074
8075
8076 makefield("fn", types.Types[types.TUINTPTR]),
8077 makefield("link", types.Types[types.TUINTPTR]),
8078 makefield("head", types.Types[types.TUINTPTR]),
8079 }
8080 if name := fields[deferStructFnField].Sym.Name; name != "fn" {
8081 base.Fatalf("deferStructFnField is %q, not fn", name)
8082 }
8083
8084 n := ir.NewDeclNameAt(src.NoXPos, ir.OTYPE, ir.Pkgs.Runtime.Lookup("_defer"))
8085 typ := types.NewNamed(n)
8086 n.SetType(typ)
8087 n.SetTypecheck(1)
8088
8089
8090 typ.SetUnderlying(types.NewStruct(fields))
8091 types.CalcStructSize(typ)
8092
8093 deferType = typ
8094 return typ
8095 }
8096
8097
8098
8099
8100
8101 func SpillSlotAddr(spill ssa.Spill, baseReg int16, extraOffset int64) obj.Addr {
8102 return obj.Addr{
8103 Name: obj.NAME_NONE,
8104 Type: obj.TYPE_MEM,
8105 Reg: baseReg,
8106 Offset: spill.Offset + extraOffset,
8107 }
8108 }
8109
8110 func isStructNotSIMD(t *types.Type) bool {
8111 return t.IsStruct() && !t.IsSIMD()
8112 }
8113
8114 var BoundsCheckFunc [ssa.BoundsKindCount]*obj.LSym
8115
View as plain text