Source file src/cmd/compile/internal/walk/assign.go

     1  // Copyright 2009 The Go Authors. All rights reserved.
     2  // Use of this source code is governed by a BSD-style
     3  // license that can be found in the LICENSE file.
     4  
     5  package walk
     6  
     7  import (
     8  	"go/constant"
     9  	"internal/abi"
    10  
    11  	"cmd/compile/internal/base"
    12  	"cmd/compile/internal/ir"
    13  	"cmd/compile/internal/reflectdata"
    14  	"cmd/compile/internal/typecheck"
    15  	"cmd/compile/internal/types"
    16  	"cmd/internal/src"
    17  )
    18  
    19  // walkAssign walks an OAS (AssignExpr) or OASOP (AssignOpExpr) node.
    20  func walkAssign(init *ir.Nodes, n ir.Node) ir.Node {
    21  	init.Append(ir.TakeInit(n)...)
    22  
    23  	var left, right ir.Node
    24  	switch n.Op() {
    25  	case ir.OAS:
    26  		n := n.(*ir.AssignStmt)
    27  		left, right = n.X, n.Y
    28  	case ir.OASOP:
    29  		n := n.(*ir.AssignOpStmt)
    30  		left, right = n.X, n.Y
    31  	}
    32  
    33  	// Recognize m[k] = append(m[k], ...) so we can reuse
    34  	// the mapassign call.
    35  	var mapAppend *ir.CallExpr
    36  	if left.Op() == ir.OINDEXMAP && right.Op() == ir.OAPPEND {
    37  		left := left.(*ir.IndexExpr)
    38  		mapAppend = right.(*ir.CallExpr)
    39  		if !ir.SameSafeExpr(left, mapAppend.Args[0]) {
    40  			base.Fatalf("not same expressions: %v != %v", left, mapAppend.Args[0])
    41  		}
    42  	}
    43  
    44  	left = walkExpr(left, init)
    45  	left = safeExpr(left, init)
    46  	if mapAppend != nil {
    47  		mapAppend.Args[0] = left
    48  	}
    49  
    50  	if n.Op() == ir.OASOP {
    51  		// Rewrite x op= y into x = x op y.
    52  		n = ir.NewAssignStmt(base.Pos, left, typecheck.Expr(ir.NewBinaryExpr(base.Pos, n.(*ir.AssignOpStmt).AsOp, left, right)))
    53  	} else {
    54  		n.(*ir.AssignStmt).X = left
    55  	}
    56  	as := n.(*ir.AssignStmt)
    57  
    58  	if oaslit(as, init) {
    59  		return ir.NewBlockStmt(as.Pos(), nil)
    60  	}
    61  
    62  	if as.Y == nil {
    63  		// TODO(austin): Check all "implicit zeroing"
    64  		return as
    65  	}
    66  
    67  	if !base.Flag.Cfg.Instrumenting && ir.IsZero(as.Y) {
    68  		return as
    69  	}
    70  
    71  	switch as.Y.Op() {
    72  	default:
    73  		as.Y = walkExpr(as.Y, init)
    74  
    75  	case ir.ORECV:
    76  		// x = <-c; as.Left is x, as.Right.Left is c.
    77  		// order.stmt made sure x is addressable.
    78  		recv := as.Y.(*ir.UnaryExpr)
    79  		recv.X = walkExpr(recv.X, init)
    80  
    81  		n1 := typecheck.NodAddr(as.X)
    82  		r := recv.X // the channel
    83  		return mkcall1(chanfn("chanrecv1", 2, r.Type()), nil, init, r, n1)
    84  
    85  	case ir.OAPPEND:
    86  		// x = append(...)
    87  		call := as.Y.(*ir.CallExpr)
    88  		if call.Type().Elem().NotInHeap() {
    89  			base.Errorf("%v can't be allocated in Go; it is incomplete (or unallocatable)", call.Type().Elem())
    90  		}
    91  		var r ir.Node
    92  		switch {
    93  		case isAppendOfMake(call):
    94  			// x = append(y, make([]T, y)...)
    95  			r = extendSlice(call, init)
    96  		case call.IsDDD:
    97  			r = appendSlice(call, init) // also works for append(slice, string).
    98  		default:
    99  			r = walkAppend(call, init, as)
   100  		}
   101  		as.Y = r
   102  		if r.Op() == ir.OAPPEND {
   103  			r := r.(*ir.CallExpr)
   104  			// Left in place for back end.
   105  			// Do not add a new write barrier.
   106  			// Set up address of type for back end.
   107  			r.Fun = reflectdata.AppendElemRType(base.Pos, r)
   108  			return as
   109  		}
   110  		// Otherwise, lowered for race detector.
   111  		// Treat as ordinary assignment.
   112  	}
   113  
   114  	if as.X != nil && as.Y != nil {
   115  		return convas(as, init)
   116  	}
   117  	return as
   118  }
   119  
   120  // walkAssignDotType walks an OAS2DOTTYPE node.
   121  func walkAssignDotType(n *ir.AssignListStmt, init *ir.Nodes) ir.Node {
   122  	walkExprListSafe(n.Lhs, init)
   123  	n.Rhs[0] = walkExpr(n.Rhs[0], init)
   124  	return n
   125  }
   126  
   127  // walkAssignFunc walks an OAS2FUNC node.
   128  func walkAssignFunc(init *ir.Nodes, n *ir.AssignListStmt) ir.Node {
   129  	init.Append(ir.TakeInit(n)...)
   130  
   131  	r := n.Rhs[0]
   132  	walkExprListSafe(n.Lhs, init)
   133  	r = walkExpr(r, init)
   134  
   135  	if ir.IsIntrinsicCall(r.(*ir.CallExpr)) {
   136  		n.Rhs = []ir.Node{r}
   137  		return n
   138  	}
   139  	init.Append(r)
   140  
   141  	ll := ascompatet(n.Lhs, r.Type())
   142  	return ir.NewBlockStmt(src.NoXPos, ll)
   143  }
   144  
   145  // walkAssignList walks an OAS2 node.
   146  func walkAssignList(init *ir.Nodes, n *ir.AssignListStmt) ir.Node {
   147  	init.Append(ir.TakeInit(n)...)
   148  	return ir.NewBlockStmt(src.NoXPos, ascompatee(ir.OAS, n.Lhs, n.Rhs))
   149  }
   150  
   151  // walkAssignMapRead walks an OAS2MAPR node.
   152  func walkAssignMapRead(init *ir.Nodes, n *ir.AssignListStmt) ir.Node {
   153  	init.Append(ir.TakeInit(n)...)
   154  
   155  	r := n.Rhs[0].(*ir.IndexExpr)
   156  	walkExprListSafe(n.Lhs, init)
   157  
   158  	r.X = walkExpr(r.X, init)
   159  	r.Index = walkExpr(r.Index, init)
   160  	map_ := r.X
   161  	t := r.X.Type()
   162  	fast := mapfast(t)
   163  	key := mapKeyArg(fast, r, r.Index, false)
   164  	args := []ir.Node{reflectdata.IndexMapRType(base.Pos, r), map_, key}
   165  
   166  	// from:
   167  	//   a,b = m[i]
   168  	// to:
   169  	//   var,b = mapaccess2*(t, m, i)
   170  	//   a = *var
   171  	a := n.Lhs[0]
   172  
   173  	var mapFn ir.Node
   174  	if t.Elem().Size() > abi.ZeroValSize {
   175  		args = append(args, reflectdata.ZeroAddr(t.Elem().Size()))
   176  		mapFn = mapfn("mapaccess2_fat", t, true)
   177  	} else {
   178  		mapFn = mapfn(mapaccess[fast], t, false)
   179  	}
   180  	call := mkcall1(mapFn, mapFn.Type().ResultsTuple(), init, args...)
   181  
   182  	// mapaccess2* returns a typed bool, but due to spec changes,
   183  	// the boolean result of i.(T) is now untyped so we make it the
   184  	// same type as the variable on the lhs.
   185  	if ok := n.Lhs[1]; !ir.IsBlank(ok) && ok.Type().IsBoolean() {
   186  		call.Type().Field(1).Type = ok.Type()
   187  	}
   188  	n.Rhs = []ir.Node{call}
   189  	n.SetOp(ir.OAS2FUNC)
   190  
   191  	// don't generate a = *var if a is _
   192  	if ir.IsBlank(a) {
   193  		return walkExpr(typecheck.Stmt(n), init)
   194  	}
   195  
   196  	var_ := typecheck.TempAt(base.Pos, ir.CurFunc, types.NewPtr(t.Elem()))
   197  	var_.SetTypecheck(1)
   198  	var_.MarkNonNil() // mapaccess always returns a non-nil pointer
   199  
   200  	n.Lhs[0] = var_
   201  	init.Append(walkExpr(n, init))
   202  
   203  	as := ir.NewAssignStmt(base.Pos, a, ir.NewStarExpr(base.Pos, var_))
   204  	return walkExpr(typecheck.Stmt(as), init)
   205  }
   206  
   207  // walkAssignRecv walks an OAS2RECV node.
   208  func walkAssignRecv(init *ir.Nodes, n *ir.AssignListStmt) ir.Node {
   209  	init.Append(ir.TakeInit(n)...)
   210  
   211  	r := n.Rhs[0].(*ir.UnaryExpr) // recv
   212  	walkExprListSafe(n.Lhs, init)
   213  	r.X = walkExpr(r.X, init)
   214  	var n1 ir.Node
   215  	if ir.IsBlank(n.Lhs[0]) {
   216  		n1 = typecheck.NodNil()
   217  	} else {
   218  		n1 = typecheck.NodAddr(n.Lhs[0])
   219  	}
   220  	fn := chanfn("chanrecv2", 2, r.X.Type())
   221  	ok := n.Lhs[1]
   222  	call := mkcall1(fn, types.Types[types.TBOOL], init, r.X, n1)
   223  	return walkAssign(init, typecheck.Stmt(ir.NewAssignStmt(base.Pos, ok, call)))
   224  }
   225  
   226  // walkReturn walks an ORETURN node.
   227  func walkReturn(n *ir.ReturnStmt) ir.Node {
   228  	fn := ir.CurFunc
   229  
   230  	fn.NumReturns++
   231  	if len(n.Results) == 0 {
   232  		return n
   233  	}
   234  
   235  	results := fn.Type().Results()
   236  	dsts := make([]ir.Node, len(results))
   237  	for i, v := range results {
   238  		// TODO(mdempsky): typecheck should have already checked the result variables.
   239  		dsts[i] = typecheck.AssignExpr(v.Nname.(*ir.Name))
   240  	}
   241  
   242  	n.Results = ascompatee(n.Op(), dsts, n.Results)
   243  	return n
   244  }
   245  
   246  // check assign type list to
   247  // an expression list. called in
   248  //
   249  //	expr-list = func()
   250  func ascompatet(nl ir.Nodes, nr *types.Type) []ir.Node {
   251  	if len(nl) != nr.NumFields() {
   252  		base.Fatalf("ascompatet: assignment count mismatch: %d = %d", len(nl), nr.NumFields())
   253  	}
   254  
   255  	var nn ir.Nodes
   256  	for i, l := range nl {
   257  		if ir.IsBlank(l) {
   258  			continue
   259  		}
   260  		r := nr.Field(i)
   261  
   262  		// Order should have created autotemps of the appropriate type for
   263  		// us to store results into.
   264  		if tmp, ok := l.(*ir.Name); !ok || !tmp.AutoTemp() || !types.Identical(tmp.Type(), r.Type) {
   265  			base.FatalfAt(l.Pos(), "assigning %v to %+v", r.Type, l)
   266  		}
   267  
   268  		res := ir.NewResultExpr(base.Pos, nil, types.BADWIDTH)
   269  		res.Index = int64(i)
   270  		res.SetType(r.Type)
   271  		res.SetTypecheck(1)
   272  
   273  		nn.Append(ir.NewAssignStmt(base.Pos, l, res))
   274  	}
   275  	return nn
   276  }
   277  
   278  // check assign expression list to
   279  // an expression list. called in
   280  //
   281  //	expr-list = expr-list
   282  func ascompatee(op ir.Op, nl, nr []ir.Node) []ir.Node {
   283  	// cannot happen: should have been rejected during type checking
   284  	if len(nl) != len(nr) {
   285  		base.Fatalf("assignment operands mismatch: %+v / %+v", ir.Nodes(nl), ir.Nodes(nr))
   286  	}
   287  
   288  	var assigned ir.NameSet
   289  	var memWrite, deferResultWrite bool
   290  
   291  	// affected reports whether expression n could be affected by
   292  	// the assignments applied so far.
   293  	affected := func(n ir.Node) bool {
   294  		if deferResultWrite {
   295  			return true
   296  		}
   297  		return ir.Any(n, func(n ir.Node) bool {
   298  			if n.Op() == ir.ONAME && assigned.Has(n.(*ir.Name)) {
   299  				return true
   300  			}
   301  			if memWrite && readsMemory(n) {
   302  				return true
   303  			}
   304  			return false
   305  		})
   306  	}
   307  
   308  	// If a needed expression may be affected by an
   309  	// earlier assignment, make an early copy of that
   310  	// expression and use the copy instead.
   311  	var early ir.Nodes
   312  	save := func(np *ir.Node) {
   313  		if n := *np; affected(n) {
   314  			*np = copyExpr(n, n.Type(), &early)
   315  		}
   316  	}
   317  
   318  	var late ir.Nodes
   319  	for i, lorig := range nl {
   320  		l, r := lorig, nr[i]
   321  
   322  		// Do not generate 'x = x' during return. See issue 4014.
   323  		if op == ir.ORETURN && ir.SameSafeExpr(l, r) {
   324  			continue
   325  		}
   326  
   327  		// Save subexpressions needed on left side.
   328  		// Drill through non-dereferences.
   329  		for {
   330  			// If an expression has init statements, they must be evaluated
   331  			// before any of its saved sub-operands (#45706).
   332  			// TODO(mdempsky): Disallow init statements on lvalues.
   333  			init := ir.TakeInit(l)
   334  			walkStmtList(init)
   335  			early.Append(init...)
   336  
   337  			switch ll := l.(type) {
   338  			case *ir.IndexExpr:
   339  				if ll.X.Type().IsArray() {
   340  					save(&ll.Index)
   341  					l = ll.X
   342  					continue
   343  				}
   344  			case *ir.ParenExpr:
   345  				l = ll.X
   346  				continue
   347  			case *ir.SelectorExpr:
   348  				if ll.Op() == ir.ODOT {
   349  					l = ll.X
   350  					continue
   351  				}
   352  			}
   353  			break
   354  		}
   355  
   356  		var name *ir.Name
   357  		switch l.Op() {
   358  		default:
   359  			base.Fatalf("unexpected lvalue %v", l.Op())
   360  		case ir.ONAME:
   361  			name = l.(*ir.Name)
   362  		case ir.OINDEX, ir.OINDEXMAP:
   363  			l := l.(*ir.IndexExpr)
   364  			save(&l.X)
   365  			save(&l.Index)
   366  		case ir.ODEREF:
   367  			l := l.(*ir.StarExpr)
   368  			save(&l.X)
   369  		case ir.ODOTPTR:
   370  			l := l.(*ir.SelectorExpr)
   371  			save(&l.X)
   372  		}
   373  
   374  		// Save expression on right side.
   375  		save(&r)
   376  
   377  		appendWalkStmt(&late, convas(ir.NewAssignStmt(base.Pos, lorig, r), &late))
   378  
   379  		// Check for reasons why we may need to compute later expressions
   380  		// before this assignment happens.
   381  
   382  		if name == nil {
   383  			// Not a direct assignment to a declared variable.
   384  			// Conservatively assume any memory access might alias.
   385  			memWrite = true
   386  			continue
   387  		}
   388  
   389  		if name.Class == ir.PPARAMOUT && ir.CurFunc.HasDefer() {
   390  			// Assignments to a result parameter in a function with defers
   391  			// becomes visible early if evaluation of any later expression
   392  			// panics (#43835).
   393  			deferResultWrite = true
   394  			continue
   395  		}
   396  
   397  		if ir.IsBlank(name) {
   398  			// We can ignore assignments to blank or anonymous result parameters.
   399  			// These can't appear in expressions anyway.
   400  			continue
   401  		}
   402  
   403  		if name.Addrtaken() || !name.OnStack() {
   404  			// Global variable, heap escaped, or just addrtaken.
   405  			// Conservatively assume any memory access might alias.
   406  			memWrite = true
   407  			continue
   408  		}
   409  
   410  		// Local, non-addrtaken variable.
   411  		// Assignments can only alias with direct uses of this variable.
   412  		assigned.Add(name)
   413  	}
   414  
   415  	early.Append(late.Take()...)
   416  	return early
   417  }
   418  
   419  // readsMemory reports whether the evaluation n directly reads from
   420  // memory that might be written to indirectly.
   421  func readsMemory(n ir.Node) bool {
   422  	switch n.Op() {
   423  	case ir.ONAME:
   424  		n := n.(*ir.Name)
   425  		if n.Class == ir.PFUNC {
   426  			return false
   427  		}
   428  		return n.Addrtaken() || !n.OnStack()
   429  
   430  	case ir.OADD,
   431  		ir.OAND,
   432  		ir.OANDAND,
   433  		ir.OANDNOT,
   434  		ir.OBITNOT,
   435  		ir.OCONV,
   436  		ir.OCONVIFACE,
   437  		ir.OCONVNOP,
   438  		ir.ODIV,
   439  		ir.ODOT,
   440  		ir.ODOTTYPE,
   441  		ir.OLITERAL,
   442  		ir.OLSH,
   443  		ir.OMOD,
   444  		ir.OMUL,
   445  		ir.ONEG,
   446  		ir.ONIL,
   447  		ir.OOR,
   448  		ir.OOROR,
   449  		ir.OPAREN,
   450  		ir.OPLUS,
   451  		ir.ORSH,
   452  		ir.OSUB,
   453  		ir.OXOR:
   454  		return false
   455  	}
   456  
   457  	// Be conservative.
   458  	return true
   459  }
   460  
   461  // expand append(l1, l2...) to
   462  //
   463  //	init {
   464  //	  s := l1
   465  //	  newLen := s.len + l2.len
   466  //	  // Compare as uint so growslice can panic on overflow.
   467  //	  if uint(newLen) <= uint(s.cap) {
   468  //	    s = s[:newLen]
   469  //	  } else {
   470  //	    s = growslice(s.ptr, s.len, s.cap, l2.len, T)
   471  //	  }
   472  //	  memmove(&s[s.len-l2.len], &l2[0], l2.len*sizeof(T))
   473  //	}
   474  //	s
   475  //
   476  // l2 is allowed to be a string.
   477  func appendSlice(n *ir.CallExpr, init *ir.Nodes) ir.Node {
   478  	walkAppendArgs(n, init)
   479  
   480  	l1 := n.Args[0]
   481  	l2 := n.Args[1]
   482  	l2 = cheapExpr(l2, init)
   483  	n.Args[1] = l2
   484  
   485  	var nodes ir.Nodes
   486  
   487  	// var s []T
   488  	s := typecheck.TempAt(base.Pos, ir.CurFunc, l1.Type())
   489  	nodes.Append(ir.NewAssignStmt(base.Pos, s, l1)) // s = l1
   490  
   491  	elemtype := s.Type().Elem()
   492  
   493  	// Decompose slice.
   494  	oldPtr := ir.NewUnaryExpr(base.Pos, ir.OSPTR, s)
   495  	oldLen := ir.NewUnaryExpr(base.Pos, ir.OLEN, s)
   496  	oldCap := ir.NewUnaryExpr(base.Pos, ir.OCAP, s)
   497  
   498  	// Number of elements we are adding
   499  	num := ir.NewUnaryExpr(base.Pos, ir.OLEN, l2)
   500  
   501  	// newLen := oldLen + num
   502  	newLen := typecheck.TempAt(base.Pos, ir.CurFunc, types.Types[types.TINT])
   503  	nodes.Append(ir.NewAssignStmt(base.Pos, newLen, ir.NewBinaryExpr(base.Pos, ir.OADD, oldLen, num)))
   504  
   505  	// if uint(newLen) <= uint(oldCap)
   506  	nif := ir.NewIfStmt(base.Pos, nil, nil, nil)
   507  	nuint := typecheck.Conv(newLen, types.Types[types.TUINT])
   508  	scapuint := typecheck.Conv(oldCap, types.Types[types.TUINT])
   509  	nif.Cond = ir.NewBinaryExpr(base.Pos, ir.OLE, nuint, scapuint)
   510  	nif.Likely = true
   511  
   512  	// then { s = s[:newLen] }
   513  	slice := ir.NewSliceExpr(base.Pos, ir.OSLICE, s, nil, newLen, nil)
   514  	slice.SetBounded(true)
   515  	nif.Body = []ir.Node{ir.NewAssignStmt(base.Pos, s, slice)}
   516  
   517  	// else { s = growslice(oldPtr, newLen, oldCap, num, T) }
   518  	call := walkGrowslice(s, nif.PtrInit(), oldPtr, newLen, oldCap, num)
   519  	nif.Else = []ir.Node{ir.NewAssignStmt(base.Pos, s, call)}
   520  
   521  	nodes.Append(nif)
   522  
   523  	// Index to start copying into s.
   524  	//   idx = newLen - len(l2)
   525  	// We use this expression instead of oldLen because it avoids
   526  	// a spill/restore of oldLen.
   527  	// Note: this doesn't work optimally currently because
   528  	// the compiler optimizer undoes this arithmetic.
   529  	idx := ir.NewBinaryExpr(base.Pos, ir.OSUB, newLen, ir.NewUnaryExpr(base.Pos, ir.OLEN, l2))
   530  
   531  	var ncopy ir.Node
   532  	if elemtype.HasPointers() {
   533  		// copy(s[idx:], l2)
   534  		slice := ir.NewSliceExpr(base.Pos, ir.OSLICE, s, idx, nil, nil)
   535  		slice.SetType(s.Type())
   536  		slice.SetBounded(true)
   537  
   538  		ir.CurFunc.SetWBPos(n.Pos())
   539  
   540  		// instantiate typedslicecopy(typ *type, dstPtr *any, dstLen int, srcPtr *any, srcLen int) int
   541  		fn := typecheck.LookupRuntime("typedslicecopy", l1.Type().Elem(), l2.Type().Elem())
   542  		ptr1, len1 := backingArrayPtrLen(cheapExpr(slice, &nodes))
   543  		ptr2, len2 := backingArrayPtrLen(l2)
   544  		ncopy = mkcall1(fn, types.Types[types.TINT], &nodes, reflectdata.AppendElemRType(base.Pos, n), ptr1, len1, ptr2, len2)
   545  	} else if base.Flag.Cfg.Instrumenting && !base.Flag.CompilingRuntime {
   546  		// rely on runtime to instrument:
   547  		//  copy(s[idx:], l2)
   548  		// l2 can be a slice or string.
   549  		slice := ir.NewSliceExpr(base.Pos, ir.OSLICE, s, idx, nil, nil)
   550  		slice.SetType(s.Type())
   551  		slice.SetBounded(true)
   552  
   553  		ptr1, len1 := backingArrayPtrLen(cheapExpr(slice, &nodes))
   554  		ptr2, len2 := backingArrayPtrLen(l2)
   555  
   556  		fn := typecheck.LookupRuntime("slicecopy", ptr1.Type().Elem(), ptr2.Type().Elem())
   557  		ncopy = mkcall1(fn, types.Types[types.TINT], &nodes, ptr1, len1, ptr2, len2, ir.NewInt(base.Pos, elemtype.Size()))
   558  	} else {
   559  		// memmove(&s[idx], &l2[0], len(l2)*sizeof(T))
   560  		ix := ir.NewIndexExpr(base.Pos, s, idx)
   561  		ix.SetBounded(true)
   562  		addr := typecheck.NodAddr(ix)
   563  
   564  		sptr := ir.NewUnaryExpr(base.Pos, ir.OSPTR, l2)
   565  
   566  		nwid := cheapExpr(typecheck.Conv(ir.NewUnaryExpr(base.Pos, ir.OLEN, l2), types.Types[types.TUINTPTR]), &nodes)
   567  		nwid = ir.NewBinaryExpr(base.Pos, ir.OMUL, nwid, ir.NewInt(base.Pos, elemtype.Size()))
   568  
   569  		// instantiate func memmove(to *any, frm *any, length uintptr)
   570  		fn := typecheck.LookupRuntime("memmove", elemtype, elemtype)
   571  		ncopy = mkcall1(fn, nil, &nodes, addr, sptr, nwid)
   572  	}
   573  	ln := append(nodes, ncopy)
   574  
   575  	typecheck.Stmts(ln)
   576  	walkStmtList(ln)
   577  	init.Append(ln...)
   578  	return s
   579  }
   580  
   581  // isAppendOfMake reports whether n is of the form append(x, make([]T, y)...).
   582  // isAppendOfMake assumes n has already been typechecked.
   583  func isAppendOfMake(n ir.Node) bool {
   584  	if base.Flag.N != 0 || base.Flag.Cfg.Instrumenting {
   585  		return false
   586  	}
   587  
   588  	if n.Typecheck() == 0 {
   589  		base.Fatalf("missing typecheck: %+v", n)
   590  	}
   591  
   592  	if n.Op() != ir.OAPPEND {
   593  		return false
   594  	}
   595  	call := n.(*ir.CallExpr)
   596  	if !call.IsDDD || len(call.Args) != 2 || call.Args[1].Op() != ir.OMAKESLICE {
   597  		return false
   598  	}
   599  
   600  	mk := call.Args[1].(*ir.MakeExpr)
   601  	if mk.Cap != nil {
   602  		return false
   603  	}
   604  
   605  	// y must be either an integer constant or the largest possible positive value
   606  	// of variable y needs to fit into a uint.
   607  
   608  	// typecheck made sure that constant arguments to make are not negative and fit into an int.
   609  
   610  	// The care of overflow of the len argument to make will be handled by an explicit check of int(len) < 0 during runtime.
   611  	y := mk.Len
   612  	if !ir.IsConst(y, constant.Int) && y.Type().Size() > types.Types[types.TUINT].Size() {
   613  		return false
   614  	}
   615  
   616  	return true
   617  }
   618  
   619  // extendSlice rewrites append(l1, make([]T, l2)...) to
   620  //
   621  //	init {
   622  //	  if l2 >= 0 { // Empty if block here for more meaningful node.SetLikely(true)
   623  //	  } else {
   624  //	    panicmakeslicelen()
   625  //	  }
   626  //	  s := l1
   627  //	  if l2 != 0 {
   628  //	    n := len(s) + l2
   629  //	    // Compare n and s as uint so growslice can panic on overflow of len(s) + l2.
   630  //	    // cap is a positive int and n can become negative when len(s) + l2
   631  //	    // overflows int. Interpreting n when negative as uint makes it larger
   632  //	    // than cap(s). growslice will check the int n arg and panic if n is
   633  //	    // negative. This prevents the overflow from being undetected.
   634  //	    if uint(n) <= uint(cap(s)) {
   635  //	      s = s[:n]
   636  //	    } else {
   637  //	      s = growslice(T, s.ptr, n, s.cap, l2, T)
   638  //	    }
   639  //	    // clear the new portion of the underlying array.
   640  //	    hp := &s[len(s)-l2]
   641  //	    hn := l2 * sizeof(T)
   642  //	    memclr(hp, hn)
   643  //	  }
   644  //	}
   645  //	s
   646  //
   647  //	if T has pointers, the final memclr can go inside the "then" branch, as
   648  //	growslice will have done the clearing for us.
   649  
   650  func extendSlice(n *ir.CallExpr, init *ir.Nodes) ir.Node {
   651  	// isAppendOfMake made sure all possible positive values of l2 fit into a uint.
   652  	// The case of l2 overflow when converting from e.g. uint to int is handled by an explicit
   653  	// check of l2 < 0 at runtime which is generated below.
   654  	l2 := typecheck.Conv(n.Args[1].(*ir.MakeExpr).Len, types.Types[types.TINT])
   655  	l2 = typecheck.Expr(l2)
   656  	n.Args[1] = l2 // walkAppendArgs expects l2 in n.List.Second().
   657  
   658  	walkAppendArgs(n, init)
   659  
   660  	l1 := n.Args[0]
   661  	l2 = n.Args[1] // re-read l2, as it may have been updated by walkAppendArgs
   662  
   663  	var nodes []ir.Node
   664  
   665  	// if l2 >= 0 (likely happens), do nothing
   666  	nifneg := ir.NewIfStmt(base.Pos, ir.NewBinaryExpr(base.Pos, ir.OGE, l2, ir.NewInt(base.Pos, 0)), nil, nil)
   667  	nifneg.Likely = true
   668  
   669  	// else panicmakeslicelen()
   670  	nifneg.Else = []ir.Node{mkcall("panicmakeslicelen", nil, init)}
   671  	nodes = append(nodes, nifneg)
   672  
   673  	// s := l1
   674  	s := typecheck.TempAt(base.Pos, ir.CurFunc, l1.Type())
   675  	nodes = append(nodes, ir.NewAssignStmt(base.Pos, s, l1))
   676  
   677  	// if l2 != 0 {
   678  	// Avoid work if we're not appending anything. But more importantly,
   679  	// avoid allowing hp to be a past-the-end pointer when clearing. See issue 67255.
   680  	nifnz := ir.NewIfStmt(base.Pos, ir.NewBinaryExpr(base.Pos, ir.ONE, l2, ir.NewInt(base.Pos, 0)), nil, nil)
   681  	nifnz.Likely = true
   682  	nodes = append(nodes, nifnz)
   683  
   684  	elemtype := s.Type().Elem()
   685  
   686  	// n := s.len + l2
   687  	nn := typecheck.TempAt(base.Pos, ir.CurFunc, types.Types[types.TINT])
   688  	nifnz.Body = append(nifnz.Body, ir.NewAssignStmt(base.Pos, nn, ir.NewBinaryExpr(base.Pos, ir.OADD, ir.NewUnaryExpr(base.Pos, ir.OLEN, s), l2)))
   689  
   690  	// if uint(n) <= uint(s.cap)
   691  	nuint := typecheck.Conv(nn, types.Types[types.TUINT])
   692  	capuint := typecheck.Conv(ir.NewUnaryExpr(base.Pos, ir.OCAP, s), types.Types[types.TUINT])
   693  	nif := ir.NewIfStmt(base.Pos, ir.NewBinaryExpr(base.Pos, ir.OLE, nuint, capuint), nil, nil)
   694  	nif.Likely = true
   695  
   696  	// then { s = s[:n] }
   697  	nt := ir.NewSliceExpr(base.Pos, ir.OSLICE, s, nil, nn, nil)
   698  	nt.SetBounded(true)
   699  	nif.Body = []ir.Node{ir.NewAssignStmt(base.Pos, s, nt)}
   700  
   701  	// else { s = growslice(s.ptr, n, s.cap, l2, T) }
   702  	nif.Else = []ir.Node{
   703  		ir.NewAssignStmt(base.Pos, s, walkGrowslice(s, nif.PtrInit(),
   704  			ir.NewUnaryExpr(base.Pos, ir.OSPTR, s),
   705  			nn,
   706  			ir.NewUnaryExpr(base.Pos, ir.OCAP, s),
   707  			l2)),
   708  	}
   709  
   710  	nifnz.Body = append(nifnz.Body, nif)
   711  
   712  	// hp := &s[s.len - l2]
   713  	// TODO: &s[s.len] - hn?
   714  	ix := ir.NewIndexExpr(base.Pos, s, ir.NewBinaryExpr(base.Pos, ir.OSUB, ir.NewUnaryExpr(base.Pos, ir.OLEN, s), l2))
   715  	ix.SetBounded(true)
   716  	hp := typecheck.ConvNop(typecheck.NodAddr(ix), types.Types[types.TUNSAFEPTR])
   717  
   718  	// hn := l2 * sizeof(elem(s))
   719  	hn := typecheck.Conv(ir.NewBinaryExpr(base.Pos, ir.OMUL, l2, ir.NewInt(base.Pos, elemtype.Size())), types.Types[types.TUINTPTR])
   720  
   721  	clrname := "memclrNoHeapPointers"
   722  	hasPointers := elemtype.HasPointers()
   723  	if hasPointers {
   724  		clrname = "memclrHasPointers"
   725  		ir.CurFunc.SetWBPos(n.Pos())
   726  	}
   727  
   728  	var clr ir.Nodes
   729  	clrfn := mkcall(clrname, nil, &clr, hp, hn)
   730  	clr.Append(clrfn)
   731  	if hasPointers {
   732  		// growslice will have cleared the new entries, so only
   733  		// if growslice isn't called do we need to do the zeroing ourselves.
   734  		nif.Body = append(nif.Body, clr...)
   735  	} else {
   736  		nifnz.Body = append(nifnz.Body, clr...)
   737  	}
   738  
   739  	typecheck.Stmts(nodes)
   740  	walkStmtList(nodes)
   741  	init.Append(nodes...)
   742  	return s
   743  }
   744  

View as plain text