Source file src/cmd/compile/internal/ssa/sccp.go

     1  // Copyright 2023 The Go Authors. All rights reserved.
     2  // Use of this source code is governed by a BSD-style
     3  // license that can be found in the LICENSE file.
     4  
     5  package ssa
     6  
     7  // ----------------------------------------------------------------------------
     8  // Sparse Conditional Constant Propagation
     9  //
    10  // Described in
    11  // Mark N. Wegman, F. Kenneth Zadeck: Constant Propagation with Conditional Branches.
    12  // TOPLAS 1991.
    13  //
    14  // This algorithm uses three level lattice for SSA value
    15  //
    16  //      Top        undefined
    17  //     / | \
    18  // .. 1  2  3 ..   constant
    19  //     \ | /
    20  //     Bottom      not constant
    21  //
    22  // It starts with optimistically assuming that all SSA values are initially Top
    23  // and then propagates constant facts only along reachable control flow paths.
    24  // Since some basic blocks are not visited yet, corresponding inputs of phi become
    25  // Top, we use the meet(phi) to compute its lattice.
    26  //
    27  // 	  Top ∩ any = any
    28  // 	  Bottom ∩ any = Bottom
    29  // 	  ConstantA ∩ ConstantA = ConstantA
    30  // 	  ConstantA ∩ ConstantB = Bottom
    31  //
    32  // Each lattice value is lowered most twice(Top to Constant, Constant to Bottom)
    33  // due to lattice depth, resulting in a fast convergence speed of the algorithm.
    34  // In this way, sccp can discover optimization opportunities that cannot be found
    35  // by just combining constant folding and constant propagation and dead code
    36  // elimination separately.
    37  
    38  // Three level lattice holds compile time knowledge about SSA value
    39  const (
    40  	top      int8 = iota // undefined
    41  	constant             // constant
    42  	bottom               // not a constant
    43  )
    44  
    45  type lattice struct {
    46  	tag int8   // lattice type
    47  	val *Value // constant value
    48  }
    49  
    50  type worklist struct {
    51  	f            *Func               // the target function to be optimized out
    52  	edges        []Edge              // propagate constant facts through edges
    53  	inUses       *sparseSet          // IDs already in uses, for duplicate check
    54  	uses         []*Value            // re-visiting set
    55  	visited      map[Edge]bool       // visited edges
    56  	latticeCells map[*Value]lattice  // constant lattices
    57  	defUse       map[*Value][]*Value // def-use chains for some values
    58  	defBlock     map[*Value][]*Block // use blocks of def
    59  	visitedBlock []bool              // visited block
    60  }
    61  
    62  // sccp stands for sparse conditional constant propagation, it propagates constants
    63  // through CFG conditionally and applies constant folding, constant replacement and
    64  // dead code elimination all together.
    65  func sccp(f *Func) {
    66  	var t worklist
    67  	t.f = f
    68  	t.edges = make([]Edge, 0)
    69  	t.visited = make(map[Edge]bool)
    70  	t.edges = append(t.edges, Edge{f.Entry, 0})
    71  	t.defUse = make(map[*Value][]*Value)
    72  	t.defBlock = make(map[*Value][]*Block)
    73  	t.latticeCells = make(map[*Value]lattice)
    74  	t.visitedBlock = f.Cache.allocBoolSlice(f.NumBlocks())
    75  	t.inUses = f.newSparseSet(f.NumValues())
    76  	defer f.retSparseSet(t.inUses)
    77  	defer f.Cache.freeBoolSlice(t.visitedBlock)
    78  
    79  	// build it early since we rely heavily on the def-use chain later
    80  	t.buildDefUses()
    81  
    82  	// pick up either an edge or SSA value from worklist, process it
    83  	for {
    84  		if len(t.edges) > 0 {
    85  			edge := t.edges[0]
    86  			t.edges = t.edges[1:]
    87  			if _, exist := t.visited[edge]; !exist {
    88  				dest := edge.b
    89  				destVisited := t.visitedBlock[dest.ID]
    90  
    91  				// mark edge as visited
    92  				t.visited[edge] = true
    93  				t.visitedBlock[dest.ID] = true
    94  				for _, val := range dest.Values {
    95  					if val.Op == OpPhi || !destVisited {
    96  						t.visitValue(val)
    97  					}
    98  				}
    99  				// propagates constants facts through CFG, taking condition test
   100  				// into account
   101  				if !destVisited {
   102  					t.propagate(dest)
   103  				}
   104  			}
   105  			continue
   106  		}
   107  		if len(t.uses) > 0 {
   108  			use := t.uses[0]
   109  			t.uses = t.uses[1:]
   110  			t.inUses.remove(use.ID)
   111  			t.visitValue(use)
   112  			continue
   113  		}
   114  		break
   115  	}
   116  
   117  	// apply optimizations based on discovered constants
   118  	constCnt, rewireCnt := t.replaceConst()
   119  	if f.pass.debug > 0 {
   120  		if constCnt > 0 || rewireCnt > 0 {
   121  			f.Warnl(f.Entry.Pos, "Phase SCCP for %v : %v constants, %v dce", f.Name, constCnt, rewireCnt)
   122  		}
   123  	}
   124  }
   125  
   126  func equals(a, b lattice) bool {
   127  	if a == b {
   128  		// fast path
   129  		return true
   130  	}
   131  	if a.tag != b.tag {
   132  		return false
   133  	}
   134  	if a.tag == constant {
   135  		// The same content of const value may be different, we should
   136  		// compare with auxInt instead
   137  		v1 := a.val
   138  		v2 := b.val
   139  		if v1.Op == v2.Op && v1.AuxInt == v2.AuxInt {
   140  			return true
   141  		} else {
   142  			return false
   143  		}
   144  	}
   145  	return true
   146  }
   147  
   148  // possibleConst checks if Value can be folded to const. For those Values that can
   149  // never become constants(e.g. StaticCall), we don't make futile efforts.
   150  func possibleConst(val *Value) bool {
   151  	if isConst(val) {
   152  		return true
   153  	}
   154  	switch val.Op {
   155  	case OpCopy:
   156  		return true
   157  	case OpPhi:
   158  		return true
   159  	case
   160  		// negate
   161  		OpNeg8, OpNeg16, OpNeg32, OpNeg64, OpNeg32F, OpNeg64F,
   162  		OpCom8, OpCom16, OpCom32, OpCom64,
   163  		// math
   164  		OpFloor, OpCeil, OpTrunc, OpRoundToEven, OpSqrt,
   165  		// conversion
   166  		OpTrunc16to8, OpTrunc32to8, OpTrunc32to16, OpTrunc64to8,
   167  		OpTrunc64to16, OpTrunc64to32, OpCvt32to32F, OpCvt32to64F,
   168  		OpCvt64to32F, OpCvt64to64F, OpCvt32Fto32, OpCvt32Fto64,
   169  		OpCvt64Fto32, OpCvt64Fto64, OpCvt32Fto64F, OpCvt64Fto32F,
   170  		OpCvtBoolToUint8,
   171  		OpZeroExt8to16, OpZeroExt8to32, OpZeroExt8to64, OpZeroExt16to32,
   172  		OpZeroExt16to64, OpZeroExt32to64, OpSignExt8to16, OpSignExt8to32,
   173  		OpSignExt8to64, OpSignExt16to32, OpSignExt16to64, OpSignExt32to64,
   174  		// bit
   175  		OpCtz8, OpCtz16, OpCtz32, OpCtz64,
   176  		// mask
   177  		OpSlicemask,
   178  		// safety check
   179  		OpIsNonNil,
   180  		// not
   181  		OpNot:
   182  		return true
   183  	case
   184  		// add
   185  		OpAdd64, OpAdd32, OpAdd16, OpAdd8,
   186  		OpAdd32F, OpAdd64F,
   187  		// sub
   188  		OpSub64, OpSub32, OpSub16, OpSub8,
   189  		OpSub32F, OpSub64F,
   190  		// mul
   191  		OpMul64, OpMul32, OpMul16, OpMul8,
   192  		OpMul32F, OpMul64F,
   193  		// div
   194  		OpDiv32F, OpDiv64F,
   195  		OpDiv8, OpDiv16, OpDiv32, OpDiv64,
   196  		OpDiv8u, OpDiv16u, OpDiv32u, OpDiv64u,
   197  		OpMod8, OpMod16, OpMod32, OpMod64,
   198  		OpMod8u, OpMod16u, OpMod32u, OpMod64u,
   199  		// compare
   200  		OpEq64, OpEq32, OpEq16, OpEq8,
   201  		OpEq32F, OpEq64F,
   202  		OpLess64, OpLess32, OpLess16, OpLess8,
   203  		OpLess64U, OpLess32U, OpLess16U, OpLess8U,
   204  		OpLess32F, OpLess64F,
   205  		OpLeq64, OpLeq32, OpLeq16, OpLeq8,
   206  		OpLeq64U, OpLeq32U, OpLeq16U, OpLeq8U,
   207  		OpLeq32F, OpLeq64F,
   208  		OpEqB, OpNeqB,
   209  		// shift
   210  		OpLsh64x64, OpRsh64x64, OpRsh64Ux64, OpLsh32x64,
   211  		OpRsh32x64, OpRsh32Ux64, OpLsh16x64, OpRsh16x64,
   212  		OpRsh16Ux64, OpLsh8x64, OpRsh8x64, OpRsh8Ux64,
   213  		// safety check
   214  		OpIsInBounds, OpIsSliceInBounds,
   215  		// bit
   216  		OpAnd8, OpAnd16, OpAnd32, OpAnd64,
   217  		OpOr8, OpOr16, OpOr32, OpOr64,
   218  		OpXor8, OpXor16, OpXor32, OpXor64:
   219  		return true
   220  	default:
   221  		return false
   222  	}
   223  }
   224  
   225  func (t *worklist) getLatticeCell(val *Value) lattice {
   226  	if !possibleConst(val) {
   227  		// they are always worst
   228  		return lattice{bottom, nil}
   229  	}
   230  	lt, exist := t.latticeCells[val]
   231  	if !exist {
   232  		return lattice{top, nil} // optimistically for un-visited value
   233  	}
   234  	return lt
   235  }
   236  
   237  func isConst(val *Value) bool {
   238  	switch val.Op {
   239  	case OpConst64, OpConst32, OpConst16, OpConst8,
   240  		OpConstBool, OpConst32F, OpConst64F:
   241  		return true
   242  	default:
   243  		return false
   244  	}
   245  }
   246  
   247  // buildDefUses builds def-use chain for some values early, because once the
   248  // lattice of a value is changed, we need to update lattices of use. But we don't
   249  // need all uses of it, only uses that can become constants would be added into
   250  // re-visit worklist since no matter how many times they are revisited, uses which
   251  // can't become constants lattice remains unchanged, i.e. Bottom.
   252  func (t *worklist) buildDefUses() {
   253  	for _, block := range t.f.Blocks {
   254  		for _, val := range block.Values {
   255  			for _, arg := range val.Args {
   256  				// find its uses, only uses that can become constants take into account
   257  				if possibleConst(arg) && possibleConst(val) {
   258  					// Phi may refer to itself as uses, avoid duplicate visits
   259  					if arg == val {
   260  						continue
   261  					}
   262  					if _, exist := t.defUse[arg]; !exist {
   263  						t.defUse[arg] = make([]*Value, 0, arg.Uses)
   264  					}
   265  					t.defUse[arg] = append(t.defUse[arg], val)
   266  				}
   267  			}
   268  		}
   269  		for _, ctl := range block.ControlValues() {
   270  			// for control values that can become constants, find their use blocks
   271  			if possibleConst(ctl) {
   272  				t.defBlock[ctl] = append(t.defBlock[ctl], block)
   273  			}
   274  		}
   275  	}
   276  }
   277  
   278  // addUses finds all uses of value and appends them into work list for further process
   279  func (t *worklist) addUses(val *Value) {
   280  	for _, use := range t.defUse[val] {
   281  		// Provenly not a constant, ignore
   282  		useLt := t.getLatticeCell(use)
   283  		if useLt.tag == bottom {
   284  			continue
   285  		}
   286  		// Avoid duplicate visits
   287  		if !t.inUses.contains(use.ID) {
   288  			t.inUses.add(use.ID)
   289  			t.uses = append(t.uses, use)
   290  		}
   291  	}
   292  	for _, block := range t.defBlock[val] {
   293  		if t.visitedBlock[block.ID] {
   294  			t.propagate(block)
   295  		}
   296  	}
   297  }
   298  
   299  // meet meets all of phi arguments and computes result lattice
   300  func (t *worklist) meet(val *Value) lattice {
   301  	optimisticLt := lattice{top, nil}
   302  	for i := 0; i < len(val.Args); i++ {
   303  		edge := Edge{val.Block, i}
   304  		// If incoming edge for phi is not visited, assume top optimistically.
   305  		// According to rules of meet:
   306  		// 		Top ∩ any = any
   307  		// Top participates in meet() but does not affect the result, so here
   308  		// we will ignore Top and only take other lattices into consideration.
   309  		if _, exist := t.visited[edge]; exist {
   310  			lt := t.getLatticeCell(val.Args[i])
   311  			if lt.tag == constant {
   312  				if optimisticLt.tag == top {
   313  					optimisticLt = lt
   314  				} else {
   315  					if !equals(optimisticLt, lt) {
   316  						// ConstantA ∩ ConstantB = Bottom
   317  						return lattice{bottom, nil}
   318  					}
   319  				}
   320  			} else if lt.tag == bottom {
   321  				// Bottom ∩ any = Bottom
   322  				return lattice{bottom, nil}
   323  			} else {
   324  				// Top ∩ any = any
   325  			}
   326  		} else {
   327  			// Top ∩ any = any
   328  		}
   329  	}
   330  
   331  	// ConstantA ∩ ConstantA = ConstantA or Top ∩ any = any
   332  	return optimisticLt
   333  }
   334  
   335  func computeLattice(f *Func, val *Value, args ...*Value) lattice {
   336  	// In general, we need to perform constant evaluation based on constant args:
   337  	//
   338  	//  res := lattice{constant, nil}
   339  	// 	switch op {
   340  	// 	case OpAdd16:
   341  	//		res.val = newConst(argLt1.val.AuxInt16() + argLt2.val.AuxInt16())
   342  	// 	case OpAdd32:
   343  	// 		res.val = newConst(argLt1.val.AuxInt32() + argLt2.val.AuxInt32())
   344  	//	case OpDiv8:
   345  	//		if !isDivideByZero(argLt2.val.AuxInt8()) {
   346  	//			res.val = newConst(argLt1.val.AuxInt8() / argLt2.val.AuxInt8())
   347  	//		}
   348  	//  ...
   349  	// 	}
   350  	//
   351  	// However, this would create a huge switch for all opcodes that can be
   352  	// evaluated during compile time. Moreover, some operations can be evaluated
   353  	// only if its arguments satisfy additional conditions(e.g. divide by zero).
   354  	// It's fragile and error-prone. We did a trick by reusing the existing rules
   355  	// in generic rules for compile-time evaluation. But generic rules rewrite
   356  	// original value, this behavior is undesired, because the lattice of values
   357  	// may change multiple times, once it was rewritten, we lose the opportunity
   358  	// to change it permanently, which can lead to errors. For example, We cannot
   359  	// change its value immediately after visiting Phi, because some of its input
   360  	// edges may still not be visited at this moment.
   361  	constValue := f.newValue(val.Op, val.Type, f.Entry, val.Pos)
   362  	constValue.AddArgs(args...)
   363  	matched := rewriteValuegeneric(constValue)
   364  	if matched {
   365  		if isConst(constValue) {
   366  			return lattice{constant, constValue}
   367  		}
   368  	}
   369  	// Either we can not match generic rules for given value or it does not
   370  	// satisfy additional constraints(e.g. divide by zero), in these cases, clean
   371  	// up temporary value immediately in case they are not dominated by their args.
   372  	constValue.reset(OpInvalid)
   373  	return lattice{bottom, nil}
   374  }
   375  
   376  func (t *worklist) visitValue(val *Value) {
   377  	// Impossible to be a constant, fast fail
   378  	if !possibleConst(val) {
   379  		return
   380  	}
   381  
   382  	// Provenly not a constant, fast fail
   383  	oldLt := t.getLatticeCell(val)
   384  	if oldLt.tag == bottom {
   385  		return
   386  	}
   387  
   388  	// Re-visit all uses of value if its lattice is changed
   389  	defer func() {
   390  		newLt := t.getLatticeCell(val)
   391  		if !equals(newLt, oldLt) {
   392  			if oldLt.tag > newLt.tag {
   393  				t.f.Fatalf("Must lower lattice\n")
   394  			}
   395  			t.addUses(val)
   396  		}
   397  	}()
   398  
   399  	switch val.Op {
   400  	// they are constant values, aren't they?
   401  	case OpConst64, OpConst32, OpConst16, OpConst8,
   402  		OpConstBool, OpConst32F, OpConst64F: //TODO: support ConstNil ConstString etc
   403  		t.latticeCells[val] = lattice{constant, val}
   404  	// lattice value of copy(x) actually means lattice value of (x)
   405  	case OpCopy:
   406  		t.latticeCells[val] = t.getLatticeCell(val.Args[0])
   407  	// phi should be processed specially
   408  	case OpPhi:
   409  		t.latticeCells[val] = t.meet(val)
   410  	// fold 1-input operations:
   411  	case
   412  		// negate
   413  		OpNeg8, OpNeg16, OpNeg32, OpNeg64, OpNeg32F, OpNeg64F,
   414  		OpCom8, OpCom16, OpCom32, OpCom64,
   415  		// math
   416  		OpFloor, OpCeil, OpTrunc, OpRoundToEven, OpSqrt,
   417  		// conversion
   418  		OpTrunc16to8, OpTrunc32to8, OpTrunc32to16, OpTrunc64to8,
   419  		OpTrunc64to16, OpTrunc64to32, OpCvt32to32F, OpCvt32to64F,
   420  		OpCvt64to32F, OpCvt64to64F, OpCvt32Fto32, OpCvt32Fto64,
   421  		OpCvt64Fto32, OpCvt64Fto64, OpCvt32Fto64F, OpCvt64Fto32F,
   422  		OpCvtBoolToUint8,
   423  		OpZeroExt8to16, OpZeroExt8to32, OpZeroExt8to64, OpZeroExt16to32,
   424  		OpZeroExt16to64, OpZeroExt32to64, OpSignExt8to16, OpSignExt8to32,
   425  		OpSignExt8to64, OpSignExt16to32, OpSignExt16to64, OpSignExt32to64,
   426  		// bit
   427  		OpCtz8, OpCtz16, OpCtz32, OpCtz64,
   428  		// mask
   429  		OpSlicemask,
   430  		// safety check
   431  		OpIsNonNil,
   432  		// not
   433  		OpNot:
   434  		lt1 := t.getLatticeCell(val.Args[0])
   435  
   436  		if lt1.tag == constant {
   437  			// here we take a shortcut by reusing generic rules to fold constants
   438  			t.latticeCells[val] = computeLattice(t.f, val, lt1.val)
   439  		} else {
   440  			t.latticeCells[val] = lattice{lt1.tag, nil}
   441  		}
   442  	// fold 2-input operations
   443  	case
   444  		// add
   445  		OpAdd64, OpAdd32, OpAdd16, OpAdd8,
   446  		OpAdd32F, OpAdd64F,
   447  		// sub
   448  		OpSub64, OpSub32, OpSub16, OpSub8,
   449  		OpSub32F, OpSub64F,
   450  		// mul
   451  		OpMul64, OpMul32, OpMul16, OpMul8,
   452  		OpMul32F, OpMul64F,
   453  		// div
   454  		OpDiv32F, OpDiv64F,
   455  		OpDiv8, OpDiv16, OpDiv32, OpDiv64,
   456  		OpDiv8u, OpDiv16u, OpDiv32u, OpDiv64u, //TODO: support div128u
   457  		// mod
   458  		OpMod8, OpMod16, OpMod32, OpMod64,
   459  		OpMod8u, OpMod16u, OpMod32u, OpMod64u,
   460  		// compare
   461  		OpEq64, OpEq32, OpEq16, OpEq8,
   462  		OpEq32F, OpEq64F,
   463  		OpLess64, OpLess32, OpLess16, OpLess8,
   464  		OpLess64U, OpLess32U, OpLess16U, OpLess8U,
   465  		OpLess32F, OpLess64F,
   466  		OpLeq64, OpLeq32, OpLeq16, OpLeq8,
   467  		OpLeq64U, OpLeq32U, OpLeq16U, OpLeq8U,
   468  		OpLeq32F, OpLeq64F,
   469  		OpEqB, OpNeqB,
   470  		// shift
   471  		OpLsh64x64, OpRsh64x64, OpRsh64Ux64, OpLsh32x64,
   472  		OpRsh32x64, OpRsh32Ux64, OpLsh16x64, OpRsh16x64,
   473  		OpRsh16Ux64, OpLsh8x64, OpRsh8x64, OpRsh8Ux64,
   474  		// safety check
   475  		OpIsInBounds, OpIsSliceInBounds,
   476  		// bit
   477  		OpAnd8, OpAnd16, OpAnd32, OpAnd64,
   478  		OpOr8, OpOr16, OpOr32, OpOr64,
   479  		OpXor8, OpXor16, OpXor32, OpXor64:
   480  		lt1 := t.getLatticeCell(val.Args[0])
   481  		lt2 := t.getLatticeCell(val.Args[1])
   482  
   483  		if lt1.tag == constant && lt2.tag == constant {
   484  			// here we take a shortcut by reusing generic rules to fold constants
   485  			t.latticeCells[val] = computeLattice(t.f, val, lt1.val, lt2.val)
   486  		} else {
   487  			if lt1.tag == bottom || lt2.tag == bottom {
   488  				t.latticeCells[val] = lattice{bottom, nil}
   489  			} else {
   490  				t.latticeCells[val] = lattice{top, nil}
   491  			}
   492  		}
   493  	default:
   494  		// Any other type of value cannot be a constant, they are always worst(Bottom)
   495  	}
   496  }
   497  
   498  // propagate propagates constants facts through CFG. If the block has single successor,
   499  // add the successor anyway. If the block has multiple successors, only add the
   500  // branch destination corresponding to lattice value of condition value.
   501  func (t *worklist) propagate(block *Block) {
   502  	switch block.Kind {
   503  	case BlockExit, BlockRet, BlockRetJmp, BlockInvalid:
   504  		// control flow ends, do nothing then
   505  		break
   506  	case BlockDefer:
   507  		// we know nothing about control flow, add all branch destinations
   508  		t.edges = append(t.edges, block.Succs...)
   509  	case BlockFirst:
   510  		fallthrough // always takes the first branch
   511  	case BlockPlain:
   512  		t.edges = append(t.edges, block.Succs[0])
   513  	case BlockIf, BlockJumpTable:
   514  		cond := block.ControlValues()[0]
   515  		condLattice := t.getLatticeCell(cond)
   516  		if condLattice.tag == bottom {
   517  			// we know nothing about control flow, add all branch destinations
   518  			t.edges = append(t.edges, block.Succs...)
   519  		} else if condLattice.tag == constant {
   520  			// add branchIdx destinations depends on its condition
   521  			var branchIdx int64
   522  			if block.Kind == BlockIf {
   523  				branchIdx = 1 - condLattice.val.AuxInt
   524  			} else {
   525  				branchIdx = condLattice.val.AuxInt
   526  				if branchIdx < 0 || branchIdx >= int64(len(block.Succs)) {
   527  					// unreachable code, do nothing then
   528  					break
   529  				}
   530  			}
   531  			t.edges = append(t.edges, block.Succs[branchIdx])
   532  		} else {
   533  			// condition value is not visited yet, don't propagate it now
   534  		}
   535  	default:
   536  		t.f.Fatalf("All kind of block should be processed above.")
   537  	}
   538  }
   539  
   540  // rewireSuccessor rewires corresponding successors according to constant value
   541  // discovered by previous analysis. As the result, some successors become unreachable
   542  // and thus can be removed in further deadcode phase
   543  func rewireSuccessor(block *Block, constVal *Value) bool {
   544  	switch block.Kind {
   545  	case BlockIf:
   546  		block.removeEdge(int(constVal.AuxInt))
   547  		block.Kind = BlockPlain
   548  		block.Likely = BranchUnknown
   549  		block.ResetControls()
   550  		return true
   551  	case BlockJumpTable:
   552  		// Remove everything but the known taken branch.
   553  		idx := int(constVal.AuxInt)
   554  		if idx < 0 || idx >= len(block.Succs) {
   555  			// This can only happen in unreachable code,
   556  			// as an invariant of jump tables is that their
   557  			// input index is in range.
   558  			// See issue 64826.
   559  			return false
   560  		}
   561  		block.swapSuccessorsByIdx(0, idx)
   562  		for len(block.Succs) > 1 {
   563  			block.removeEdge(1)
   564  		}
   565  		block.Kind = BlockPlain
   566  		block.Likely = BranchUnknown
   567  		block.ResetControls()
   568  		return true
   569  	default:
   570  		return false
   571  	}
   572  }
   573  
   574  // replaceConst will replace non-constant values that have been proven by sccp
   575  // to be constants.
   576  func (t *worklist) replaceConst() (int, int) {
   577  	constCnt, rewireCnt := 0, 0
   578  	for val, lt := range t.latticeCells {
   579  		if lt.tag == constant {
   580  			if !isConst(val) {
   581  				if t.f.pass.debug > 0 {
   582  					t.f.Warnl(val.Pos, "Replace %v with %v", val.LongString(), lt.val.LongString())
   583  				}
   584  				val.reset(lt.val.Op)
   585  				val.AuxInt = lt.val.AuxInt
   586  				constCnt++
   587  			}
   588  			// If const value controls this block, rewires successors according to its value
   589  			ctrlBlock := t.defBlock[val]
   590  			for _, block := range ctrlBlock {
   591  				if rewireSuccessor(block, lt.val) {
   592  					rewireCnt++
   593  					if t.f.pass.debug > 0 {
   594  						t.f.Warnl(block.Pos, "Rewire %v %v successors", block.Kind, block)
   595  					}
   596  				}
   597  			}
   598  		}
   599  	}
   600  	return constCnt, rewireCnt
   601  }
   602  

View as plain text