Source file src/cmd/compile/internal/ssa/rewriteMIPS64.go

     1  // Code generated from _gen/MIPS64.rules using 'go generate'; DO NOT EDIT.
     2  
     3  package ssa
     4  
     5  import "cmd/compile/internal/types"
     6  
     7  func rewriteValueMIPS64(v *Value) bool {
     8  	switch v.Op {
     9  	case OpAbs:
    10  		v.Op = OpMIPS64ABSD
    11  		return true
    12  	case OpAdd16:
    13  		v.Op = OpMIPS64ADDV
    14  		return true
    15  	case OpAdd32:
    16  		v.Op = OpMIPS64ADDV
    17  		return true
    18  	case OpAdd32F:
    19  		v.Op = OpMIPS64ADDF
    20  		return true
    21  	case OpAdd64:
    22  		v.Op = OpMIPS64ADDV
    23  		return true
    24  	case OpAdd64F:
    25  		v.Op = OpMIPS64ADDD
    26  		return true
    27  	case OpAdd8:
    28  		v.Op = OpMIPS64ADDV
    29  		return true
    30  	case OpAddPtr:
    31  		v.Op = OpMIPS64ADDV
    32  		return true
    33  	case OpAddr:
    34  		return rewriteValueMIPS64_OpAddr(v)
    35  	case OpAnd16:
    36  		v.Op = OpMIPS64AND
    37  		return true
    38  	case OpAnd32:
    39  		v.Op = OpMIPS64AND
    40  		return true
    41  	case OpAnd64:
    42  		v.Op = OpMIPS64AND
    43  		return true
    44  	case OpAnd8:
    45  		v.Op = OpMIPS64AND
    46  		return true
    47  	case OpAndB:
    48  		v.Op = OpMIPS64AND
    49  		return true
    50  	case OpAtomicAdd32:
    51  		v.Op = OpMIPS64LoweredAtomicAdd32
    52  		return true
    53  	case OpAtomicAdd64:
    54  		v.Op = OpMIPS64LoweredAtomicAdd64
    55  		return true
    56  	case OpAtomicAnd32:
    57  		v.Op = OpMIPS64LoweredAtomicAnd32
    58  		return true
    59  	case OpAtomicAnd8:
    60  		return rewriteValueMIPS64_OpAtomicAnd8(v)
    61  	case OpAtomicCompareAndSwap32:
    62  		return rewriteValueMIPS64_OpAtomicCompareAndSwap32(v)
    63  	case OpAtomicCompareAndSwap64:
    64  		v.Op = OpMIPS64LoweredAtomicCas64
    65  		return true
    66  	case OpAtomicExchange32:
    67  		v.Op = OpMIPS64LoweredAtomicExchange32
    68  		return true
    69  	case OpAtomicExchange64:
    70  		v.Op = OpMIPS64LoweredAtomicExchange64
    71  		return true
    72  	case OpAtomicLoad32:
    73  		v.Op = OpMIPS64LoweredAtomicLoad32
    74  		return true
    75  	case OpAtomicLoad64:
    76  		v.Op = OpMIPS64LoweredAtomicLoad64
    77  		return true
    78  	case OpAtomicLoad8:
    79  		v.Op = OpMIPS64LoweredAtomicLoad8
    80  		return true
    81  	case OpAtomicLoadPtr:
    82  		v.Op = OpMIPS64LoweredAtomicLoad64
    83  		return true
    84  	case OpAtomicOr32:
    85  		v.Op = OpMIPS64LoweredAtomicOr32
    86  		return true
    87  	case OpAtomicOr8:
    88  		return rewriteValueMIPS64_OpAtomicOr8(v)
    89  	case OpAtomicStore32:
    90  		v.Op = OpMIPS64LoweredAtomicStore32
    91  		return true
    92  	case OpAtomicStore64:
    93  		v.Op = OpMIPS64LoweredAtomicStore64
    94  		return true
    95  	case OpAtomicStore8:
    96  		v.Op = OpMIPS64LoweredAtomicStore8
    97  		return true
    98  	case OpAtomicStorePtrNoWB:
    99  		v.Op = OpMIPS64LoweredAtomicStore64
   100  		return true
   101  	case OpAvg64u:
   102  		return rewriteValueMIPS64_OpAvg64u(v)
   103  	case OpClosureCall:
   104  		v.Op = OpMIPS64CALLclosure
   105  		return true
   106  	case OpCom16:
   107  		return rewriteValueMIPS64_OpCom16(v)
   108  	case OpCom32:
   109  		return rewriteValueMIPS64_OpCom32(v)
   110  	case OpCom64:
   111  		return rewriteValueMIPS64_OpCom64(v)
   112  	case OpCom8:
   113  		return rewriteValueMIPS64_OpCom8(v)
   114  	case OpConst16:
   115  		return rewriteValueMIPS64_OpConst16(v)
   116  	case OpConst32:
   117  		return rewriteValueMIPS64_OpConst32(v)
   118  	case OpConst32F:
   119  		return rewriteValueMIPS64_OpConst32F(v)
   120  	case OpConst64:
   121  		return rewriteValueMIPS64_OpConst64(v)
   122  	case OpConst64F:
   123  		return rewriteValueMIPS64_OpConst64F(v)
   124  	case OpConst8:
   125  		return rewriteValueMIPS64_OpConst8(v)
   126  	case OpConstBool:
   127  		return rewriteValueMIPS64_OpConstBool(v)
   128  	case OpConstNil:
   129  		return rewriteValueMIPS64_OpConstNil(v)
   130  	case OpCvt32Fto32:
   131  		v.Op = OpMIPS64TRUNCFW
   132  		return true
   133  	case OpCvt32Fto64:
   134  		v.Op = OpMIPS64TRUNCFV
   135  		return true
   136  	case OpCvt32Fto64F:
   137  		v.Op = OpMIPS64MOVFD
   138  		return true
   139  	case OpCvt32to32F:
   140  		v.Op = OpMIPS64MOVWF
   141  		return true
   142  	case OpCvt32to64F:
   143  		v.Op = OpMIPS64MOVWD
   144  		return true
   145  	case OpCvt64Fto32:
   146  		v.Op = OpMIPS64TRUNCDW
   147  		return true
   148  	case OpCvt64Fto32F:
   149  		v.Op = OpMIPS64MOVDF
   150  		return true
   151  	case OpCvt64Fto64:
   152  		v.Op = OpMIPS64TRUNCDV
   153  		return true
   154  	case OpCvt64to32F:
   155  		v.Op = OpMIPS64MOVVF
   156  		return true
   157  	case OpCvt64to64F:
   158  		v.Op = OpMIPS64MOVVD
   159  		return true
   160  	case OpCvtBoolToUint8:
   161  		v.Op = OpCopy
   162  		return true
   163  	case OpDiv16:
   164  		return rewriteValueMIPS64_OpDiv16(v)
   165  	case OpDiv16u:
   166  		return rewriteValueMIPS64_OpDiv16u(v)
   167  	case OpDiv32:
   168  		return rewriteValueMIPS64_OpDiv32(v)
   169  	case OpDiv32F:
   170  		v.Op = OpMIPS64DIVF
   171  		return true
   172  	case OpDiv32u:
   173  		return rewriteValueMIPS64_OpDiv32u(v)
   174  	case OpDiv64:
   175  		return rewriteValueMIPS64_OpDiv64(v)
   176  	case OpDiv64F:
   177  		v.Op = OpMIPS64DIVD
   178  		return true
   179  	case OpDiv64u:
   180  		return rewriteValueMIPS64_OpDiv64u(v)
   181  	case OpDiv8:
   182  		return rewriteValueMIPS64_OpDiv8(v)
   183  	case OpDiv8u:
   184  		return rewriteValueMIPS64_OpDiv8u(v)
   185  	case OpEq16:
   186  		return rewriteValueMIPS64_OpEq16(v)
   187  	case OpEq32:
   188  		return rewriteValueMIPS64_OpEq32(v)
   189  	case OpEq32F:
   190  		return rewriteValueMIPS64_OpEq32F(v)
   191  	case OpEq64:
   192  		return rewriteValueMIPS64_OpEq64(v)
   193  	case OpEq64F:
   194  		return rewriteValueMIPS64_OpEq64F(v)
   195  	case OpEq8:
   196  		return rewriteValueMIPS64_OpEq8(v)
   197  	case OpEqB:
   198  		return rewriteValueMIPS64_OpEqB(v)
   199  	case OpEqPtr:
   200  		return rewriteValueMIPS64_OpEqPtr(v)
   201  	case OpGetCallerPC:
   202  		v.Op = OpMIPS64LoweredGetCallerPC
   203  		return true
   204  	case OpGetCallerSP:
   205  		v.Op = OpMIPS64LoweredGetCallerSP
   206  		return true
   207  	case OpGetClosurePtr:
   208  		v.Op = OpMIPS64LoweredGetClosurePtr
   209  		return true
   210  	case OpHmul32:
   211  		return rewriteValueMIPS64_OpHmul32(v)
   212  	case OpHmul32u:
   213  		return rewriteValueMIPS64_OpHmul32u(v)
   214  	case OpHmul64:
   215  		return rewriteValueMIPS64_OpHmul64(v)
   216  	case OpHmul64u:
   217  		return rewriteValueMIPS64_OpHmul64u(v)
   218  	case OpInterCall:
   219  		v.Op = OpMIPS64CALLinter
   220  		return true
   221  	case OpIsInBounds:
   222  		return rewriteValueMIPS64_OpIsInBounds(v)
   223  	case OpIsNonNil:
   224  		return rewriteValueMIPS64_OpIsNonNil(v)
   225  	case OpIsSliceInBounds:
   226  		return rewriteValueMIPS64_OpIsSliceInBounds(v)
   227  	case OpLeq16:
   228  		return rewriteValueMIPS64_OpLeq16(v)
   229  	case OpLeq16U:
   230  		return rewriteValueMIPS64_OpLeq16U(v)
   231  	case OpLeq32:
   232  		return rewriteValueMIPS64_OpLeq32(v)
   233  	case OpLeq32F:
   234  		return rewriteValueMIPS64_OpLeq32F(v)
   235  	case OpLeq32U:
   236  		return rewriteValueMIPS64_OpLeq32U(v)
   237  	case OpLeq64:
   238  		return rewriteValueMIPS64_OpLeq64(v)
   239  	case OpLeq64F:
   240  		return rewriteValueMIPS64_OpLeq64F(v)
   241  	case OpLeq64U:
   242  		return rewriteValueMIPS64_OpLeq64U(v)
   243  	case OpLeq8:
   244  		return rewriteValueMIPS64_OpLeq8(v)
   245  	case OpLeq8U:
   246  		return rewriteValueMIPS64_OpLeq8U(v)
   247  	case OpLess16:
   248  		return rewriteValueMIPS64_OpLess16(v)
   249  	case OpLess16U:
   250  		return rewriteValueMIPS64_OpLess16U(v)
   251  	case OpLess32:
   252  		return rewriteValueMIPS64_OpLess32(v)
   253  	case OpLess32F:
   254  		return rewriteValueMIPS64_OpLess32F(v)
   255  	case OpLess32U:
   256  		return rewriteValueMIPS64_OpLess32U(v)
   257  	case OpLess64:
   258  		return rewriteValueMIPS64_OpLess64(v)
   259  	case OpLess64F:
   260  		return rewriteValueMIPS64_OpLess64F(v)
   261  	case OpLess64U:
   262  		return rewriteValueMIPS64_OpLess64U(v)
   263  	case OpLess8:
   264  		return rewriteValueMIPS64_OpLess8(v)
   265  	case OpLess8U:
   266  		return rewriteValueMIPS64_OpLess8U(v)
   267  	case OpLoad:
   268  		return rewriteValueMIPS64_OpLoad(v)
   269  	case OpLocalAddr:
   270  		return rewriteValueMIPS64_OpLocalAddr(v)
   271  	case OpLsh16x16:
   272  		return rewriteValueMIPS64_OpLsh16x16(v)
   273  	case OpLsh16x32:
   274  		return rewriteValueMIPS64_OpLsh16x32(v)
   275  	case OpLsh16x64:
   276  		return rewriteValueMIPS64_OpLsh16x64(v)
   277  	case OpLsh16x8:
   278  		return rewriteValueMIPS64_OpLsh16x8(v)
   279  	case OpLsh32x16:
   280  		return rewriteValueMIPS64_OpLsh32x16(v)
   281  	case OpLsh32x32:
   282  		return rewriteValueMIPS64_OpLsh32x32(v)
   283  	case OpLsh32x64:
   284  		return rewriteValueMIPS64_OpLsh32x64(v)
   285  	case OpLsh32x8:
   286  		return rewriteValueMIPS64_OpLsh32x8(v)
   287  	case OpLsh64x16:
   288  		return rewriteValueMIPS64_OpLsh64x16(v)
   289  	case OpLsh64x32:
   290  		return rewriteValueMIPS64_OpLsh64x32(v)
   291  	case OpLsh64x64:
   292  		return rewriteValueMIPS64_OpLsh64x64(v)
   293  	case OpLsh64x8:
   294  		return rewriteValueMIPS64_OpLsh64x8(v)
   295  	case OpLsh8x16:
   296  		return rewriteValueMIPS64_OpLsh8x16(v)
   297  	case OpLsh8x32:
   298  		return rewriteValueMIPS64_OpLsh8x32(v)
   299  	case OpLsh8x64:
   300  		return rewriteValueMIPS64_OpLsh8x64(v)
   301  	case OpLsh8x8:
   302  		return rewriteValueMIPS64_OpLsh8x8(v)
   303  	case OpMIPS64ADDV:
   304  		return rewriteValueMIPS64_OpMIPS64ADDV(v)
   305  	case OpMIPS64ADDVconst:
   306  		return rewriteValueMIPS64_OpMIPS64ADDVconst(v)
   307  	case OpMIPS64AND:
   308  		return rewriteValueMIPS64_OpMIPS64AND(v)
   309  	case OpMIPS64ANDconst:
   310  		return rewriteValueMIPS64_OpMIPS64ANDconst(v)
   311  	case OpMIPS64LoweredAtomicAdd32:
   312  		return rewriteValueMIPS64_OpMIPS64LoweredAtomicAdd32(v)
   313  	case OpMIPS64LoweredAtomicAdd64:
   314  		return rewriteValueMIPS64_OpMIPS64LoweredAtomicAdd64(v)
   315  	case OpMIPS64LoweredAtomicStore32:
   316  		return rewriteValueMIPS64_OpMIPS64LoweredAtomicStore32(v)
   317  	case OpMIPS64LoweredAtomicStore64:
   318  		return rewriteValueMIPS64_OpMIPS64LoweredAtomicStore64(v)
   319  	case OpMIPS64LoweredPanicBoundsCR:
   320  		return rewriteValueMIPS64_OpMIPS64LoweredPanicBoundsCR(v)
   321  	case OpMIPS64LoweredPanicBoundsRC:
   322  		return rewriteValueMIPS64_OpMIPS64LoweredPanicBoundsRC(v)
   323  	case OpMIPS64LoweredPanicBoundsRR:
   324  		return rewriteValueMIPS64_OpMIPS64LoweredPanicBoundsRR(v)
   325  	case OpMIPS64MOVBUload:
   326  		return rewriteValueMIPS64_OpMIPS64MOVBUload(v)
   327  	case OpMIPS64MOVBUreg:
   328  		return rewriteValueMIPS64_OpMIPS64MOVBUreg(v)
   329  	case OpMIPS64MOVBload:
   330  		return rewriteValueMIPS64_OpMIPS64MOVBload(v)
   331  	case OpMIPS64MOVBreg:
   332  		return rewriteValueMIPS64_OpMIPS64MOVBreg(v)
   333  	case OpMIPS64MOVBstore:
   334  		return rewriteValueMIPS64_OpMIPS64MOVBstore(v)
   335  	case OpMIPS64MOVDF:
   336  		return rewriteValueMIPS64_OpMIPS64MOVDF(v)
   337  	case OpMIPS64MOVDload:
   338  		return rewriteValueMIPS64_OpMIPS64MOVDload(v)
   339  	case OpMIPS64MOVDstore:
   340  		return rewriteValueMIPS64_OpMIPS64MOVDstore(v)
   341  	case OpMIPS64MOVFload:
   342  		return rewriteValueMIPS64_OpMIPS64MOVFload(v)
   343  	case OpMIPS64MOVFstore:
   344  		return rewriteValueMIPS64_OpMIPS64MOVFstore(v)
   345  	case OpMIPS64MOVHUload:
   346  		return rewriteValueMIPS64_OpMIPS64MOVHUload(v)
   347  	case OpMIPS64MOVHUreg:
   348  		return rewriteValueMIPS64_OpMIPS64MOVHUreg(v)
   349  	case OpMIPS64MOVHload:
   350  		return rewriteValueMIPS64_OpMIPS64MOVHload(v)
   351  	case OpMIPS64MOVHreg:
   352  		return rewriteValueMIPS64_OpMIPS64MOVHreg(v)
   353  	case OpMIPS64MOVHstore:
   354  		return rewriteValueMIPS64_OpMIPS64MOVHstore(v)
   355  	case OpMIPS64MOVVload:
   356  		return rewriteValueMIPS64_OpMIPS64MOVVload(v)
   357  	case OpMIPS64MOVVnop:
   358  		return rewriteValueMIPS64_OpMIPS64MOVVnop(v)
   359  	case OpMIPS64MOVVreg:
   360  		return rewriteValueMIPS64_OpMIPS64MOVVreg(v)
   361  	case OpMIPS64MOVVstore:
   362  		return rewriteValueMIPS64_OpMIPS64MOVVstore(v)
   363  	case OpMIPS64MOVWUload:
   364  		return rewriteValueMIPS64_OpMIPS64MOVWUload(v)
   365  	case OpMIPS64MOVWUreg:
   366  		return rewriteValueMIPS64_OpMIPS64MOVWUreg(v)
   367  	case OpMIPS64MOVWload:
   368  		return rewriteValueMIPS64_OpMIPS64MOVWload(v)
   369  	case OpMIPS64MOVWreg:
   370  		return rewriteValueMIPS64_OpMIPS64MOVWreg(v)
   371  	case OpMIPS64MOVWstore:
   372  		return rewriteValueMIPS64_OpMIPS64MOVWstore(v)
   373  	case OpMIPS64NEGV:
   374  		return rewriteValueMIPS64_OpMIPS64NEGV(v)
   375  	case OpMIPS64OR:
   376  		return rewriteValueMIPS64_OpMIPS64OR(v)
   377  	case OpMIPS64ORconst:
   378  		return rewriteValueMIPS64_OpMIPS64ORconst(v)
   379  	case OpMIPS64SGT:
   380  		return rewriteValueMIPS64_OpMIPS64SGT(v)
   381  	case OpMIPS64SGTU:
   382  		return rewriteValueMIPS64_OpMIPS64SGTU(v)
   383  	case OpMIPS64SGTUconst:
   384  		return rewriteValueMIPS64_OpMIPS64SGTUconst(v)
   385  	case OpMIPS64SGTconst:
   386  		return rewriteValueMIPS64_OpMIPS64SGTconst(v)
   387  	case OpMIPS64SLLV:
   388  		return rewriteValueMIPS64_OpMIPS64SLLV(v)
   389  	case OpMIPS64SLLVconst:
   390  		return rewriteValueMIPS64_OpMIPS64SLLVconst(v)
   391  	case OpMIPS64SRAV:
   392  		return rewriteValueMIPS64_OpMIPS64SRAV(v)
   393  	case OpMIPS64SRAVconst:
   394  		return rewriteValueMIPS64_OpMIPS64SRAVconst(v)
   395  	case OpMIPS64SRLV:
   396  		return rewriteValueMIPS64_OpMIPS64SRLV(v)
   397  	case OpMIPS64SRLVconst:
   398  		return rewriteValueMIPS64_OpMIPS64SRLVconst(v)
   399  	case OpMIPS64SUBV:
   400  		return rewriteValueMIPS64_OpMIPS64SUBV(v)
   401  	case OpMIPS64SUBVconst:
   402  		return rewriteValueMIPS64_OpMIPS64SUBVconst(v)
   403  	case OpMIPS64XOR:
   404  		return rewriteValueMIPS64_OpMIPS64XOR(v)
   405  	case OpMIPS64XORconst:
   406  		return rewriteValueMIPS64_OpMIPS64XORconst(v)
   407  	case OpMod16:
   408  		return rewriteValueMIPS64_OpMod16(v)
   409  	case OpMod16u:
   410  		return rewriteValueMIPS64_OpMod16u(v)
   411  	case OpMod32:
   412  		return rewriteValueMIPS64_OpMod32(v)
   413  	case OpMod32u:
   414  		return rewriteValueMIPS64_OpMod32u(v)
   415  	case OpMod64:
   416  		return rewriteValueMIPS64_OpMod64(v)
   417  	case OpMod64u:
   418  		return rewriteValueMIPS64_OpMod64u(v)
   419  	case OpMod8:
   420  		return rewriteValueMIPS64_OpMod8(v)
   421  	case OpMod8u:
   422  		return rewriteValueMIPS64_OpMod8u(v)
   423  	case OpMove:
   424  		return rewriteValueMIPS64_OpMove(v)
   425  	case OpMul16:
   426  		return rewriteValueMIPS64_OpMul16(v)
   427  	case OpMul32:
   428  		return rewriteValueMIPS64_OpMul32(v)
   429  	case OpMul32F:
   430  		v.Op = OpMIPS64MULF
   431  		return true
   432  	case OpMul64:
   433  		return rewriteValueMIPS64_OpMul64(v)
   434  	case OpMul64F:
   435  		v.Op = OpMIPS64MULD
   436  		return true
   437  	case OpMul64uhilo:
   438  		v.Op = OpMIPS64MULVU
   439  		return true
   440  	case OpMul8:
   441  		return rewriteValueMIPS64_OpMul8(v)
   442  	case OpNeg16:
   443  		v.Op = OpMIPS64NEGV
   444  		return true
   445  	case OpNeg32:
   446  		v.Op = OpMIPS64NEGV
   447  		return true
   448  	case OpNeg32F:
   449  		v.Op = OpMIPS64NEGF
   450  		return true
   451  	case OpNeg64:
   452  		v.Op = OpMIPS64NEGV
   453  		return true
   454  	case OpNeg64F:
   455  		v.Op = OpMIPS64NEGD
   456  		return true
   457  	case OpNeg8:
   458  		v.Op = OpMIPS64NEGV
   459  		return true
   460  	case OpNeq16:
   461  		return rewriteValueMIPS64_OpNeq16(v)
   462  	case OpNeq32:
   463  		return rewriteValueMIPS64_OpNeq32(v)
   464  	case OpNeq32F:
   465  		return rewriteValueMIPS64_OpNeq32F(v)
   466  	case OpNeq64:
   467  		return rewriteValueMIPS64_OpNeq64(v)
   468  	case OpNeq64F:
   469  		return rewriteValueMIPS64_OpNeq64F(v)
   470  	case OpNeq8:
   471  		return rewriteValueMIPS64_OpNeq8(v)
   472  	case OpNeqB:
   473  		v.Op = OpMIPS64XOR
   474  		return true
   475  	case OpNeqPtr:
   476  		return rewriteValueMIPS64_OpNeqPtr(v)
   477  	case OpNilCheck:
   478  		v.Op = OpMIPS64LoweredNilCheck
   479  		return true
   480  	case OpNot:
   481  		return rewriteValueMIPS64_OpNot(v)
   482  	case OpOffPtr:
   483  		return rewriteValueMIPS64_OpOffPtr(v)
   484  	case OpOr16:
   485  		v.Op = OpMIPS64OR
   486  		return true
   487  	case OpOr32:
   488  		v.Op = OpMIPS64OR
   489  		return true
   490  	case OpOr64:
   491  		v.Op = OpMIPS64OR
   492  		return true
   493  	case OpOr8:
   494  		v.Op = OpMIPS64OR
   495  		return true
   496  	case OpOrB:
   497  		v.Op = OpMIPS64OR
   498  		return true
   499  	case OpPanicBounds:
   500  		v.Op = OpMIPS64LoweredPanicBoundsRR
   501  		return true
   502  	case OpPubBarrier:
   503  		v.Op = OpMIPS64LoweredPubBarrier
   504  		return true
   505  	case OpRotateLeft16:
   506  		return rewriteValueMIPS64_OpRotateLeft16(v)
   507  	case OpRotateLeft32:
   508  		return rewriteValueMIPS64_OpRotateLeft32(v)
   509  	case OpRotateLeft64:
   510  		return rewriteValueMIPS64_OpRotateLeft64(v)
   511  	case OpRotateLeft8:
   512  		return rewriteValueMIPS64_OpRotateLeft8(v)
   513  	case OpRound32F:
   514  		v.Op = OpCopy
   515  		return true
   516  	case OpRound64F:
   517  		v.Op = OpCopy
   518  		return true
   519  	case OpRsh16Ux16:
   520  		return rewriteValueMIPS64_OpRsh16Ux16(v)
   521  	case OpRsh16Ux32:
   522  		return rewriteValueMIPS64_OpRsh16Ux32(v)
   523  	case OpRsh16Ux64:
   524  		return rewriteValueMIPS64_OpRsh16Ux64(v)
   525  	case OpRsh16Ux8:
   526  		return rewriteValueMIPS64_OpRsh16Ux8(v)
   527  	case OpRsh16x16:
   528  		return rewriteValueMIPS64_OpRsh16x16(v)
   529  	case OpRsh16x32:
   530  		return rewriteValueMIPS64_OpRsh16x32(v)
   531  	case OpRsh16x64:
   532  		return rewriteValueMIPS64_OpRsh16x64(v)
   533  	case OpRsh16x8:
   534  		return rewriteValueMIPS64_OpRsh16x8(v)
   535  	case OpRsh32Ux16:
   536  		return rewriteValueMIPS64_OpRsh32Ux16(v)
   537  	case OpRsh32Ux32:
   538  		return rewriteValueMIPS64_OpRsh32Ux32(v)
   539  	case OpRsh32Ux64:
   540  		return rewriteValueMIPS64_OpRsh32Ux64(v)
   541  	case OpRsh32Ux8:
   542  		return rewriteValueMIPS64_OpRsh32Ux8(v)
   543  	case OpRsh32x16:
   544  		return rewriteValueMIPS64_OpRsh32x16(v)
   545  	case OpRsh32x32:
   546  		return rewriteValueMIPS64_OpRsh32x32(v)
   547  	case OpRsh32x64:
   548  		return rewriteValueMIPS64_OpRsh32x64(v)
   549  	case OpRsh32x8:
   550  		return rewriteValueMIPS64_OpRsh32x8(v)
   551  	case OpRsh64Ux16:
   552  		return rewriteValueMIPS64_OpRsh64Ux16(v)
   553  	case OpRsh64Ux32:
   554  		return rewriteValueMIPS64_OpRsh64Ux32(v)
   555  	case OpRsh64Ux64:
   556  		return rewriteValueMIPS64_OpRsh64Ux64(v)
   557  	case OpRsh64Ux8:
   558  		return rewriteValueMIPS64_OpRsh64Ux8(v)
   559  	case OpRsh64x16:
   560  		return rewriteValueMIPS64_OpRsh64x16(v)
   561  	case OpRsh64x32:
   562  		return rewriteValueMIPS64_OpRsh64x32(v)
   563  	case OpRsh64x64:
   564  		return rewriteValueMIPS64_OpRsh64x64(v)
   565  	case OpRsh64x8:
   566  		return rewriteValueMIPS64_OpRsh64x8(v)
   567  	case OpRsh8Ux16:
   568  		return rewriteValueMIPS64_OpRsh8Ux16(v)
   569  	case OpRsh8Ux32:
   570  		return rewriteValueMIPS64_OpRsh8Ux32(v)
   571  	case OpRsh8Ux64:
   572  		return rewriteValueMIPS64_OpRsh8Ux64(v)
   573  	case OpRsh8Ux8:
   574  		return rewriteValueMIPS64_OpRsh8Ux8(v)
   575  	case OpRsh8x16:
   576  		return rewriteValueMIPS64_OpRsh8x16(v)
   577  	case OpRsh8x32:
   578  		return rewriteValueMIPS64_OpRsh8x32(v)
   579  	case OpRsh8x64:
   580  		return rewriteValueMIPS64_OpRsh8x64(v)
   581  	case OpRsh8x8:
   582  		return rewriteValueMIPS64_OpRsh8x8(v)
   583  	case OpSelect0:
   584  		return rewriteValueMIPS64_OpSelect0(v)
   585  	case OpSelect1:
   586  		return rewriteValueMIPS64_OpSelect1(v)
   587  	case OpSignExt16to32:
   588  		v.Op = OpMIPS64MOVHreg
   589  		return true
   590  	case OpSignExt16to64:
   591  		v.Op = OpMIPS64MOVHreg
   592  		return true
   593  	case OpSignExt32to64:
   594  		v.Op = OpMIPS64MOVWreg
   595  		return true
   596  	case OpSignExt8to16:
   597  		v.Op = OpMIPS64MOVBreg
   598  		return true
   599  	case OpSignExt8to32:
   600  		v.Op = OpMIPS64MOVBreg
   601  		return true
   602  	case OpSignExt8to64:
   603  		v.Op = OpMIPS64MOVBreg
   604  		return true
   605  	case OpSlicemask:
   606  		return rewriteValueMIPS64_OpSlicemask(v)
   607  	case OpSqrt:
   608  		v.Op = OpMIPS64SQRTD
   609  		return true
   610  	case OpSqrt32:
   611  		v.Op = OpMIPS64SQRTF
   612  		return true
   613  	case OpStaticCall:
   614  		v.Op = OpMIPS64CALLstatic
   615  		return true
   616  	case OpStore:
   617  		return rewriteValueMIPS64_OpStore(v)
   618  	case OpSub16:
   619  		v.Op = OpMIPS64SUBV
   620  		return true
   621  	case OpSub32:
   622  		v.Op = OpMIPS64SUBV
   623  		return true
   624  	case OpSub32F:
   625  		v.Op = OpMIPS64SUBF
   626  		return true
   627  	case OpSub64:
   628  		v.Op = OpMIPS64SUBV
   629  		return true
   630  	case OpSub64F:
   631  		v.Op = OpMIPS64SUBD
   632  		return true
   633  	case OpSub8:
   634  		v.Op = OpMIPS64SUBV
   635  		return true
   636  	case OpSubPtr:
   637  		v.Op = OpMIPS64SUBV
   638  		return true
   639  	case OpTailCall:
   640  		v.Op = OpMIPS64CALLtail
   641  		return true
   642  	case OpTailCallInter:
   643  		v.Op = OpMIPS64CALLtailinter
   644  		return true
   645  	case OpTrunc16to8:
   646  		v.Op = OpCopy
   647  		return true
   648  	case OpTrunc32to16:
   649  		v.Op = OpCopy
   650  		return true
   651  	case OpTrunc32to8:
   652  		v.Op = OpCopy
   653  		return true
   654  	case OpTrunc64to16:
   655  		v.Op = OpCopy
   656  		return true
   657  	case OpTrunc64to32:
   658  		v.Op = OpCopy
   659  		return true
   660  	case OpTrunc64to8:
   661  		v.Op = OpCopy
   662  		return true
   663  	case OpWB:
   664  		v.Op = OpMIPS64LoweredWB
   665  		return true
   666  	case OpXor16:
   667  		v.Op = OpMIPS64XOR
   668  		return true
   669  	case OpXor32:
   670  		v.Op = OpMIPS64XOR
   671  		return true
   672  	case OpXor64:
   673  		v.Op = OpMIPS64XOR
   674  		return true
   675  	case OpXor8:
   676  		v.Op = OpMIPS64XOR
   677  		return true
   678  	case OpZero:
   679  		return rewriteValueMIPS64_OpZero(v)
   680  	case OpZeroExt16to32:
   681  		v.Op = OpMIPS64MOVHUreg
   682  		return true
   683  	case OpZeroExt16to64:
   684  		v.Op = OpMIPS64MOVHUreg
   685  		return true
   686  	case OpZeroExt32to64:
   687  		v.Op = OpMIPS64MOVWUreg
   688  		return true
   689  	case OpZeroExt8to16:
   690  		v.Op = OpMIPS64MOVBUreg
   691  		return true
   692  	case OpZeroExt8to32:
   693  		v.Op = OpMIPS64MOVBUreg
   694  		return true
   695  	case OpZeroExt8to64:
   696  		v.Op = OpMIPS64MOVBUreg
   697  		return true
   698  	}
   699  	return false
   700  }
   701  func rewriteValueMIPS64_OpAddr(v *Value) bool {
   702  	v_0 := v.Args[0]
   703  	// match: (Addr {sym} base)
   704  	// result: (MOVVaddr {sym} base)
   705  	for {
   706  		sym := auxToSym(v.Aux)
   707  		base := v_0
   708  		v.reset(OpMIPS64MOVVaddr)
   709  		v.Aux = symToAux(sym)
   710  		v.AddArg(base)
   711  		return true
   712  	}
   713  }
   714  func rewriteValueMIPS64_OpAtomicAnd8(v *Value) bool {
   715  	v_2 := v.Args[2]
   716  	v_1 := v.Args[1]
   717  	v_0 := v.Args[0]
   718  	b := v.Block
   719  	config := b.Func.Config
   720  	typ := &b.Func.Config.Types
   721  	// match: (AtomicAnd8 ptr val mem)
   722  	// cond: !config.BigEndian
   723  	// result: (LoweredAtomicAnd32 (AND <typ.UInt32Ptr> (MOVVconst [^3]) ptr) (OR <typ.UInt64> (SLLV <typ.UInt32> (ZeroExt8to32 val) (SLLVconst <typ.UInt64> [3] (ANDconst <typ.UInt64> [3] ptr))) (NOR (MOVVconst [0]) <typ.UInt64> (SLLV <typ.UInt64> (MOVVconst [0xff]) (SLLVconst <typ.UInt64> [3] (ANDconst <typ.UInt64> [3] ptr))))) mem)
   724  	for {
   725  		ptr := v_0
   726  		val := v_1
   727  		mem := v_2
   728  		if !(!config.BigEndian) {
   729  			break
   730  		}
   731  		v.reset(OpMIPS64LoweredAtomicAnd32)
   732  		v0 := b.NewValue0(v.Pos, OpMIPS64AND, typ.UInt32Ptr)
   733  		v1 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
   734  		v1.AuxInt = int64ToAuxInt(^3)
   735  		v0.AddArg2(v1, ptr)
   736  		v2 := b.NewValue0(v.Pos, OpMIPS64OR, typ.UInt64)
   737  		v3 := b.NewValue0(v.Pos, OpMIPS64SLLV, typ.UInt32)
   738  		v4 := b.NewValue0(v.Pos, OpZeroExt8to32, typ.UInt32)
   739  		v4.AddArg(val)
   740  		v5 := b.NewValue0(v.Pos, OpMIPS64SLLVconst, typ.UInt64)
   741  		v5.AuxInt = int64ToAuxInt(3)
   742  		v6 := b.NewValue0(v.Pos, OpMIPS64ANDconst, typ.UInt64)
   743  		v6.AuxInt = int64ToAuxInt(3)
   744  		v6.AddArg(ptr)
   745  		v5.AddArg(v6)
   746  		v3.AddArg2(v4, v5)
   747  		v7 := b.NewValue0(v.Pos, OpMIPS64NOR, typ.UInt64)
   748  		v8 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
   749  		v8.AuxInt = int64ToAuxInt(0)
   750  		v9 := b.NewValue0(v.Pos, OpMIPS64SLLV, typ.UInt64)
   751  		v10 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
   752  		v10.AuxInt = int64ToAuxInt(0xff)
   753  		v9.AddArg2(v10, v5)
   754  		v7.AddArg2(v8, v9)
   755  		v2.AddArg2(v3, v7)
   756  		v.AddArg3(v0, v2, mem)
   757  		return true
   758  	}
   759  	// match: (AtomicAnd8 ptr val mem)
   760  	// cond: config.BigEndian
   761  	// result: (LoweredAtomicAnd32 (AND <typ.UInt32Ptr> (MOVVconst [^3]) ptr) (OR <typ.UInt64> (SLLV <typ.UInt32> (ZeroExt8to32 val) (SLLVconst <typ.UInt64> [3] (ANDconst <typ.UInt64> [3] (XORconst <typ.UInt64> [3] ptr)))) (NOR (MOVVconst [0]) <typ.UInt64> (SLLV <typ.UInt64> (MOVVconst [0xff]) (SLLVconst <typ.UInt64> [3] (ANDconst <typ.UInt64> [3] (XORconst <typ.UInt64> [3] ptr)))))) mem)
   762  	for {
   763  		ptr := v_0
   764  		val := v_1
   765  		mem := v_2
   766  		if !(config.BigEndian) {
   767  			break
   768  		}
   769  		v.reset(OpMIPS64LoweredAtomicAnd32)
   770  		v0 := b.NewValue0(v.Pos, OpMIPS64AND, typ.UInt32Ptr)
   771  		v1 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
   772  		v1.AuxInt = int64ToAuxInt(^3)
   773  		v0.AddArg2(v1, ptr)
   774  		v2 := b.NewValue0(v.Pos, OpMIPS64OR, typ.UInt64)
   775  		v3 := b.NewValue0(v.Pos, OpMIPS64SLLV, typ.UInt32)
   776  		v4 := b.NewValue0(v.Pos, OpZeroExt8to32, typ.UInt32)
   777  		v4.AddArg(val)
   778  		v5 := b.NewValue0(v.Pos, OpMIPS64SLLVconst, typ.UInt64)
   779  		v5.AuxInt = int64ToAuxInt(3)
   780  		v6 := b.NewValue0(v.Pos, OpMIPS64ANDconst, typ.UInt64)
   781  		v6.AuxInt = int64ToAuxInt(3)
   782  		v7 := b.NewValue0(v.Pos, OpMIPS64XORconst, typ.UInt64)
   783  		v7.AuxInt = int64ToAuxInt(3)
   784  		v7.AddArg(ptr)
   785  		v6.AddArg(v7)
   786  		v5.AddArg(v6)
   787  		v3.AddArg2(v4, v5)
   788  		v8 := b.NewValue0(v.Pos, OpMIPS64NOR, typ.UInt64)
   789  		v9 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
   790  		v9.AuxInt = int64ToAuxInt(0)
   791  		v10 := b.NewValue0(v.Pos, OpMIPS64SLLV, typ.UInt64)
   792  		v11 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
   793  		v11.AuxInt = int64ToAuxInt(0xff)
   794  		v10.AddArg2(v11, v5)
   795  		v8.AddArg2(v9, v10)
   796  		v2.AddArg2(v3, v8)
   797  		v.AddArg3(v0, v2, mem)
   798  		return true
   799  	}
   800  	return false
   801  }
   802  func rewriteValueMIPS64_OpAtomicCompareAndSwap32(v *Value) bool {
   803  	v_3 := v.Args[3]
   804  	v_2 := v.Args[2]
   805  	v_1 := v.Args[1]
   806  	v_0 := v.Args[0]
   807  	b := v.Block
   808  	typ := &b.Func.Config.Types
   809  	// match: (AtomicCompareAndSwap32 ptr old new mem)
   810  	// result: (LoweredAtomicCas32 ptr (SignExt32to64 old) new mem)
   811  	for {
   812  		ptr := v_0
   813  		old := v_1
   814  		new := v_2
   815  		mem := v_3
   816  		v.reset(OpMIPS64LoweredAtomicCas32)
   817  		v0 := b.NewValue0(v.Pos, OpSignExt32to64, typ.Int64)
   818  		v0.AddArg(old)
   819  		v.AddArg4(ptr, v0, new, mem)
   820  		return true
   821  	}
   822  }
   823  func rewriteValueMIPS64_OpAtomicOr8(v *Value) bool {
   824  	v_2 := v.Args[2]
   825  	v_1 := v.Args[1]
   826  	v_0 := v.Args[0]
   827  	b := v.Block
   828  	config := b.Func.Config
   829  	typ := &b.Func.Config.Types
   830  	// match: (AtomicOr8 ptr val mem)
   831  	// cond: !config.BigEndian
   832  	// result: (LoweredAtomicOr32 (AND <typ.UInt32Ptr> (MOVVconst [^3]) ptr) (SLLV <typ.UInt32> (ZeroExt8to32 val) (SLLVconst <typ.UInt64> [3] (ANDconst <typ.UInt64> [3] ptr))) mem)
   833  	for {
   834  		ptr := v_0
   835  		val := v_1
   836  		mem := v_2
   837  		if !(!config.BigEndian) {
   838  			break
   839  		}
   840  		v.reset(OpMIPS64LoweredAtomicOr32)
   841  		v0 := b.NewValue0(v.Pos, OpMIPS64AND, typ.UInt32Ptr)
   842  		v1 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
   843  		v1.AuxInt = int64ToAuxInt(^3)
   844  		v0.AddArg2(v1, ptr)
   845  		v2 := b.NewValue0(v.Pos, OpMIPS64SLLV, typ.UInt32)
   846  		v3 := b.NewValue0(v.Pos, OpZeroExt8to32, typ.UInt32)
   847  		v3.AddArg(val)
   848  		v4 := b.NewValue0(v.Pos, OpMIPS64SLLVconst, typ.UInt64)
   849  		v4.AuxInt = int64ToAuxInt(3)
   850  		v5 := b.NewValue0(v.Pos, OpMIPS64ANDconst, typ.UInt64)
   851  		v5.AuxInt = int64ToAuxInt(3)
   852  		v5.AddArg(ptr)
   853  		v4.AddArg(v5)
   854  		v2.AddArg2(v3, v4)
   855  		v.AddArg3(v0, v2, mem)
   856  		return true
   857  	}
   858  	// match: (AtomicOr8 ptr val mem)
   859  	// cond: config.BigEndian
   860  	// result: (LoweredAtomicOr32 (AND <typ.UInt32Ptr> (MOVVconst [^3]) ptr) (SLLV <typ.UInt32> (ZeroExt8to32 val) (SLLVconst <typ.UInt64> [3] (ANDconst <typ.UInt64> [3] (XORconst <typ.UInt64> [3] ptr)))) mem)
   861  	for {
   862  		ptr := v_0
   863  		val := v_1
   864  		mem := v_2
   865  		if !(config.BigEndian) {
   866  			break
   867  		}
   868  		v.reset(OpMIPS64LoweredAtomicOr32)
   869  		v0 := b.NewValue0(v.Pos, OpMIPS64AND, typ.UInt32Ptr)
   870  		v1 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
   871  		v1.AuxInt = int64ToAuxInt(^3)
   872  		v0.AddArg2(v1, ptr)
   873  		v2 := b.NewValue0(v.Pos, OpMIPS64SLLV, typ.UInt32)
   874  		v3 := b.NewValue0(v.Pos, OpZeroExt8to32, typ.UInt32)
   875  		v3.AddArg(val)
   876  		v4 := b.NewValue0(v.Pos, OpMIPS64SLLVconst, typ.UInt64)
   877  		v4.AuxInt = int64ToAuxInt(3)
   878  		v5 := b.NewValue0(v.Pos, OpMIPS64ANDconst, typ.UInt64)
   879  		v5.AuxInt = int64ToAuxInt(3)
   880  		v6 := b.NewValue0(v.Pos, OpMIPS64XORconst, typ.UInt64)
   881  		v6.AuxInt = int64ToAuxInt(3)
   882  		v6.AddArg(ptr)
   883  		v5.AddArg(v6)
   884  		v4.AddArg(v5)
   885  		v2.AddArg2(v3, v4)
   886  		v.AddArg3(v0, v2, mem)
   887  		return true
   888  	}
   889  	return false
   890  }
   891  func rewriteValueMIPS64_OpAvg64u(v *Value) bool {
   892  	v_1 := v.Args[1]
   893  	v_0 := v.Args[0]
   894  	b := v.Block
   895  	// match: (Avg64u <t> x y)
   896  	// result: (ADDV (SRLVconst <t> (SUBV <t> x y) [1]) y)
   897  	for {
   898  		t := v.Type
   899  		x := v_0
   900  		y := v_1
   901  		v.reset(OpMIPS64ADDV)
   902  		v0 := b.NewValue0(v.Pos, OpMIPS64SRLVconst, t)
   903  		v0.AuxInt = int64ToAuxInt(1)
   904  		v1 := b.NewValue0(v.Pos, OpMIPS64SUBV, t)
   905  		v1.AddArg2(x, y)
   906  		v0.AddArg(v1)
   907  		v.AddArg2(v0, y)
   908  		return true
   909  	}
   910  }
   911  func rewriteValueMIPS64_OpCom16(v *Value) bool {
   912  	v_0 := v.Args[0]
   913  	b := v.Block
   914  	typ := &b.Func.Config.Types
   915  	// match: (Com16 x)
   916  	// result: (NOR (MOVVconst [0]) x)
   917  	for {
   918  		x := v_0
   919  		v.reset(OpMIPS64NOR)
   920  		v0 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
   921  		v0.AuxInt = int64ToAuxInt(0)
   922  		v.AddArg2(v0, x)
   923  		return true
   924  	}
   925  }
   926  func rewriteValueMIPS64_OpCom32(v *Value) bool {
   927  	v_0 := v.Args[0]
   928  	b := v.Block
   929  	typ := &b.Func.Config.Types
   930  	// match: (Com32 x)
   931  	// result: (NOR (MOVVconst [0]) x)
   932  	for {
   933  		x := v_0
   934  		v.reset(OpMIPS64NOR)
   935  		v0 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
   936  		v0.AuxInt = int64ToAuxInt(0)
   937  		v.AddArg2(v0, x)
   938  		return true
   939  	}
   940  }
   941  func rewriteValueMIPS64_OpCom64(v *Value) bool {
   942  	v_0 := v.Args[0]
   943  	b := v.Block
   944  	typ := &b.Func.Config.Types
   945  	// match: (Com64 x)
   946  	// result: (NOR (MOVVconst [0]) x)
   947  	for {
   948  		x := v_0
   949  		v.reset(OpMIPS64NOR)
   950  		v0 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
   951  		v0.AuxInt = int64ToAuxInt(0)
   952  		v.AddArg2(v0, x)
   953  		return true
   954  	}
   955  }
   956  func rewriteValueMIPS64_OpCom8(v *Value) bool {
   957  	v_0 := v.Args[0]
   958  	b := v.Block
   959  	typ := &b.Func.Config.Types
   960  	// match: (Com8 x)
   961  	// result: (NOR (MOVVconst [0]) x)
   962  	for {
   963  		x := v_0
   964  		v.reset(OpMIPS64NOR)
   965  		v0 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
   966  		v0.AuxInt = int64ToAuxInt(0)
   967  		v.AddArg2(v0, x)
   968  		return true
   969  	}
   970  }
   971  func rewriteValueMIPS64_OpConst16(v *Value) bool {
   972  	// match: (Const16 [val])
   973  	// result: (MOVVconst [int64(val)])
   974  	for {
   975  		val := auxIntToInt16(v.AuxInt)
   976  		v.reset(OpMIPS64MOVVconst)
   977  		v.AuxInt = int64ToAuxInt(int64(val))
   978  		return true
   979  	}
   980  }
   981  func rewriteValueMIPS64_OpConst32(v *Value) bool {
   982  	// match: (Const32 [val])
   983  	// result: (MOVVconst [int64(val)])
   984  	for {
   985  		val := auxIntToInt32(v.AuxInt)
   986  		v.reset(OpMIPS64MOVVconst)
   987  		v.AuxInt = int64ToAuxInt(int64(val))
   988  		return true
   989  	}
   990  }
   991  func rewriteValueMIPS64_OpConst32F(v *Value) bool {
   992  	// match: (Const32F [val])
   993  	// result: (MOVFconst [float64(val)])
   994  	for {
   995  		val := auxIntToFloat32(v.AuxInt)
   996  		v.reset(OpMIPS64MOVFconst)
   997  		v.AuxInt = float64ToAuxInt(float64(val))
   998  		return true
   999  	}
  1000  }
  1001  func rewriteValueMIPS64_OpConst64(v *Value) bool {
  1002  	// match: (Const64 [val])
  1003  	// result: (MOVVconst [int64(val)])
  1004  	for {
  1005  		val := auxIntToInt64(v.AuxInt)
  1006  		v.reset(OpMIPS64MOVVconst)
  1007  		v.AuxInt = int64ToAuxInt(int64(val))
  1008  		return true
  1009  	}
  1010  }
  1011  func rewriteValueMIPS64_OpConst64F(v *Value) bool {
  1012  	// match: (Const64F [val])
  1013  	// result: (MOVDconst [float64(val)])
  1014  	for {
  1015  		val := auxIntToFloat64(v.AuxInt)
  1016  		v.reset(OpMIPS64MOVDconst)
  1017  		v.AuxInt = float64ToAuxInt(float64(val))
  1018  		return true
  1019  	}
  1020  }
  1021  func rewriteValueMIPS64_OpConst8(v *Value) bool {
  1022  	// match: (Const8 [val])
  1023  	// result: (MOVVconst [int64(val)])
  1024  	for {
  1025  		val := auxIntToInt8(v.AuxInt)
  1026  		v.reset(OpMIPS64MOVVconst)
  1027  		v.AuxInt = int64ToAuxInt(int64(val))
  1028  		return true
  1029  	}
  1030  }
  1031  func rewriteValueMIPS64_OpConstBool(v *Value) bool {
  1032  	// match: (ConstBool [t])
  1033  	// result: (MOVVconst [int64(b2i(t))])
  1034  	for {
  1035  		t := auxIntToBool(v.AuxInt)
  1036  		v.reset(OpMIPS64MOVVconst)
  1037  		v.AuxInt = int64ToAuxInt(int64(b2i(t)))
  1038  		return true
  1039  	}
  1040  }
  1041  func rewriteValueMIPS64_OpConstNil(v *Value) bool {
  1042  	// match: (ConstNil)
  1043  	// result: (MOVVconst [0])
  1044  	for {
  1045  		v.reset(OpMIPS64MOVVconst)
  1046  		v.AuxInt = int64ToAuxInt(0)
  1047  		return true
  1048  	}
  1049  }
  1050  func rewriteValueMIPS64_OpDiv16(v *Value) bool {
  1051  	v_1 := v.Args[1]
  1052  	v_0 := v.Args[0]
  1053  	b := v.Block
  1054  	typ := &b.Func.Config.Types
  1055  	// match: (Div16 x y)
  1056  	// result: (Select1 (DIVV (SignExt16to64 x) (SignExt16to64 y)))
  1057  	for {
  1058  		x := v_0
  1059  		y := v_1
  1060  		v.reset(OpSelect1)
  1061  		v0 := b.NewValue0(v.Pos, OpMIPS64DIVV, types.NewTuple(typ.Int64, typ.Int64))
  1062  		v1 := b.NewValue0(v.Pos, OpSignExt16to64, typ.Int64)
  1063  		v1.AddArg(x)
  1064  		v2 := b.NewValue0(v.Pos, OpSignExt16to64, typ.Int64)
  1065  		v2.AddArg(y)
  1066  		v0.AddArg2(v1, v2)
  1067  		v.AddArg(v0)
  1068  		return true
  1069  	}
  1070  }
  1071  func rewriteValueMIPS64_OpDiv16u(v *Value) bool {
  1072  	v_1 := v.Args[1]
  1073  	v_0 := v.Args[0]
  1074  	b := v.Block
  1075  	typ := &b.Func.Config.Types
  1076  	// match: (Div16u x y)
  1077  	// result: (Select1 (DIVVU (ZeroExt16to64 x) (ZeroExt16to64 y)))
  1078  	for {
  1079  		x := v_0
  1080  		y := v_1
  1081  		v.reset(OpSelect1)
  1082  		v0 := b.NewValue0(v.Pos, OpMIPS64DIVVU, types.NewTuple(typ.UInt64, typ.UInt64))
  1083  		v1 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64)
  1084  		v1.AddArg(x)
  1085  		v2 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64)
  1086  		v2.AddArg(y)
  1087  		v0.AddArg2(v1, v2)
  1088  		v.AddArg(v0)
  1089  		return true
  1090  	}
  1091  }
  1092  func rewriteValueMIPS64_OpDiv32(v *Value) bool {
  1093  	v_1 := v.Args[1]
  1094  	v_0 := v.Args[0]
  1095  	b := v.Block
  1096  	typ := &b.Func.Config.Types
  1097  	// match: (Div32 x y)
  1098  	// result: (Select1 (DIVV (SignExt32to64 x) (SignExt32to64 y)))
  1099  	for {
  1100  		x := v_0
  1101  		y := v_1
  1102  		v.reset(OpSelect1)
  1103  		v0 := b.NewValue0(v.Pos, OpMIPS64DIVV, types.NewTuple(typ.Int64, typ.Int64))
  1104  		v1 := b.NewValue0(v.Pos, OpSignExt32to64, typ.Int64)
  1105  		v1.AddArg(x)
  1106  		v2 := b.NewValue0(v.Pos, OpSignExt32to64, typ.Int64)
  1107  		v2.AddArg(y)
  1108  		v0.AddArg2(v1, v2)
  1109  		v.AddArg(v0)
  1110  		return true
  1111  	}
  1112  }
  1113  func rewriteValueMIPS64_OpDiv32u(v *Value) bool {
  1114  	v_1 := v.Args[1]
  1115  	v_0 := v.Args[0]
  1116  	b := v.Block
  1117  	typ := &b.Func.Config.Types
  1118  	// match: (Div32u x y)
  1119  	// result: (Select1 (DIVVU (ZeroExt32to64 x) (ZeroExt32to64 y)))
  1120  	for {
  1121  		x := v_0
  1122  		y := v_1
  1123  		v.reset(OpSelect1)
  1124  		v0 := b.NewValue0(v.Pos, OpMIPS64DIVVU, types.NewTuple(typ.UInt64, typ.UInt64))
  1125  		v1 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64)
  1126  		v1.AddArg(x)
  1127  		v2 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64)
  1128  		v2.AddArg(y)
  1129  		v0.AddArg2(v1, v2)
  1130  		v.AddArg(v0)
  1131  		return true
  1132  	}
  1133  }
  1134  func rewriteValueMIPS64_OpDiv64(v *Value) bool {
  1135  	v_1 := v.Args[1]
  1136  	v_0 := v.Args[0]
  1137  	b := v.Block
  1138  	typ := &b.Func.Config.Types
  1139  	// match: (Div64 x y)
  1140  	// result: (Select1 (DIVV x y))
  1141  	for {
  1142  		x := v_0
  1143  		y := v_1
  1144  		v.reset(OpSelect1)
  1145  		v0 := b.NewValue0(v.Pos, OpMIPS64DIVV, types.NewTuple(typ.Int64, typ.Int64))
  1146  		v0.AddArg2(x, y)
  1147  		v.AddArg(v0)
  1148  		return true
  1149  	}
  1150  }
  1151  func rewriteValueMIPS64_OpDiv64u(v *Value) bool {
  1152  	v_1 := v.Args[1]
  1153  	v_0 := v.Args[0]
  1154  	b := v.Block
  1155  	typ := &b.Func.Config.Types
  1156  	// match: (Div64u x y)
  1157  	// result: (Select1 (DIVVU x y))
  1158  	for {
  1159  		x := v_0
  1160  		y := v_1
  1161  		v.reset(OpSelect1)
  1162  		v0 := b.NewValue0(v.Pos, OpMIPS64DIVVU, types.NewTuple(typ.UInt64, typ.UInt64))
  1163  		v0.AddArg2(x, y)
  1164  		v.AddArg(v0)
  1165  		return true
  1166  	}
  1167  }
  1168  func rewriteValueMIPS64_OpDiv8(v *Value) bool {
  1169  	v_1 := v.Args[1]
  1170  	v_0 := v.Args[0]
  1171  	b := v.Block
  1172  	typ := &b.Func.Config.Types
  1173  	// match: (Div8 x y)
  1174  	// result: (Select1 (DIVV (SignExt8to64 x) (SignExt8to64 y)))
  1175  	for {
  1176  		x := v_0
  1177  		y := v_1
  1178  		v.reset(OpSelect1)
  1179  		v0 := b.NewValue0(v.Pos, OpMIPS64DIVV, types.NewTuple(typ.Int64, typ.Int64))
  1180  		v1 := b.NewValue0(v.Pos, OpSignExt8to64, typ.Int64)
  1181  		v1.AddArg(x)
  1182  		v2 := b.NewValue0(v.Pos, OpSignExt8to64, typ.Int64)
  1183  		v2.AddArg(y)
  1184  		v0.AddArg2(v1, v2)
  1185  		v.AddArg(v0)
  1186  		return true
  1187  	}
  1188  }
  1189  func rewriteValueMIPS64_OpDiv8u(v *Value) bool {
  1190  	v_1 := v.Args[1]
  1191  	v_0 := v.Args[0]
  1192  	b := v.Block
  1193  	typ := &b.Func.Config.Types
  1194  	// match: (Div8u x y)
  1195  	// result: (Select1 (DIVVU (ZeroExt8to64 x) (ZeroExt8to64 y)))
  1196  	for {
  1197  		x := v_0
  1198  		y := v_1
  1199  		v.reset(OpSelect1)
  1200  		v0 := b.NewValue0(v.Pos, OpMIPS64DIVVU, types.NewTuple(typ.UInt64, typ.UInt64))
  1201  		v1 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64)
  1202  		v1.AddArg(x)
  1203  		v2 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64)
  1204  		v2.AddArg(y)
  1205  		v0.AddArg2(v1, v2)
  1206  		v.AddArg(v0)
  1207  		return true
  1208  	}
  1209  }
  1210  func rewriteValueMIPS64_OpEq16(v *Value) bool {
  1211  	v_1 := v.Args[1]
  1212  	v_0 := v.Args[0]
  1213  	b := v.Block
  1214  	typ := &b.Func.Config.Types
  1215  	// match: (Eq16 x y)
  1216  	// result: (SGTU (MOVVconst [1]) (XOR (ZeroExt16to64 x) (ZeroExt16to64 y)))
  1217  	for {
  1218  		x := v_0
  1219  		y := v_1
  1220  		v.reset(OpMIPS64SGTU)
  1221  		v0 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
  1222  		v0.AuxInt = int64ToAuxInt(1)
  1223  		v1 := b.NewValue0(v.Pos, OpMIPS64XOR, typ.UInt64)
  1224  		v2 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64)
  1225  		v2.AddArg(x)
  1226  		v3 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64)
  1227  		v3.AddArg(y)
  1228  		v1.AddArg2(v2, v3)
  1229  		v.AddArg2(v0, v1)
  1230  		return true
  1231  	}
  1232  }
  1233  func rewriteValueMIPS64_OpEq32(v *Value) bool {
  1234  	v_1 := v.Args[1]
  1235  	v_0 := v.Args[0]
  1236  	b := v.Block
  1237  	typ := &b.Func.Config.Types
  1238  	// match: (Eq32 x y)
  1239  	// result: (SGTU (MOVVconst [1]) (XOR (ZeroExt32to64 x) (ZeroExt32to64 y)))
  1240  	for {
  1241  		x := v_0
  1242  		y := v_1
  1243  		v.reset(OpMIPS64SGTU)
  1244  		v0 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
  1245  		v0.AuxInt = int64ToAuxInt(1)
  1246  		v1 := b.NewValue0(v.Pos, OpMIPS64XOR, typ.UInt64)
  1247  		v2 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64)
  1248  		v2.AddArg(x)
  1249  		v3 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64)
  1250  		v3.AddArg(y)
  1251  		v1.AddArg2(v2, v3)
  1252  		v.AddArg2(v0, v1)
  1253  		return true
  1254  	}
  1255  }
  1256  func rewriteValueMIPS64_OpEq32F(v *Value) bool {
  1257  	v_1 := v.Args[1]
  1258  	v_0 := v.Args[0]
  1259  	b := v.Block
  1260  	// match: (Eq32F x y)
  1261  	// result: (FPFlagTrue (CMPEQF x y))
  1262  	for {
  1263  		x := v_0
  1264  		y := v_1
  1265  		v.reset(OpMIPS64FPFlagTrue)
  1266  		v0 := b.NewValue0(v.Pos, OpMIPS64CMPEQF, types.TypeFlags)
  1267  		v0.AddArg2(x, y)
  1268  		v.AddArg(v0)
  1269  		return true
  1270  	}
  1271  }
  1272  func rewriteValueMIPS64_OpEq64(v *Value) bool {
  1273  	v_1 := v.Args[1]
  1274  	v_0 := v.Args[0]
  1275  	b := v.Block
  1276  	typ := &b.Func.Config.Types
  1277  	// match: (Eq64 x y)
  1278  	// result: (SGTU (MOVVconst [1]) (XOR x y))
  1279  	for {
  1280  		x := v_0
  1281  		y := v_1
  1282  		v.reset(OpMIPS64SGTU)
  1283  		v0 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
  1284  		v0.AuxInt = int64ToAuxInt(1)
  1285  		v1 := b.NewValue0(v.Pos, OpMIPS64XOR, typ.UInt64)
  1286  		v1.AddArg2(x, y)
  1287  		v.AddArg2(v0, v1)
  1288  		return true
  1289  	}
  1290  }
  1291  func rewriteValueMIPS64_OpEq64F(v *Value) bool {
  1292  	v_1 := v.Args[1]
  1293  	v_0 := v.Args[0]
  1294  	b := v.Block
  1295  	// match: (Eq64F x y)
  1296  	// result: (FPFlagTrue (CMPEQD x y))
  1297  	for {
  1298  		x := v_0
  1299  		y := v_1
  1300  		v.reset(OpMIPS64FPFlagTrue)
  1301  		v0 := b.NewValue0(v.Pos, OpMIPS64CMPEQD, types.TypeFlags)
  1302  		v0.AddArg2(x, y)
  1303  		v.AddArg(v0)
  1304  		return true
  1305  	}
  1306  }
  1307  func rewriteValueMIPS64_OpEq8(v *Value) bool {
  1308  	v_1 := v.Args[1]
  1309  	v_0 := v.Args[0]
  1310  	b := v.Block
  1311  	typ := &b.Func.Config.Types
  1312  	// match: (Eq8 x y)
  1313  	// result: (SGTU (MOVVconst [1]) (XOR (ZeroExt8to64 x) (ZeroExt8to64 y)))
  1314  	for {
  1315  		x := v_0
  1316  		y := v_1
  1317  		v.reset(OpMIPS64SGTU)
  1318  		v0 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
  1319  		v0.AuxInt = int64ToAuxInt(1)
  1320  		v1 := b.NewValue0(v.Pos, OpMIPS64XOR, typ.UInt64)
  1321  		v2 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64)
  1322  		v2.AddArg(x)
  1323  		v3 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64)
  1324  		v3.AddArg(y)
  1325  		v1.AddArg2(v2, v3)
  1326  		v.AddArg2(v0, v1)
  1327  		return true
  1328  	}
  1329  }
  1330  func rewriteValueMIPS64_OpEqB(v *Value) bool {
  1331  	v_1 := v.Args[1]
  1332  	v_0 := v.Args[0]
  1333  	b := v.Block
  1334  	typ := &b.Func.Config.Types
  1335  	// match: (EqB x y)
  1336  	// result: (XOR (MOVVconst [1]) (XOR <typ.Bool> x y))
  1337  	for {
  1338  		x := v_0
  1339  		y := v_1
  1340  		v.reset(OpMIPS64XOR)
  1341  		v0 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
  1342  		v0.AuxInt = int64ToAuxInt(1)
  1343  		v1 := b.NewValue0(v.Pos, OpMIPS64XOR, typ.Bool)
  1344  		v1.AddArg2(x, y)
  1345  		v.AddArg2(v0, v1)
  1346  		return true
  1347  	}
  1348  }
  1349  func rewriteValueMIPS64_OpEqPtr(v *Value) bool {
  1350  	v_1 := v.Args[1]
  1351  	v_0 := v.Args[0]
  1352  	b := v.Block
  1353  	typ := &b.Func.Config.Types
  1354  	// match: (EqPtr x y)
  1355  	// result: (SGTU (MOVVconst [1]) (XOR x y))
  1356  	for {
  1357  		x := v_0
  1358  		y := v_1
  1359  		v.reset(OpMIPS64SGTU)
  1360  		v0 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
  1361  		v0.AuxInt = int64ToAuxInt(1)
  1362  		v1 := b.NewValue0(v.Pos, OpMIPS64XOR, typ.UInt64)
  1363  		v1.AddArg2(x, y)
  1364  		v.AddArg2(v0, v1)
  1365  		return true
  1366  	}
  1367  }
  1368  func rewriteValueMIPS64_OpHmul32(v *Value) bool {
  1369  	v_1 := v.Args[1]
  1370  	v_0 := v.Args[0]
  1371  	b := v.Block
  1372  	typ := &b.Func.Config.Types
  1373  	// match: (Hmul32 x y)
  1374  	// result: (SRAVconst (Select1 <typ.Int64> (MULV (SignExt32to64 x) (SignExt32to64 y))) [32])
  1375  	for {
  1376  		x := v_0
  1377  		y := v_1
  1378  		v.reset(OpMIPS64SRAVconst)
  1379  		v.AuxInt = int64ToAuxInt(32)
  1380  		v0 := b.NewValue0(v.Pos, OpSelect1, typ.Int64)
  1381  		v1 := b.NewValue0(v.Pos, OpMIPS64MULV, types.NewTuple(typ.Int64, typ.Int64))
  1382  		v2 := b.NewValue0(v.Pos, OpSignExt32to64, typ.Int64)
  1383  		v2.AddArg(x)
  1384  		v3 := b.NewValue0(v.Pos, OpSignExt32to64, typ.Int64)
  1385  		v3.AddArg(y)
  1386  		v1.AddArg2(v2, v3)
  1387  		v0.AddArg(v1)
  1388  		v.AddArg(v0)
  1389  		return true
  1390  	}
  1391  }
  1392  func rewriteValueMIPS64_OpHmul32u(v *Value) bool {
  1393  	v_1 := v.Args[1]
  1394  	v_0 := v.Args[0]
  1395  	b := v.Block
  1396  	typ := &b.Func.Config.Types
  1397  	// match: (Hmul32u x y)
  1398  	// result: (SRLVconst (Select1 <typ.UInt64> (MULVU (ZeroExt32to64 x) (ZeroExt32to64 y))) [32])
  1399  	for {
  1400  		x := v_0
  1401  		y := v_1
  1402  		v.reset(OpMIPS64SRLVconst)
  1403  		v.AuxInt = int64ToAuxInt(32)
  1404  		v0 := b.NewValue0(v.Pos, OpSelect1, typ.UInt64)
  1405  		v1 := b.NewValue0(v.Pos, OpMIPS64MULVU, types.NewTuple(typ.UInt64, typ.UInt64))
  1406  		v2 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64)
  1407  		v2.AddArg(x)
  1408  		v3 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64)
  1409  		v3.AddArg(y)
  1410  		v1.AddArg2(v2, v3)
  1411  		v0.AddArg(v1)
  1412  		v.AddArg(v0)
  1413  		return true
  1414  	}
  1415  }
  1416  func rewriteValueMIPS64_OpHmul64(v *Value) bool {
  1417  	v_1 := v.Args[1]
  1418  	v_0 := v.Args[0]
  1419  	b := v.Block
  1420  	typ := &b.Func.Config.Types
  1421  	// match: (Hmul64 x y)
  1422  	// result: (Select0 (MULV x y))
  1423  	for {
  1424  		x := v_0
  1425  		y := v_1
  1426  		v.reset(OpSelect0)
  1427  		v0 := b.NewValue0(v.Pos, OpMIPS64MULV, types.NewTuple(typ.Int64, typ.Int64))
  1428  		v0.AddArg2(x, y)
  1429  		v.AddArg(v0)
  1430  		return true
  1431  	}
  1432  }
  1433  func rewriteValueMIPS64_OpHmul64u(v *Value) bool {
  1434  	v_1 := v.Args[1]
  1435  	v_0 := v.Args[0]
  1436  	b := v.Block
  1437  	typ := &b.Func.Config.Types
  1438  	// match: (Hmul64u x y)
  1439  	// result: (Select0 (MULVU x y))
  1440  	for {
  1441  		x := v_0
  1442  		y := v_1
  1443  		v.reset(OpSelect0)
  1444  		v0 := b.NewValue0(v.Pos, OpMIPS64MULVU, types.NewTuple(typ.UInt64, typ.UInt64))
  1445  		v0.AddArg2(x, y)
  1446  		v.AddArg(v0)
  1447  		return true
  1448  	}
  1449  }
  1450  func rewriteValueMIPS64_OpIsInBounds(v *Value) bool {
  1451  	v_1 := v.Args[1]
  1452  	v_0 := v.Args[0]
  1453  	// match: (IsInBounds idx len)
  1454  	// result: (SGTU len idx)
  1455  	for {
  1456  		idx := v_0
  1457  		len := v_1
  1458  		v.reset(OpMIPS64SGTU)
  1459  		v.AddArg2(len, idx)
  1460  		return true
  1461  	}
  1462  }
  1463  func rewriteValueMIPS64_OpIsNonNil(v *Value) bool {
  1464  	v_0 := v.Args[0]
  1465  	b := v.Block
  1466  	typ := &b.Func.Config.Types
  1467  	// match: (IsNonNil ptr)
  1468  	// result: (SGTU ptr (MOVVconst [0]))
  1469  	for {
  1470  		ptr := v_0
  1471  		v.reset(OpMIPS64SGTU)
  1472  		v0 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
  1473  		v0.AuxInt = int64ToAuxInt(0)
  1474  		v.AddArg2(ptr, v0)
  1475  		return true
  1476  	}
  1477  }
  1478  func rewriteValueMIPS64_OpIsSliceInBounds(v *Value) bool {
  1479  	v_1 := v.Args[1]
  1480  	v_0 := v.Args[0]
  1481  	b := v.Block
  1482  	typ := &b.Func.Config.Types
  1483  	// match: (IsSliceInBounds idx len)
  1484  	// result: (XOR (MOVVconst [1]) (SGTU idx len))
  1485  	for {
  1486  		idx := v_0
  1487  		len := v_1
  1488  		v.reset(OpMIPS64XOR)
  1489  		v0 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
  1490  		v0.AuxInt = int64ToAuxInt(1)
  1491  		v1 := b.NewValue0(v.Pos, OpMIPS64SGTU, typ.Bool)
  1492  		v1.AddArg2(idx, len)
  1493  		v.AddArg2(v0, v1)
  1494  		return true
  1495  	}
  1496  }
  1497  func rewriteValueMIPS64_OpLeq16(v *Value) bool {
  1498  	v_1 := v.Args[1]
  1499  	v_0 := v.Args[0]
  1500  	b := v.Block
  1501  	typ := &b.Func.Config.Types
  1502  	// match: (Leq16 x y)
  1503  	// result: (XOR (MOVVconst [1]) (SGT (SignExt16to64 x) (SignExt16to64 y)))
  1504  	for {
  1505  		x := v_0
  1506  		y := v_1
  1507  		v.reset(OpMIPS64XOR)
  1508  		v0 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
  1509  		v0.AuxInt = int64ToAuxInt(1)
  1510  		v1 := b.NewValue0(v.Pos, OpMIPS64SGT, typ.Bool)
  1511  		v2 := b.NewValue0(v.Pos, OpSignExt16to64, typ.Int64)
  1512  		v2.AddArg(x)
  1513  		v3 := b.NewValue0(v.Pos, OpSignExt16to64, typ.Int64)
  1514  		v3.AddArg(y)
  1515  		v1.AddArg2(v2, v3)
  1516  		v.AddArg2(v0, v1)
  1517  		return true
  1518  	}
  1519  }
  1520  func rewriteValueMIPS64_OpLeq16U(v *Value) bool {
  1521  	v_1 := v.Args[1]
  1522  	v_0 := v.Args[0]
  1523  	b := v.Block
  1524  	typ := &b.Func.Config.Types
  1525  	// match: (Leq16U x y)
  1526  	// result: (XOR (MOVVconst [1]) (SGTU (ZeroExt16to64 x) (ZeroExt16to64 y)))
  1527  	for {
  1528  		x := v_0
  1529  		y := v_1
  1530  		v.reset(OpMIPS64XOR)
  1531  		v0 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
  1532  		v0.AuxInt = int64ToAuxInt(1)
  1533  		v1 := b.NewValue0(v.Pos, OpMIPS64SGTU, typ.Bool)
  1534  		v2 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64)
  1535  		v2.AddArg(x)
  1536  		v3 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64)
  1537  		v3.AddArg(y)
  1538  		v1.AddArg2(v2, v3)
  1539  		v.AddArg2(v0, v1)
  1540  		return true
  1541  	}
  1542  }
  1543  func rewriteValueMIPS64_OpLeq32(v *Value) bool {
  1544  	v_1 := v.Args[1]
  1545  	v_0 := v.Args[0]
  1546  	b := v.Block
  1547  	typ := &b.Func.Config.Types
  1548  	// match: (Leq32 x y)
  1549  	// result: (XOR (MOVVconst [1]) (SGT (SignExt32to64 x) (SignExt32to64 y)))
  1550  	for {
  1551  		x := v_0
  1552  		y := v_1
  1553  		v.reset(OpMIPS64XOR)
  1554  		v0 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
  1555  		v0.AuxInt = int64ToAuxInt(1)
  1556  		v1 := b.NewValue0(v.Pos, OpMIPS64SGT, typ.Bool)
  1557  		v2 := b.NewValue0(v.Pos, OpSignExt32to64, typ.Int64)
  1558  		v2.AddArg(x)
  1559  		v3 := b.NewValue0(v.Pos, OpSignExt32to64, typ.Int64)
  1560  		v3.AddArg(y)
  1561  		v1.AddArg2(v2, v3)
  1562  		v.AddArg2(v0, v1)
  1563  		return true
  1564  	}
  1565  }
  1566  func rewriteValueMIPS64_OpLeq32F(v *Value) bool {
  1567  	v_1 := v.Args[1]
  1568  	v_0 := v.Args[0]
  1569  	b := v.Block
  1570  	// match: (Leq32F x y)
  1571  	// result: (FPFlagTrue (CMPGEF y x))
  1572  	for {
  1573  		x := v_0
  1574  		y := v_1
  1575  		v.reset(OpMIPS64FPFlagTrue)
  1576  		v0 := b.NewValue0(v.Pos, OpMIPS64CMPGEF, types.TypeFlags)
  1577  		v0.AddArg2(y, x)
  1578  		v.AddArg(v0)
  1579  		return true
  1580  	}
  1581  }
  1582  func rewriteValueMIPS64_OpLeq32U(v *Value) bool {
  1583  	v_1 := v.Args[1]
  1584  	v_0 := v.Args[0]
  1585  	b := v.Block
  1586  	typ := &b.Func.Config.Types
  1587  	// match: (Leq32U x y)
  1588  	// result: (XOR (MOVVconst [1]) (SGTU (ZeroExt32to64 x) (ZeroExt32to64 y)))
  1589  	for {
  1590  		x := v_0
  1591  		y := v_1
  1592  		v.reset(OpMIPS64XOR)
  1593  		v0 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
  1594  		v0.AuxInt = int64ToAuxInt(1)
  1595  		v1 := b.NewValue0(v.Pos, OpMIPS64SGTU, typ.Bool)
  1596  		v2 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64)
  1597  		v2.AddArg(x)
  1598  		v3 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64)
  1599  		v3.AddArg(y)
  1600  		v1.AddArg2(v2, v3)
  1601  		v.AddArg2(v0, v1)
  1602  		return true
  1603  	}
  1604  }
  1605  func rewriteValueMIPS64_OpLeq64(v *Value) bool {
  1606  	v_1 := v.Args[1]
  1607  	v_0 := v.Args[0]
  1608  	b := v.Block
  1609  	typ := &b.Func.Config.Types
  1610  	// match: (Leq64 x y)
  1611  	// result: (XOR (MOVVconst [1]) (SGT x y))
  1612  	for {
  1613  		x := v_0
  1614  		y := v_1
  1615  		v.reset(OpMIPS64XOR)
  1616  		v0 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
  1617  		v0.AuxInt = int64ToAuxInt(1)
  1618  		v1 := b.NewValue0(v.Pos, OpMIPS64SGT, typ.Bool)
  1619  		v1.AddArg2(x, y)
  1620  		v.AddArg2(v0, v1)
  1621  		return true
  1622  	}
  1623  }
  1624  func rewriteValueMIPS64_OpLeq64F(v *Value) bool {
  1625  	v_1 := v.Args[1]
  1626  	v_0 := v.Args[0]
  1627  	b := v.Block
  1628  	// match: (Leq64F x y)
  1629  	// result: (FPFlagTrue (CMPGED y x))
  1630  	for {
  1631  		x := v_0
  1632  		y := v_1
  1633  		v.reset(OpMIPS64FPFlagTrue)
  1634  		v0 := b.NewValue0(v.Pos, OpMIPS64CMPGED, types.TypeFlags)
  1635  		v0.AddArg2(y, x)
  1636  		v.AddArg(v0)
  1637  		return true
  1638  	}
  1639  }
  1640  func rewriteValueMIPS64_OpLeq64U(v *Value) bool {
  1641  	v_1 := v.Args[1]
  1642  	v_0 := v.Args[0]
  1643  	b := v.Block
  1644  	typ := &b.Func.Config.Types
  1645  	// match: (Leq64U x y)
  1646  	// result: (XOR (MOVVconst [1]) (SGTU x y))
  1647  	for {
  1648  		x := v_0
  1649  		y := v_1
  1650  		v.reset(OpMIPS64XOR)
  1651  		v0 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
  1652  		v0.AuxInt = int64ToAuxInt(1)
  1653  		v1 := b.NewValue0(v.Pos, OpMIPS64SGTU, typ.Bool)
  1654  		v1.AddArg2(x, y)
  1655  		v.AddArg2(v0, v1)
  1656  		return true
  1657  	}
  1658  }
  1659  func rewriteValueMIPS64_OpLeq8(v *Value) bool {
  1660  	v_1 := v.Args[1]
  1661  	v_0 := v.Args[0]
  1662  	b := v.Block
  1663  	typ := &b.Func.Config.Types
  1664  	// match: (Leq8 x y)
  1665  	// result: (XOR (MOVVconst [1]) (SGT (SignExt8to64 x) (SignExt8to64 y)))
  1666  	for {
  1667  		x := v_0
  1668  		y := v_1
  1669  		v.reset(OpMIPS64XOR)
  1670  		v0 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
  1671  		v0.AuxInt = int64ToAuxInt(1)
  1672  		v1 := b.NewValue0(v.Pos, OpMIPS64SGT, typ.Bool)
  1673  		v2 := b.NewValue0(v.Pos, OpSignExt8to64, typ.Int64)
  1674  		v2.AddArg(x)
  1675  		v3 := b.NewValue0(v.Pos, OpSignExt8to64, typ.Int64)
  1676  		v3.AddArg(y)
  1677  		v1.AddArg2(v2, v3)
  1678  		v.AddArg2(v0, v1)
  1679  		return true
  1680  	}
  1681  }
  1682  func rewriteValueMIPS64_OpLeq8U(v *Value) bool {
  1683  	v_1 := v.Args[1]
  1684  	v_0 := v.Args[0]
  1685  	b := v.Block
  1686  	typ := &b.Func.Config.Types
  1687  	// match: (Leq8U x y)
  1688  	// result: (XOR (MOVVconst [1]) (SGTU (ZeroExt8to64 x) (ZeroExt8to64 y)))
  1689  	for {
  1690  		x := v_0
  1691  		y := v_1
  1692  		v.reset(OpMIPS64XOR)
  1693  		v0 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
  1694  		v0.AuxInt = int64ToAuxInt(1)
  1695  		v1 := b.NewValue0(v.Pos, OpMIPS64SGTU, typ.Bool)
  1696  		v2 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64)
  1697  		v2.AddArg(x)
  1698  		v3 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64)
  1699  		v3.AddArg(y)
  1700  		v1.AddArg2(v2, v3)
  1701  		v.AddArg2(v0, v1)
  1702  		return true
  1703  	}
  1704  }
  1705  func rewriteValueMIPS64_OpLess16(v *Value) bool {
  1706  	v_1 := v.Args[1]
  1707  	v_0 := v.Args[0]
  1708  	b := v.Block
  1709  	typ := &b.Func.Config.Types
  1710  	// match: (Less16 x y)
  1711  	// result: (SGT (SignExt16to64 y) (SignExt16to64 x))
  1712  	for {
  1713  		x := v_0
  1714  		y := v_1
  1715  		v.reset(OpMIPS64SGT)
  1716  		v0 := b.NewValue0(v.Pos, OpSignExt16to64, typ.Int64)
  1717  		v0.AddArg(y)
  1718  		v1 := b.NewValue0(v.Pos, OpSignExt16to64, typ.Int64)
  1719  		v1.AddArg(x)
  1720  		v.AddArg2(v0, v1)
  1721  		return true
  1722  	}
  1723  }
  1724  func rewriteValueMIPS64_OpLess16U(v *Value) bool {
  1725  	v_1 := v.Args[1]
  1726  	v_0 := v.Args[0]
  1727  	b := v.Block
  1728  	typ := &b.Func.Config.Types
  1729  	// match: (Less16U x y)
  1730  	// result: (SGTU (ZeroExt16to64 y) (ZeroExt16to64 x))
  1731  	for {
  1732  		x := v_0
  1733  		y := v_1
  1734  		v.reset(OpMIPS64SGTU)
  1735  		v0 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64)
  1736  		v0.AddArg(y)
  1737  		v1 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64)
  1738  		v1.AddArg(x)
  1739  		v.AddArg2(v0, v1)
  1740  		return true
  1741  	}
  1742  }
  1743  func rewriteValueMIPS64_OpLess32(v *Value) bool {
  1744  	v_1 := v.Args[1]
  1745  	v_0 := v.Args[0]
  1746  	b := v.Block
  1747  	typ := &b.Func.Config.Types
  1748  	// match: (Less32 x y)
  1749  	// result: (SGT (SignExt32to64 y) (SignExt32to64 x))
  1750  	for {
  1751  		x := v_0
  1752  		y := v_1
  1753  		v.reset(OpMIPS64SGT)
  1754  		v0 := b.NewValue0(v.Pos, OpSignExt32to64, typ.Int64)
  1755  		v0.AddArg(y)
  1756  		v1 := b.NewValue0(v.Pos, OpSignExt32to64, typ.Int64)
  1757  		v1.AddArg(x)
  1758  		v.AddArg2(v0, v1)
  1759  		return true
  1760  	}
  1761  }
  1762  func rewriteValueMIPS64_OpLess32F(v *Value) bool {
  1763  	v_1 := v.Args[1]
  1764  	v_0 := v.Args[0]
  1765  	b := v.Block
  1766  	// match: (Less32F x y)
  1767  	// result: (FPFlagTrue (CMPGTF y x))
  1768  	for {
  1769  		x := v_0
  1770  		y := v_1
  1771  		v.reset(OpMIPS64FPFlagTrue)
  1772  		v0 := b.NewValue0(v.Pos, OpMIPS64CMPGTF, types.TypeFlags)
  1773  		v0.AddArg2(y, x)
  1774  		v.AddArg(v0)
  1775  		return true
  1776  	}
  1777  }
  1778  func rewriteValueMIPS64_OpLess32U(v *Value) bool {
  1779  	v_1 := v.Args[1]
  1780  	v_0 := v.Args[0]
  1781  	b := v.Block
  1782  	typ := &b.Func.Config.Types
  1783  	// match: (Less32U x y)
  1784  	// result: (SGTU (ZeroExt32to64 y) (ZeroExt32to64 x))
  1785  	for {
  1786  		x := v_0
  1787  		y := v_1
  1788  		v.reset(OpMIPS64SGTU)
  1789  		v0 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64)
  1790  		v0.AddArg(y)
  1791  		v1 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64)
  1792  		v1.AddArg(x)
  1793  		v.AddArg2(v0, v1)
  1794  		return true
  1795  	}
  1796  }
  1797  func rewriteValueMIPS64_OpLess64(v *Value) bool {
  1798  	v_1 := v.Args[1]
  1799  	v_0 := v.Args[0]
  1800  	// match: (Less64 x y)
  1801  	// result: (SGT y x)
  1802  	for {
  1803  		x := v_0
  1804  		y := v_1
  1805  		v.reset(OpMIPS64SGT)
  1806  		v.AddArg2(y, x)
  1807  		return true
  1808  	}
  1809  }
  1810  func rewriteValueMIPS64_OpLess64F(v *Value) bool {
  1811  	v_1 := v.Args[1]
  1812  	v_0 := v.Args[0]
  1813  	b := v.Block
  1814  	// match: (Less64F x y)
  1815  	// result: (FPFlagTrue (CMPGTD y x))
  1816  	for {
  1817  		x := v_0
  1818  		y := v_1
  1819  		v.reset(OpMIPS64FPFlagTrue)
  1820  		v0 := b.NewValue0(v.Pos, OpMIPS64CMPGTD, types.TypeFlags)
  1821  		v0.AddArg2(y, x)
  1822  		v.AddArg(v0)
  1823  		return true
  1824  	}
  1825  }
  1826  func rewriteValueMIPS64_OpLess64U(v *Value) bool {
  1827  	v_1 := v.Args[1]
  1828  	v_0 := v.Args[0]
  1829  	// match: (Less64U x y)
  1830  	// result: (SGTU y x)
  1831  	for {
  1832  		x := v_0
  1833  		y := v_1
  1834  		v.reset(OpMIPS64SGTU)
  1835  		v.AddArg2(y, x)
  1836  		return true
  1837  	}
  1838  }
  1839  func rewriteValueMIPS64_OpLess8(v *Value) bool {
  1840  	v_1 := v.Args[1]
  1841  	v_0 := v.Args[0]
  1842  	b := v.Block
  1843  	typ := &b.Func.Config.Types
  1844  	// match: (Less8 x y)
  1845  	// result: (SGT (SignExt8to64 y) (SignExt8to64 x))
  1846  	for {
  1847  		x := v_0
  1848  		y := v_1
  1849  		v.reset(OpMIPS64SGT)
  1850  		v0 := b.NewValue0(v.Pos, OpSignExt8to64, typ.Int64)
  1851  		v0.AddArg(y)
  1852  		v1 := b.NewValue0(v.Pos, OpSignExt8to64, typ.Int64)
  1853  		v1.AddArg(x)
  1854  		v.AddArg2(v0, v1)
  1855  		return true
  1856  	}
  1857  }
  1858  func rewriteValueMIPS64_OpLess8U(v *Value) bool {
  1859  	v_1 := v.Args[1]
  1860  	v_0 := v.Args[0]
  1861  	b := v.Block
  1862  	typ := &b.Func.Config.Types
  1863  	// match: (Less8U x y)
  1864  	// result: (SGTU (ZeroExt8to64 y) (ZeroExt8to64 x))
  1865  	for {
  1866  		x := v_0
  1867  		y := v_1
  1868  		v.reset(OpMIPS64SGTU)
  1869  		v0 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64)
  1870  		v0.AddArg(y)
  1871  		v1 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64)
  1872  		v1.AddArg(x)
  1873  		v.AddArg2(v0, v1)
  1874  		return true
  1875  	}
  1876  }
  1877  func rewriteValueMIPS64_OpLoad(v *Value) bool {
  1878  	v_1 := v.Args[1]
  1879  	v_0 := v.Args[0]
  1880  	// match: (Load <t> ptr mem)
  1881  	// cond: t.IsBoolean()
  1882  	// result: (MOVBUload ptr mem)
  1883  	for {
  1884  		t := v.Type
  1885  		ptr := v_0
  1886  		mem := v_1
  1887  		if !(t.IsBoolean()) {
  1888  			break
  1889  		}
  1890  		v.reset(OpMIPS64MOVBUload)
  1891  		v.AddArg2(ptr, mem)
  1892  		return true
  1893  	}
  1894  	// match: (Load <t> ptr mem)
  1895  	// cond: (is8BitInt(t) && t.IsSigned())
  1896  	// result: (MOVBload ptr mem)
  1897  	for {
  1898  		t := v.Type
  1899  		ptr := v_0
  1900  		mem := v_1
  1901  		if !(is8BitInt(t) && t.IsSigned()) {
  1902  			break
  1903  		}
  1904  		v.reset(OpMIPS64MOVBload)
  1905  		v.AddArg2(ptr, mem)
  1906  		return true
  1907  	}
  1908  	// match: (Load <t> ptr mem)
  1909  	// cond: (is8BitInt(t) && !t.IsSigned())
  1910  	// result: (MOVBUload ptr mem)
  1911  	for {
  1912  		t := v.Type
  1913  		ptr := v_0
  1914  		mem := v_1
  1915  		if !(is8BitInt(t) && !t.IsSigned()) {
  1916  			break
  1917  		}
  1918  		v.reset(OpMIPS64MOVBUload)
  1919  		v.AddArg2(ptr, mem)
  1920  		return true
  1921  	}
  1922  	// match: (Load <t> ptr mem)
  1923  	// cond: (is16BitInt(t) && t.IsSigned())
  1924  	// result: (MOVHload ptr mem)
  1925  	for {
  1926  		t := v.Type
  1927  		ptr := v_0
  1928  		mem := v_1
  1929  		if !(is16BitInt(t) && t.IsSigned()) {
  1930  			break
  1931  		}
  1932  		v.reset(OpMIPS64MOVHload)
  1933  		v.AddArg2(ptr, mem)
  1934  		return true
  1935  	}
  1936  	// match: (Load <t> ptr mem)
  1937  	// cond: (is16BitInt(t) && !t.IsSigned())
  1938  	// result: (MOVHUload ptr mem)
  1939  	for {
  1940  		t := v.Type
  1941  		ptr := v_0
  1942  		mem := v_1
  1943  		if !(is16BitInt(t) && !t.IsSigned()) {
  1944  			break
  1945  		}
  1946  		v.reset(OpMIPS64MOVHUload)
  1947  		v.AddArg2(ptr, mem)
  1948  		return true
  1949  	}
  1950  	// match: (Load <t> ptr mem)
  1951  	// cond: (is32BitInt(t) && t.IsSigned())
  1952  	// result: (MOVWload ptr mem)
  1953  	for {
  1954  		t := v.Type
  1955  		ptr := v_0
  1956  		mem := v_1
  1957  		if !(is32BitInt(t) && t.IsSigned()) {
  1958  			break
  1959  		}
  1960  		v.reset(OpMIPS64MOVWload)
  1961  		v.AddArg2(ptr, mem)
  1962  		return true
  1963  	}
  1964  	// match: (Load <t> ptr mem)
  1965  	// cond: (is32BitInt(t) && !t.IsSigned())
  1966  	// result: (MOVWUload ptr mem)
  1967  	for {
  1968  		t := v.Type
  1969  		ptr := v_0
  1970  		mem := v_1
  1971  		if !(is32BitInt(t) && !t.IsSigned()) {
  1972  			break
  1973  		}
  1974  		v.reset(OpMIPS64MOVWUload)
  1975  		v.AddArg2(ptr, mem)
  1976  		return true
  1977  	}
  1978  	// match: (Load <t> ptr mem)
  1979  	// cond: (is64BitInt(t) || isPtr(t))
  1980  	// result: (MOVVload ptr mem)
  1981  	for {
  1982  		t := v.Type
  1983  		ptr := v_0
  1984  		mem := v_1
  1985  		if !(is64BitInt(t) || isPtr(t)) {
  1986  			break
  1987  		}
  1988  		v.reset(OpMIPS64MOVVload)
  1989  		v.AddArg2(ptr, mem)
  1990  		return true
  1991  	}
  1992  	// match: (Load <t> ptr mem)
  1993  	// cond: is32BitFloat(t)
  1994  	// result: (MOVFload ptr mem)
  1995  	for {
  1996  		t := v.Type
  1997  		ptr := v_0
  1998  		mem := v_1
  1999  		if !(is32BitFloat(t)) {
  2000  			break
  2001  		}
  2002  		v.reset(OpMIPS64MOVFload)
  2003  		v.AddArg2(ptr, mem)
  2004  		return true
  2005  	}
  2006  	// match: (Load <t> ptr mem)
  2007  	// cond: is64BitFloat(t)
  2008  	// result: (MOVDload ptr mem)
  2009  	for {
  2010  		t := v.Type
  2011  		ptr := v_0
  2012  		mem := v_1
  2013  		if !(is64BitFloat(t)) {
  2014  			break
  2015  		}
  2016  		v.reset(OpMIPS64MOVDload)
  2017  		v.AddArg2(ptr, mem)
  2018  		return true
  2019  	}
  2020  	return false
  2021  }
  2022  func rewriteValueMIPS64_OpLocalAddr(v *Value) bool {
  2023  	v_1 := v.Args[1]
  2024  	v_0 := v.Args[0]
  2025  	b := v.Block
  2026  	typ := &b.Func.Config.Types
  2027  	// match: (LocalAddr <t> {sym} base mem)
  2028  	// cond: t.Elem().HasPointers()
  2029  	// result: (MOVVaddr {sym} (SPanchored base mem))
  2030  	for {
  2031  		t := v.Type
  2032  		sym := auxToSym(v.Aux)
  2033  		base := v_0
  2034  		mem := v_1
  2035  		if !(t.Elem().HasPointers()) {
  2036  			break
  2037  		}
  2038  		v.reset(OpMIPS64MOVVaddr)
  2039  		v.Aux = symToAux(sym)
  2040  		v0 := b.NewValue0(v.Pos, OpSPanchored, typ.Uintptr)
  2041  		v0.AddArg2(base, mem)
  2042  		v.AddArg(v0)
  2043  		return true
  2044  	}
  2045  	// match: (LocalAddr <t> {sym} base _)
  2046  	// cond: !t.Elem().HasPointers()
  2047  	// result: (MOVVaddr {sym} base)
  2048  	for {
  2049  		t := v.Type
  2050  		sym := auxToSym(v.Aux)
  2051  		base := v_0
  2052  		if !(!t.Elem().HasPointers()) {
  2053  			break
  2054  		}
  2055  		v.reset(OpMIPS64MOVVaddr)
  2056  		v.Aux = symToAux(sym)
  2057  		v.AddArg(base)
  2058  		return true
  2059  	}
  2060  	return false
  2061  }
  2062  func rewriteValueMIPS64_OpLsh16x16(v *Value) bool {
  2063  	v_1 := v.Args[1]
  2064  	v_0 := v.Args[0]
  2065  	b := v.Block
  2066  	typ := &b.Func.Config.Types
  2067  	// match: (Lsh16x16 <t> x y)
  2068  	// result: (AND (NEGV <t> (SGTU (MOVVconst <typ.UInt64> [64]) (ZeroExt16to64 y))) (SLLV <t> x (ZeroExt16to64 y)))
  2069  	for {
  2070  		t := v.Type
  2071  		x := v_0
  2072  		y := v_1
  2073  		v.reset(OpMIPS64AND)
  2074  		v0 := b.NewValue0(v.Pos, OpMIPS64NEGV, t)
  2075  		v1 := b.NewValue0(v.Pos, OpMIPS64SGTU, typ.Bool)
  2076  		v2 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
  2077  		v2.AuxInt = int64ToAuxInt(64)
  2078  		v3 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64)
  2079  		v3.AddArg(y)
  2080  		v1.AddArg2(v2, v3)
  2081  		v0.AddArg(v1)
  2082  		v4 := b.NewValue0(v.Pos, OpMIPS64SLLV, t)
  2083  		v4.AddArg2(x, v3)
  2084  		v.AddArg2(v0, v4)
  2085  		return true
  2086  	}
  2087  }
  2088  func rewriteValueMIPS64_OpLsh16x32(v *Value) bool {
  2089  	v_1 := v.Args[1]
  2090  	v_0 := v.Args[0]
  2091  	b := v.Block
  2092  	typ := &b.Func.Config.Types
  2093  	// match: (Lsh16x32 <t> x y)
  2094  	// result: (AND (NEGV <t> (SGTU (MOVVconst <typ.UInt64> [64]) (ZeroExt32to64 y))) (SLLV <t> x (ZeroExt32to64 y)))
  2095  	for {
  2096  		t := v.Type
  2097  		x := v_0
  2098  		y := v_1
  2099  		v.reset(OpMIPS64AND)
  2100  		v0 := b.NewValue0(v.Pos, OpMIPS64NEGV, t)
  2101  		v1 := b.NewValue0(v.Pos, OpMIPS64SGTU, typ.Bool)
  2102  		v2 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
  2103  		v2.AuxInt = int64ToAuxInt(64)
  2104  		v3 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64)
  2105  		v3.AddArg(y)
  2106  		v1.AddArg2(v2, v3)
  2107  		v0.AddArg(v1)
  2108  		v4 := b.NewValue0(v.Pos, OpMIPS64SLLV, t)
  2109  		v4.AddArg2(x, v3)
  2110  		v.AddArg2(v0, v4)
  2111  		return true
  2112  	}
  2113  }
  2114  func rewriteValueMIPS64_OpLsh16x64(v *Value) bool {
  2115  	v_1 := v.Args[1]
  2116  	v_0 := v.Args[0]
  2117  	b := v.Block
  2118  	typ := &b.Func.Config.Types
  2119  	// match: (Lsh16x64 <t> x y)
  2120  	// result: (AND (NEGV <t> (SGTU (MOVVconst <typ.UInt64> [64]) y)) (SLLV <t> x y))
  2121  	for {
  2122  		t := v.Type
  2123  		x := v_0
  2124  		y := v_1
  2125  		v.reset(OpMIPS64AND)
  2126  		v0 := b.NewValue0(v.Pos, OpMIPS64NEGV, t)
  2127  		v1 := b.NewValue0(v.Pos, OpMIPS64SGTU, typ.Bool)
  2128  		v2 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
  2129  		v2.AuxInt = int64ToAuxInt(64)
  2130  		v1.AddArg2(v2, y)
  2131  		v0.AddArg(v1)
  2132  		v3 := b.NewValue0(v.Pos, OpMIPS64SLLV, t)
  2133  		v3.AddArg2(x, y)
  2134  		v.AddArg2(v0, v3)
  2135  		return true
  2136  	}
  2137  }
  2138  func rewriteValueMIPS64_OpLsh16x8(v *Value) bool {
  2139  	v_1 := v.Args[1]
  2140  	v_0 := v.Args[0]
  2141  	b := v.Block
  2142  	typ := &b.Func.Config.Types
  2143  	// match: (Lsh16x8 <t> x y)
  2144  	// result: (AND (NEGV <t> (SGTU (MOVVconst <typ.UInt64> [64]) (ZeroExt8to64 y))) (SLLV <t> x (ZeroExt8to64 y)))
  2145  	for {
  2146  		t := v.Type
  2147  		x := v_0
  2148  		y := v_1
  2149  		v.reset(OpMIPS64AND)
  2150  		v0 := b.NewValue0(v.Pos, OpMIPS64NEGV, t)
  2151  		v1 := b.NewValue0(v.Pos, OpMIPS64SGTU, typ.Bool)
  2152  		v2 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
  2153  		v2.AuxInt = int64ToAuxInt(64)
  2154  		v3 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64)
  2155  		v3.AddArg(y)
  2156  		v1.AddArg2(v2, v3)
  2157  		v0.AddArg(v1)
  2158  		v4 := b.NewValue0(v.Pos, OpMIPS64SLLV, t)
  2159  		v4.AddArg2(x, v3)
  2160  		v.AddArg2(v0, v4)
  2161  		return true
  2162  	}
  2163  }
  2164  func rewriteValueMIPS64_OpLsh32x16(v *Value) bool {
  2165  	v_1 := v.Args[1]
  2166  	v_0 := v.Args[0]
  2167  	b := v.Block
  2168  	typ := &b.Func.Config.Types
  2169  	// match: (Lsh32x16 <t> x y)
  2170  	// result: (AND (NEGV <t> (SGTU (MOVVconst <typ.UInt64> [64]) (ZeroExt16to64 y))) (SLLV <t> x (ZeroExt16to64 y)))
  2171  	for {
  2172  		t := v.Type
  2173  		x := v_0
  2174  		y := v_1
  2175  		v.reset(OpMIPS64AND)
  2176  		v0 := b.NewValue0(v.Pos, OpMIPS64NEGV, t)
  2177  		v1 := b.NewValue0(v.Pos, OpMIPS64SGTU, typ.Bool)
  2178  		v2 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
  2179  		v2.AuxInt = int64ToAuxInt(64)
  2180  		v3 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64)
  2181  		v3.AddArg(y)
  2182  		v1.AddArg2(v2, v3)
  2183  		v0.AddArg(v1)
  2184  		v4 := b.NewValue0(v.Pos, OpMIPS64SLLV, t)
  2185  		v4.AddArg2(x, v3)
  2186  		v.AddArg2(v0, v4)
  2187  		return true
  2188  	}
  2189  }
  2190  func rewriteValueMIPS64_OpLsh32x32(v *Value) bool {
  2191  	v_1 := v.Args[1]
  2192  	v_0 := v.Args[0]
  2193  	b := v.Block
  2194  	typ := &b.Func.Config.Types
  2195  	// match: (Lsh32x32 <t> x y)
  2196  	// result: (AND (NEGV <t> (SGTU (MOVVconst <typ.UInt64> [64]) (ZeroExt32to64 y))) (SLLV <t> x (ZeroExt32to64 y)))
  2197  	for {
  2198  		t := v.Type
  2199  		x := v_0
  2200  		y := v_1
  2201  		v.reset(OpMIPS64AND)
  2202  		v0 := b.NewValue0(v.Pos, OpMIPS64NEGV, t)
  2203  		v1 := b.NewValue0(v.Pos, OpMIPS64SGTU, typ.Bool)
  2204  		v2 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
  2205  		v2.AuxInt = int64ToAuxInt(64)
  2206  		v3 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64)
  2207  		v3.AddArg(y)
  2208  		v1.AddArg2(v2, v3)
  2209  		v0.AddArg(v1)
  2210  		v4 := b.NewValue0(v.Pos, OpMIPS64SLLV, t)
  2211  		v4.AddArg2(x, v3)
  2212  		v.AddArg2(v0, v4)
  2213  		return true
  2214  	}
  2215  }
  2216  func rewriteValueMIPS64_OpLsh32x64(v *Value) bool {
  2217  	v_1 := v.Args[1]
  2218  	v_0 := v.Args[0]
  2219  	b := v.Block
  2220  	typ := &b.Func.Config.Types
  2221  	// match: (Lsh32x64 <t> x y)
  2222  	// result: (AND (NEGV <t> (SGTU (MOVVconst <typ.UInt64> [64]) y)) (SLLV <t> x y))
  2223  	for {
  2224  		t := v.Type
  2225  		x := v_0
  2226  		y := v_1
  2227  		v.reset(OpMIPS64AND)
  2228  		v0 := b.NewValue0(v.Pos, OpMIPS64NEGV, t)
  2229  		v1 := b.NewValue0(v.Pos, OpMIPS64SGTU, typ.Bool)
  2230  		v2 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
  2231  		v2.AuxInt = int64ToAuxInt(64)
  2232  		v1.AddArg2(v2, y)
  2233  		v0.AddArg(v1)
  2234  		v3 := b.NewValue0(v.Pos, OpMIPS64SLLV, t)
  2235  		v3.AddArg2(x, y)
  2236  		v.AddArg2(v0, v3)
  2237  		return true
  2238  	}
  2239  }
  2240  func rewriteValueMIPS64_OpLsh32x8(v *Value) bool {
  2241  	v_1 := v.Args[1]
  2242  	v_0 := v.Args[0]
  2243  	b := v.Block
  2244  	typ := &b.Func.Config.Types
  2245  	// match: (Lsh32x8 <t> x y)
  2246  	// result: (AND (NEGV <t> (SGTU (MOVVconst <typ.UInt64> [64]) (ZeroExt8to64 y))) (SLLV <t> x (ZeroExt8to64 y)))
  2247  	for {
  2248  		t := v.Type
  2249  		x := v_0
  2250  		y := v_1
  2251  		v.reset(OpMIPS64AND)
  2252  		v0 := b.NewValue0(v.Pos, OpMIPS64NEGV, t)
  2253  		v1 := b.NewValue0(v.Pos, OpMIPS64SGTU, typ.Bool)
  2254  		v2 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
  2255  		v2.AuxInt = int64ToAuxInt(64)
  2256  		v3 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64)
  2257  		v3.AddArg(y)
  2258  		v1.AddArg2(v2, v3)
  2259  		v0.AddArg(v1)
  2260  		v4 := b.NewValue0(v.Pos, OpMIPS64SLLV, t)
  2261  		v4.AddArg2(x, v3)
  2262  		v.AddArg2(v0, v4)
  2263  		return true
  2264  	}
  2265  }
  2266  func rewriteValueMIPS64_OpLsh64x16(v *Value) bool {
  2267  	v_1 := v.Args[1]
  2268  	v_0 := v.Args[0]
  2269  	b := v.Block
  2270  	typ := &b.Func.Config.Types
  2271  	// match: (Lsh64x16 <t> x y)
  2272  	// result: (AND (NEGV <t> (SGTU (MOVVconst <typ.UInt64> [64]) (ZeroExt16to64 y))) (SLLV <t> x (ZeroExt16to64 y)))
  2273  	for {
  2274  		t := v.Type
  2275  		x := v_0
  2276  		y := v_1
  2277  		v.reset(OpMIPS64AND)
  2278  		v0 := b.NewValue0(v.Pos, OpMIPS64NEGV, t)
  2279  		v1 := b.NewValue0(v.Pos, OpMIPS64SGTU, typ.Bool)
  2280  		v2 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
  2281  		v2.AuxInt = int64ToAuxInt(64)
  2282  		v3 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64)
  2283  		v3.AddArg(y)
  2284  		v1.AddArg2(v2, v3)
  2285  		v0.AddArg(v1)
  2286  		v4 := b.NewValue0(v.Pos, OpMIPS64SLLV, t)
  2287  		v4.AddArg2(x, v3)
  2288  		v.AddArg2(v0, v4)
  2289  		return true
  2290  	}
  2291  }
  2292  func rewriteValueMIPS64_OpLsh64x32(v *Value) bool {
  2293  	v_1 := v.Args[1]
  2294  	v_0 := v.Args[0]
  2295  	b := v.Block
  2296  	typ := &b.Func.Config.Types
  2297  	// match: (Lsh64x32 <t> x y)
  2298  	// result: (AND (NEGV <t> (SGTU (MOVVconst <typ.UInt64> [64]) (ZeroExt32to64 y))) (SLLV <t> x (ZeroExt32to64 y)))
  2299  	for {
  2300  		t := v.Type
  2301  		x := v_0
  2302  		y := v_1
  2303  		v.reset(OpMIPS64AND)
  2304  		v0 := b.NewValue0(v.Pos, OpMIPS64NEGV, t)
  2305  		v1 := b.NewValue0(v.Pos, OpMIPS64SGTU, typ.Bool)
  2306  		v2 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
  2307  		v2.AuxInt = int64ToAuxInt(64)
  2308  		v3 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64)
  2309  		v3.AddArg(y)
  2310  		v1.AddArg2(v2, v3)
  2311  		v0.AddArg(v1)
  2312  		v4 := b.NewValue0(v.Pos, OpMIPS64SLLV, t)
  2313  		v4.AddArg2(x, v3)
  2314  		v.AddArg2(v0, v4)
  2315  		return true
  2316  	}
  2317  }
  2318  func rewriteValueMIPS64_OpLsh64x64(v *Value) bool {
  2319  	v_1 := v.Args[1]
  2320  	v_0 := v.Args[0]
  2321  	b := v.Block
  2322  	typ := &b.Func.Config.Types
  2323  	// match: (Lsh64x64 <t> x y)
  2324  	// result: (AND (NEGV <t> (SGTU (MOVVconst <typ.UInt64> [64]) y)) (SLLV <t> x y))
  2325  	for {
  2326  		t := v.Type
  2327  		x := v_0
  2328  		y := v_1
  2329  		v.reset(OpMIPS64AND)
  2330  		v0 := b.NewValue0(v.Pos, OpMIPS64NEGV, t)
  2331  		v1 := b.NewValue0(v.Pos, OpMIPS64SGTU, typ.Bool)
  2332  		v2 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
  2333  		v2.AuxInt = int64ToAuxInt(64)
  2334  		v1.AddArg2(v2, y)
  2335  		v0.AddArg(v1)
  2336  		v3 := b.NewValue0(v.Pos, OpMIPS64SLLV, t)
  2337  		v3.AddArg2(x, y)
  2338  		v.AddArg2(v0, v3)
  2339  		return true
  2340  	}
  2341  }
  2342  func rewriteValueMIPS64_OpLsh64x8(v *Value) bool {
  2343  	v_1 := v.Args[1]
  2344  	v_0 := v.Args[0]
  2345  	b := v.Block
  2346  	typ := &b.Func.Config.Types
  2347  	// match: (Lsh64x8 <t> x y)
  2348  	// result: (AND (NEGV <t> (SGTU (MOVVconst <typ.UInt64> [64]) (ZeroExt8to64 y))) (SLLV <t> x (ZeroExt8to64 y)))
  2349  	for {
  2350  		t := v.Type
  2351  		x := v_0
  2352  		y := v_1
  2353  		v.reset(OpMIPS64AND)
  2354  		v0 := b.NewValue0(v.Pos, OpMIPS64NEGV, t)
  2355  		v1 := b.NewValue0(v.Pos, OpMIPS64SGTU, typ.Bool)
  2356  		v2 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
  2357  		v2.AuxInt = int64ToAuxInt(64)
  2358  		v3 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64)
  2359  		v3.AddArg(y)
  2360  		v1.AddArg2(v2, v3)
  2361  		v0.AddArg(v1)
  2362  		v4 := b.NewValue0(v.Pos, OpMIPS64SLLV, t)
  2363  		v4.AddArg2(x, v3)
  2364  		v.AddArg2(v0, v4)
  2365  		return true
  2366  	}
  2367  }
  2368  func rewriteValueMIPS64_OpLsh8x16(v *Value) bool {
  2369  	v_1 := v.Args[1]
  2370  	v_0 := v.Args[0]
  2371  	b := v.Block
  2372  	typ := &b.Func.Config.Types
  2373  	// match: (Lsh8x16 <t> x y)
  2374  	// result: (AND (NEGV <t> (SGTU (MOVVconst <typ.UInt64> [64]) (ZeroExt16to64 y))) (SLLV <t> x (ZeroExt16to64 y)))
  2375  	for {
  2376  		t := v.Type
  2377  		x := v_0
  2378  		y := v_1
  2379  		v.reset(OpMIPS64AND)
  2380  		v0 := b.NewValue0(v.Pos, OpMIPS64NEGV, t)
  2381  		v1 := b.NewValue0(v.Pos, OpMIPS64SGTU, typ.Bool)
  2382  		v2 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
  2383  		v2.AuxInt = int64ToAuxInt(64)
  2384  		v3 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64)
  2385  		v3.AddArg(y)
  2386  		v1.AddArg2(v2, v3)
  2387  		v0.AddArg(v1)
  2388  		v4 := b.NewValue0(v.Pos, OpMIPS64SLLV, t)
  2389  		v4.AddArg2(x, v3)
  2390  		v.AddArg2(v0, v4)
  2391  		return true
  2392  	}
  2393  }
  2394  func rewriteValueMIPS64_OpLsh8x32(v *Value) bool {
  2395  	v_1 := v.Args[1]
  2396  	v_0 := v.Args[0]
  2397  	b := v.Block
  2398  	typ := &b.Func.Config.Types
  2399  	// match: (Lsh8x32 <t> x y)
  2400  	// result: (AND (NEGV <t> (SGTU (MOVVconst <typ.UInt64> [64]) (ZeroExt32to64 y))) (SLLV <t> x (ZeroExt32to64 y)))
  2401  	for {
  2402  		t := v.Type
  2403  		x := v_0
  2404  		y := v_1
  2405  		v.reset(OpMIPS64AND)
  2406  		v0 := b.NewValue0(v.Pos, OpMIPS64NEGV, t)
  2407  		v1 := b.NewValue0(v.Pos, OpMIPS64SGTU, typ.Bool)
  2408  		v2 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
  2409  		v2.AuxInt = int64ToAuxInt(64)
  2410  		v3 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64)
  2411  		v3.AddArg(y)
  2412  		v1.AddArg2(v2, v3)
  2413  		v0.AddArg(v1)
  2414  		v4 := b.NewValue0(v.Pos, OpMIPS64SLLV, t)
  2415  		v4.AddArg2(x, v3)
  2416  		v.AddArg2(v0, v4)
  2417  		return true
  2418  	}
  2419  }
  2420  func rewriteValueMIPS64_OpLsh8x64(v *Value) bool {
  2421  	v_1 := v.Args[1]
  2422  	v_0 := v.Args[0]
  2423  	b := v.Block
  2424  	typ := &b.Func.Config.Types
  2425  	// match: (Lsh8x64 <t> x y)
  2426  	// result: (AND (NEGV <t> (SGTU (MOVVconst <typ.UInt64> [64]) y)) (SLLV <t> x y))
  2427  	for {
  2428  		t := v.Type
  2429  		x := v_0
  2430  		y := v_1
  2431  		v.reset(OpMIPS64AND)
  2432  		v0 := b.NewValue0(v.Pos, OpMIPS64NEGV, t)
  2433  		v1 := b.NewValue0(v.Pos, OpMIPS64SGTU, typ.Bool)
  2434  		v2 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
  2435  		v2.AuxInt = int64ToAuxInt(64)
  2436  		v1.AddArg2(v2, y)
  2437  		v0.AddArg(v1)
  2438  		v3 := b.NewValue0(v.Pos, OpMIPS64SLLV, t)
  2439  		v3.AddArg2(x, y)
  2440  		v.AddArg2(v0, v3)
  2441  		return true
  2442  	}
  2443  }
  2444  func rewriteValueMIPS64_OpLsh8x8(v *Value) bool {
  2445  	v_1 := v.Args[1]
  2446  	v_0 := v.Args[0]
  2447  	b := v.Block
  2448  	typ := &b.Func.Config.Types
  2449  	// match: (Lsh8x8 <t> x y)
  2450  	// result: (AND (NEGV <t> (SGTU (MOVVconst <typ.UInt64> [64]) (ZeroExt8to64 y))) (SLLV <t> x (ZeroExt8to64 y)))
  2451  	for {
  2452  		t := v.Type
  2453  		x := v_0
  2454  		y := v_1
  2455  		v.reset(OpMIPS64AND)
  2456  		v0 := b.NewValue0(v.Pos, OpMIPS64NEGV, t)
  2457  		v1 := b.NewValue0(v.Pos, OpMIPS64SGTU, typ.Bool)
  2458  		v2 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
  2459  		v2.AuxInt = int64ToAuxInt(64)
  2460  		v3 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64)
  2461  		v3.AddArg(y)
  2462  		v1.AddArg2(v2, v3)
  2463  		v0.AddArg(v1)
  2464  		v4 := b.NewValue0(v.Pos, OpMIPS64SLLV, t)
  2465  		v4.AddArg2(x, v3)
  2466  		v.AddArg2(v0, v4)
  2467  		return true
  2468  	}
  2469  }
  2470  func rewriteValueMIPS64_OpMIPS64ADDV(v *Value) bool {
  2471  	v_1 := v.Args[1]
  2472  	v_0 := v.Args[0]
  2473  	// match: (ADDV x (MOVVconst <t> [c]))
  2474  	// cond: is32Bit(c) && !t.IsPtr()
  2475  	// result: (ADDVconst [c] x)
  2476  	for {
  2477  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
  2478  			x := v_0
  2479  			if v_1.Op != OpMIPS64MOVVconst {
  2480  				continue
  2481  			}
  2482  			t := v_1.Type
  2483  			c := auxIntToInt64(v_1.AuxInt)
  2484  			if !(is32Bit(c) && !t.IsPtr()) {
  2485  				continue
  2486  			}
  2487  			v.reset(OpMIPS64ADDVconst)
  2488  			v.AuxInt = int64ToAuxInt(c)
  2489  			v.AddArg(x)
  2490  			return true
  2491  		}
  2492  		break
  2493  	}
  2494  	// match: (ADDV x (NEGV y))
  2495  	// result: (SUBV x y)
  2496  	for {
  2497  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
  2498  			x := v_0
  2499  			if v_1.Op != OpMIPS64NEGV {
  2500  				continue
  2501  			}
  2502  			y := v_1.Args[0]
  2503  			v.reset(OpMIPS64SUBV)
  2504  			v.AddArg2(x, y)
  2505  			return true
  2506  		}
  2507  		break
  2508  	}
  2509  	return false
  2510  }
  2511  func rewriteValueMIPS64_OpMIPS64ADDVconst(v *Value) bool {
  2512  	v_0 := v.Args[0]
  2513  	// match: (ADDVconst [off1] (MOVVaddr [off2] {sym} ptr))
  2514  	// cond: is32Bit(off1+int64(off2))
  2515  	// result: (MOVVaddr [int32(off1)+int32(off2)] {sym} ptr)
  2516  	for {
  2517  		off1 := auxIntToInt64(v.AuxInt)
  2518  		if v_0.Op != OpMIPS64MOVVaddr {
  2519  			break
  2520  		}
  2521  		off2 := auxIntToInt32(v_0.AuxInt)
  2522  		sym := auxToSym(v_0.Aux)
  2523  		ptr := v_0.Args[0]
  2524  		if !(is32Bit(off1 + int64(off2))) {
  2525  			break
  2526  		}
  2527  		v.reset(OpMIPS64MOVVaddr)
  2528  		v.AuxInt = int32ToAuxInt(int32(off1) + int32(off2))
  2529  		v.Aux = symToAux(sym)
  2530  		v.AddArg(ptr)
  2531  		return true
  2532  	}
  2533  	// match: (ADDVconst [0] x)
  2534  	// result: x
  2535  	for {
  2536  		if auxIntToInt64(v.AuxInt) != 0 {
  2537  			break
  2538  		}
  2539  		x := v_0
  2540  		v.copyOf(x)
  2541  		return true
  2542  	}
  2543  	// match: (ADDVconst [c] (MOVVconst [d]))
  2544  	// result: (MOVVconst [c+d])
  2545  	for {
  2546  		c := auxIntToInt64(v.AuxInt)
  2547  		if v_0.Op != OpMIPS64MOVVconst {
  2548  			break
  2549  		}
  2550  		d := auxIntToInt64(v_0.AuxInt)
  2551  		v.reset(OpMIPS64MOVVconst)
  2552  		v.AuxInt = int64ToAuxInt(c + d)
  2553  		return true
  2554  	}
  2555  	// match: (ADDVconst [c] (ADDVconst [d] x))
  2556  	// cond: is32Bit(c+d)
  2557  	// result: (ADDVconst [c+d] x)
  2558  	for {
  2559  		c := auxIntToInt64(v.AuxInt)
  2560  		if v_0.Op != OpMIPS64ADDVconst {
  2561  			break
  2562  		}
  2563  		d := auxIntToInt64(v_0.AuxInt)
  2564  		x := v_0.Args[0]
  2565  		if !(is32Bit(c + d)) {
  2566  			break
  2567  		}
  2568  		v.reset(OpMIPS64ADDVconst)
  2569  		v.AuxInt = int64ToAuxInt(c + d)
  2570  		v.AddArg(x)
  2571  		return true
  2572  	}
  2573  	// match: (ADDVconst [c] (SUBVconst [d] x))
  2574  	// cond: is32Bit(c-d)
  2575  	// result: (ADDVconst [c-d] x)
  2576  	for {
  2577  		c := auxIntToInt64(v.AuxInt)
  2578  		if v_0.Op != OpMIPS64SUBVconst {
  2579  			break
  2580  		}
  2581  		d := auxIntToInt64(v_0.AuxInt)
  2582  		x := v_0.Args[0]
  2583  		if !(is32Bit(c - d)) {
  2584  			break
  2585  		}
  2586  		v.reset(OpMIPS64ADDVconst)
  2587  		v.AuxInt = int64ToAuxInt(c - d)
  2588  		v.AddArg(x)
  2589  		return true
  2590  	}
  2591  	return false
  2592  }
  2593  func rewriteValueMIPS64_OpMIPS64AND(v *Value) bool {
  2594  	v_1 := v.Args[1]
  2595  	v_0 := v.Args[0]
  2596  	// match: (AND x (MOVVconst [c]))
  2597  	// cond: is32Bit(c)
  2598  	// result: (ANDconst [c] x)
  2599  	for {
  2600  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
  2601  			x := v_0
  2602  			if v_1.Op != OpMIPS64MOVVconst {
  2603  				continue
  2604  			}
  2605  			c := auxIntToInt64(v_1.AuxInt)
  2606  			if !(is32Bit(c)) {
  2607  				continue
  2608  			}
  2609  			v.reset(OpMIPS64ANDconst)
  2610  			v.AuxInt = int64ToAuxInt(c)
  2611  			v.AddArg(x)
  2612  			return true
  2613  		}
  2614  		break
  2615  	}
  2616  	// match: (AND x x)
  2617  	// result: x
  2618  	for {
  2619  		x := v_0
  2620  		if x != v_1 {
  2621  			break
  2622  		}
  2623  		v.copyOf(x)
  2624  		return true
  2625  	}
  2626  	return false
  2627  }
  2628  func rewriteValueMIPS64_OpMIPS64ANDconst(v *Value) bool {
  2629  	v_0 := v.Args[0]
  2630  	// match: (ANDconst [0] _)
  2631  	// result: (MOVVconst [0])
  2632  	for {
  2633  		if auxIntToInt64(v.AuxInt) != 0 {
  2634  			break
  2635  		}
  2636  		v.reset(OpMIPS64MOVVconst)
  2637  		v.AuxInt = int64ToAuxInt(0)
  2638  		return true
  2639  	}
  2640  	// match: (ANDconst [-1] x)
  2641  	// result: x
  2642  	for {
  2643  		if auxIntToInt64(v.AuxInt) != -1 {
  2644  			break
  2645  		}
  2646  		x := v_0
  2647  		v.copyOf(x)
  2648  		return true
  2649  	}
  2650  	// match: (ANDconst [c] (MOVVconst [d]))
  2651  	// result: (MOVVconst [c&d])
  2652  	for {
  2653  		c := auxIntToInt64(v.AuxInt)
  2654  		if v_0.Op != OpMIPS64MOVVconst {
  2655  			break
  2656  		}
  2657  		d := auxIntToInt64(v_0.AuxInt)
  2658  		v.reset(OpMIPS64MOVVconst)
  2659  		v.AuxInt = int64ToAuxInt(c & d)
  2660  		return true
  2661  	}
  2662  	// match: (ANDconst [c] (ANDconst [d] x))
  2663  	// result: (ANDconst [c&d] x)
  2664  	for {
  2665  		c := auxIntToInt64(v.AuxInt)
  2666  		if v_0.Op != OpMIPS64ANDconst {
  2667  			break
  2668  		}
  2669  		d := auxIntToInt64(v_0.AuxInt)
  2670  		x := v_0.Args[0]
  2671  		v.reset(OpMIPS64ANDconst)
  2672  		v.AuxInt = int64ToAuxInt(c & d)
  2673  		v.AddArg(x)
  2674  		return true
  2675  	}
  2676  	return false
  2677  }
  2678  func rewriteValueMIPS64_OpMIPS64LoweredAtomicAdd32(v *Value) bool {
  2679  	v_2 := v.Args[2]
  2680  	v_1 := v.Args[1]
  2681  	v_0 := v.Args[0]
  2682  	// match: (LoweredAtomicAdd32 ptr (MOVVconst [c]) mem)
  2683  	// cond: is32Bit(c)
  2684  	// result: (LoweredAtomicAddconst32 [int32(c)] ptr mem)
  2685  	for {
  2686  		ptr := v_0
  2687  		if v_1.Op != OpMIPS64MOVVconst {
  2688  			break
  2689  		}
  2690  		c := auxIntToInt64(v_1.AuxInt)
  2691  		mem := v_2
  2692  		if !(is32Bit(c)) {
  2693  			break
  2694  		}
  2695  		v.reset(OpMIPS64LoweredAtomicAddconst32)
  2696  		v.AuxInt = int32ToAuxInt(int32(c))
  2697  		v.AddArg2(ptr, mem)
  2698  		return true
  2699  	}
  2700  	return false
  2701  }
  2702  func rewriteValueMIPS64_OpMIPS64LoweredAtomicAdd64(v *Value) bool {
  2703  	v_2 := v.Args[2]
  2704  	v_1 := v.Args[1]
  2705  	v_0 := v.Args[0]
  2706  	// match: (LoweredAtomicAdd64 ptr (MOVVconst [c]) mem)
  2707  	// cond: is32Bit(c)
  2708  	// result: (LoweredAtomicAddconst64 [c] ptr mem)
  2709  	for {
  2710  		ptr := v_0
  2711  		if v_1.Op != OpMIPS64MOVVconst {
  2712  			break
  2713  		}
  2714  		c := auxIntToInt64(v_1.AuxInt)
  2715  		mem := v_2
  2716  		if !(is32Bit(c)) {
  2717  			break
  2718  		}
  2719  		v.reset(OpMIPS64LoweredAtomicAddconst64)
  2720  		v.AuxInt = int64ToAuxInt(c)
  2721  		v.AddArg2(ptr, mem)
  2722  		return true
  2723  	}
  2724  	return false
  2725  }
  2726  func rewriteValueMIPS64_OpMIPS64LoweredAtomicStore32(v *Value) bool {
  2727  	v_2 := v.Args[2]
  2728  	v_1 := v.Args[1]
  2729  	v_0 := v.Args[0]
  2730  	// match: (LoweredAtomicStore32 ptr (MOVVconst [0]) mem)
  2731  	// result: (LoweredAtomicStorezero32 ptr mem)
  2732  	for {
  2733  		ptr := v_0
  2734  		if v_1.Op != OpMIPS64MOVVconst || auxIntToInt64(v_1.AuxInt) != 0 {
  2735  			break
  2736  		}
  2737  		mem := v_2
  2738  		v.reset(OpMIPS64LoweredAtomicStorezero32)
  2739  		v.AddArg2(ptr, mem)
  2740  		return true
  2741  	}
  2742  	return false
  2743  }
  2744  func rewriteValueMIPS64_OpMIPS64LoweredAtomicStore64(v *Value) bool {
  2745  	v_2 := v.Args[2]
  2746  	v_1 := v.Args[1]
  2747  	v_0 := v.Args[0]
  2748  	// match: (LoweredAtomicStore64 ptr (MOVVconst [0]) mem)
  2749  	// result: (LoweredAtomicStorezero64 ptr mem)
  2750  	for {
  2751  		ptr := v_0
  2752  		if v_1.Op != OpMIPS64MOVVconst || auxIntToInt64(v_1.AuxInt) != 0 {
  2753  			break
  2754  		}
  2755  		mem := v_2
  2756  		v.reset(OpMIPS64LoweredAtomicStorezero64)
  2757  		v.AddArg2(ptr, mem)
  2758  		return true
  2759  	}
  2760  	return false
  2761  }
  2762  func rewriteValueMIPS64_OpMIPS64LoweredPanicBoundsCR(v *Value) bool {
  2763  	v_1 := v.Args[1]
  2764  	v_0 := v.Args[0]
  2765  	// match: (LoweredPanicBoundsCR [kind] {p} (MOVVconst [c]) mem)
  2766  	// result: (LoweredPanicBoundsCC [kind] {PanicBoundsCC{Cx:p.C, Cy:c}} mem)
  2767  	for {
  2768  		kind := auxIntToInt64(v.AuxInt)
  2769  		p := auxToPanicBoundsC(v.Aux)
  2770  		if v_0.Op != OpMIPS64MOVVconst {
  2771  			break
  2772  		}
  2773  		c := auxIntToInt64(v_0.AuxInt)
  2774  		mem := v_1
  2775  		v.reset(OpMIPS64LoweredPanicBoundsCC)
  2776  		v.AuxInt = int64ToAuxInt(kind)
  2777  		v.Aux = panicBoundsCCToAux(PanicBoundsCC{Cx: p.C, Cy: c})
  2778  		v.AddArg(mem)
  2779  		return true
  2780  	}
  2781  	return false
  2782  }
  2783  func rewriteValueMIPS64_OpMIPS64LoweredPanicBoundsRC(v *Value) bool {
  2784  	v_1 := v.Args[1]
  2785  	v_0 := v.Args[0]
  2786  	// match: (LoweredPanicBoundsRC [kind] {p} (MOVVconst [c]) mem)
  2787  	// result: (LoweredPanicBoundsCC [kind] {PanicBoundsCC{Cx:c, Cy:p.C}} mem)
  2788  	for {
  2789  		kind := auxIntToInt64(v.AuxInt)
  2790  		p := auxToPanicBoundsC(v.Aux)
  2791  		if v_0.Op != OpMIPS64MOVVconst {
  2792  			break
  2793  		}
  2794  		c := auxIntToInt64(v_0.AuxInt)
  2795  		mem := v_1
  2796  		v.reset(OpMIPS64LoweredPanicBoundsCC)
  2797  		v.AuxInt = int64ToAuxInt(kind)
  2798  		v.Aux = panicBoundsCCToAux(PanicBoundsCC{Cx: c, Cy: p.C})
  2799  		v.AddArg(mem)
  2800  		return true
  2801  	}
  2802  	return false
  2803  }
  2804  func rewriteValueMIPS64_OpMIPS64LoweredPanicBoundsRR(v *Value) bool {
  2805  	v_2 := v.Args[2]
  2806  	v_1 := v.Args[1]
  2807  	v_0 := v.Args[0]
  2808  	// match: (LoweredPanicBoundsRR [kind] x (MOVVconst [c]) mem)
  2809  	// result: (LoweredPanicBoundsRC [kind] x {PanicBoundsC{C:c}} mem)
  2810  	for {
  2811  		kind := auxIntToInt64(v.AuxInt)
  2812  		x := v_0
  2813  		if v_1.Op != OpMIPS64MOVVconst {
  2814  			break
  2815  		}
  2816  		c := auxIntToInt64(v_1.AuxInt)
  2817  		mem := v_2
  2818  		v.reset(OpMIPS64LoweredPanicBoundsRC)
  2819  		v.AuxInt = int64ToAuxInt(kind)
  2820  		v.Aux = panicBoundsCToAux(PanicBoundsC{C: c})
  2821  		v.AddArg2(x, mem)
  2822  		return true
  2823  	}
  2824  	// match: (LoweredPanicBoundsRR [kind] (MOVVconst [c]) y mem)
  2825  	// result: (LoweredPanicBoundsCR [kind] {PanicBoundsC{C:c}} y mem)
  2826  	for {
  2827  		kind := auxIntToInt64(v.AuxInt)
  2828  		if v_0.Op != OpMIPS64MOVVconst {
  2829  			break
  2830  		}
  2831  		c := auxIntToInt64(v_0.AuxInt)
  2832  		y := v_1
  2833  		mem := v_2
  2834  		v.reset(OpMIPS64LoweredPanicBoundsCR)
  2835  		v.AuxInt = int64ToAuxInt(kind)
  2836  		v.Aux = panicBoundsCToAux(PanicBoundsC{C: c})
  2837  		v.AddArg2(y, mem)
  2838  		return true
  2839  	}
  2840  	return false
  2841  }
  2842  func rewriteValueMIPS64_OpMIPS64MOVBUload(v *Value) bool {
  2843  	v_1 := v.Args[1]
  2844  	v_0 := v.Args[0]
  2845  	b := v.Block
  2846  	config := b.Func.Config
  2847  	// match: (MOVBUload [off1] {sym} (ADDVconst [off2] ptr) mem)
  2848  	// cond: is32Bit(int64(off1)+off2) && (ptr.Op != OpSB || !config.ctxt.Flag_shared)
  2849  	// result: (MOVBUload [off1+int32(off2)] {sym} ptr mem)
  2850  	for {
  2851  		off1 := auxIntToInt32(v.AuxInt)
  2852  		sym := auxToSym(v.Aux)
  2853  		if v_0.Op != OpMIPS64ADDVconst {
  2854  			break
  2855  		}
  2856  		off2 := auxIntToInt64(v_0.AuxInt)
  2857  		ptr := v_0.Args[0]
  2858  		mem := v_1
  2859  		if !(is32Bit(int64(off1)+off2) && (ptr.Op != OpSB || !config.ctxt.Flag_shared)) {
  2860  			break
  2861  		}
  2862  		v.reset(OpMIPS64MOVBUload)
  2863  		v.AuxInt = int32ToAuxInt(off1 + int32(off2))
  2864  		v.Aux = symToAux(sym)
  2865  		v.AddArg2(ptr, mem)
  2866  		return true
  2867  	}
  2868  	// match: (MOVBUload [off1] {sym1} (MOVVaddr [off2] {sym2} ptr) mem)
  2869  	// cond: canMergeSym(sym1,sym2) && is32Bit(int64(off1)+int64(off2)) && (ptr.Op != OpSB || !config.ctxt.Flag_shared)
  2870  	// result: (MOVBUload [off1+int32(off2)] {mergeSym(sym1,sym2)} ptr mem)
  2871  	for {
  2872  		off1 := auxIntToInt32(v.AuxInt)
  2873  		sym1 := auxToSym(v.Aux)
  2874  		if v_0.Op != OpMIPS64MOVVaddr {
  2875  			break
  2876  		}
  2877  		off2 := auxIntToInt32(v_0.AuxInt)
  2878  		sym2 := auxToSym(v_0.Aux)
  2879  		ptr := v_0.Args[0]
  2880  		mem := v_1
  2881  		if !(canMergeSym(sym1, sym2) && is32Bit(int64(off1)+int64(off2)) && (ptr.Op != OpSB || !config.ctxt.Flag_shared)) {
  2882  			break
  2883  		}
  2884  		v.reset(OpMIPS64MOVBUload)
  2885  		v.AuxInt = int32ToAuxInt(off1 + int32(off2))
  2886  		v.Aux = symToAux(mergeSym(sym1, sym2))
  2887  		v.AddArg2(ptr, mem)
  2888  		return true
  2889  	}
  2890  	// match: (MOVBUload [off] {sym} (SB) _)
  2891  	// cond: symIsRO(sym)
  2892  	// result: (MOVVconst [int64(read8(sym, int64(off)))])
  2893  	for {
  2894  		off := auxIntToInt32(v.AuxInt)
  2895  		sym := auxToSym(v.Aux)
  2896  		if v_0.Op != OpSB || !(symIsRO(sym)) {
  2897  			break
  2898  		}
  2899  		v.reset(OpMIPS64MOVVconst)
  2900  		v.AuxInt = int64ToAuxInt(int64(read8(sym, int64(off))))
  2901  		return true
  2902  	}
  2903  	return false
  2904  }
  2905  func rewriteValueMIPS64_OpMIPS64MOVBUreg(v *Value) bool {
  2906  	v_0 := v.Args[0]
  2907  	// match: (MOVBUreg x:(MOVBUload _ _))
  2908  	// result: (MOVVreg x)
  2909  	for {
  2910  		x := v_0
  2911  		if x.Op != OpMIPS64MOVBUload {
  2912  			break
  2913  		}
  2914  		v.reset(OpMIPS64MOVVreg)
  2915  		v.AddArg(x)
  2916  		return true
  2917  	}
  2918  	// match: (MOVBUreg x:(MOVBUreg _))
  2919  	// result: (MOVVreg x)
  2920  	for {
  2921  		x := v_0
  2922  		if x.Op != OpMIPS64MOVBUreg {
  2923  			break
  2924  		}
  2925  		v.reset(OpMIPS64MOVVreg)
  2926  		v.AddArg(x)
  2927  		return true
  2928  	}
  2929  	// match: (MOVBUreg (MOVVconst [c]))
  2930  	// result: (MOVVconst [int64(uint8(c))])
  2931  	for {
  2932  		if v_0.Op != OpMIPS64MOVVconst {
  2933  			break
  2934  		}
  2935  		c := auxIntToInt64(v_0.AuxInt)
  2936  		v.reset(OpMIPS64MOVVconst)
  2937  		v.AuxInt = int64ToAuxInt(int64(uint8(c)))
  2938  		return true
  2939  	}
  2940  	return false
  2941  }
  2942  func rewriteValueMIPS64_OpMIPS64MOVBload(v *Value) bool {
  2943  	v_1 := v.Args[1]
  2944  	v_0 := v.Args[0]
  2945  	b := v.Block
  2946  	config := b.Func.Config
  2947  	// match: (MOVBload [off1] {sym} (ADDVconst [off2] ptr) mem)
  2948  	// cond: is32Bit(int64(off1)+off2) && (ptr.Op != OpSB || !config.ctxt.Flag_shared)
  2949  	// result: (MOVBload [off1+int32(off2)] {sym} ptr mem)
  2950  	for {
  2951  		off1 := auxIntToInt32(v.AuxInt)
  2952  		sym := auxToSym(v.Aux)
  2953  		if v_0.Op != OpMIPS64ADDVconst {
  2954  			break
  2955  		}
  2956  		off2 := auxIntToInt64(v_0.AuxInt)
  2957  		ptr := v_0.Args[0]
  2958  		mem := v_1
  2959  		if !(is32Bit(int64(off1)+off2) && (ptr.Op != OpSB || !config.ctxt.Flag_shared)) {
  2960  			break
  2961  		}
  2962  		v.reset(OpMIPS64MOVBload)
  2963  		v.AuxInt = int32ToAuxInt(off1 + int32(off2))
  2964  		v.Aux = symToAux(sym)
  2965  		v.AddArg2(ptr, mem)
  2966  		return true
  2967  	}
  2968  	// match: (MOVBload [off1] {sym1} (MOVVaddr [off2] {sym2} ptr) mem)
  2969  	// cond: canMergeSym(sym1,sym2) && is32Bit(int64(off1)+int64(off2)) && (ptr.Op != OpSB || !config.ctxt.Flag_shared)
  2970  	// result: (MOVBload [off1+int32(off2)] {mergeSym(sym1,sym2)} ptr mem)
  2971  	for {
  2972  		off1 := auxIntToInt32(v.AuxInt)
  2973  		sym1 := auxToSym(v.Aux)
  2974  		if v_0.Op != OpMIPS64MOVVaddr {
  2975  			break
  2976  		}
  2977  		off2 := auxIntToInt32(v_0.AuxInt)
  2978  		sym2 := auxToSym(v_0.Aux)
  2979  		ptr := v_0.Args[0]
  2980  		mem := v_1
  2981  		if !(canMergeSym(sym1, sym2) && is32Bit(int64(off1)+int64(off2)) && (ptr.Op != OpSB || !config.ctxt.Flag_shared)) {
  2982  			break
  2983  		}
  2984  		v.reset(OpMIPS64MOVBload)
  2985  		v.AuxInt = int32ToAuxInt(off1 + int32(off2))
  2986  		v.Aux = symToAux(mergeSym(sym1, sym2))
  2987  		v.AddArg2(ptr, mem)
  2988  		return true
  2989  	}
  2990  	// match: (MOVBload [off] {sym} (SB) _)
  2991  	// cond: symIsRO(sym)
  2992  	// result: (MOVVconst [int64(int8(read8(sym, int64(off))))])
  2993  	for {
  2994  		off := auxIntToInt32(v.AuxInt)
  2995  		sym := auxToSym(v.Aux)
  2996  		if v_0.Op != OpSB || !(symIsRO(sym)) {
  2997  			break
  2998  		}
  2999  		v.reset(OpMIPS64MOVVconst)
  3000  		v.AuxInt = int64ToAuxInt(int64(int8(read8(sym, int64(off)))))
  3001  		return true
  3002  	}
  3003  	return false
  3004  }
  3005  func rewriteValueMIPS64_OpMIPS64MOVBreg(v *Value) bool {
  3006  	v_0 := v.Args[0]
  3007  	// match: (MOVBreg x:(MOVBload _ _))
  3008  	// result: (MOVVreg x)
  3009  	for {
  3010  		x := v_0
  3011  		if x.Op != OpMIPS64MOVBload {
  3012  			break
  3013  		}
  3014  		v.reset(OpMIPS64MOVVreg)
  3015  		v.AddArg(x)
  3016  		return true
  3017  	}
  3018  	// match: (MOVBreg x:(MOVBreg _))
  3019  	// result: (MOVVreg x)
  3020  	for {
  3021  		x := v_0
  3022  		if x.Op != OpMIPS64MOVBreg {
  3023  			break
  3024  		}
  3025  		v.reset(OpMIPS64MOVVreg)
  3026  		v.AddArg(x)
  3027  		return true
  3028  	}
  3029  	// match: (MOVBreg (MOVVconst [c]))
  3030  	// result: (MOVVconst [int64(int8(c))])
  3031  	for {
  3032  		if v_0.Op != OpMIPS64MOVVconst {
  3033  			break
  3034  		}
  3035  		c := auxIntToInt64(v_0.AuxInt)
  3036  		v.reset(OpMIPS64MOVVconst)
  3037  		v.AuxInt = int64ToAuxInt(int64(int8(c)))
  3038  		return true
  3039  	}
  3040  	return false
  3041  }
  3042  func rewriteValueMIPS64_OpMIPS64MOVBstore(v *Value) bool {
  3043  	v_2 := v.Args[2]
  3044  	v_1 := v.Args[1]
  3045  	v_0 := v.Args[0]
  3046  	b := v.Block
  3047  	config := b.Func.Config
  3048  	// match: (MOVBstore [off1] {sym} (ADDVconst [off2] ptr) val mem)
  3049  	// cond: is32Bit(int64(off1)+off2) && (ptr.Op != OpSB || !config.ctxt.Flag_shared)
  3050  	// result: (MOVBstore [off1+int32(off2)] {sym} ptr val mem)
  3051  	for {
  3052  		off1 := auxIntToInt32(v.AuxInt)
  3053  		sym := auxToSym(v.Aux)
  3054  		if v_0.Op != OpMIPS64ADDVconst {
  3055  			break
  3056  		}
  3057  		off2 := auxIntToInt64(v_0.AuxInt)
  3058  		ptr := v_0.Args[0]
  3059  		val := v_1
  3060  		mem := v_2
  3061  		if !(is32Bit(int64(off1)+off2) && (ptr.Op != OpSB || !config.ctxt.Flag_shared)) {
  3062  			break
  3063  		}
  3064  		v.reset(OpMIPS64MOVBstore)
  3065  		v.AuxInt = int32ToAuxInt(off1 + int32(off2))
  3066  		v.Aux = symToAux(sym)
  3067  		v.AddArg3(ptr, val, mem)
  3068  		return true
  3069  	}
  3070  	// match: (MOVBstore [off1] {sym1} (MOVVaddr [off2] {sym2} ptr) val mem)
  3071  	// cond: canMergeSym(sym1,sym2) && is32Bit(int64(off1)+int64(off2)) && (ptr.Op != OpSB || !config.ctxt.Flag_shared)
  3072  	// result: (MOVBstore [off1+int32(off2)] {mergeSym(sym1,sym2)} ptr val mem)
  3073  	for {
  3074  		off1 := auxIntToInt32(v.AuxInt)
  3075  		sym1 := auxToSym(v.Aux)
  3076  		if v_0.Op != OpMIPS64MOVVaddr {
  3077  			break
  3078  		}
  3079  		off2 := auxIntToInt32(v_0.AuxInt)
  3080  		sym2 := auxToSym(v_0.Aux)
  3081  		ptr := v_0.Args[0]
  3082  		val := v_1
  3083  		mem := v_2
  3084  		if !(canMergeSym(sym1, sym2) && is32Bit(int64(off1)+int64(off2)) && (ptr.Op != OpSB || !config.ctxt.Flag_shared)) {
  3085  			break
  3086  		}
  3087  		v.reset(OpMIPS64MOVBstore)
  3088  		v.AuxInt = int32ToAuxInt(off1 + int32(off2))
  3089  		v.Aux = symToAux(mergeSym(sym1, sym2))
  3090  		v.AddArg3(ptr, val, mem)
  3091  		return true
  3092  	}
  3093  	// match: (MOVBstore [off] {sym} ptr (MOVBreg x) mem)
  3094  	// result: (MOVBstore [off] {sym} ptr x mem)
  3095  	for {
  3096  		off := auxIntToInt32(v.AuxInt)
  3097  		sym := auxToSym(v.Aux)
  3098  		ptr := v_0
  3099  		if v_1.Op != OpMIPS64MOVBreg {
  3100  			break
  3101  		}
  3102  		x := v_1.Args[0]
  3103  		mem := v_2
  3104  		v.reset(OpMIPS64MOVBstore)
  3105  		v.AuxInt = int32ToAuxInt(off)
  3106  		v.Aux = symToAux(sym)
  3107  		v.AddArg3(ptr, x, mem)
  3108  		return true
  3109  	}
  3110  	// match: (MOVBstore [off] {sym} ptr (MOVBUreg x) mem)
  3111  	// result: (MOVBstore [off] {sym} ptr x mem)
  3112  	for {
  3113  		off := auxIntToInt32(v.AuxInt)
  3114  		sym := auxToSym(v.Aux)
  3115  		ptr := v_0
  3116  		if v_1.Op != OpMIPS64MOVBUreg {
  3117  			break
  3118  		}
  3119  		x := v_1.Args[0]
  3120  		mem := v_2
  3121  		v.reset(OpMIPS64MOVBstore)
  3122  		v.AuxInt = int32ToAuxInt(off)
  3123  		v.Aux = symToAux(sym)
  3124  		v.AddArg3(ptr, x, mem)
  3125  		return true
  3126  	}
  3127  	// match: (MOVBstore [off] {sym} ptr (MOVHreg x) mem)
  3128  	// result: (MOVBstore [off] {sym} ptr x mem)
  3129  	for {
  3130  		off := auxIntToInt32(v.AuxInt)
  3131  		sym := auxToSym(v.Aux)
  3132  		ptr := v_0
  3133  		if v_1.Op != OpMIPS64MOVHreg {
  3134  			break
  3135  		}
  3136  		x := v_1.Args[0]
  3137  		mem := v_2
  3138  		v.reset(OpMIPS64MOVBstore)
  3139  		v.AuxInt = int32ToAuxInt(off)
  3140  		v.Aux = symToAux(sym)
  3141  		v.AddArg3(ptr, x, mem)
  3142  		return true
  3143  	}
  3144  	// match: (MOVBstore [off] {sym} ptr (MOVHUreg x) mem)
  3145  	// result: (MOVBstore [off] {sym} ptr x mem)
  3146  	for {
  3147  		off := auxIntToInt32(v.AuxInt)
  3148  		sym := auxToSym(v.Aux)
  3149  		ptr := v_0
  3150  		if v_1.Op != OpMIPS64MOVHUreg {
  3151  			break
  3152  		}
  3153  		x := v_1.Args[0]
  3154  		mem := v_2
  3155  		v.reset(OpMIPS64MOVBstore)
  3156  		v.AuxInt = int32ToAuxInt(off)
  3157  		v.Aux = symToAux(sym)
  3158  		v.AddArg3(ptr, x, mem)
  3159  		return true
  3160  	}
  3161  	// match: (MOVBstore [off] {sym} ptr (MOVWreg x) mem)
  3162  	// result: (MOVBstore [off] {sym} ptr x mem)
  3163  	for {
  3164  		off := auxIntToInt32(v.AuxInt)
  3165  		sym := auxToSym(v.Aux)
  3166  		ptr := v_0
  3167  		if v_1.Op != OpMIPS64MOVWreg {
  3168  			break
  3169  		}
  3170  		x := v_1.Args[0]
  3171  		mem := v_2
  3172  		v.reset(OpMIPS64MOVBstore)
  3173  		v.AuxInt = int32ToAuxInt(off)
  3174  		v.Aux = symToAux(sym)
  3175  		v.AddArg3(ptr, x, mem)
  3176  		return true
  3177  	}
  3178  	// match: (MOVBstore [off] {sym} ptr (MOVWUreg x) mem)
  3179  	// result: (MOVBstore [off] {sym} ptr x mem)
  3180  	for {
  3181  		off := auxIntToInt32(v.AuxInt)
  3182  		sym := auxToSym(v.Aux)
  3183  		ptr := v_0
  3184  		if v_1.Op != OpMIPS64MOVWUreg {
  3185  			break
  3186  		}
  3187  		x := v_1.Args[0]
  3188  		mem := v_2
  3189  		v.reset(OpMIPS64MOVBstore)
  3190  		v.AuxInt = int32ToAuxInt(off)
  3191  		v.Aux = symToAux(sym)
  3192  		v.AddArg3(ptr, x, mem)
  3193  		return true
  3194  	}
  3195  	return false
  3196  }
  3197  func rewriteValueMIPS64_OpMIPS64MOVDF(v *Value) bool {
  3198  	v_0 := v.Args[0]
  3199  	// match: (MOVDF (ABSD (MOVFD x)))
  3200  	// result: (ABSF x)
  3201  	for {
  3202  		if v_0.Op != OpMIPS64ABSD {
  3203  			break
  3204  		}
  3205  		v_0_0 := v_0.Args[0]
  3206  		if v_0_0.Op != OpMIPS64MOVFD {
  3207  			break
  3208  		}
  3209  		x := v_0_0.Args[0]
  3210  		v.reset(OpMIPS64ABSF)
  3211  		v.AddArg(x)
  3212  		return true
  3213  	}
  3214  	// match: (MOVDF (SQRTD (MOVFD x)))
  3215  	// result: (SQRTF x)
  3216  	for {
  3217  		if v_0.Op != OpMIPS64SQRTD {
  3218  			break
  3219  		}
  3220  		v_0_0 := v_0.Args[0]
  3221  		if v_0_0.Op != OpMIPS64MOVFD {
  3222  			break
  3223  		}
  3224  		x := v_0_0.Args[0]
  3225  		v.reset(OpMIPS64SQRTF)
  3226  		v.AddArg(x)
  3227  		return true
  3228  	}
  3229  	return false
  3230  }
  3231  func rewriteValueMIPS64_OpMIPS64MOVDload(v *Value) bool {
  3232  	v_1 := v.Args[1]
  3233  	v_0 := v.Args[0]
  3234  	b := v.Block
  3235  	config := b.Func.Config
  3236  	// match: (MOVDload [off] {sym} ptr (MOVVstore [off] {sym} ptr val _))
  3237  	// result: (MOVVgpfp val)
  3238  	for {
  3239  		off := auxIntToInt32(v.AuxInt)
  3240  		sym := auxToSym(v.Aux)
  3241  		ptr := v_0
  3242  		if v_1.Op != OpMIPS64MOVVstore || auxIntToInt32(v_1.AuxInt) != off || auxToSym(v_1.Aux) != sym {
  3243  			break
  3244  		}
  3245  		val := v_1.Args[1]
  3246  		if ptr != v_1.Args[0] {
  3247  			break
  3248  		}
  3249  		v.reset(OpMIPS64MOVVgpfp)
  3250  		v.AddArg(val)
  3251  		return true
  3252  	}
  3253  	// match: (MOVDload [off1] {sym} (ADDVconst [off2] ptr) mem)
  3254  	// cond: is32Bit(int64(off1)+off2) && (ptr.Op != OpSB || !config.ctxt.Flag_shared)
  3255  	// result: (MOVDload [off1+int32(off2)] {sym} ptr mem)
  3256  	for {
  3257  		off1 := auxIntToInt32(v.AuxInt)
  3258  		sym := auxToSym(v.Aux)
  3259  		if v_0.Op != OpMIPS64ADDVconst {
  3260  			break
  3261  		}
  3262  		off2 := auxIntToInt64(v_0.AuxInt)
  3263  		ptr := v_0.Args[0]
  3264  		mem := v_1
  3265  		if !(is32Bit(int64(off1)+off2) && (ptr.Op != OpSB || !config.ctxt.Flag_shared)) {
  3266  			break
  3267  		}
  3268  		v.reset(OpMIPS64MOVDload)
  3269  		v.AuxInt = int32ToAuxInt(off1 + int32(off2))
  3270  		v.Aux = symToAux(sym)
  3271  		v.AddArg2(ptr, mem)
  3272  		return true
  3273  	}
  3274  	// match: (MOVDload [off1] {sym1} (MOVVaddr [off2] {sym2} ptr) mem)
  3275  	// cond: canMergeSym(sym1,sym2) && is32Bit(int64(off1)+int64(off2)) && (ptr.Op != OpSB || !config.ctxt.Flag_shared)
  3276  	// result: (MOVDload [off1+int32(off2)] {mergeSym(sym1,sym2)} ptr mem)
  3277  	for {
  3278  		off1 := auxIntToInt32(v.AuxInt)
  3279  		sym1 := auxToSym(v.Aux)
  3280  		if v_0.Op != OpMIPS64MOVVaddr {
  3281  			break
  3282  		}
  3283  		off2 := auxIntToInt32(v_0.AuxInt)
  3284  		sym2 := auxToSym(v_0.Aux)
  3285  		ptr := v_0.Args[0]
  3286  		mem := v_1
  3287  		if !(canMergeSym(sym1, sym2) && is32Bit(int64(off1)+int64(off2)) && (ptr.Op != OpSB || !config.ctxt.Flag_shared)) {
  3288  			break
  3289  		}
  3290  		v.reset(OpMIPS64MOVDload)
  3291  		v.AuxInt = int32ToAuxInt(off1 + int32(off2))
  3292  		v.Aux = symToAux(mergeSym(sym1, sym2))
  3293  		v.AddArg2(ptr, mem)
  3294  		return true
  3295  	}
  3296  	return false
  3297  }
  3298  func rewriteValueMIPS64_OpMIPS64MOVDstore(v *Value) bool {
  3299  	v_2 := v.Args[2]
  3300  	v_1 := v.Args[1]
  3301  	v_0 := v.Args[0]
  3302  	b := v.Block
  3303  	config := b.Func.Config
  3304  	// match: (MOVDstore [off] {sym} ptr (MOVVgpfp val) mem)
  3305  	// result: (MOVVstore [off] {sym} ptr val mem)
  3306  	for {
  3307  		off := auxIntToInt32(v.AuxInt)
  3308  		sym := auxToSym(v.Aux)
  3309  		ptr := v_0
  3310  		if v_1.Op != OpMIPS64MOVVgpfp {
  3311  			break
  3312  		}
  3313  		val := v_1.Args[0]
  3314  		mem := v_2
  3315  		v.reset(OpMIPS64MOVVstore)
  3316  		v.AuxInt = int32ToAuxInt(off)
  3317  		v.Aux = symToAux(sym)
  3318  		v.AddArg3(ptr, val, mem)
  3319  		return true
  3320  	}
  3321  	// match: (MOVDstore [off1] {sym} (ADDVconst [off2] ptr) val mem)
  3322  	// cond: is32Bit(int64(off1)+off2) && (ptr.Op != OpSB || !config.ctxt.Flag_shared)
  3323  	// result: (MOVDstore [off1+int32(off2)] {sym} ptr val mem)
  3324  	for {
  3325  		off1 := auxIntToInt32(v.AuxInt)
  3326  		sym := auxToSym(v.Aux)
  3327  		if v_0.Op != OpMIPS64ADDVconst {
  3328  			break
  3329  		}
  3330  		off2 := auxIntToInt64(v_0.AuxInt)
  3331  		ptr := v_0.Args[0]
  3332  		val := v_1
  3333  		mem := v_2
  3334  		if !(is32Bit(int64(off1)+off2) && (ptr.Op != OpSB || !config.ctxt.Flag_shared)) {
  3335  			break
  3336  		}
  3337  		v.reset(OpMIPS64MOVDstore)
  3338  		v.AuxInt = int32ToAuxInt(off1 + int32(off2))
  3339  		v.Aux = symToAux(sym)
  3340  		v.AddArg3(ptr, val, mem)
  3341  		return true
  3342  	}
  3343  	// match: (MOVDstore [off1] {sym1} (MOVVaddr [off2] {sym2} ptr) val mem)
  3344  	// cond: canMergeSym(sym1,sym2) && is32Bit(int64(off1)+int64(off2)) && (ptr.Op != OpSB || !config.ctxt.Flag_shared)
  3345  	// result: (MOVDstore [off1+int32(off2)] {mergeSym(sym1,sym2)} ptr val mem)
  3346  	for {
  3347  		off1 := auxIntToInt32(v.AuxInt)
  3348  		sym1 := auxToSym(v.Aux)
  3349  		if v_0.Op != OpMIPS64MOVVaddr {
  3350  			break
  3351  		}
  3352  		off2 := auxIntToInt32(v_0.AuxInt)
  3353  		sym2 := auxToSym(v_0.Aux)
  3354  		ptr := v_0.Args[0]
  3355  		val := v_1
  3356  		mem := v_2
  3357  		if !(canMergeSym(sym1, sym2) && is32Bit(int64(off1)+int64(off2)) && (ptr.Op != OpSB || !config.ctxt.Flag_shared)) {
  3358  			break
  3359  		}
  3360  		v.reset(OpMIPS64MOVDstore)
  3361  		v.AuxInt = int32ToAuxInt(off1 + int32(off2))
  3362  		v.Aux = symToAux(mergeSym(sym1, sym2))
  3363  		v.AddArg3(ptr, val, mem)
  3364  		return true
  3365  	}
  3366  	return false
  3367  }
  3368  func rewriteValueMIPS64_OpMIPS64MOVFload(v *Value) bool {
  3369  	v_1 := v.Args[1]
  3370  	v_0 := v.Args[0]
  3371  	b := v.Block
  3372  	config := b.Func.Config
  3373  	// match: (MOVFload [off] {sym} ptr (MOVWstore [off] {sym} ptr val _))
  3374  	// result: (MOVWgpfp val)
  3375  	for {
  3376  		off := auxIntToInt32(v.AuxInt)
  3377  		sym := auxToSym(v.Aux)
  3378  		ptr := v_0
  3379  		if v_1.Op != OpMIPS64MOVWstore || auxIntToInt32(v_1.AuxInt) != off || auxToSym(v_1.Aux) != sym {
  3380  			break
  3381  		}
  3382  		val := v_1.Args[1]
  3383  		if ptr != v_1.Args[0] {
  3384  			break
  3385  		}
  3386  		v.reset(OpMIPS64MOVWgpfp)
  3387  		v.AddArg(val)
  3388  		return true
  3389  	}
  3390  	// match: (MOVFload [off1] {sym} (ADDVconst [off2] ptr) mem)
  3391  	// cond: is32Bit(int64(off1)+off2) && (ptr.Op != OpSB || !config.ctxt.Flag_shared)
  3392  	// result: (MOVFload [off1+int32(off2)] {sym} ptr mem)
  3393  	for {
  3394  		off1 := auxIntToInt32(v.AuxInt)
  3395  		sym := auxToSym(v.Aux)
  3396  		if v_0.Op != OpMIPS64ADDVconst {
  3397  			break
  3398  		}
  3399  		off2 := auxIntToInt64(v_0.AuxInt)
  3400  		ptr := v_0.Args[0]
  3401  		mem := v_1
  3402  		if !(is32Bit(int64(off1)+off2) && (ptr.Op != OpSB || !config.ctxt.Flag_shared)) {
  3403  			break
  3404  		}
  3405  		v.reset(OpMIPS64MOVFload)
  3406  		v.AuxInt = int32ToAuxInt(off1 + int32(off2))
  3407  		v.Aux = symToAux(sym)
  3408  		v.AddArg2(ptr, mem)
  3409  		return true
  3410  	}
  3411  	// match: (MOVFload [off1] {sym1} (MOVVaddr [off2] {sym2} ptr) mem)
  3412  	// cond: canMergeSym(sym1,sym2) && is32Bit(int64(off1)+int64(off2)) && (ptr.Op != OpSB || !config.ctxt.Flag_shared)
  3413  	// result: (MOVFload [off1+int32(off2)] {mergeSym(sym1,sym2)} ptr mem)
  3414  	for {
  3415  		off1 := auxIntToInt32(v.AuxInt)
  3416  		sym1 := auxToSym(v.Aux)
  3417  		if v_0.Op != OpMIPS64MOVVaddr {
  3418  			break
  3419  		}
  3420  		off2 := auxIntToInt32(v_0.AuxInt)
  3421  		sym2 := auxToSym(v_0.Aux)
  3422  		ptr := v_0.Args[0]
  3423  		mem := v_1
  3424  		if !(canMergeSym(sym1, sym2) && is32Bit(int64(off1)+int64(off2)) && (ptr.Op != OpSB || !config.ctxt.Flag_shared)) {
  3425  			break
  3426  		}
  3427  		v.reset(OpMIPS64MOVFload)
  3428  		v.AuxInt = int32ToAuxInt(off1 + int32(off2))
  3429  		v.Aux = symToAux(mergeSym(sym1, sym2))
  3430  		v.AddArg2(ptr, mem)
  3431  		return true
  3432  	}
  3433  	return false
  3434  }
  3435  func rewriteValueMIPS64_OpMIPS64MOVFstore(v *Value) bool {
  3436  	v_2 := v.Args[2]
  3437  	v_1 := v.Args[1]
  3438  	v_0 := v.Args[0]
  3439  	b := v.Block
  3440  	config := b.Func.Config
  3441  	// match: (MOVFstore [off] {sym} ptr (MOVWgpfp val) mem)
  3442  	// result: (MOVWstore [off] {sym} ptr val mem)
  3443  	for {
  3444  		off := auxIntToInt32(v.AuxInt)
  3445  		sym := auxToSym(v.Aux)
  3446  		ptr := v_0
  3447  		if v_1.Op != OpMIPS64MOVWgpfp {
  3448  			break
  3449  		}
  3450  		val := v_1.Args[0]
  3451  		mem := v_2
  3452  		v.reset(OpMIPS64MOVWstore)
  3453  		v.AuxInt = int32ToAuxInt(off)
  3454  		v.Aux = symToAux(sym)
  3455  		v.AddArg3(ptr, val, mem)
  3456  		return true
  3457  	}
  3458  	// match: (MOVFstore [off1] {sym} (ADDVconst [off2] ptr) val mem)
  3459  	// cond: is32Bit(int64(off1)+off2) && (ptr.Op != OpSB || !config.ctxt.Flag_shared)
  3460  	// result: (MOVFstore [off1+int32(off2)] {sym} ptr val mem)
  3461  	for {
  3462  		off1 := auxIntToInt32(v.AuxInt)
  3463  		sym := auxToSym(v.Aux)
  3464  		if v_0.Op != OpMIPS64ADDVconst {
  3465  			break
  3466  		}
  3467  		off2 := auxIntToInt64(v_0.AuxInt)
  3468  		ptr := v_0.Args[0]
  3469  		val := v_1
  3470  		mem := v_2
  3471  		if !(is32Bit(int64(off1)+off2) && (ptr.Op != OpSB || !config.ctxt.Flag_shared)) {
  3472  			break
  3473  		}
  3474  		v.reset(OpMIPS64MOVFstore)
  3475  		v.AuxInt = int32ToAuxInt(off1 + int32(off2))
  3476  		v.Aux = symToAux(sym)
  3477  		v.AddArg3(ptr, val, mem)
  3478  		return true
  3479  	}
  3480  	// match: (MOVFstore [off1] {sym1} (MOVVaddr [off2] {sym2} ptr) val mem)
  3481  	// cond: canMergeSym(sym1,sym2) && is32Bit(int64(off1)+int64(off2)) && (ptr.Op != OpSB || !config.ctxt.Flag_shared)
  3482  	// result: (MOVFstore [off1+int32(off2)] {mergeSym(sym1,sym2)} ptr val mem)
  3483  	for {
  3484  		off1 := auxIntToInt32(v.AuxInt)
  3485  		sym1 := auxToSym(v.Aux)
  3486  		if v_0.Op != OpMIPS64MOVVaddr {
  3487  			break
  3488  		}
  3489  		off2 := auxIntToInt32(v_0.AuxInt)
  3490  		sym2 := auxToSym(v_0.Aux)
  3491  		ptr := v_0.Args[0]
  3492  		val := v_1
  3493  		mem := v_2
  3494  		if !(canMergeSym(sym1, sym2) && is32Bit(int64(off1)+int64(off2)) && (ptr.Op != OpSB || !config.ctxt.Flag_shared)) {
  3495  			break
  3496  		}
  3497  		v.reset(OpMIPS64MOVFstore)
  3498  		v.AuxInt = int32ToAuxInt(off1 + int32(off2))
  3499  		v.Aux = symToAux(mergeSym(sym1, sym2))
  3500  		v.AddArg3(ptr, val, mem)
  3501  		return true
  3502  	}
  3503  	return false
  3504  }
  3505  func rewriteValueMIPS64_OpMIPS64MOVHUload(v *Value) bool {
  3506  	v_1 := v.Args[1]
  3507  	v_0 := v.Args[0]
  3508  	b := v.Block
  3509  	config := b.Func.Config
  3510  	// match: (MOVHUload [off1] {sym} (ADDVconst [off2] ptr) mem)
  3511  	// cond: is32Bit(int64(off1)+off2) && (ptr.Op != OpSB || !config.ctxt.Flag_shared)
  3512  	// result: (MOVHUload [off1+int32(off2)] {sym} ptr mem)
  3513  	for {
  3514  		off1 := auxIntToInt32(v.AuxInt)
  3515  		sym := auxToSym(v.Aux)
  3516  		if v_0.Op != OpMIPS64ADDVconst {
  3517  			break
  3518  		}
  3519  		off2 := auxIntToInt64(v_0.AuxInt)
  3520  		ptr := v_0.Args[0]
  3521  		mem := v_1
  3522  		if !(is32Bit(int64(off1)+off2) && (ptr.Op != OpSB || !config.ctxt.Flag_shared)) {
  3523  			break
  3524  		}
  3525  		v.reset(OpMIPS64MOVHUload)
  3526  		v.AuxInt = int32ToAuxInt(off1 + int32(off2))
  3527  		v.Aux = symToAux(sym)
  3528  		v.AddArg2(ptr, mem)
  3529  		return true
  3530  	}
  3531  	// match: (MOVHUload [off1] {sym1} (MOVVaddr [off2] {sym2} ptr) mem)
  3532  	// cond: canMergeSym(sym1,sym2) && is32Bit(int64(off1)+int64(off2)) && (ptr.Op != OpSB || !config.ctxt.Flag_shared)
  3533  	// result: (MOVHUload [off1+int32(off2)] {mergeSym(sym1,sym2)} ptr mem)
  3534  	for {
  3535  		off1 := auxIntToInt32(v.AuxInt)
  3536  		sym1 := auxToSym(v.Aux)
  3537  		if v_0.Op != OpMIPS64MOVVaddr {
  3538  			break
  3539  		}
  3540  		off2 := auxIntToInt32(v_0.AuxInt)
  3541  		sym2 := auxToSym(v_0.Aux)
  3542  		ptr := v_0.Args[0]
  3543  		mem := v_1
  3544  		if !(canMergeSym(sym1, sym2) && is32Bit(int64(off1)+int64(off2)) && (ptr.Op != OpSB || !config.ctxt.Flag_shared)) {
  3545  			break
  3546  		}
  3547  		v.reset(OpMIPS64MOVHUload)
  3548  		v.AuxInt = int32ToAuxInt(off1 + int32(off2))
  3549  		v.Aux = symToAux(mergeSym(sym1, sym2))
  3550  		v.AddArg2(ptr, mem)
  3551  		return true
  3552  	}
  3553  	// match: (MOVHUload [off] {sym} (SB) _)
  3554  	// cond: symIsRO(sym)
  3555  	// result: (MOVVconst [int64(read16(sym, int64(off), config.ctxt.Arch.ByteOrder))])
  3556  	for {
  3557  		off := auxIntToInt32(v.AuxInt)
  3558  		sym := auxToSym(v.Aux)
  3559  		if v_0.Op != OpSB || !(symIsRO(sym)) {
  3560  			break
  3561  		}
  3562  		v.reset(OpMIPS64MOVVconst)
  3563  		v.AuxInt = int64ToAuxInt(int64(read16(sym, int64(off), config.ctxt.Arch.ByteOrder)))
  3564  		return true
  3565  	}
  3566  	return false
  3567  }
  3568  func rewriteValueMIPS64_OpMIPS64MOVHUreg(v *Value) bool {
  3569  	v_0 := v.Args[0]
  3570  	// match: (MOVHUreg x:(MOVBUload _ _))
  3571  	// result: (MOVVreg x)
  3572  	for {
  3573  		x := v_0
  3574  		if x.Op != OpMIPS64MOVBUload {
  3575  			break
  3576  		}
  3577  		v.reset(OpMIPS64MOVVreg)
  3578  		v.AddArg(x)
  3579  		return true
  3580  	}
  3581  	// match: (MOVHUreg x:(MOVHUload _ _))
  3582  	// result: (MOVVreg x)
  3583  	for {
  3584  		x := v_0
  3585  		if x.Op != OpMIPS64MOVHUload {
  3586  			break
  3587  		}
  3588  		v.reset(OpMIPS64MOVVreg)
  3589  		v.AddArg(x)
  3590  		return true
  3591  	}
  3592  	// match: (MOVHUreg x:(MOVBUreg _))
  3593  	// result: (MOVVreg x)
  3594  	for {
  3595  		x := v_0
  3596  		if x.Op != OpMIPS64MOVBUreg {
  3597  			break
  3598  		}
  3599  		v.reset(OpMIPS64MOVVreg)
  3600  		v.AddArg(x)
  3601  		return true
  3602  	}
  3603  	// match: (MOVHUreg x:(MOVHUreg _))
  3604  	// result: (MOVVreg x)
  3605  	for {
  3606  		x := v_0
  3607  		if x.Op != OpMIPS64MOVHUreg {
  3608  			break
  3609  		}
  3610  		v.reset(OpMIPS64MOVVreg)
  3611  		v.AddArg(x)
  3612  		return true
  3613  	}
  3614  	// match: (MOVHUreg (MOVVconst [c]))
  3615  	// result: (MOVVconst [int64(uint16(c))])
  3616  	for {
  3617  		if v_0.Op != OpMIPS64MOVVconst {
  3618  			break
  3619  		}
  3620  		c := auxIntToInt64(v_0.AuxInt)
  3621  		v.reset(OpMIPS64MOVVconst)
  3622  		v.AuxInt = int64ToAuxInt(int64(uint16(c)))
  3623  		return true
  3624  	}
  3625  	return false
  3626  }
  3627  func rewriteValueMIPS64_OpMIPS64MOVHload(v *Value) bool {
  3628  	v_1 := v.Args[1]
  3629  	v_0 := v.Args[0]
  3630  	b := v.Block
  3631  	config := b.Func.Config
  3632  	// match: (MOVHload [off1] {sym} (ADDVconst [off2] ptr) mem)
  3633  	// cond: is32Bit(int64(off1)+off2) && (ptr.Op != OpSB || !config.ctxt.Flag_shared)
  3634  	// result: (MOVHload [off1+int32(off2)] {sym} ptr mem)
  3635  	for {
  3636  		off1 := auxIntToInt32(v.AuxInt)
  3637  		sym := auxToSym(v.Aux)
  3638  		if v_0.Op != OpMIPS64ADDVconst {
  3639  			break
  3640  		}
  3641  		off2 := auxIntToInt64(v_0.AuxInt)
  3642  		ptr := v_0.Args[0]
  3643  		mem := v_1
  3644  		if !(is32Bit(int64(off1)+off2) && (ptr.Op != OpSB || !config.ctxt.Flag_shared)) {
  3645  			break
  3646  		}
  3647  		v.reset(OpMIPS64MOVHload)
  3648  		v.AuxInt = int32ToAuxInt(off1 + int32(off2))
  3649  		v.Aux = symToAux(sym)
  3650  		v.AddArg2(ptr, mem)
  3651  		return true
  3652  	}
  3653  	// match: (MOVHload [off1] {sym1} (MOVVaddr [off2] {sym2} ptr) mem)
  3654  	// cond: canMergeSym(sym1,sym2) && is32Bit(int64(off1)+int64(off2)) && (ptr.Op != OpSB || !config.ctxt.Flag_shared)
  3655  	// result: (MOVHload [off1+int32(off2)] {mergeSym(sym1,sym2)} ptr mem)
  3656  	for {
  3657  		off1 := auxIntToInt32(v.AuxInt)
  3658  		sym1 := auxToSym(v.Aux)
  3659  		if v_0.Op != OpMIPS64MOVVaddr {
  3660  			break
  3661  		}
  3662  		off2 := auxIntToInt32(v_0.AuxInt)
  3663  		sym2 := auxToSym(v_0.Aux)
  3664  		ptr := v_0.Args[0]
  3665  		mem := v_1
  3666  		if !(canMergeSym(sym1, sym2) && is32Bit(int64(off1)+int64(off2)) && (ptr.Op != OpSB || !config.ctxt.Flag_shared)) {
  3667  			break
  3668  		}
  3669  		v.reset(OpMIPS64MOVHload)
  3670  		v.AuxInt = int32ToAuxInt(off1 + int32(off2))
  3671  		v.Aux = symToAux(mergeSym(sym1, sym2))
  3672  		v.AddArg2(ptr, mem)
  3673  		return true
  3674  	}
  3675  	// match: (MOVHload [off] {sym} (SB) _)
  3676  	// cond: symIsRO(sym)
  3677  	// result: (MOVVconst [int64(int16(read16(sym, int64(off), config.ctxt.Arch.ByteOrder)))])
  3678  	for {
  3679  		off := auxIntToInt32(v.AuxInt)
  3680  		sym := auxToSym(v.Aux)
  3681  		if v_0.Op != OpSB || !(symIsRO(sym)) {
  3682  			break
  3683  		}
  3684  		v.reset(OpMIPS64MOVVconst)
  3685  		v.AuxInt = int64ToAuxInt(int64(int16(read16(sym, int64(off), config.ctxt.Arch.ByteOrder))))
  3686  		return true
  3687  	}
  3688  	return false
  3689  }
  3690  func rewriteValueMIPS64_OpMIPS64MOVHreg(v *Value) bool {
  3691  	v_0 := v.Args[0]
  3692  	// match: (MOVHreg x:(MOVBload _ _))
  3693  	// result: (MOVVreg x)
  3694  	for {
  3695  		x := v_0
  3696  		if x.Op != OpMIPS64MOVBload {
  3697  			break
  3698  		}
  3699  		v.reset(OpMIPS64MOVVreg)
  3700  		v.AddArg(x)
  3701  		return true
  3702  	}
  3703  	// match: (MOVHreg x:(MOVBUload _ _))
  3704  	// result: (MOVVreg x)
  3705  	for {
  3706  		x := v_0
  3707  		if x.Op != OpMIPS64MOVBUload {
  3708  			break
  3709  		}
  3710  		v.reset(OpMIPS64MOVVreg)
  3711  		v.AddArg(x)
  3712  		return true
  3713  	}
  3714  	// match: (MOVHreg x:(MOVHload _ _))
  3715  	// result: (MOVVreg x)
  3716  	for {
  3717  		x := v_0
  3718  		if x.Op != OpMIPS64MOVHload {
  3719  			break
  3720  		}
  3721  		v.reset(OpMIPS64MOVVreg)
  3722  		v.AddArg(x)
  3723  		return true
  3724  	}
  3725  	// match: (MOVHreg x:(MOVBreg _))
  3726  	// result: (MOVVreg x)
  3727  	for {
  3728  		x := v_0
  3729  		if x.Op != OpMIPS64MOVBreg {
  3730  			break
  3731  		}
  3732  		v.reset(OpMIPS64MOVVreg)
  3733  		v.AddArg(x)
  3734  		return true
  3735  	}
  3736  	// match: (MOVHreg x:(MOVBUreg _))
  3737  	// result: (MOVVreg x)
  3738  	for {
  3739  		x := v_0
  3740  		if x.Op != OpMIPS64MOVBUreg {
  3741  			break
  3742  		}
  3743  		v.reset(OpMIPS64MOVVreg)
  3744  		v.AddArg(x)
  3745  		return true
  3746  	}
  3747  	// match: (MOVHreg x:(MOVHreg _))
  3748  	// result: (MOVVreg x)
  3749  	for {
  3750  		x := v_0
  3751  		if x.Op != OpMIPS64MOVHreg {
  3752  			break
  3753  		}
  3754  		v.reset(OpMIPS64MOVVreg)
  3755  		v.AddArg(x)
  3756  		return true
  3757  	}
  3758  	// match: (MOVHreg (MOVVconst [c]))
  3759  	// result: (MOVVconst [int64(int16(c))])
  3760  	for {
  3761  		if v_0.Op != OpMIPS64MOVVconst {
  3762  			break
  3763  		}
  3764  		c := auxIntToInt64(v_0.AuxInt)
  3765  		v.reset(OpMIPS64MOVVconst)
  3766  		v.AuxInt = int64ToAuxInt(int64(int16(c)))
  3767  		return true
  3768  	}
  3769  	return false
  3770  }
  3771  func rewriteValueMIPS64_OpMIPS64MOVHstore(v *Value) bool {
  3772  	v_2 := v.Args[2]
  3773  	v_1 := v.Args[1]
  3774  	v_0 := v.Args[0]
  3775  	b := v.Block
  3776  	config := b.Func.Config
  3777  	// match: (MOVHstore [off1] {sym} (ADDVconst [off2] ptr) val mem)
  3778  	// cond: is32Bit(int64(off1)+off2) && (ptr.Op != OpSB || !config.ctxt.Flag_shared)
  3779  	// result: (MOVHstore [off1+int32(off2)] {sym} ptr val mem)
  3780  	for {
  3781  		off1 := auxIntToInt32(v.AuxInt)
  3782  		sym := auxToSym(v.Aux)
  3783  		if v_0.Op != OpMIPS64ADDVconst {
  3784  			break
  3785  		}
  3786  		off2 := auxIntToInt64(v_0.AuxInt)
  3787  		ptr := v_0.Args[0]
  3788  		val := v_1
  3789  		mem := v_2
  3790  		if !(is32Bit(int64(off1)+off2) && (ptr.Op != OpSB || !config.ctxt.Flag_shared)) {
  3791  			break
  3792  		}
  3793  		v.reset(OpMIPS64MOVHstore)
  3794  		v.AuxInt = int32ToAuxInt(off1 + int32(off2))
  3795  		v.Aux = symToAux(sym)
  3796  		v.AddArg3(ptr, val, mem)
  3797  		return true
  3798  	}
  3799  	// match: (MOVHstore [off1] {sym1} (MOVVaddr [off2] {sym2} ptr) val mem)
  3800  	// cond: canMergeSym(sym1,sym2) && is32Bit(int64(off1)+int64(off2)) && (ptr.Op != OpSB || !config.ctxt.Flag_shared)
  3801  	// result: (MOVHstore [off1+int32(off2)] {mergeSym(sym1,sym2)} ptr val mem)
  3802  	for {
  3803  		off1 := auxIntToInt32(v.AuxInt)
  3804  		sym1 := auxToSym(v.Aux)
  3805  		if v_0.Op != OpMIPS64MOVVaddr {
  3806  			break
  3807  		}
  3808  		off2 := auxIntToInt32(v_0.AuxInt)
  3809  		sym2 := auxToSym(v_0.Aux)
  3810  		ptr := v_0.Args[0]
  3811  		val := v_1
  3812  		mem := v_2
  3813  		if !(canMergeSym(sym1, sym2) && is32Bit(int64(off1)+int64(off2)) && (ptr.Op != OpSB || !config.ctxt.Flag_shared)) {
  3814  			break
  3815  		}
  3816  		v.reset(OpMIPS64MOVHstore)
  3817  		v.AuxInt = int32ToAuxInt(off1 + int32(off2))
  3818  		v.Aux = symToAux(mergeSym(sym1, sym2))
  3819  		v.AddArg3(ptr, val, mem)
  3820  		return true
  3821  	}
  3822  	// match: (MOVHstore [off] {sym} ptr (MOVHreg x) mem)
  3823  	// result: (MOVHstore [off] {sym} ptr x mem)
  3824  	for {
  3825  		off := auxIntToInt32(v.AuxInt)
  3826  		sym := auxToSym(v.Aux)
  3827  		ptr := v_0
  3828  		if v_1.Op != OpMIPS64MOVHreg {
  3829  			break
  3830  		}
  3831  		x := v_1.Args[0]
  3832  		mem := v_2
  3833  		v.reset(OpMIPS64MOVHstore)
  3834  		v.AuxInt = int32ToAuxInt(off)
  3835  		v.Aux = symToAux(sym)
  3836  		v.AddArg3(ptr, x, mem)
  3837  		return true
  3838  	}
  3839  	// match: (MOVHstore [off] {sym} ptr (MOVHUreg x) mem)
  3840  	// result: (MOVHstore [off] {sym} ptr x mem)
  3841  	for {
  3842  		off := auxIntToInt32(v.AuxInt)
  3843  		sym := auxToSym(v.Aux)
  3844  		ptr := v_0
  3845  		if v_1.Op != OpMIPS64MOVHUreg {
  3846  			break
  3847  		}
  3848  		x := v_1.Args[0]
  3849  		mem := v_2
  3850  		v.reset(OpMIPS64MOVHstore)
  3851  		v.AuxInt = int32ToAuxInt(off)
  3852  		v.Aux = symToAux(sym)
  3853  		v.AddArg3(ptr, x, mem)
  3854  		return true
  3855  	}
  3856  	// match: (MOVHstore [off] {sym} ptr (MOVWreg x) mem)
  3857  	// result: (MOVHstore [off] {sym} ptr x mem)
  3858  	for {
  3859  		off := auxIntToInt32(v.AuxInt)
  3860  		sym := auxToSym(v.Aux)
  3861  		ptr := v_0
  3862  		if v_1.Op != OpMIPS64MOVWreg {
  3863  			break
  3864  		}
  3865  		x := v_1.Args[0]
  3866  		mem := v_2
  3867  		v.reset(OpMIPS64MOVHstore)
  3868  		v.AuxInt = int32ToAuxInt(off)
  3869  		v.Aux = symToAux(sym)
  3870  		v.AddArg3(ptr, x, mem)
  3871  		return true
  3872  	}
  3873  	// match: (MOVHstore [off] {sym} ptr (MOVWUreg x) mem)
  3874  	// result: (MOVHstore [off] {sym} ptr x mem)
  3875  	for {
  3876  		off := auxIntToInt32(v.AuxInt)
  3877  		sym := auxToSym(v.Aux)
  3878  		ptr := v_0
  3879  		if v_1.Op != OpMIPS64MOVWUreg {
  3880  			break
  3881  		}
  3882  		x := v_1.Args[0]
  3883  		mem := v_2
  3884  		v.reset(OpMIPS64MOVHstore)
  3885  		v.AuxInt = int32ToAuxInt(off)
  3886  		v.Aux = symToAux(sym)
  3887  		v.AddArg3(ptr, x, mem)
  3888  		return true
  3889  	}
  3890  	return false
  3891  }
  3892  func rewriteValueMIPS64_OpMIPS64MOVVload(v *Value) bool {
  3893  	v_1 := v.Args[1]
  3894  	v_0 := v.Args[0]
  3895  	b := v.Block
  3896  	config := b.Func.Config
  3897  	// match: (MOVVload [off] {sym} ptr (MOVDstore [off] {sym} ptr val _))
  3898  	// result: (MOVVfpgp val)
  3899  	for {
  3900  		off := auxIntToInt32(v.AuxInt)
  3901  		sym := auxToSym(v.Aux)
  3902  		ptr := v_0
  3903  		if v_1.Op != OpMIPS64MOVDstore || auxIntToInt32(v_1.AuxInt) != off || auxToSym(v_1.Aux) != sym {
  3904  			break
  3905  		}
  3906  		val := v_1.Args[1]
  3907  		if ptr != v_1.Args[0] {
  3908  			break
  3909  		}
  3910  		v.reset(OpMIPS64MOVVfpgp)
  3911  		v.AddArg(val)
  3912  		return true
  3913  	}
  3914  	// match: (MOVVload [off1] {sym} (ADDVconst [off2] ptr) mem)
  3915  	// cond: is32Bit(int64(off1)+off2) && (ptr.Op != OpSB || !config.ctxt.Flag_shared)
  3916  	// result: (MOVVload [off1+int32(off2)] {sym} ptr mem)
  3917  	for {
  3918  		off1 := auxIntToInt32(v.AuxInt)
  3919  		sym := auxToSym(v.Aux)
  3920  		if v_0.Op != OpMIPS64ADDVconst {
  3921  			break
  3922  		}
  3923  		off2 := auxIntToInt64(v_0.AuxInt)
  3924  		ptr := v_0.Args[0]
  3925  		mem := v_1
  3926  		if !(is32Bit(int64(off1)+off2) && (ptr.Op != OpSB || !config.ctxt.Flag_shared)) {
  3927  			break
  3928  		}
  3929  		v.reset(OpMIPS64MOVVload)
  3930  		v.AuxInt = int32ToAuxInt(off1 + int32(off2))
  3931  		v.Aux = symToAux(sym)
  3932  		v.AddArg2(ptr, mem)
  3933  		return true
  3934  	}
  3935  	// match: (MOVVload [off1] {sym1} (MOVVaddr [off2] {sym2} ptr) mem)
  3936  	// cond: canMergeSym(sym1,sym2) && is32Bit(int64(off1)+int64(off2)) && (ptr.Op != OpSB || !config.ctxt.Flag_shared)
  3937  	// result: (MOVVload [off1+int32(off2)] {mergeSym(sym1,sym2)} ptr mem)
  3938  	for {
  3939  		off1 := auxIntToInt32(v.AuxInt)
  3940  		sym1 := auxToSym(v.Aux)
  3941  		if v_0.Op != OpMIPS64MOVVaddr {
  3942  			break
  3943  		}
  3944  		off2 := auxIntToInt32(v_0.AuxInt)
  3945  		sym2 := auxToSym(v_0.Aux)
  3946  		ptr := v_0.Args[0]
  3947  		mem := v_1
  3948  		if !(canMergeSym(sym1, sym2) && is32Bit(int64(off1)+int64(off2)) && (ptr.Op != OpSB || !config.ctxt.Flag_shared)) {
  3949  			break
  3950  		}
  3951  		v.reset(OpMIPS64MOVVload)
  3952  		v.AuxInt = int32ToAuxInt(off1 + int32(off2))
  3953  		v.Aux = symToAux(mergeSym(sym1, sym2))
  3954  		v.AddArg2(ptr, mem)
  3955  		return true
  3956  	}
  3957  	// match: (MOVVload [off] {sym} (SB) _)
  3958  	// cond: symIsRO(sym)
  3959  	// result: (MOVVconst [int64(read64(sym, int64(off), config.ctxt.Arch.ByteOrder))])
  3960  	for {
  3961  		off := auxIntToInt32(v.AuxInt)
  3962  		sym := auxToSym(v.Aux)
  3963  		if v_0.Op != OpSB || !(symIsRO(sym)) {
  3964  			break
  3965  		}
  3966  		v.reset(OpMIPS64MOVVconst)
  3967  		v.AuxInt = int64ToAuxInt(int64(read64(sym, int64(off), config.ctxt.Arch.ByteOrder)))
  3968  		return true
  3969  	}
  3970  	return false
  3971  }
  3972  func rewriteValueMIPS64_OpMIPS64MOVVnop(v *Value) bool {
  3973  	v_0 := v.Args[0]
  3974  	// match: (MOVVnop (MOVVconst [c]))
  3975  	// result: (MOVVconst [c])
  3976  	for {
  3977  		if v_0.Op != OpMIPS64MOVVconst {
  3978  			break
  3979  		}
  3980  		c := auxIntToInt64(v_0.AuxInt)
  3981  		v.reset(OpMIPS64MOVVconst)
  3982  		v.AuxInt = int64ToAuxInt(c)
  3983  		return true
  3984  	}
  3985  	return false
  3986  }
  3987  func rewriteValueMIPS64_OpMIPS64MOVVreg(v *Value) bool {
  3988  	v_0 := v.Args[0]
  3989  	// match: (MOVVreg x)
  3990  	// cond: x.Uses == 1
  3991  	// result: (MOVVnop x)
  3992  	for {
  3993  		x := v_0
  3994  		if !(x.Uses == 1) {
  3995  			break
  3996  		}
  3997  		v.reset(OpMIPS64MOVVnop)
  3998  		v.AddArg(x)
  3999  		return true
  4000  	}
  4001  	// match: (MOVVreg (MOVVconst [c]))
  4002  	// result: (MOVVconst [c])
  4003  	for {
  4004  		if v_0.Op != OpMIPS64MOVVconst {
  4005  			break
  4006  		}
  4007  		c := auxIntToInt64(v_0.AuxInt)
  4008  		v.reset(OpMIPS64MOVVconst)
  4009  		v.AuxInt = int64ToAuxInt(c)
  4010  		return true
  4011  	}
  4012  	return false
  4013  }
  4014  func rewriteValueMIPS64_OpMIPS64MOVVstore(v *Value) bool {
  4015  	v_2 := v.Args[2]
  4016  	v_1 := v.Args[1]
  4017  	v_0 := v.Args[0]
  4018  	b := v.Block
  4019  	config := b.Func.Config
  4020  	// match: (MOVVstore [off] {sym} ptr (MOVVfpgp val) mem)
  4021  	// result: (MOVDstore [off] {sym} ptr val mem)
  4022  	for {
  4023  		off := auxIntToInt32(v.AuxInt)
  4024  		sym := auxToSym(v.Aux)
  4025  		ptr := v_0
  4026  		if v_1.Op != OpMIPS64MOVVfpgp {
  4027  			break
  4028  		}
  4029  		val := v_1.Args[0]
  4030  		mem := v_2
  4031  		v.reset(OpMIPS64MOVDstore)
  4032  		v.AuxInt = int32ToAuxInt(off)
  4033  		v.Aux = symToAux(sym)
  4034  		v.AddArg3(ptr, val, mem)
  4035  		return true
  4036  	}
  4037  	// match: (MOVVstore [off1] {sym} (ADDVconst [off2] ptr) val mem)
  4038  	// cond: is32Bit(int64(off1)+off2) && (ptr.Op != OpSB || !config.ctxt.Flag_shared)
  4039  	// result: (MOVVstore [off1+int32(off2)] {sym} ptr val mem)
  4040  	for {
  4041  		off1 := auxIntToInt32(v.AuxInt)
  4042  		sym := auxToSym(v.Aux)
  4043  		if v_0.Op != OpMIPS64ADDVconst {
  4044  			break
  4045  		}
  4046  		off2 := auxIntToInt64(v_0.AuxInt)
  4047  		ptr := v_0.Args[0]
  4048  		val := v_1
  4049  		mem := v_2
  4050  		if !(is32Bit(int64(off1)+off2) && (ptr.Op != OpSB || !config.ctxt.Flag_shared)) {
  4051  			break
  4052  		}
  4053  		v.reset(OpMIPS64MOVVstore)
  4054  		v.AuxInt = int32ToAuxInt(off1 + int32(off2))
  4055  		v.Aux = symToAux(sym)
  4056  		v.AddArg3(ptr, val, mem)
  4057  		return true
  4058  	}
  4059  	// match: (MOVVstore [off1] {sym1} (MOVVaddr [off2] {sym2} ptr) val mem)
  4060  	// cond: canMergeSym(sym1,sym2) && is32Bit(int64(off1)+int64(off2)) && (ptr.Op != OpSB || !config.ctxt.Flag_shared)
  4061  	// result: (MOVVstore [off1+int32(off2)] {mergeSym(sym1,sym2)} ptr val mem)
  4062  	for {
  4063  		off1 := auxIntToInt32(v.AuxInt)
  4064  		sym1 := auxToSym(v.Aux)
  4065  		if v_0.Op != OpMIPS64MOVVaddr {
  4066  			break
  4067  		}
  4068  		off2 := auxIntToInt32(v_0.AuxInt)
  4069  		sym2 := auxToSym(v_0.Aux)
  4070  		ptr := v_0.Args[0]
  4071  		val := v_1
  4072  		mem := v_2
  4073  		if !(canMergeSym(sym1, sym2) && is32Bit(int64(off1)+int64(off2)) && (ptr.Op != OpSB || !config.ctxt.Flag_shared)) {
  4074  			break
  4075  		}
  4076  		v.reset(OpMIPS64MOVVstore)
  4077  		v.AuxInt = int32ToAuxInt(off1 + int32(off2))
  4078  		v.Aux = symToAux(mergeSym(sym1, sym2))
  4079  		v.AddArg3(ptr, val, mem)
  4080  		return true
  4081  	}
  4082  	return false
  4083  }
  4084  func rewriteValueMIPS64_OpMIPS64MOVWUload(v *Value) bool {
  4085  	v_1 := v.Args[1]
  4086  	v_0 := v.Args[0]
  4087  	b := v.Block
  4088  	config := b.Func.Config
  4089  	typ := &b.Func.Config.Types
  4090  	// match: (MOVWUload [off] {sym} ptr (MOVFstore [off] {sym} ptr val _))
  4091  	// result: (ZeroExt32to64 (MOVWfpgp <typ.Float32> val))
  4092  	for {
  4093  		off := auxIntToInt32(v.AuxInt)
  4094  		sym := auxToSym(v.Aux)
  4095  		ptr := v_0
  4096  		if v_1.Op != OpMIPS64MOVFstore || auxIntToInt32(v_1.AuxInt) != off || auxToSym(v_1.Aux) != sym {
  4097  			break
  4098  		}
  4099  		val := v_1.Args[1]
  4100  		if ptr != v_1.Args[0] {
  4101  			break
  4102  		}
  4103  		v.reset(OpZeroExt32to64)
  4104  		v0 := b.NewValue0(v_1.Pos, OpMIPS64MOVWfpgp, typ.Float32)
  4105  		v0.AddArg(val)
  4106  		v.AddArg(v0)
  4107  		return true
  4108  	}
  4109  	// match: (MOVWUload [off1] {sym} (ADDVconst [off2] ptr) mem)
  4110  	// cond: is32Bit(int64(off1)+off2) && (ptr.Op != OpSB || !config.ctxt.Flag_shared)
  4111  	// result: (MOVWUload [off1+int32(off2)] {sym} ptr mem)
  4112  	for {
  4113  		off1 := auxIntToInt32(v.AuxInt)
  4114  		sym := auxToSym(v.Aux)
  4115  		if v_0.Op != OpMIPS64ADDVconst {
  4116  			break
  4117  		}
  4118  		off2 := auxIntToInt64(v_0.AuxInt)
  4119  		ptr := v_0.Args[0]
  4120  		mem := v_1
  4121  		if !(is32Bit(int64(off1)+off2) && (ptr.Op != OpSB || !config.ctxt.Flag_shared)) {
  4122  			break
  4123  		}
  4124  		v.reset(OpMIPS64MOVWUload)
  4125  		v.AuxInt = int32ToAuxInt(off1 + int32(off2))
  4126  		v.Aux = symToAux(sym)
  4127  		v.AddArg2(ptr, mem)
  4128  		return true
  4129  	}
  4130  	// match: (MOVWUload [off1] {sym1} (MOVVaddr [off2] {sym2} ptr) mem)
  4131  	// cond: canMergeSym(sym1,sym2) && is32Bit(int64(off1)+int64(off2)) && (ptr.Op != OpSB || !config.ctxt.Flag_shared)
  4132  	// result: (MOVWUload [off1+int32(off2)] {mergeSym(sym1,sym2)} ptr mem)
  4133  	for {
  4134  		off1 := auxIntToInt32(v.AuxInt)
  4135  		sym1 := auxToSym(v.Aux)
  4136  		if v_0.Op != OpMIPS64MOVVaddr {
  4137  			break
  4138  		}
  4139  		off2 := auxIntToInt32(v_0.AuxInt)
  4140  		sym2 := auxToSym(v_0.Aux)
  4141  		ptr := v_0.Args[0]
  4142  		mem := v_1
  4143  		if !(canMergeSym(sym1, sym2) && is32Bit(int64(off1)+int64(off2)) && (ptr.Op != OpSB || !config.ctxt.Flag_shared)) {
  4144  			break
  4145  		}
  4146  		v.reset(OpMIPS64MOVWUload)
  4147  		v.AuxInt = int32ToAuxInt(off1 + int32(off2))
  4148  		v.Aux = symToAux(mergeSym(sym1, sym2))
  4149  		v.AddArg2(ptr, mem)
  4150  		return true
  4151  	}
  4152  	// match: (MOVWUload [off] {sym} (SB) _)
  4153  	// cond: symIsRO(sym)
  4154  	// result: (MOVVconst [int64(read32(sym, int64(off), config.ctxt.Arch.ByteOrder))])
  4155  	for {
  4156  		off := auxIntToInt32(v.AuxInt)
  4157  		sym := auxToSym(v.Aux)
  4158  		if v_0.Op != OpSB || !(symIsRO(sym)) {
  4159  			break
  4160  		}
  4161  		v.reset(OpMIPS64MOVVconst)
  4162  		v.AuxInt = int64ToAuxInt(int64(read32(sym, int64(off), config.ctxt.Arch.ByteOrder)))
  4163  		return true
  4164  	}
  4165  	return false
  4166  }
  4167  func rewriteValueMIPS64_OpMIPS64MOVWUreg(v *Value) bool {
  4168  	v_0 := v.Args[0]
  4169  	// match: (MOVWUreg x:(MOVBUload _ _))
  4170  	// result: (MOVVreg x)
  4171  	for {
  4172  		x := v_0
  4173  		if x.Op != OpMIPS64MOVBUload {
  4174  			break
  4175  		}
  4176  		v.reset(OpMIPS64MOVVreg)
  4177  		v.AddArg(x)
  4178  		return true
  4179  	}
  4180  	// match: (MOVWUreg x:(MOVHUload _ _))
  4181  	// result: (MOVVreg x)
  4182  	for {
  4183  		x := v_0
  4184  		if x.Op != OpMIPS64MOVHUload {
  4185  			break
  4186  		}
  4187  		v.reset(OpMIPS64MOVVreg)
  4188  		v.AddArg(x)
  4189  		return true
  4190  	}
  4191  	// match: (MOVWUreg x:(MOVWUload _ _))
  4192  	// result: (MOVVreg x)
  4193  	for {
  4194  		x := v_0
  4195  		if x.Op != OpMIPS64MOVWUload {
  4196  			break
  4197  		}
  4198  		v.reset(OpMIPS64MOVVreg)
  4199  		v.AddArg(x)
  4200  		return true
  4201  	}
  4202  	// match: (MOVWUreg x:(MOVBUreg _))
  4203  	// result: (MOVVreg x)
  4204  	for {
  4205  		x := v_0
  4206  		if x.Op != OpMIPS64MOVBUreg {
  4207  			break
  4208  		}
  4209  		v.reset(OpMIPS64MOVVreg)
  4210  		v.AddArg(x)
  4211  		return true
  4212  	}
  4213  	// match: (MOVWUreg x:(MOVHUreg _))
  4214  	// result: (MOVVreg x)
  4215  	for {
  4216  		x := v_0
  4217  		if x.Op != OpMIPS64MOVHUreg {
  4218  			break
  4219  		}
  4220  		v.reset(OpMIPS64MOVVreg)
  4221  		v.AddArg(x)
  4222  		return true
  4223  	}
  4224  	// match: (MOVWUreg x:(MOVWUreg _))
  4225  	// result: (MOVVreg x)
  4226  	for {
  4227  		x := v_0
  4228  		if x.Op != OpMIPS64MOVWUreg {
  4229  			break
  4230  		}
  4231  		v.reset(OpMIPS64MOVVreg)
  4232  		v.AddArg(x)
  4233  		return true
  4234  	}
  4235  	// match: (MOVWUreg (MOVVconst [c]))
  4236  	// result: (MOVVconst [int64(uint32(c))])
  4237  	for {
  4238  		if v_0.Op != OpMIPS64MOVVconst {
  4239  			break
  4240  		}
  4241  		c := auxIntToInt64(v_0.AuxInt)
  4242  		v.reset(OpMIPS64MOVVconst)
  4243  		v.AuxInt = int64ToAuxInt(int64(uint32(c)))
  4244  		return true
  4245  	}
  4246  	return false
  4247  }
  4248  func rewriteValueMIPS64_OpMIPS64MOVWload(v *Value) bool {
  4249  	v_1 := v.Args[1]
  4250  	v_0 := v.Args[0]
  4251  	b := v.Block
  4252  	config := b.Func.Config
  4253  	// match: (MOVWload [off1] {sym} (ADDVconst [off2] ptr) mem)
  4254  	// cond: is32Bit(int64(off1)+off2) && (ptr.Op != OpSB || !config.ctxt.Flag_shared)
  4255  	// result: (MOVWload [off1+int32(off2)] {sym} ptr mem)
  4256  	for {
  4257  		off1 := auxIntToInt32(v.AuxInt)
  4258  		sym := auxToSym(v.Aux)
  4259  		if v_0.Op != OpMIPS64ADDVconst {
  4260  			break
  4261  		}
  4262  		off2 := auxIntToInt64(v_0.AuxInt)
  4263  		ptr := v_0.Args[0]
  4264  		mem := v_1
  4265  		if !(is32Bit(int64(off1)+off2) && (ptr.Op != OpSB || !config.ctxt.Flag_shared)) {
  4266  			break
  4267  		}
  4268  		v.reset(OpMIPS64MOVWload)
  4269  		v.AuxInt = int32ToAuxInt(off1 + int32(off2))
  4270  		v.Aux = symToAux(sym)
  4271  		v.AddArg2(ptr, mem)
  4272  		return true
  4273  	}
  4274  	// match: (MOVWload [off1] {sym1} (MOVVaddr [off2] {sym2} ptr) mem)
  4275  	// cond: canMergeSym(sym1,sym2) && is32Bit(int64(off1)+int64(off2)) && (ptr.Op != OpSB || !config.ctxt.Flag_shared)
  4276  	// result: (MOVWload [off1+int32(off2)] {mergeSym(sym1,sym2)} ptr mem)
  4277  	for {
  4278  		off1 := auxIntToInt32(v.AuxInt)
  4279  		sym1 := auxToSym(v.Aux)
  4280  		if v_0.Op != OpMIPS64MOVVaddr {
  4281  			break
  4282  		}
  4283  		off2 := auxIntToInt32(v_0.AuxInt)
  4284  		sym2 := auxToSym(v_0.Aux)
  4285  		ptr := v_0.Args[0]
  4286  		mem := v_1
  4287  		if !(canMergeSym(sym1, sym2) && is32Bit(int64(off1)+int64(off2)) && (ptr.Op != OpSB || !config.ctxt.Flag_shared)) {
  4288  			break
  4289  		}
  4290  		v.reset(OpMIPS64MOVWload)
  4291  		v.AuxInt = int32ToAuxInt(off1 + int32(off2))
  4292  		v.Aux = symToAux(mergeSym(sym1, sym2))
  4293  		v.AddArg2(ptr, mem)
  4294  		return true
  4295  	}
  4296  	// match: (MOVWload [off] {sym} (SB) _)
  4297  	// cond: symIsRO(sym)
  4298  	// result: (MOVVconst [int64(int32(read32(sym, int64(off), config.ctxt.Arch.ByteOrder)))])
  4299  	for {
  4300  		off := auxIntToInt32(v.AuxInt)
  4301  		sym := auxToSym(v.Aux)
  4302  		if v_0.Op != OpSB || !(symIsRO(sym)) {
  4303  			break
  4304  		}
  4305  		v.reset(OpMIPS64MOVVconst)
  4306  		v.AuxInt = int64ToAuxInt(int64(int32(read32(sym, int64(off), config.ctxt.Arch.ByteOrder))))
  4307  		return true
  4308  	}
  4309  	return false
  4310  }
  4311  func rewriteValueMIPS64_OpMIPS64MOVWreg(v *Value) bool {
  4312  	v_0 := v.Args[0]
  4313  	// match: (MOVWreg x:(MOVBload _ _))
  4314  	// result: (MOVVreg x)
  4315  	for {
  4316  		x := v_0
  4317  		if x.Op != OpMIPS64MOVBload {
  4318  			break
  4319  		}
  4320  		v.reset(OpMIPS64MOVVreg)
  4321  		v.AddArg(x)
  4322  		return true
  4323  	}
  4324  	// match: (MOVWreg x:(MOVBUload _ _))
  4325  	// result: (MOVVreg x)
  4326  	for {
  4327  		x := v_0
  4328  		if x.Op != OpMIPS64MOVBUload {
  4329  			break
  4330  		}
  4331  		v.reset(OpMIPS64MOVVreg)
  4332  		v.AddArg(x)
  4333  		return true
  4334  	}
  4335  	// match: (MOVWreg x:(MOVHload _ _))
  4336  	// result: (MOVVreg x)
  4337  	for {
  4338  		x := v_0
  4339  		if x.Op != OpMIPS64MOVHload {
  4340  			break
  4341  		}
  4342  		v.reset(OpMIPS64MOVVreg)
  4343  		v.AddArg(x)
  4344  		return true
  4345  	}
  4346  	// match: (MOVWreg x:(MOVHUload _ _))
  4347  	// result: (MOVVreg x)
  4348  	for {
  4349  		x := v_0
  4350  		if x.Op != OpMIPS64MOVHUload {
  4351  			break
  4352  		}
  4353  		v.reset(OpMIPS64MOVVreg)
  4354  		v.AddArg(x)
  4355  		return true
  4356  	}
  4357  	// match: (MOVWreg x:(MOVWload _ _))
  4358  	// result: (MOVVreg x)
  4359  	for {
  4360  		x := v_0
  4361  		if x.Op != OpMIPS64MOVWload {
  4362  			break
  4363  		}
  4364  		v.reset(OpMIPS64MOVVreg)
  4365  		v.AddArg(x)
  4366  		return true
  4367  	}
  4368  	// match: (MOVWreg x:(MOVBreg _))
  4369  	// result: (MOVVreg x)
  4370  	for {
  4371  		x := v_0
  4372  		if x.Op != OpMIPS64MOVBreg {
  4373  			break
  4374  		}
  4375  		v.reset(OpMIPS64MOVVreg)
  4376  		v.AddArg(x)
  4377  		return true
  4378  	}
  4379  	// match: (MOVWreg x:(MOVBUreg _))
  4380  	// result: (MOVVreg x)
  4381  	for {
  4382  		x := v_0
  4383  		if x.Op != OpMIPS64MOVBUreg {
  4384  			break
  4385  		}
  4386  		v.reset(OpMIPS64MOVVreg)
  4387  		v.AddArg(x)
  4388  		return true
  4389  	}
  4390  	// match: (MOVWreg x:(MOVHreg _))
  4391  	// result: (MOVVreg x)
  4392  	for {
  4393  		x := v_0
  4394  		if x.Op != OpMIPS64MOVHreg {
  4395  			break
  4396  		}
  4397  		v.reset(OpMIPS64MOVVreg)
  4398  		v.AddArg(x)
  4399  		return true
  4400  	}
  4401  	// match: (MOVWreg x:(MOVWreg _))
  4402  	// result: (MOVVreg x)
  4403  	for {
  4404  		x := v_0
  4405  		if x.Op != OpMIPS64MOVWreg {
  4406  			break
  4407  		}
  4408  		v.reset(OpMIPS64MOVVreg)
  4409  		v.AddArg(x)
  4410  		return true
  4411  	}
  4412  	// match: (MOVWreg (MOVVconst [c]))
  4413  	// result: (MOVVconst [int64(int32(c))])
  4414  	for {
  4415  		if v_0.Op != OpMIPS64MOVVconst {
  4416  			break
  4417  		}
  4418  		c := auxIntToInt64(v_0.AuxInt)
  4419  		v.reset(OpMIPS64MOVVconst)
  4420  		v.AuxInt = int64ToAuxInt(int64(int32(c)))
  4421  		return true
  4422  	}
  4423  	return false
  4424  }
  4425  func rewriteValueMIPS64_OpMIPS64MOVWstore(v *Value) bool {
  4426  	v_2 := v.Args[2]
  4427  	v_1 := v.Args[1]
  4428  	v_0 := v.Args[0]
  4429  	b := v.Block
  4430  	config := b.Func.Config
  4431  	// match: (MOVWstore [off] {sym} ptr (MOVWfpgp val) mem)
  4432  	// result: (MOVFstore [off] {sym} ptr val mem)
  4433  	for {
  4434  		off := auxIntToInt32(v.AuxInt)
  4435  		sym := auxToSym(v.Aux)
  4436  		ptr := v_0
  4437  		if v_1.Op != OpMIPS64MOVWfpgp {
  4438  			break
  4439  		}
  4440  		val := v_1.Args[0]
  4441  		mem := v_2
  4442  		v.reset(OpMIPS64MOVFstore)
  4443  		v.AuxInt = int32ToAuxInt(off)
  4444  		v.Aux = symToAux(sym)
  4445  		v.AddArg3(ptr, val, mem)
  4446  		return true
  4447  	}
  4448  	// match: (MOVWstore [off1] {sym} (ADDVconst [off2] ptr) val mem)
  4449  	// cond: is32Bit(int64(off1)+off2) && (ptr.Op != OpSB || !config.ctxt.Flag_shared)
  4450  	// result: (MOVWstore [off1+int32(off2)] {sym} ptr val mem)
  4451  	for {
  4452  		off1 := auxIntToInt32(v.AuxInt)
  4453  		sym := auxToSym(v.Aux)
  4454  		if v_0.Op != OpMIPS64ADDVconst {
  4455  			break
  4456  		}
  4457  		off2 := auxIntToInt64(v_0.AuxInt)
  4458  		ptr := v_0.Args[0]
  4459  		val := v_1
  4460  		mem := v_2
  4461  		if !(is32Bit(int64(off1)+off2) && (ptr.Op != OpSB || !config.ctxt.Flag_shared)) {
  4462  			break
  4463  		}
  4464  		v.reset(OpMIPS64MOVWstore)
  4465  		v.AuxInt = int32ToAuxInt(off1 + int32(off2))
  4466  		v.Aux = symToAux(sym)
  4467  		v.AddArg3(ptr, val, mem)
  4468  		return true
  4469  	}
  4470  	// match: (MOVWstore [off1] {sym1} (MOVVaddr [off2] {sym2} ptr) val mem)
  4471  	// cond: canMergeSym(sym1,sym2) && is32Bit(int64(off1)+int64(off2)) && (ptr.Op != OpSB || !config.ctxt.Flag_shared)
  4472  	// result: (MOVWstore [off1+int32(off2)] {mergeSym(sym1,sym2)} ptr val mem)
  4473  	for {
  4474  		off1 := auxIntToInt32(v.AuxInt)
  4475  		sym1 := auxToSym(v.Aux)
  4476  		if v_0.Op != OpMIPS64MOVVaddr {
  4477  			break
  4478  		}
  4479  		off2 := auxIntToInt32(v_0.AuxInt)
  4480  		sym2 := auxToSym(v_0.Aux)
  4481  		ptr := v_0.Args[0]
  4482  		val := v_1
  4483  		mem := v_2
  4484  		if !(canMergeSym(sym1, sym2) && is32Bit(int64(off1)+int64(off2)) && (ptr.Op != OpSB || !config.ctxt.Flag_shared)) {
  4485  			break
  4486  		}
  4487  		v.reset(OpMIPS64MOVWstore)
  4488  		v.AuxInt = int32ToAuxInt(off1 + int32(off2))
  4489  		v.Aux = symToAux(mergeSym(sym1, sym2))
  4490  		v.AddArg3(ptr, val, mem)
  4491  		return true
  4492  	}
  4493  	// match: (MOVWstore [off] {sym} ptr (MOVWreg x) mem)
  4494  	// result: (MOVWstore [off] {sym} ptr x mem)
  4495  	for {
  4496  		off := auxIntToInt32(v.AuxInt)
  4497  		sym := auxToSym(v.Aux)
  4498  		ptr := v_0
  4499  		if v_1.Op != OpMIPS64MOVWreg {
  4500  			break
  4501  		}
  4502  		x := v_1.Args[0]
  4503  		mem := v_2
  4504  		v.reset(OpMIPS64MOVWstore)
  4505  		v.AuxInt = int32ToAuxInt(off)
  4506  		v.Aux = symToAux(sym)
  4507  		v.AddArg3(ptr, x, mem)
  4508  		return true
  4509  	}
  4510  	// match: (MOVWstore [off] {sym} ptr (MOVWUreg x) mem)
  4511  	// result: (MOVWstore [off] {sym} ptr x mem)
  4512  	for {
  4513  		off := auxIntToInt32(v.AuxInt)
  4514  		sym := auxToSym(v.Aux)
  4515  		ptr := v_0
  4516  		if v_1.Op != OpMIPS64MOVWUreg {
  4517  			break
  4518  		}
  4519  		x := v_1.Args[0]
  4520  		mem := v_2
  4521  		v.reset(OpMIPS64MOVWstore)
  4522  		v.AuxInt = int32ToAuxInt(off)
  4523  		v.Aux = symToAux(sym)
  4524  		v.AddArg3(ptr, x, mem)
  4525  		return true
  4526  	}
  4527  	return false
  4528  }
  4529  func rewriteValueMIPS64_OpMIPS64NEGV(v *Value) bool {
  4530  	v_0 := v.Args[0]
  4531  	// match: (NEGV (SUBV x y))
  4532  	// result: (SUBV y x)
  4533  	for {
  4534  		if v_0.Op != OpMIPS64SUBV {
  4535  			break
  4536  		}
  4537  		y := v_0.Args[1]
  4538  		x := v_0.Args[0]
  4539  		v.reset(OpMIPS64SUBV)
  4540  		v.AddArg2(y, x)
  4541  		return true
  4542  	}
  4543  	// match: (NEGV (NEGV x))
  4544  	// result: x
  4545  	for {
  4546  		if v_0.Op != OpMIPS64NEGV {
  4547  			break
  4548  		}
  4549  		x := v_0.Args[0]
  4550  		v.copyOf(x)
  4551  		return true
  4552  	}
  4553  	// match: (NEGV (MOVVconst [c]))
  4554  	// result: (MOVVconst [-c])
  4555  	for {
  4556  		if v_0.Op != OpMIPS64MOVVconst {
  4557  			break
  4558  		}
  4559  		c := auxIntToInt64(v_0.AuxInt)
  4560  		v.reset(OpMIPS64MOVVconst)
  4561  		v.AuxInt = int64ToAuxInt(-c)
  4562  		return true
  4563  	}
  4564  	return false
  4565  }
  4566  func rewriteValueMIPS64_OpMIPS64OR(v *Value) bool {
  4567  	v_1 := v.Args[1]
  4568  	v_0 := v.Args[0]
  4569  	// match: (OR x (MOVVconst [c]))
  4570  	// cond: is32Bit(c)
  4571  	// result: (ORconst [c] x)
  4572  	for {
  4573  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
  4574  			x := v_0
  4575  			if v_1.Op != OpMIPS64MOVVconst {
  4576  				continue
  4577  			}
  4578  			c := auxIntToInt64(v_1.AuxInt)
  4579  			if !(is32Bit(c)) {
  4580  				continue
  4581  			}
  4582  			v.reset(OpMIPS64ORconst)
  4583  			v.AuxInt = int64ToAuxInt(c)
  4584  			v.AddArg(x)
  4585  			return true
  4586  		}
  4587  		break
  4588  	}
  4589  	// match: (OR x x)
  4590  	// result: x
  4591  	for {
  4592  		x := v_0
  4593  		if x != v_1 {
  4594  			break
  4595  		}
  4596  		v.copyOf(x)
  4597  		return true
  4598  	}
  4599  	return false
  4600  }
  4601  func rewriteValueMIPS64_OpMIPS64ORconst(v *Value) bool {
  4602  	v_0 := v.Args[0]
  4603  	// match: (ORconst [0] x)
  4604  	// result: x
  4605  	for {
  4606  		if auxIntToInt64(v.AuxInt) != 0 {
  4607  			break
  4608  		}
  4609  		x := v_0
  4610  		v.copyOf(x)
  4611  		return true
  4612  	}
  4613  	// match: (ORconst [-1] _)
  4614  	// result: (MOVVconst [-1])
  4615  	for {
  4616  		if auxIntToInt64(v.AuxInt) != -1 {
  4617  			break
  4618  		}
  4619  		v.reset(OpMIPS64MOVVconst)
  4620  		v.AuxInt = int64ToAuxInt(-1)
  4621  		return true
  4622  	}
  4623  	// match: (ORconst [c] (MOVVconst [d]))
  4624  	// result: (MOVVconst [c|d])
  4625  	for {
  4626  		c := auxIntToInt64(v.AuxInt)
  4627  		if v_0.Op != OpMIPS64MOVVconst {
  4628  			break
  4629  		}
  4630  		d := auxIntToInt64(v_0.AuxInt)
  4631  		v.reset(OpMIPS64MOVVconst)
  4632  		v.AuxInt = int64ToAuxInt(c | d)
  4633  		return true
  4634  	}
  4635  	// match: (ORconst [c] (ORconst [d] x))
  4636  	// cond: is32Bit(c|d)
  4637  	// result: (ORconst [c|d] x)
  4638  	for {
  4639  		c := auxIntToInt64(v.AuxInt)
  4640  		if v_0.Op != OpMIPS64ORconst {
  4641  			break
  4642  		}
  4643  		d := auxIntToInt64(v_0.AuxInt)
  4644  		x := v_0.Args[0]
  4645  		if !(is32Bit(c | d)) {
  4646  			break
  4647  		}
  4648  		v.reset(OpMIPS64ORconst)
  4649  		v.AuxInt = int64ToAuxInt(c | d)
  4650  		v.AddArg(x)
  4651  		return true
  4652  	}
  4653  	return false
  4654  }
  4655  func rewriteValueMIPS64_OpMIPS64SGT(v *Value) bool {
  4656  	v_1 := v.Args[1]
  4657  	v_0 := v.Args[0]
  4658  	// match: (SGT (MOVVconst [c]) x)
  4659  	// cond: is32Bit(c)
  4660  	// result: (SGTconst [c] x)
  4661  	for {
  4662  		if v_0.Op != OpMIPS64MOVVconst {
  4663  			break
  4664  		}
  4665  		c := auxIntToInt64(v_0.AuxInt)
  4666  		x := v_1
  4667  		if !(is32Bit(c)) {
  4668  			break
  4669  		}
  4670  		v.reset(OpMIPS64SGTconst)
  4671  		v.AuxInt = int64ToAuxInt(c)
  4672  		v.AddArg(x)
  4673  		return true
  4674  	}
  4675  	// match: (SGT x x)
  4676  	// result: (MOVVconst [0])
  4677  	for {
  4678  		x := v_0
  4679  		if x != v_1 {
  4680  			break
  4681  		}
  4682  		v.reset(OpMIPS64MOVVconst)
  4683  		v.AuxInt = int64ToAuxInt(0)
  4684  		return true
  4685  	}
  4686  	return false
  4687  }
  4688  func rewriteValueMIPS64_OpMIPS64SGTU(v *Value) bool {
  4689  	v_1 := v.Args[1]
  4690  	v_0 := v.Args[0]
  4691  	// match: (SGTU (MOVVconst [c]) x)
  4692  	// cond: is32Bit(c)
  4693  	// result: (SGTUconst [c] x)
  4694  	for {
  4695  		if v_0.Op != OpMIPS64MOVVconst {
  4696  			break
  4697  		}
  4698  		c := auxIntToInt64(v_0.AuxInt)
  4699  		x := v_1
  4700  		if !(is32Bit(c)) {
  4701  			break
  4702  		}
  4703  		v.reset(OpMIPS64SGTUconst)
  4704  		v.AuxInt = int64ToAuxInt(c)
  4705  		v.AddArg(x)
  4706  		return true
  4707  	}
  4708  	// match: (SGTU x x)
  4709  	// result: (MOVVconst [0])
  4710  	for {
  4711  		x := v_0
  4712  		if x != v_1 {
  4713  			break
  4714  		}
  4715  		v.reset(OpMIPS64MOVVconst)
  4716  		v.AuxInt = int64ToAuxInt(0)
  4717  		return true
  4718  	}
  4719  	return false
  4720  }
  4721  func rewriteValueMIPS64_OpMIPS64SGTUconst(v *Value) bool {
  4722  	v_0 := v.Args[0]
  4723  	// match: (SGTUconst [c] (MOVVconst [d]))
  4724  	// cond: uint64(c)>uint64(d)
  4725  	// result: (MOVVconst [1])
  4726  	for {
  4727  		c := auxIntToInt64(v.AuxInt)
  4728  		if v_0.Op != OpMIPS64MOVVconst {
  4729  			break
  4730  		}
  4731  		d := auxIntToInt64(v_0.AuxInt)
  4732  		if !(uint64(c) > uint64(d)) {
  4733  			break
  4734  		}
  4735  		v.reset(OpMIPS64MOVVconst)
  4736  		v.AuxInt = int64ToAuxInt(1)
  4737  		return true
  4738  	}
  4739  	// match: (SGTUconst [c] (MOVVconst [d]))
  4740  	// cond: uint64(c)<=uint64(d)
  4741  	// result: (MOVVconst [0])
  4742  	for {
  4743  		c := auxIntToInt64(v.AuxInt)
  4744  		if v_0.Op != OpMIPS64MOVVconst {
  4745  			break
  4746  		}
  4747  		d := auxIntToInt64(v_0.AuxInt)
  4748  		if !(uint64(c) <= uint64(d)) {
  4749  			break
  4750  		}
  4751  		v.reset(OpMIPS64MOVVconst)
  4752  		v.AuxInt = int64ToAuxInt(0)
  4753  		return true
  4754  	}
  4755  	// match: (SGTUconst [c] (MOVBUreg _))
  4756  	// cond: 0xff < uint64(c)
  4757  	// result: (MOVVconst [1])
  4758  	for {
  4759  		c := auxIntToInt64(v.AuxInt)
  4760  		if v_0.Op != OpMIPS64MOVBUreg || !(0xff < uint64(c)) {
  4761  			break
  4762  		}
  4763  		v.reset(OpMIPS64MOVVconst)
  4764  		v.AuxInt = int64ToAuxInt(1)
  4765  		return true
  4766  	}
  4767  	// match: (SGTUconst [c] (MOVHUreg _))
  4768  	// cond: 0xffff < uint64(c)
  4769  	// result: (MOVVconst [1])
  4770  	for {
  4771  		c := auxIntToInt64(v.AuxInt)
  4772  		if v_0.Op != OpMIPS64MOVHUreg || !(0xffff < uint64(c)) {
  4773  			break
  4774  		}
  4775  		v.reset(OpMIPS64MOVVconst)
  4776  		v.AuxInt = int64ToAuxInt(1)
  4777  		return true
  4778  	}
  4779  	// match: (SGTUconst [c] (ANDconst [m] _))
  4780  	// cond: uint64(m) < uint64(c)
  4781  	// result: (MOVVconst [1])
  4782  	for {
  4783  		c := auxIntToInt64(v.AuxInt)
  4784  		if v_0.Op != OpMIPS64ANDconst {
  4785  			break
  4786  		}
  4787  		m := auxIntToInt64(v_0.AuxInt)
  4788  		if !(uint64(m) < uint64(c)) {
  4789  			break
  4790  		}
  4791  		v.reset(OpMIPS64MOVVconst)
  4792  		v.AuxInt = int64ToAuxInt(1)
  4793  		return true
  4794  	}
  4795  	// match: (SGTUconst [c] (SRLVconst _ [d]))
  4796  	// cond: 0 < d && d <= 63 && 0xffffffffffffffff>>uint64(d) < uint64(c)
  4797  	// result: (MOVVconst [1])
  4798  	for {
  4799  		c := auxIntToInt64(v.AuxInt)
  4800  		if v_0.Op != OpMIPS64SRLVconst {
  4801  			break
  4802  		}
  4803  		d := auxIntToInt64(v_0.AuxInt)
  4804  		if !(0 < d && d <= 63 && 0xffffffffffffffff>>uint64(d) < uint64(c)) {
  4805  			break
  4806  		}
  4807  		v.reset(OpMIPS64MOVVconst)
  4808  		v.AuxInt = int64ToAuxInt(1)
  4809  		return true
  4810  	}
  4811  	return false
  4812  }
  4813  func rewriteValueMIPS64_OpMIPS64SGTconst(v *Value) bool {
  4814  	v_0 := v.Args[0]
  4815  	// match: (SGTconst [c] (MOVVconst [d]))
  4816  	// cond: c>d
  4817  	// result: (MOVVconst [1])
  4818  	for {
  4819  		c := auxIntToInt64(v.AuxInt)
  4820  		if v_0.Op != OpMIPS64MOVVconst {
  4821  			break
  4822  		}
  4823  		d := auxIntToInt64(v_0.AuxInt)
  4824  		if !(c > d) {
  4825  			break
  4826  		}
  4827  		v.reset(OpMIPS64MOVVconst)
  4828  		v.AuxInt = int64ToAuxInt(1)
  4829  		return true
  4830  	}
  4831  	// match: (SGTconst [c] (MOVVconst [d]))
  4832  	// cond: c<=d
  4833  	// result: (MOVVconst [0])
  4834  	for {
  4835  		c := auxIntToInt64(v.AuxInt)
  4836  		if v_0.Op != OpMIPS64MOVVconst {
  4837  			break
  4838  		}
  4839  		d := auxIntToInt64(v_0.AuxInt)
  4840  		if !(c <= d) {
  4841  			break
  4842  		}
  4843  		v.reset(OpMIPS64MOVVconst)
  4844  		v.AuxInt = int64ToAuxInt(0)
  4845  		return true
  4846  	}
  4847  	// match: (SGTconst [c] (MOVBreg _))
  4848  	// cond: 0x7f < c
  4849  	// result: (MOVVconst [1])
  4850  	for {
  4851  		c := auxIntToInt64(v.AuxInt)
  4852  		if v_0.Op != OpMIPS64MOVBreg || !(0x7f < c) {
  4853  			break
  4854  		}
  4855  		v.reset(OpMIPS64MOVVconst)
  4856  		v.AuxInt = int64ToAuxInt(1)
  4857  		return true
  4858  	}
  4859  	// match: (SGTconst [c] (MOVBreg _))
  4860  	// cond: c <= -0x80
  4861  	// result: (MOVVconst [0])
  4862  	for {
  4863  		c := auxIntToInt64(v.AuxInt)
  4864  		if v_0.Op != OpMIPS64MOVBreg || !(c <= -0x80) {
  4865  			break
  4866  		}
  4867  		v.reset(OpMIPS64MOVVconst)
  4868  		v.AuxInt = int64ToAuxInt(0)
  4869  		return true
  4870  	}
  4871  	// match: (SGTconst [c] (MOVBUreg _))
  4872  	// cond: 0xff < c
  4873  	// result: (MOVVconst [1])
  4874  	for {
  4875  		c := auxIntToInt64(v.AuxInt)
  4876  		if v_0.Op != OpMIPS64MOVBUreg || !(0xff < c) {
  4877  			break
  4878  		}
  4879  		v.reset(OpMIPS64MOVVconst)
  4880  		v.AuxInt = int64ToAuxInt(1)
  4881  		return true
  4882  	}
  4883  	// match: (SGTconst [c] (MOVBUreg _))
  4884  	// cond: c < 0
  4885  	// result: (MOVVconst [0])
  4886  	for {
  4887  		c := auxIntToInt64(v.AuxInt)
  4888  		if v_0.Op != OpMIPS64MOVBUreg || !(c < 0) {
  4889  			break
  4890  		}
  4891  		v.reset(OpMIPS64MOVVconst)
  4892  		v.AuxInt = int64ToAuxInt(0)
  4893  		return true
  4894  	}
  4895  	// match: (SGTconst [c] (MOVHreg _))
  4896  	// cond: 0x7fff < c
  4897  	// result: (MOVVconst [1])
  4898  	for {
  4899  		c := auxIntToInt64(v.AuxInt)
  4900  		if v_0.Op != OpMIPS64MOVHreg || !(0x7fff < c) {
  4901  			break
  4902  		}
  4903  		v.reset(OpMIPS64MOVVconst)
  4904  		v.AuxInt = int64ToAuxInt(1)
  4905  		return true
  4906  	}
  4907  	// match: (SGTconst [c] (MOVHreg _))
  4908  	// cond: c <= -0x8000
  4909  	// result: (MOVVconst [0])
  4910  	for {
  4911  		c := auxIntToInt64(v.AuxInt)
  4912  		if v_0.Op != OpMIPS64MOVHreg || !(c <= -0x8000) {
  4913  			break
  4914  		}
  4915  		v.reset(OpMIPS64MOVVconst)
  4916  		v.AuxInt = int64ToAuxInt(0)
  4917  		return true
  4918  	}
  4919  	// match: (SGTconst [c] (MOVHUreg _))
  4920  	// cond: 0xffff < c
  4921  	// result: (MOVVconst [1])
  4922  	for {
  4923  		c := auxIntToInt64(v.AuxInt)
  4924  		if v_0.Op != OpMIPS64MOVHUreg || !(0xffff < c) {
  4925  			break
  4926  		}
  4927  		v.reset(OpMIPS64MOVVconst)
  4928  		v.AuxInt = int64ToAuxInt(1)
  4929  		return true
  4930  	}
  4931  	// match: (SGTconst [c] (MOVHUreg _))
  4932  	// cond: c < 0
  4933  	// result: (MOVVconst [0])
  4934  	for {
  4935  		c := auxIntToInt64(v.AuxInt)
  4936  		if v_0.Op != OpMIPS64MOVHUreg || !(c < 0) {
  4937  			break
  4938  		}
  4939  		v.reset(OpMIPS64MOVVconst)
  4940  		v.AuxInt = int64ToAuxInt(0)
  4941  		return true
  4942  	}
  4943  	// match: (SGTconst [c] (MOVWUreg _))
  4944  	// cond: c < 0
  4945  	// result: (MOVVconst [0])
  4946  	for {
  4947  		c := auxIntToInt64(v.AuxInt)
  4948  		if v_0.Op != OpMIPS64MOVWUreg || !(c < 0) {
  4949  			break
  4950  		}
  4951  		v.reset(OpMIPS64MOVVconst)
  4952  		v.AuxInt = int64ToAuxInt(0)
  4953  		return true
  4954  	}
  4955  	// match: (SGTconst [c] (ANDconst [m] _))
  4956  	// cond: 0 <= m && m < c
  4957  	// result: (MOVVconst [1])
  4958  	for {
  4959  		c := auxIntToInt64(v.AuxInt)
  4960  		if v_0.Op != OpMIPS64ANDconst {
  4961  			break
  4962  		}
  4963  		m := auxIntToInt64(v_0.AuxInt)
  4964  		if !(0 <= m && m < c) {
  4965  			break
  4966  		}
  4967  		v.reset(OpMIPS64MOVVconst)
  4968  		v.AuxInt = int64ToAuxInt(1)
  4969  		return true
  4970  	}
  4971  	// match: (SGTconst [c] (SRLVconst _ [d]))
  4972  	// cond: 0 <= c && 0 < d && d <= 63 && 0xffffffffffffffff>>uint64(d) < uint64(c)
  4973  	// result: (MOVVconst [1])
  4974  	for {
  4975  		c := auxIntToInt64(v.AuxInt)
  4976  		if v_0.Op != OpMIPS64SRLVconst {
  4977  			break
  4978  		}
  4979  		d := auxIntToInt64(v_0.AuxInt)
  4980  		if !(0 <= c && 0 < d && d <= 63 && 0xffffffffffffffff>>uint64(d) < uint64(c)) {
  4981  			break
  4982  		}
  4983  		v.reset(OpMIPS64MOVVconst)
  4984  		v.AuxInt = int64ToAuxInt(1)
  4985  		return true
  4986  	}
  4987  	return false
  4988  }
  4989  func rewriteValueMIPS64_OpMIPS64SLLV(v *Value) bool {
  4990  	v_1 := v.Args[1]
  4991  	v_0 := v.Args[0]
  4992  	// match: (SLLV _ (MOVVconst [c]))
  4993  	// cond: uint64(c)>=64
  4994  	// result: (MOVVconst [0])
  4995  	for {
  4996  		if v_1.Op != OpMIPS64MOVVconst {
  4997  			break
  4998  		}
  4999  		c := auxIntToInt64(v_1.AuxInt)
  5000  		if !(uint64(c) >= 64) {
  5001  			break
  5002  		}
  5003  		v.reset(OpMIPS64MOVVconst)
  5004  		v.AuxInt = int64ToAuxInt(0)
  5005  		return true
  5006  	}
  5007  	// match: (SLLV x (MOVVconst [c]))
  5008  	// result: (SLLVconst x [c])
  5009  	for {
  5010  		x := v_0
  5011  		if v_1.Op != OpMIPS64MOVVconst {
  5012  			break
  5013  		}
  5014  		c := auxIntToInt64(v_1.AuxInt)
  5015  		v.reset(OpMIPS64SLLVconst)
  5016  		v.AuxInt = int64ToAuxInt(c)
  5017  		v.AddArg(x)
  5018  		return true
  5019  	}
  5020  	return false
  5021  }
  5022  func rewriteValueMIPS64_OpMIPS64SLLVconst(v *Value) bool {
  5023  	v_0 := v.Args[0]
  5024  	// match: (SLLVconst [c] (MOVVconst [d]))
  5025  	// result: (MOVVconst [d<<uint64(c)])
  5026  	for {
  5027  		c := auxIntToInt64(v.AuxInt)
  5028  		if v_0.Op != OpMIPS64MOVVconst {
  5029  			break
  5030  		}
  5031  		d := auxIntToInt64(v_0.AuxInt)
  5032  		v.reset(OpMIPS64MOVVconst)
  5033  		v.AuxInt = int64ToAuxInt(d << uint64(c))
  5034  		return true
  5035  	}
  5036  	return false
  5037  }
  5038  func rewriteValueMIPS64_OpMIPS64SRAV(v *Value) bool {
  5039  	v_1 := v.Args[1]
  5040  	v_0 := v.Args[0]
  5041  	// match: (SRAV x (MOVVconst [c]))
  5042  	// cond: uint64(c)>=64
  5043  	// result: (SRAVconst x [63])
  5044  	for {
  5045  		x := v_0
  5046  		if v_1.Op != OpMIPS64MOVVconst {
  5047  			break
  5048  		}
  5049  		c := auxIntToInt64(v_1.AuxInt)
  5050  		if !(uint64(c) >= 64) {
  5051  			break
  5052  		}
  5053  		v.reset(OpMIPS64SRAVconst)
  5054  		v.AuxInt = int64ToAuxInt(63)
  5055  		v.AddArg(x)
  5056  		return true
  5057  	}
  5058  	// match: (SRAV x (MOVVconst [c]))
  5059  	// result: (SRAVconst x [c])
  5060  	for {
  5061  		x := v_0
  5062  		if v_1.Op != OpMIPS64MOVVconst {
  5063  			break
  5064  		}
  5065  		c := auxIntToInt64(v_1.AuxInt)
  5066  		v.reset(OpMIPS64SRAVconst)
  5067  		v.AuxInt = int64ToAuxInt(c)
  5068  		v.AddArg(x)
  5069  		return true
  5070  	}
  5071  	return false
  5072  }
  5073  func rewriteValueMIPS64_OpMIPS64SRAVconst(v *Value) bool {
  5074  	v_0 := v.Args[0]
  5075  	// match: (SRAVconst [c] (MOVVconst [d]))
  5076  	// result: (MOVVconst [d>>uint64(c)])
  5077  	for {
  5078  		c := auxIntToInt64(v.AuxInt)
  5079  		if v_0.Op != OpMIPS64MOVVconst {
  5080  			break
  5081  		}
  5082  		d := auxIntToInt64(v_0.AuxInt)
  5083  		v.reset(OpMIPS64MOVVconst)
  5084  		v.AuxInt = int64ToAuxInt(d >> uint64(c))
  5085  		return true
  5086  	}
  5087  	return false
  5088  }
  5089  func rewriteValueMIPS64_OpMIPS64SRLV(v *Value) bool {
  5090  	v_1 := v.Args[1]
  5091  	v_0 := v.Args[0]
  5092  	// match: (SRLV _ (MOVVconst [c]))
  5093  	// cond: uint64(c)>=64
  5094  	// result: (MOVVconst [0])
  5095  	for {
  5096  		if v_1.Op != OpMIPS64MOVVconst {
  5097  			break
  5098  		}
  5099  		c := auxIntToInt64(v_1.AuxInt)
  5100  		if !(uint64(c) >= 64) {
  5101  			break
  5102  		}
  5103  		v.reset(OpMIPS64MOVVconst)
  5104  		v.AuxInt = int64ToAuxInt(0)
  5105  		return true
  5106  	}
  5107  	// match: (SRLV x (MOVVconst [c]))
  5108  	// result: (SRLVconst x [c])
  5109  	for {
  5110  		x := v_0
  5111  		if v_1.Op != OpMIPS64MOVVconst {
  5112  			break
  5113  		}
  5114  		c := auxIntToInt64(v_1.AuxInt)
  5115  		v.reset(OpMIPS64SRLVconst)
  5116  		v.AuxInt = int64ToAuxInt(c)
  5117  		v.AddArg(x)
  5118  		return true
  5119  	}
  5120  	return false
  5121  }
  5122  func rewriteValueMIPS64_OpMIPS64SRLVconst(v *Value) bool {
  5123  	v_0 := v.Args[0]
  5124  	// match: (SRLVconst [c] (MOVVconst [d]))
  5125  	// result: (MOVVconst [int64(uint64(d)>>uint64(c))])
  5126  	for {
  5127  		c := auxIntToInt64(v.AuxInt)
  5128  		if v_0.Op != OpMIPS64MOVVconst {
  5129  			break
  5130  		}
  5131  		d := auxIntToInt64(v_0.AuxInt)
  5132  		v.reset(OpMIPS64MOVVconst)
  5133  		v.AuxInt = int64ToAuxInt(int64(uint64(d) >> uint64(c)))
  5134  		return true
  5135  	}
  5136  	return false
  5137  }
  5138  func rewriteValueMIPS64_OpMIPS64SUBV(v *Value) bool {
  5139  	v_1 := v.Args[1]
  5140  	v_0 := v.Args[0]
  5141  	// match: (SUBV x (MOVVconst [c]))
  5142  	// cond: is32Bit(c)
  5143  	// result: (SUBVconst [c] x)
  5144  	for {
  5145  		x := v_0
  5146  		if v_1.Op != OpMIPS64MOVVconst {
  5147  			break
  5148  		}
  5149  		c := auxIntToInt64(v_1.AuxInt)
  5150  		if !(is32Bit(c)) {
  5151  			break
  5152  		}
  5153  		v.reset(OpMIPS64SUBVconst)
  5154  		v.AuxInt = int64ToAuxInt(c)
  5155  		v.AddArg(x)
  5156  		return true
  5157  	}
  5158  	// match: (SUBV x (NEGV y))
  5159  	// result: (ADDV x y)
  5160  	for {
  5161  		x := v_0
  5162  		if v_1.Op != OpMIPS64NEGV {
  5163  			break
  5164  		}
  5165  		y := v_1.Args[0]
  5166  		v.reset(OpMIPS64ADDV)
  5167  		v.AddArg2(x, y)
  5168  		return true
  5169  	}
  5170  	// match: (SUBV x x)
  5171  	// result: (MOVVconst [0])
  5172  	for {
  5173  		x := v_0
  5174  		if x != v_1 {
  5175  			break
  5176  		}
  5177  		v.reset(OpMIPS64MOVVconst)
  5178  		v.AuxInt = int64ToAuxInt(0)
  5179  		return true
  5180  	}
  5181  	// match: (SUBV (MOVVconst [0]) x)
  5182  	// result: (NEGV x)
  5183  	for {
  5184  		if v_0.Op != OpMIPS64MOVVconst || auxIntToInt64(v_0.AuxInt) != 0 {
  5185  			break
  5186  		}
  5187  		x := v_1
  5188  		v.reset(OpMIPS64NEGV)
  5189  		v.AddArg(x)
  5190  		return true
  5191  	}
  5192  	return false
  5193  }
  5194  func rewriteValueMIPS64_OpMIPS64SUBVconst(v *Value) bool {
  5195  	v_0 := v.Args[0]
  5196  	// match: (SUBVconst [0] x)
  5197  	// result: x
  5198  	for {
  5199  		if auxIntToInt64(v.AuxInt) != 0 {
  5200  			break
  5201  		}
  5202  		x := v_0
  5203  		v.copyOf(x)
  5204  		return true
  5205  	}
  5206  	// match: (SUBVconst [c] (MOVVconst [d]))
  5207  	// result: (MOVVconst [d-c])
  5208  	for {
  5209  		c := auxIntToInt64(v.AuxInt)
  5210  		if v_0.Op != OpMIPS64MOVVconst {
  5211  			break
  5212  		}
  5213  		d := auxIntToInt64(v_0.AuxInt)
  5214  		v.reset(OpMIPS64MOVVconst)
  5215  		v.AuxInt = int64ToAuxInt(d - c)
  5216  		return true
  5217  	}
  5218  	// match: (SUBVconst [c] (SUBVconst [d] x))
  5219  	// cond: is32Bit(-c-d)
  5220  	// result: (ADDVconst [-c-d] x)
  5221  	for {
  5222  		c := auxIntToInt64(v.AuxInt)
  5223  		if v_0.Op != OpMIPS64SUBVconst {
  5224  			break
  5225  		}
  5226  		d := auxIntToInt64(v_0.AuxInt)
  5227  		x := v_0.Args[0]
  5228  		if !(is32Bit(-c - d)) {
  5229  			break
  5230  		}
  5231  		v.reset(OpMIPS64ADDVconst)
  5232  		v.AuxInt = int64ToAuxInt(-c - d)
  5233  		v.AddArg(x)
  5234  		return true
  5235  	}
  5236  	// match: (SUBVconst [c] (ADDVconst [d] x))
  5237  	// cond: is32Bit(-c+d)
  5238  	// result: (ADDVconst [-c+d] x)
  5239  	for {
  5240  		c := auxIntToInt64(v.AuxInt)
  5241  		if v_0.Op != OpMIPS64ADDVconst {
  5242  			break
  5243  		}
  5244  		d := auxIntToInt64(v_0.AuxInt)
  5245  		x := v_0.Args[0]
  5246  		if !(is32Bit(-c + d)) {
  5247  			break
  5248  		}
  5249  		v.reset(OpMIPS64ADDVconst)
  5250  		v.AuxInt = int64ToAuxInt(-c + d)
  5251  		v.AddArg(x)
  5252  		return true
  5253  	}
  5254  	return false
  5255  }
  5256  func rewriteValueMIPS64_OpMIPS64XOR(v *Value) bool {
  5257  	v_1 := v.Args[1]
  5258  	v_0 := v.Args[0]
  5259  	// match: (XOR x (MOVVconst [c]))
  5260  	// cond: is32Bit(c)
  5261  	// result: (XORconst [c] x)
  5262  	for {
  5263  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
  5264  			x := v_0
  5265  			if v_1.Op != OpMIPS64MOVVconst {
  5266  				continue
  5267  			}
  5268  			c := auxIntToInt64(v_1.AuxInt)
  5269  			if !(is32Bit(c)) {
  5270  				continue
  5271  			}
  5272  			v.reset(OpMIPS64XORconst)
  5273  			v.AuxInt = int64ToAuxInt(c)
  5274  			v.AddArg(x)
  5275  			return true
  5276  		}
  5277  		break
  5278  	}
  5279  	// match: (XOR x x)
  5280  	// result: (MOVVconst [0])
  5281  	for {
  5282  		x := v_0
  5283  		if x != v_1 {
  5284  			break
  5285  		}
  5286  		v.reset(OpMIPS64MOVVconst)
  5287  		v.AuxInt = int64ToAuxInt(0)
  5288  		return true
  5289  	}
  5290  	return false
  5291  }
  5292  func rewriteValueMIPS64_OpMIPS64XORconst(v *Value) bool {
  5293  	v_0 := v.Args[0]
  5294  	// match: (XORconst [0] x)
  5295  	// result: x
  5296  	for {
  5297  		if auxIntToInt64(v.AuxInt) != 0 {
  5298  			break
  5299  		}
  5300  		x := v_0
  5301  		v.copyOf(x)
  5302  		return true
  5303  	}
  5304  	// match: (XORconst [c] (MOVVconst [d]))
  5305  	// result: (MOVVconst [c^d])
  5306  	for {
  5307  		c := auxIntToInt64(v.AuxInt)
  5308  		if v_0.Op != OpMIPS64MOVVconst {
  5309  			break
  5310  		}
  5311  		d := auxIntToInt64(v_0.AuxInt)
  5312  		v.reset(OpMIPS64MOVVconst)
  5313  		v.AuxInt = int64ToAuxInt(c ^ d)
  5314  		return true
  5315  	}
  5316  	// match: (XORconst [c] (XORconst [d] x))
  5317  	// cond: is32Bit(c^d)
  5318  	// result: (XORconst [c^d] x)
  5319  	for {
  5320  		c := auxIntToInt64(v.AuxInt)
  5321  		if v_0.Op != OpMIPS64XORconst {
  5322  			break
  5323  		}
  5324  		d := auxIntToInt64(v_0.AuxInt)
  5325  		x := v_0.Args[0]
  5326  		if !(is32Bit(c ^ d)) {
  5327  			break
  5328  		}
  5329  		v.reset(OpMIPS64XORconst)
  5330  		v.AuxInt = int64ToAuxInt(c ^ d)
  5331  		v.AddArg(x)
  5332  		return true
  5333  	}
  5334  	return false
  5335  }
  5336  func rewriteValueMIPS64_OpMod16(v *Value) bool {
  5337  	v_1 := v.Args[1]
  5338  	v_0 := v.Args[0]
  5339  	b := v.Block
  5340  	typ := &b.Func.Config.Types
  5341  	// match: (Mod16 x y)
  5342  	// result: (Select0 (DIVV (SignExt16to64 x) (SignExt16to64 y)))
  5343  	for {
  5344  		x := v_0
  5345  		y := v_1
  5346  		v.reset(OpSelect0)
  5347  		v0 := b.NewValue0(v.Pos, OpMIPS64DIVV, types.NewTuple(typ.Int64, typ.Int64))
  5348  		v1 := b.NewValue0(v.Pos, OpSignExt16to64, typ.Int64)
  5349  		v1.AddArg(x)
  5350  		v2 := b.NewValue0(v.Pos, OpSignExt16to64, typ.Int64)
  5351  		v2.AddArg(y)
  5352  		v0.AddArg2(v1, v2)
  5353  		v.AddArg(v0)
  5354  		return true
  5355  	}
  5356  }
  5357  func rewriteValueMIPS64_OpMod16u(v *Value) bool {
  5358  	v_1 := v.Args[1]
  5359  	v_0 := v.Args[0]
  5360  	b := v.Block
  5361  	typ := &b.Func.Config.Types
  5362  	// match: (Mod16u x y)
  5363  	// result: (Select0 (DIVVU (ZeroExt16to64 x) (ZeroExt16to64 y)))
  5364  	for {
  5365  		x := v_0
  5366  		y := v_1
  5367  		v.reset(OpSelect0)
  5368  		v0 := b.NewValue0(v.Pos, OpMIPS64DIVVU, types.NewTuple(typ.UInt64, typ.UInt64))
  5369  		v1 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64)
  5370  		v1.AddArg(x)
  5371  		v2 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64)
  5372  		v2.AddArg(y)
  5373  		v0.AddArg2(v1, v2)
  5374  		v.AddArg(v0)
  5375  		return true
  5376  	}
  5377  }
  5378  func rewriteValueMIPS64_OpMod32(v *Value) bool {
  5379  	v_1 := v.Args[1]
  5380  	v_0 := v.Args[0]
  5381  	b := v.Block
  5382  	typ := &b.Func.Config.Types
  5383  	// match: (Mod32 x y)
  5384  	// result: (Select0 (DIVV (SignExt32to64 x) (SignExt32to64 y)))
  5385  	for {
  5386  		x := v_0
  5387  		y := v_1
  5388  		v.reset(OpSelect0)
  5389  		v0 := b.NewValue0(v.Pos, OpMIPS64DIVV, types.NewTuple(typ.Int64, typ.Int64))
  5390  		v1 := b.NewValue0(v.Pos, OpSignExt32to64, typ.Int64)
  5391  		v1.AddArg(x)
  5392  		v2 := b.NewValue0(v.Pos, OpSignExt32to64, typ.Int64)
  5393  		v2.AddArg(y)
  5394  		v0.AddArg2(v1, v2)
  5395  		v.AddArg(v0)
  5396  		return true
  5397  	}
  5398  }
  5399  func rewriteValueMIPS64_OpMod32u(v *Value) bool {
  5400  	v_1 := v.Args[1]
  5401  	v_0 := v.Args[0]
  5402  	b := v.Block
  5403  	typ := &b.Func.Config.Types
  5404  	// match: (Mod32u x y)
  5405  	// result: (Select0 (DIVVU (ZeroExt32to64 x) (ZeroExt32to64 y)))
  5406  	for {
  5407  		x := v_0
  5408  		y := v_1
  5409  		v.reset(OpSelect0)
  5410  		v0 := b.NewValue0(v.Pos, OpMIPS64DIVVU, types.NewTuple(typ.UInt64, typ.UInt64))
  5411  		v1 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64)
  5412  		v1.AddArg(x)
  5413  		v2 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64)
  5414  		v2.AddArg(y)
  5415  		v0.AddArg2(v1, v2)
  5416  		v.AddArg(v0)
  5417  		return true
  5418  	}
  5419  }
  5420  func rewriteValueMIPS64_OpMod64(v *Value) bool {
  5421  	v_1 := v.Args[1]
  5422  	v_0 := v.Args[0]
  5423  	b := v.Block
  5424  	typ := &b.Func.Config.Types
  5425  	// match: (Mod64 x y)
  5426  	// result: (Select0 (DIVV x y))
  5427  	for {
  5428  		x := v_0
  5429  		y := v_1
  5430  		v.reset(OpSelect0)
  5431  		v0 := b.NewValue0(v.Pos, OpMIPS64DIVV, types.NewTuple(typ.Int64, typ.Int64))
  5432  		v0.AddArg2(x, y)
  5433  		v.AddArg(v0)
  5434  		return true
  5435  	}
  5436  }
  5437  func rewriteValueMIPS64_OpMod64u(v *Value) bool {
  5438  	v_1 := v.Args[1]
  5439  	v_0 := v.Args[0]
  5440  	b := v.Block
  5441  	typ := &b.Func.Config.Types
  5442  	// match: (Mod64u x y)
  5443  	// result: (Select0 (DIVVU x y))
  5444  	for {
  5445  		x := v_0
  5446  		y := v_1
  5447  		v.reset(OpSelect0)
  5448  		v0 := b.NewValue0(v.Pos, OpMIPS64DIVVU, types.NewTuple(typ.UInt64, typ.UInt64))
  5449  		v0.AddArg2(x, y)
  5450  		v.AddArg(v0)
  5451  		return true
  5452  	}
  5453  }
  5454  func rewriteValueMIPS64_OpMod8(v *Value) bool {
  5455  	v_1 := v.Args[1]
  5456  	v_0 := v.Args[0]
  5457  	b := v.Block
  5458  	typ := &b.Func.Config.Types
  5459  	// match: (Mod8 x y)
  5460  	// result: (Select0 (DIVV (SignExt8to64 x) (SignExt8to64 y)))
  5461  	for {
  5462  		x := v_0
  5463  		y := v_1
  5464  		v.reset(OpSelect0)
  5465  		v0 := b.NewValue0(v.Pos, OpMIPS64DIVV, types.NewTuple(typ.Int64, typ.Int64))
  5466  		v1 := b.NewValue0(v.Pos, OpSignExt8to64, typ.Int64)
  5467  		v1.AddArg(x)
  5468  		v2 := b.NewValue0(v.Pos, OpSignExt8to64, typ.Int64)
  5469  		v2.AddArg(y)
  5470  		v0.AddArg2(v1, v2)
  5471  		v.AddArg(v0)
  5472  		return true
  5473  	}
  5474  }
  5475  func rewriteValueMIPS64_OpMod8u(v *Value) bool {
  5476  	v_1 := v.Args[1]
  5477  	v_0 := v.Args[0]
  5478  	b := v.Block
  5479  	typ := &b.Func.Config.Types
  5480  	// match: (Mod8u x y)
  5481  	// result: (Select0 (DIVVU (ZeroExt8to64 x) (ZeroExt8to64 y)))
  5482  	for {
  5483  		x := v_0
  5484  		y := v_1
  5485  		v.reset(OpSelect0)
  5486  		v0 := b.NewValue0(v.Pos, OpMIPS64DIVVU, types.NewTuple(typ.UInt64, typ.UInt64))
  5487  		v1 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64)
  5488  		v1.AddArg(x)
  5489  		v2 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64)
  5490  		v2.AddArg(y)
  5491  		v0.AddArg2(v1, v2)
  5492  		v.AddArg(v0)
  5493  		return true
  5494  	}
  5495  }
  5496  func rewriteValueMIPS64_OpMove(v *Value) bool {
  5497  	v_2 := v.Args[2]
  5498  	v_1 := v.Args[1]
  5499  	v_0 := v.Args[0]
  5500  	b := v.Block
  5501  	config := b.Func.Config
  5502  	typ := &b.Func.Config.Types
  5503  	// match: (Move [0] _ _ mem)
  5504  	// result: mem
  5505  	for {
  5506  		if auxIntToInt64(v.AuxInt) != 0 {
  5507  			break
  5508  		}
  5509  		mem := v_2
  5510  		v.copyOf(mem)
  5511  		return true
  5512  	}
  5513  	// match: (Move [1] dst src mem)
  5514  	// result: (MOVBstore dst (MOVBload src mem) mem)
  5515  	for {
  5516  		if auxIntToInt64(v.AuxInt) != 1 {
  5517  			break
  5518  		}
  5519  		dst := v_0
  5520  		src := v_1
  5521  		mem := v_2
  5522  		v.reset(OpMIPS64MOVBstore)
  5523  		v0 := b.NewValue0(v.Pos, OpMIPS64MOVBload, typ.Int8)
  5524  		v0.AddArg2(src, mem)
  5525  		v.AddArg3(dst, v0, mem)
  5526  		return true
  5527  	}
  5528  	// match: (Move [2] {t} dst src mem)
  5529  	// cond: t.Alignment()%2 == 0
  5530  	// result: (MOVHstore dst (MOVHload src mem) mem)
  5531  	for {
  5532  		if auxIntToInt64(v.AuxInt) != 2 {
  5533  			break
  5534  		}
  5535  		t := auxToType(v.Aux)
  5536  		dst := v_0
  5537  		src := v_1
  5538  		mem := v_2
  5539  		if !(t.Alignment()%2 == 0) {
  5540  			break
  5541  		}
  5542  		v.reset(OpMIPS64MOVHstore)
  5543  		v0 := b.NewValue0(v.Pos, OpMIPS64MOVHload, typ.Int16)
  5544  		v0.AddArg2(src, mem)
  5545  		v.AddArg3(dst, v0, mem)
  5546  		return true
  5547  	}
  5548  	// match: (Move [2] dst src mem)
  5549  	// result: (MOVBstore [1] dst (MOVBload [1] src mem) (MOVBstore dst (MOVBload src mem) mem))
  5550  	for {
  5551  		if auxIntToInt64(v.AuxInt) != 2 {
  5552  			break
  5553  		}
  5554  		dst := v_0
  5555  		src := v_1
  5556  		mem := v_2
  5557  		v.reset(OpMIPS64MOVBstore)
  5558  		v.AuxInt = int32ToAuxInt(1)
  5559  		v0 := b.NewValue0(v.Pos, OpMIPS64MOVBload, typ.Int8)
  5560  		v0.AuxInt = int32ToAuxInt(1)
  5561  		v0.AddArg2(src, mem)
  5562  		v1 := b.NewValue0(v.Pos, OpMIPS64MOVBstore, types.TypeMem)
  5563  		v2 := b.NewValue0(v.Pos, OpMIPS64MOVBload, typ.Int8)
  5564  		v2.AddArg2(src, mem)
  5565  		v1.AddArg3(dst, v2, mem)
  5566  		v.AddArg3(dst, v0, v1)
  5567  		return true
  5568  	}
  5569  	// match: (Move [4] {t} dst src mem)
  5570  	// cond: t.Alignment()%4 == 0
  5571  	// result: (MOVWstore dst (MOVWload src mem) mem)
  5572  	for {
  5573  		if auxIntToInt64(v.AuxInt) != 4 {
  5574  			break
  5575  		}
  5576  		t := auxToType(v.Aux)
  5577  		dst := v_0
  5578  		src := v_1
  5579  		mem := v_2
  5580  		if !(t.Alignment()%4 == 0) {
  5581  			break
  5582  		}
  5583  		v.reset(OpMIPS64MOVWstore)
  5584  		v0 := b.NewValue0(v.Pos, OpMIPS64MOVWload, typ.Int32)
  5585  		v0.AddArg2(src, mem)
  5586  		v.AddArg3(dst, v0, mem)
  5587  		return true
  5588  	}
  5589  	// match: (Move [4] {t} dst src mem)
  5590  	// cond: t.Alignment()%2 == 0
  5591  	// result: (MOVHstore [2] dst (MOVHload [2] src mem) (MOVHstore dst (MOVHload src mem) mem))
  5592  	for {
  5593  		if auxIntToInt64(v.AuxInt) != 4 {
  5594  			break
  5595  		}
  5596  		t := auxToType(v.Aux)
  5597  		dst := v_0
  5598  		src := v_1
  5599  		mem := v_2
  5600  		if !(t.Alignment()%2 == 0) {
  5601  			break
  5602  		}
  5603  		v.reset(OpMIPS64MOVHstore)
  5604  		v.AuxInt = int32ToAuxInt(2)
  5605  		v0 := b.NewValue0(v.Pos, OpMIPS64MOVHload, typ.Int16)
  5606  		v0.AuxInt = int32ToAuxInt(2)
  5607  		v0.AddArg2(src, mem)
  5608  		v1 := b.NewValue0(v.Pos, OpMIPS64MOVHstore, types.TypeMem)
  5609  		v2 := b.NewValue0(v.Pos, OpMIPS64MOVHload, typ.Int16)
  5610  		v2.AddArg2(src, mem)
  5611  		v1.AddArg3(dst, v2, mem)
  5612  		v.AddArg3(dst, v0, v1)
  5613  		return true
  5614  	}
  5615  	// match: (Move [4] dst src mem)
  5616  	// result: (MOVBstore [3] dst (MOVBload [3] src mem) (MOVBstore [2] dst (MOVBload [2] src mem) (MOVBstore [1] dst (MOVBload [1] src mem) (MOVBstore dst (MOVBload src mem) mem))))
  5617  	for {
  5618  		if auxIntToInt64(v.AuxInt) != 4 {
  5619  			break
  5620  		}
  5621  		dst := v_0
  5622  		src := v_1
  5623  		mem := v_2
  5624  		v.reset(OpMIPS64MOVBstore)
  5625  		v.AuxInt = int32ToAuxInt(3)
  5626  		v0 := b.NewValue0(v.Pos, OpMIPS64MOVBload, typ.Int8)
  5627  		v0.AuxInt = int32ToAuxInt(3)
  5628  		v0.AddArg2(src, mem)
  5629  		v1 := b.NewValue0(v.Pos, OpMIPS64MOVBstore, types.TypeMem)
  5630  		v1.AuxInt = int32ToAuxInt(2)
  5631  		v2 := b.NewValue0(v.Pos, OpMIPS64MOVBload, typ.Int8)
  5632  		v2.AuxInt = int32ToAuxInt(2)
  5633  		v2.AddArg2(src, mem)
  5634  		v3 := b.NewValue0(v.Pos, OpMIPS64MOVBstore, types.TypeMem)
  5635  		v3.AuxInt = int32ToAuxInt(1)
  5636  		v4 := b.NewValue0(v.Pos, OpMIPS64MOVBload, typ.Int8)
  5637  		v4.AuxInt = int32ToAuxInt(1)
  5638  		v4.AddArg2(src, mem)
  5639  		v5 := b.NewValue0(v.Pos, OpMIPS64MOVBstore, types.TypeMem)
  5640  		v6 := b.NewValue0(v.Pos, OpMIPS64MOVBload, typ.Int8)
  5641  		v6.AddArg2(src, mem)
  5642  		v5.AddArg3(dst, v6, mem)
  5643  		v3.AddArg3(dst, v4, v5)
  5644  		v1.AddArg3(dst, v2, v3)
  5645  		v.AddArg3(dst, v0, v1)
  5646  		return true
  5647  	}
  5648  	// match: (Move [8] {t} dst src mem)
  5649  	// cond: t.Alignment()%8 == 0
  5650  	// result: (MOVVstore dst (MOVVload src mem) mem)
  5651  	for {
  5652  		if auxIntToInt64(v.AuxInt) != 8 {
  5653  			break
  5654  		}
  5655  		t := auxToType(v.Aux)
  5656  		dst := v_0
  5657  		src := v_1
  5658  		mem := v_2
  5659  		if !(t.Alignment()%8 == 0) {
  5660  			break
  5661  		}
  5662  		v.reset(OpMIPS64MOVVstore)
  5663  		v0 := b.NewValue0(v.Pos, OpMIPS64MOVVload, typ.UInt64)
  5664  		v0.AddArg2(src, mem)
  5665  		v.AddArg3(dst, v0, mem)
  5666  		return true
  5667  	}
  5668  	// match: (Move [8] {t} dst src mem)
  5669  	// cond: t.Alignment()%4 == 0
  5670  	// result: (MOVWstore [4] dst (MOVWload [4] src mem) (MOVWstore dst (MOVWload src mem) mem))
  5671  	for {
  5672  		if auxIntToInt64(v.AuxInt) != 8 {
  5673  			break
  5674  		}
  5675  		t := auxToType(v.Aux)
  5676  		dst := v_0
  5677  		src := v_1
  5678  		mem := v_2
  5679  		if !(t.Alignment()%4 == 0) {
  5680  			break
  5681  		}
  5682  		v.reset(OpMIPS64MOVWstore)
  5683  		v.AuxInt = int32ToAuxInt(4)
  5684  		v0 := b.NewValue0(v.Pos, OpMIPS64MOVWload, typ.Int32)
  5685  		v0.AuxInt = int32ToAuxInt(4)
  5686  		v0.AddArg2(src, mem)
  5687  		v1 := b.NewValue0(v.Pos, OpMIPS64MOVWstore, types.TypeMem)
  5688  		v2 := b.NewValue0(v.Pos, OpMIPS64MOVWload, typ.Int32)
  5689  		v2.AddArg2(src, mem)
  5690  		v1.AddArg3(dst, v2, mem)
  5691  		v.AddArg3(dst, v0, v1)
  5692  		return true
  5693  	}
  5694  	// match: (Move [8] {t} dst src mem)
  5695  	// cond: t.Alignment()%2 == 0
  5696  	// result: (MOVHstore [6] dst (MOVHload [6] src mem) (MOVHstore [4] dst (MOVHload [4] src mem) (MOVHstore [2] dst (MOVHload [2] src mem) (MOVHstore dst (MOVHload src mem) mem))))
  5697  	for {
  5698  		if auxIntToInt64(v.AuxInt) != 8 {
  5699  			break
  5700  		}
  5701  		t := auxToType(v.Aux)
  5702  		dst := v_0
  5703  		src := v_1
  5704  		mem := v_2
  5705  		if !(t.Alignment()%2 == 0) {
  5706  			break
  5707  		}
  5708  		v.reset(OpMIPS64MOVHstore)
  5709  		v.AuxInt = int32ToAuxInt(6)
  5710  		v0 := b.NewValue0(v.Pos, OpMIPS64MOVHload, typ.Int16)
  5711  		v0.AuxInt = int32ToAuxInt(6)
  5712  		v0.AddArg2(src, mem)
  5713  		v1 := b.NewValue0(v.Pos, OpMIPS64MOVHstore, types.TypeMem)
  5714  		v1.AuxInt = int32ToAuxInt(4)
  5715  		v2 := b.NewValue0(v.Pos, OpMIPS64MOVHload, typ.Int16)
  5716  		v2.AuxInt = int32ToAuxInt(4)
  5717  		v2.AddArg2(src, mem)
  5718  		v3 := b.NewValue0(v.Pos, OpMIPS64MOVHstore, types.TypeMem)
  5719  		v3.AuxInt = int32ToAuxInt(2)
  5720  		v4 := b.NewValue0(v.Pos, OpMIPS64MOVHload, typ.Int16)
  5721  		v4.AuxInt = int32ToAuxInt(2)
  5722  		v4.AddArg2(src, mem)
  5723  		v5 := b.NewValue0(v.Pos, OpMIPS64MOVHstore, types.TypeMem)
  5724  		v6 := b.NewValue0(v.Pos, OpMIPS64MOVHload, typ.Int16)
  5725  		v6.AddArg2(src, mem)
  5726  		v5.AddArg3(dst, v6, mem)
  5727  		v3.AddArg3(dst, v4, v5)
  5728  		v1.AddArg3(dst, v2, v3)
  5729  		v.AddArg3(dst, v0, v1)
  5730  		return true
  5731  	}
  5732  	// match: (Move [3] dst src mem)
  5733  	// result: (MOVBstore [2] dst (MOVBload [2] src mem) (MOVBstore [1] dst (MOVBload [1] src mem) (MOVBstore dst (MOVBload src mem) mem)))
  5734  	for {
  5735  		if auxIntToInt64(v.AuxInt) != 3 {
  5736  			break
  5737  		}
  5738  		dst := v_0
  5739  		src := v_1
  5740  		mem := v_2
  5741  		v.reset(OpMIPS64MOVBstore)
  5742  		v.AuxInt = int32ToAuxInt(2)
  5743  		v0 := b.NewValue0(v.Pos, OpMIPS64MOVBload, typ.Int8)
  5744  		v0.AuxInt = int32ToAuxInt(2)
  5745  		v0.AddArg2(src, mem)
  5746  		v1 := b.NewValue0(v.Pos, OpMIPS64MOVBstore, types.TypeMem)
  5747  		v1.AuxInt = int32ToAuxInt(1)
  5748  		v2 := b.NewValue0(v.Pos, OpMIPS64MOVBload, typ.Int8)
  5749  		v2.AuxInt = int32ToAuxInt(1)
  5750  		v2.AddArg2(src, mem)
  5751  		v3 := b.NewValue0(v.Pos, OpMIPS64MOVBstore, types.TypeMem)
  5752  		v4 := b.NewValue0(v.Pos, OpMIPS64MOVBload, typ.Int8)
  5753  		v4.AddArg2(src, mem)
  5754  		v3.AddArg3(dst, v4, mem)
  5755  		v1.AddArg3(dst, v2, v3)
  5756  		v.AddArg3(dst, v0, v1)
  5757  		return true
  5758  	}
  5759  	// match: (Move [6] {t} dst src mem)
  5760  	// cond: t.Alignment()%2 == 0
  5761  	// result: (MOVHstore [4] dst (MOVHload [4] src mem) (MOVHstore [2] dst (MOVHload [2] src mem) (MOVHstore dst (MOVHload src mem) mem)))
  5762  	for {
  5763  		if auxIntToInt64(v.AuxInt) != 6 {
  5764  			break
  5765  		}
  5766  		t := auxToType(v.Aux)
  5767  		dst := v_0
  5768  		src := v_1
  5769  		mem := v_2
  5770  		if !(t.Alignment()%2 == 0) {
  5771  			break
  5772  		}
  5773  		v.reset(OpMIPS64MOVHstore)
  5774  		v.AuxInt = int32ToAuxInt(4)
  5775  		v0 := b.NewValue0(v.Pos, OpMIPS64MOVHload, typ.Int16)
  5776  		v0.AuxInt = int32ToAuxInt(4)
  5777  		v0.AddArg2(src, mem)
  5778  		v1 := b.NewValue0(v.Pos, OpMIPS64MOVHstore, types.TypeMem)
  5779  		v1.AuxInt = int32ToAuxInt(2)
  5780  		v2 := b.NewValue0(v.Pos, OpMIPS64MOVHload, typ.Int16)
  5781  		v2.AuxInt = int32ToAuxInt(2)
  5782  		v2.AddArg2(src, mem)
  5783  		v3 := b.NewValue0(v.Pos, OpMIPS64MOVHstore, types.TypeMem)
  5784  		v4 := b.NewValue0(v.Pos, OpMIPS64MOVHload, typ.Int16)
  5785  		v4.AddArg2(src, mem)
  5786  		v3.AddArg3(dst, v4, mem)
  5787  		v1.AddArg3(dst, v2, v3)
  5788  		v.AddArg3(dst, v0, v1)
  5789  		return true
  5790  	}
  5791  	// match: (Move [12] {t} dst src mem)
  5792  	// cond: t.Alignment()%4 == 0
  5793  	// result: (MOVWstore [8] dst (MOVWload [8] src mem) (MOVWstore [4] dst (MOVWload [4] src mem) (MOVWstore dst (MOVWload src mem) mem)))
  5794  	for {
  5795  		if auxIntToInt64(v.AuxInt) != 12 {
  5796  			break
  5797  		}
  5798  		t := auxToType(v.Aux)
  5799  		dst := v_0
  5800  		src := v_1
  5801  		mem := v_2
  5802  		if !(t.Alignment()%4 == 0) {
  5803  			break
  5804  		}
  5805  		v.reset(OpMIPS64MOVWstore)
  5806  		v.AuxInt = int32ToAuxInt(8)
  5807  		v0 := b.NewValue0(v.Pos, OpMIPS64MOVWload, typ.Int32)
  5808  		v0.AuxInt = int32ToAuxInt(8)
  5809  		v0.AddArg2(src, mem)
  5810  		v1 := b.NewValue0(v.Pos, OpMIPS64MOVWstore, types.TypeMem)
  5811  		v1.AuxInt = int32ToAuxInt(4)
  5812  		v2 := b.NewValue0(v.Pos, OpMIPS64MOVWload, typ.Int32)
  5813  		v2.AuxInt = int32ToAuxInt(4)
  5814  		v2.AddArg2(src, mem)
  5815  		v3 := b.NewValue0(v.Pos, OpMIPS64MOVWstore, types.TypeMem)
  5816  		v4 := b.NewValue0(v.Pos, OpMIPS64MOVWload, typ.Int32)
  5817  		v4.AddArg2(src, mem)
  5818  		v3.AddArg3(dst, v4, mem)
  5819  		v1.AddArg3(dst, v2, v3)
  5820  		v.AddArg3(dst, v0, v1)
  5821  		return true
  5822  	}
  5823  	// match: (Move [16] {t} dst src mem)
  5824  	// cond: t.Alignment()%8 == 0
  5825  	// result: (MOVVstore [8] dst (MOVVload [8] src mem) (MOVVstore dst (MOVVload src mem) mem))
  5826  	for {
  5827  		if auxIntToInt64(v.AuxInt) != 16 {
  5828  			break
  5829  		}
  5830  		t := auxToType(v.Aux)
  5831  		dst := v_0
  5832  		src := v_1
  5833  		mem := v_2
  5834  		if !(t.Alignment()%8 == 0) {
  5835  			break
  5836  		}
  5837  		v.reset(OpMIPS64MOVVstore)
  5838  		v.AuxInt = int32ToAuxInt(8)
  5839  		v0 := b.NewValue0(v.Pos, OpMIPS64MOVVload, typ.UInt64)
  5840  		v0.AuxInt = int32ToAuxInt(8)
  5841  		v0.AddArg2(src, mem)
  5842  		v1 := b.NewValue0(v.Pos, OpMIPS64MOVVstore, types.TypeMem)
  5843  		v2 := b.NewValue0(v.Pos, OpMIPS64MOVVload, typ.UInt64)
  5844  		v2.AddArg2(src, mem)
  5845  		v1.AddArg3(dst, v2, mem)
  5846  		v.AddArg3(dst, v0, v1)
  5847  		return true
  5848  	}
  5849  	// match: (Move [24] {t} dst src mem)
  5850  	// cond: t.Alignment()%8 == 0
  5851  	// result: (MOVVstore [16] dst (MOVVload [16] src mem) (MOVVstore [8] dst (MOVVload [8] src mem) (MOVVstore dst (MOVVload src mem) mem)))
  5852  	for {
  5853  		if auxIntToInt64(v.AuxInt) != 24 {
  5854  			break
  5855  		}
  5856  		t := auxToType(v.Aux)
  5857  		dst := v_0
  5858  		src := v_1
  5859  		mem := v_2
  5860  		if !(t.Alignment()%8 == 0) {
  5861  			break
  5862  		}
  5863  		v.reset(OpMIPS64MOVVstore)
  5864  		v.AuxInt = int32ToAuxInt(16)
  5865  		v0 := b.NewValue0(v.Pos, OpMIPS64MOVVload, typ.UInt64)
  5866  		v0.AuxInt = int32ToAuxInt(16)
  5867  		v0.AddArg2(src, mem)
  5868  		v1 := b.NewValue0(v.Pos, OpMIPS64MOVVstore, types.TypeMem)
  5869  		v1.AuxInt = int32ToAuxInt(8)
  5870  		v2 := b.NewValue0(v.Pos, OpMIPS64MOVVload, typ.UInt64)
  5871  		v2.AuxInt = int32ToAuxInt(8)
  5872  		v2.AddArg2(src, mem)
  5873  		v3 := b.NewValue0(v.Pos, OpMIPS64MOVVstore, types.TypeMem)
  5874  		v4 := b.NewValue0(v.Pos, OpMIPS64MOVVload, typ.UInt64)
  5875  		v4.AddArg2(src, mem)
  5876  		v3.AddArg3(dst, v4, mem)
  5877  		v1.AddArg3(dst, v2, v3)
  5878  		v.AddArg3(dst, v0, v1)
  5879  		return true
  5880  	}
  5881  	// match: (Move [s] {t} dst src mem)
  5882  	// cond: s%8 == 0 && s >= 24 && s <= 8*128 && t.Alignment()%8 == 0 && logLargeCopy(v, s)
  5883  	// result: (DUFFCOPY [16 * (128 - s/8)] dst src mem)
  5884  	for {
  5885  		s := auxIntToInt64(v.AuxInt)
  5886  		t := auxToType(v.Aux)
  5887  		dst := v_0
  5888  		src := v_1
  5889  		mem := v_2
  5890  		if !(s%8 == 0 && s >= 24 && s <= 8*128 && t.Alignment()%8 == 0 && logLargeCopy(v, s)) {
  5891  			break
  5892  		}
  5893  		v.reset(OpMIPS64DUFFCOPY)
  5894  		v.AuxInt = int64ToAuxInt(16 * (128 - s/8))
  5895  		v.AddArg3(dst, src, mem)
  5896  		return true
  5897  	}
  5898  	// match: (Move [s] {t} dst src mem)
  5899  	// cond: s > 24 && logLargeCopy(v, s) || t.Alignment()%8 != 0
  5900  	// result: (LoweredMove [t.Alignment()] dst src (ADDVconst <src.Type> src [s-moveSize(t.Alignment(), config)]) mem)
  5901  	for {
  5902  		s := auxIntToInt64(v.AuxInt)
  5903  		t := auxToType(v.Aux)
  5904  		dst := v_0
  5905  		src := v_1
  5906  		mem := v_2
  5907  		if !(s > 24 && logLargeCopy(v, s) || t.Alignment()%8 != 0) {
  5908  			break
  5909  		}
  5910  		v.reset(OpMIPS64LoweredMove)
  5911  		v.AuxInt = int64ToAuxInt(t.Alignment())
  5912  		v0 := b.NewValue0(v.Pos, OpMIPS64ADDVconst, src.Type)
  5913  		v0.AuxInt = int64ToAuxInt(s - moveSize(t.Alignment(), config))
  5914  		v0.AddArg(src)
  5915  		v.AddArg4(dst, src, v0, mem)
  5916  		return true
  5917  	}
  5918  	return false
  5919  }
  5920  func rewriteValueMIPS64_OpMul16(v *Value) bool {
  5921  	v_1 := v.Args[1]
  5922  	v_0 := v.Args[0]
  5923  	b := v.Block
  5924  	typ := &b.Func.Config.Types
  5925  	// match: (Mul16 x y)
  5926  	// result: (Select1 (MULVU x y))
  5927  	for {
  5928  		x := v_0
  5929  		y := v_1
  5930  		v.reset(OpSelect1)
  5931  		v0 := b.NewValue0(v.Pos, OpMIPS64MULVU, types.NewTuple(typ.UInt64, typ.UInt64))
  5932  		v0.AddArg2(x, y)
  5933  		v.AddArg(v0)
  5934  		return true
  5935  	}
  5936  }
  5937  func rewriteValueMIPS64_OpMul32(v *Value) bool {
  5938  	v_1 := v.Args[1]
  5939  	v_0 := v.Args[0]
  5940  	b := v.Block
  5941  	typ := &b.Func.Config.Types
  5942  	// match: (Mul32 x y)
  5943  	// result: (Select1 (MULVU x y))
  5944  	for {
  5945  		x := v_0
  5946  		y := v_1
  5947  		v.reset(OpSelect1)
  5948  		v0 := b.NewValue0(v.Pos, OpMIPS64MULVU, types.NewTuple(typ.UInt64, typ.UInt64))
  5949  		v0.AddArg2(x, y)
  5950  		v.AddArg(v0)
  5951  		return true
  5952  	}
  5953  }
  5954  func rewriteValueMIPS64_OpMul64(v *Value) bool {
  5955  	v_1 := v.Args[1]
  5956  	v_0 := v.Args[0]
  5957  	b := v.Block
  5958  	typ := &b.Func.Config.Types
  5959  	// match: (Mul64 x y)
  5960  	// result: (Select1 (MULVU x y))
  5961  	for {
  5962  		x := v_0
  5963  		y := v_1
  5964  		v.reset(OpSelect1)
  5965  		v0 := b.NewValue0(v.Pos, OpMIPS64MULVU, types.NewTuple(typ.UInt64, typ.UInt64))
  5966  		v0.AddArg2(x, y)
  5967  		v.AddArg(v0)
  5968  		return true
  5969  	}
  5970  }
  5971  func rewriteValueMIPS64_OpMul8(v *Value) bool {
  5972  	v_1 := v.Args[1]
  5973  	v_0 := v.Args[0]
  5974  	b := v.Block
  5975  	typ := &b.Func.Config.Types
  5976  	// match: (Mul8 x y)
  5977  	// result: (Select1 (MULVU x y))
  5978  	for {
  5979  		x := v_0
  5980  		y := v_1
  5981  		v.reset(OpSelect1)
  5982  		v0 := b.NewValue0(v.Pos, OpMIPS64MULVU, types.NewTuple(typ.UInt64, typ.UInt64))
  5983  		v0.AddArg2(x, y)
  5984  		v.AddArg(v0)
  5985  		return true
  5986  	}
  5987  }
  5988  func rewriteValueMIPS64_OpNeq16(v *Value) bool {
  5989  	v_1 := v.Args[1]
  5990  	v_0 := v.Args[0]
  5991  	b := v.Block
  5992  	typ := &b.Func.Config.Types
  5993  	// match: (Neq16 x y)
  5994  	// result: (SGTU (XOR (ZeroExt16to32 x) (ZeroExt16to64 y)) (MOVVconst [0]))
  5995  	for {
  5996  		x := v_0
  5997  		y := v_1
  5998  		v.reset(OpMIPS64SGTU)
  5999  		v0 := b.NewValue0(v.Pos, OpMIPS64XOR, typ.UInt64)
  6000  		v1 := b.NewValue0(v.Pos, OpZeroExt16to32, typ.UInt32)
  6001  		v1.AddArg(x)
  6002  		v2 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64)
  6003  		v2.AddArg(y)
  6004  		v0.AddArg2(v1, v2)
  6005  		v3 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
  6006  		v3.AuxInt = int64ToAuxInt(0)
  6007  		v.AddArg2(v0, v3)
  6008  		return true
  6009  	}
  6010  }
  6011  func rewriteValueMIPS64_OpNeq32(v *Value) bool {
  6012  	v_1 := v.Args[1]
  6013  	v_0 := v.Args[0]
  6014  	b := v.Block
  6015  	typ := &b.Func.Config.Types
  6016  	// match: (Neq32 x y)
  6017  	// result: (SGTU (XOR (ZeroExt32to64 x) (ZeroExt32to64 y)) (MOVVconst [0]))
  6018  	for {
  6019  		x := v_0
  6020  		y := v_1
  6021  		v.reset(OpMIPS64SGTU)
  6022  		v0 := b.NewValue0(v.Pos, OpMIPS64XOR, typ.UInt64)
  6023  		v1 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64)
  6024  		v1.AddArg(x)
  6025  		v2 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64)
  6026  		v2.AddArg(y)
  6027  		v0.AddArg2(v1, v2)
  6028  		v3 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
  6029  		v3.AuxInt = int64ToAuxInt(0)
  6030  		v.AddArg2(v0, v3)
  6031  		return true
  6032  	}
  6033  }
  6034  func rewriteValueMIPS64_OpNeq32F(v *Value) bool {
  6035  	v_1 := v.Args[1]
  6036  	v_0 := v.Args[0]
  6037  	b := v.Block
  6038  	// match: (Neq32F x y)
  6039  	// result: (FPFlagFalse (CMPEQF x y))
  6040  	for {
  6041  		x := v_0
  6042  		y := v_1
  6043  		v.reset(OpMIPS64FPFlagFalse)
  6044  		v0 := b.NewValue0(v.Pos, OpMIPS64CMPEQF, types.TypeFlags)
  6045  		v0.AddArg2(x, y)
  6046  		v.AddArg(v0)
  6047  		return true
  6048  	}
  6049  }
  6050  func rewriteValueMIPS64_OpNeq64(v *Value) bool {
  6051  	v_1 := v.Args[1]
  6052  	v_0 := v.Args[0]
  6053  	b := v.Block
  6054  	typ := &b.Func.Config.Types
  6055  	// match: (Neq64 x y)
  6056  	// result: (SGTU (XOR x y) (MOVVconst [0]))
  6057  	for {
  6058  		x := v_0
  6059  		y := v_1
  6060  		v.reset(OpMIPS64SGTU)
  6061  		v0 := b.NewValue0(v.Pos, OpMIPS64XOR, typ.UInt64)
  6062  		v0.AddArg2(x, y)
  6063  		v1 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
  6064  		v1.AuxInt = int64ToAuxInt(0)
  6065  		v.AddArg2(v0, v1)
  6066  		return true
  6067  	}
  6068  }
  6069  func rewriteValueMIPS64_OpNeq64F(v *Value) bool {
  6070  	v_1 := v.Args[1]
  6071  	v_0 := v.Args[0]
  6072  	b := v.Block
  6073  	// match: (Neq64F x y)
  6074  	// result: (FPFlagFalse (CMPEQD x y))
  6075  	for {
  6076  		x := v_0
  6077  		y := v_1
  6078  		v.reset(OpMIPS64FPFlagFalse)
  6079  		v0 := b.NewValue0(v.Pos, OpMIPS64CMPEQD, types.TypeFlags)
  6080  		v0.AddArg2(x, y)
  6081  		v.AddArg(v0)
  6082  		return true
  6083  	}
  6084  }
  6085  func rewriteValueMIPS64_OpNeq8(v *Value) bool {
  6086  	v_1 := v.Args[1]
  6087  	v_0 := v.Args[0]
  6088  	b := v.Block
  6089  	typ := &b.Func.Config.Types
  6090  	// match: (Neq8 x y)
  6091  	// result: (SGTU (XOR (ZeroExt8to64 x) (ZeroExt8to64 y)) (MOVVconst [0]))
  6092  	for {
  6093  		x := v_0
  6094  		y := v_1
  6095  		v.reset(OpMIPS64SGTU)
  6096  		v0 := b.NewValue0(v.Pos, OpMIPS64XOR, typ.UInt64)
  6097  		v1 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64)
  6098  		v1.AddArg(x)
  6099  		v2 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64)
  6100  		v2.AddArg(y)
  6101  		v0.AddArg2(v1, v2)
  6102  		v3 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
  6103  		v3.AuxInt = int64ToAuxInt(0)
  6104  		v.AddArg2(v0, v3)
  6105  		return true
  6106  	}
  6107  }
  6108  func rewriteValueMIPS64_OpNeqPtr(v *Value) bool {
  6109  	v_1 := v.Args[1]
  6110  	v_0 := v.Args[0]
  6111  	b := v.Block
  6112  	typ := &b.Func.Config.Types
  6113  	// match: (NeqPtr x y)
  6114  	// result: (SGTU (XOR x y) (MOVVconst [0]))
  6115  	for {
  6116  		x := v_0
  6117  		y := v_1
  6118  		v.reset(OpMIPS64SGTU)
  6119  		v0 := b.NewValue0(v.Pos, OpMIPS64XOR, typ.UInt64)
  6120  		v0.AddArg2(x, y)
  6121  		v1 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
  6122  		v1.AuxInt = int64ToAuxInt(0)
  6123  		v.AddArg2(v0, v1)
  6124  		return true
  6125  	}
  6126  }
  6127  func rewriteValueMIPS64_OpNot(v *Value) bool {
  6128  	v_0 := v.Args[0]
  6129  	// match: (Not x)
  6130  	// result: (XORconst [1] x)
  6131  	for {
  6132  		x := v_0
  6133  		v.reset(OpMIPS64XORconst)
  6134  		v.AuxInt = int64ToAuxInt(1)
  6135  		v.AddArg(x)
  6136  		return true
  6137  	}
  6138  }
  6139  func rewriteValueMIPS64_OpOffPtr(v *Value) bool {
  6140  	v_0 := v.Args[0]
  6141  	// match: (OffPtr [off] ptr:(SP))
  6142  	// cond: is32Bit(off)
  6143  	// result: (MOVVaddr [int32(off)] ptr)
  6144  	for {
  6145  		off := auxIntToInt64(v.AuxInt)
  6146  		ptr := v_0
  6147  		if ptr.Op != OpSP || !(is32Bit(off)) {
  6148  			break
  6149  		}
  6150  		v.reset(OpMIPS64MOVVaddr)
  6151  		v.AuxInt = int32ToAuxInt(int32(off))
  6152  		v.AddArg(ptr)
  6153  		return true
  6154  	}
  6155  	// match: (OffPtr [off] ptr)
  6156  	// result: (ADDVconst [off] ptr)
  6157  	for {
  6158  		off := auxIntToInt64(v.AuxInt)
  6159  		ptr := v_0
  6160  		v.reset(OpMIPS64ADDVconst)
  6161  		v.AuxInt = int64ToAuxInt(off)
  6162  		v.AddArg(ptr)
  6163  		return true
  6164  	}
  6165  }
  6166  func rewriteValueMIPS64_OpRotateLeft16(v *Value) bool {
  6167  	v_1 := v.Args[1]
  6168  	v_0 := v.Args[0]
  6169  	b := v.Block
  6170  	typ := &b.Func.Config.Types
  6171  	// match: (RotateLeft16 <t> x (MOVVconst [c]))
  6172  	// result: (Or16 (Lsh16x64 <t> x (MOVVconst [c&15])) (Rsh16Ux64 <t> x (MOVVconst [-c&15])))
  6173  	for {
  6174  		t := v.Type
  6175  		x := v_0
  6176  		if v_1.Op != OpMIPS64MOVVconst {
  6177  			break
  6178  		}
  6179  		c := auxIntToInt64(v_1.AuxInt)
  6180  		v.reset(OpOr16)
  6181  		v0 := b.NewValue0(v.Pos, OpLsh16x64, t)
  6182  		v1 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
  6183  		v1.AuxInt = int64ToAuxInt(c & 15)
  6184  		v0.AddArg2(x, v1)
  6185  		v2 := b.NewValue0(v.Pos, OpRsh16Ux64, t)
  6186  		v3 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
  6187  		v3.AuxInt = int64ToAuxInt(-c & 15)
  6188  		v2.AddArg2(x, v3)
  6189  		v.AddArg2(v0, v2)
  6190  		return true
  6191  	}
  6192  	return false
  6193  }
  6194  func rewriteValueMIPS64_OpRotateLeft32(v *Value) bool {
  6195  	v_1 := v.Args[1]
  6196  	v_0 := v.Args[0]
  6197  	b := v.Block
  6198  	typ := &b.Func.Config.Types
  6199  	// match: (RotateLeft32 <t> x (MOVVconst [c]))
  6200  	// result: (Or32 (Lsh32x64 <t> x (MOVVconst [c&31])) (Rsh32Ux64 <t> x (MOVVconst [-c&31])))
  6201  	for {
  6202  		t := v.Type
  6203  		x := v_0
  6204  		if v_1.Op != OpMIPS64MOVVconst {
  6205  			break
  6206  		}
  6207  		c := auxIntToInt64(v_1.AuxInt)
  6208  		v.reset(OpOr32)
  6209  		v0 := b.NewValue0(v.Pos, OpLsh32x64, t)
  6210  		v1 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
  6211  		v1.AuxInt = int64ToAuxInt(c & 31)
  6212  		v0.AddArg2(x, v1)
  6213  		v2 := b.NewValue0(v.Pos, OpRsh32Ux64, t)
  6214  		v3 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
  6215  		v3.AuxInt = int64ToAuxInt(-c & 31)
  6216  		v2.AddArg2(x, v3)
  6217  		v.AddArg2(v0, v2)
  6218  		return true
  6219  	}
  6220  	return false
  6221  }
  6222  func rewriteValueMIPS64_OpRotateLeft64(v *Value) bool {
  6223  	v_1 := v.Args[1]
  6224  	v_0 := v.Args[0]
  6225  	b := v.Block
  6226  	typ := &b.Func.Config.Types
  6227  	// match: (RotateLeft64 <t> x (MOVVconst [c]))
  6228  	// result: (Or64 (Lsh64x64 <t> x (MOVVconst [c&63])) (Rsh64Ux64 <t> x (MOVVconst [-c&63])))
  6229  	for {
  6230  		t := v.Type
  6231  		x := v_0
  6232  		if v_1.Op != OpMIPS64MOVVconst {
  6233  			break
  6234  		}
  6235  		c := auxIntToInt64(v_1.AuxInt)
  6236  		v.reset(OpOr64)
  6237  		v0 := b.NewValue0(v.Pos, OpLsh64x64, t)
  6238  		v1 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
  6239  		v1.AuxInt = int64ToAuxInt(c & 63)
  6240  		v0.AddArg2(x, v1)
  6241  		v2 := b.NewValue0(v.Pos, OpRsh64Ux64, t)
  6242  		v3 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
  6243  		v3.AuxInt = int64ToAuxInt(-c & 63)
  6244  		v2.AddArg2(x, v3)
  6245  		v.AddArg2(v0, v2)
  6246  		return true
  6247  	}
  6248  	return false
  6249  }
  6250  func rewriteValueMIPS64_OpRotateLeft8(v *Value) bool {
  6251  	v_1 := v.Args[1]
  6252  	v_0 := v.Args[0]
  6253  	b := v.Block
  6254  	typ := &b.Func.Config.Types
  6255  	// match: (RotateLeft8 <t> x (MOVVconst [c]))
  6256  	// result: (Or8 (Lsh8x64 <t> x (MOVVconst [c&7])) (Rsh8Ux64 <t> x (MOVVconst [-c&7])))
  6257  	for {
  6258  		t := v.Type
  6259  		x := v_0
  6260  		if v_1.Op != OpMIPS64MOVVconst {
  6261  			break
  6262  		}
  6263  		c := auxIntToInt64(v_1.AuxInt)
  6264  		v.reset(OpOr8)
  6265  		v0 := b.NewValue0(v.Pos, OpLsh8x64, t)
  6266  		v1 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
  6267  		v1.AuxInt = int64ToAuxInt(c & 7)
  6268  		v0.AddArg2(x, v1)
  6269  		v2 := b.NewValue0(v.Pos, OpRsh8Ux64, t)
  6270  		v3 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
  6271  		v3.AuxInt = int64ToAuxInt(-c & 7)
  6272  		v2.AddArg2(x, v3)
  6273  		v.AddArg2(v0, v2)
  6274  		return true
  6275  	}
  6276  	return false
  6277  }
  6278  func rewriteValueMIPS64_OpRsh16Ux16(v *Value) bool {
  6279  	v_1 := v.Args[1]
  6280  	v_0 := v.Args[0]
  6281  	b := v.Block
  6282  	typ := &b.Func.Config.Types
  6283  	// match: (Rsh16Ux16 <t> x y)
  6284  	// result: (AND (NEGV <t> (SGTU (MOVVconst <typ.UInt64> [64]) (ZeroExt16to64 y))) (SRLV <t> (ZeroExt16to64 x) (ZeroExt16to64 y)))
  6285  	for {
  6286  		t := v.Type
  6287  		x := v_0
  6288  		y := v_1
  6289  		v.reset(OpMIPS64AND)
  6290  		v0 := b.NewValue0(v.Pos, OpMIPS64NEGV, t)
  6291  		v1 := b.NewValue0(v.Pos, OpMIPS64SGTU, typ.Bool)
  6292  		v2 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
  6293  		v2.AuxInt = int64ToAuxInt(64)
  6294  		v3 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64)
  6295  		v3.AddArg(y)
  6296  		v1.AddArg2(v2, v3)
  6297  		v0.AddArg(v1)
  6298  		v4 := b.NewValue0(v.Pos, OpMIPS64SRLV, t)
  6299  		v5 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64)
  6300  		v5.AddArg(x)
  6301  		v4.AddArg2(v5, v3)
  6302  		v.AddArg2(v0, v4)
  6303  		return true
  6304  	}
  6305  }
  6306  func rewriteValueMIPS64_OpRsh16Ux32(v *Value) bool {
  6307  	v_1 := v.Args[1]
  6308  	v_0 := v.Args[0]
  6309  	b := v.Block
  6310  	typ := &b.Func.Config.Types
  6311  	// match: (Rsh16Ux32 <t> x y)
  6312  	// result: (AND (NEGV <t> (SGTU (MOVVconst <typ.UInt64> [64]) (ZeroExt32to64 y))) (SRLV <t> (ZeroExt16to64 x) (ZeroExt32to64 y)))
  6313  	for {
  6314  		t := v.Type
  6315  		x := v_0
  6316  		y := v_1
  6317  		v.reset(OpMIPS64AND)
  6318  		v0 := b.NewValue0(v.Pos, OpMIPS64NEGV, t)
  6319  		v1 := b.NewValue0(v.Pos, OpMIPS64SGTU, typ.Bool)
  6320  		v2 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
  6321  		v2.AuxInt = int64ToAuxInt(64)
  6322  		v3 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64)
  6323  		v3.AddArg(y)
  6324  		v1.AddArg2(v2, v3)
  6325  		v0.AddArg(v1)
  6326  		v4 := b.NewValue0(v.Pos, OpMIPS64SRLV, t)
  6327  		v5 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64)
  6328  		v5.AddArg(x)
  6329  		v4.AddArg2(v5, v3)
  6330  		v.AddArg2(v0, v4)
  6331  		return true
  6332  	}
  6333  }
  6334  func rewriteValueMIPS64_OpRsh16Ux64(v *Value) bool {
  6335  	v_1 := v.Args[1]
  6336  	v_0 := v.Args[0]
  6337  	b := v.Block
  6338  	typ := &b.Func.Config.Types
  6339  	// match: (Rsh16Ux64 <t> x y)
  6340  	// result: (AND (NEGV <t> (SGTU (MOVVconst <typ.UInt64> [64]) y)) (SRLV <t> (ZeroExt16to64 x) y))
  6341  	for {
  6342  		t := v.Type
  6343  		x := v_0
  6344  		y := v_1
  6345  		v.reset(OpMIPS64AND)
  6346  		v0 := b.NewValue0(v.Pos, OpMIPS64NEGV, t)
  6347  		v1 := b.NewValue0(v.Pos, OpMIPS64SGTU, typ.Bool)
  6348  		v2 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
  6349  		v2.AuxInt = int64ToAuxInt(64)
  6350  		v1.AddArg2(v2, y)
  6351  		v0.AddArg(v1)
  6352  		v3 := b.NewValue0(v.Pos, OpMIPS64SRLV, t)
  6353  		v4 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64)
  6354  		v4.AddArg(x)
  6355  		v3.AddArg2(v4, y)
  6356  		v.AddArg2(v0, v3)
  6357  		return true
  6358  	}
  6359  }
  6360  func rewriteValueMIPS64_OpRsh16Ux8(v *Value) bool {
  6361  	v_1 := v.Args[1]
  6362  	v_0 := v.Args[0]
  6363  	b := v.Block
  6364  	typ := &b.Func.Config.Types
  6365  	// match: (Rsh16Ux8 <t> x y)
  6366  	// result: (AND (NEGV <t> (SGTU (MOVVconst <typ.UInt64> [64]) (ZeroExt8to64 y))) (SRLV <t> (ZeroExt16to64 x) (ZeroExt8to64 y)))
  6367  	for {
  6368  		t := v.Type
  6369  		x := v_0
  6370  		y := v_1
  6371  		v.reset(OpMIPS64AND)
  6372  		v0 := b.NewValue0(v.Pos, OpMIPS64NEGV, t)
  6373  		v1 := b.NewValue0(v.Pos, OpMIPS64SGTU, typ.Bool)
  6374  		v2 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
  6375  		v2.AuxInt = int64ToAuxInt(64)
  6376  		v3 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64)
  6377  		v3.AddArg(y)
  6378  		v1.AddArg2(v2, v3)
  6379  		v0.AddArg(v1)
  6380  		v4 := b.NewValue0(v.Pos, OpMIPS64SRLV, t)
  6381  		v5 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64)
  6382  		v5.AddArg(x)
  6383  		v4.AddArg2(v5, v3)
  6384  		v.AddArg2(v0, v4)
  6385  		return true
  6386  	}
  6387  }
  6388  func rewriteValueMIPS64_OpRsh16x16(v *Value) bool {
  6389  	v_1 := v.Args[1]
  6390  	v_0 := v.Args[0]
  6391  	b := v.Block
  6392  	typ := &b.Func.Config.Types
  6393  	// match: (Rsh16x16 <t> x y)
  6394  	// result: (SRAV (SignExt16to64 x) (OR <t> (NEGV <t> (SGTU (ZeroExt16to64 y) (MOVVconst <typ.UInt64> [63]))) (ZeroExt16to64 y)))
  6395  	for {
  6396  		t := v.Type
  6397  		x := v_0
  6398  		y := v_1
  6399  		v.reset(OpMIPS64SRAV)
  6400  		v0 := b.NewValue0(v.Pos, OpSignExt16to64, typ.Int64)
  6401  		v0.AddArg(x)
  6402  		v1 := b.NewValue0(v.Pos, OpMIPS64OR, t)
  6403  		v2 := b.NewValue0(v.Pos, OpMIPS64NEGV, t)
  6404  		v3 := b.NewValue0(v.Pos, OpMIPS64SGTU, typ.Bool)
  6405  		v4 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64)
  6406  		v4.AddArg(y)
  6407  		v5 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
  6408  		v5.AuxInt = int64ToAuxInt(63)
  6409  		v3.AddArg2(v4, v5)
  6410  		v2.AddArg(v3)
  6411  		v1.AddArg2(v2, v4)
  6412  		v.AddArg2(v0, v1)
  6413  		return true
  6414  	}
  6415  }
  6416  func rewriteValueMIPS64_OpRsh16x32(v *Value) bool {
  6417  	v_1 := v.Args[1]
  6418  	v_0 := v.Args[0]
  6419  	b := v.Block
  6420  	typ := &b.Func.Config.Types
  6421  	// match: (Rsh16x32 <t> x y)
  6422  	// result: (SRAV (SignExt16to64 x) (OR <t> (NEGV <t> (SGTU (ZeroExt32to64 y) (MOVVconst <typ.UInt64> [63]))) (ZeroExt32to64 y)))
  6423  	for {
  6424  		t := v.Type
  6425  		x := v_0
  6426  		y := v_1
  6427  		v.reset(OpMIPS64SRAV)
  6428  		v0 := b.NewValue0(v.Pos, OpSignExt16to64, typ.Int64)
  6429  		v0.AddArg(x)
  6430  		v1 := b.NewValue0(v.Pos, OpMIPS64OR, t)
  6431  		v2 := b.NewValue0(v.Pos, OpMIPS64NEGV, t)
  6432  		v3 := b.NewValue0(v.Pos, OpMIPS64SGTU, typ.Bool)
  6433  		v4 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64)
  6434  		v4.AddArg(y)
  6435  		v5 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
  6436  		v5.AuxInt = int64ToAuxInt(63)
  6437  		v3.AddArg2(v4, v5)
  6438  		v2.AddArg(v3)
  6439  		v1.AddArg2(v2, v4)
  6440  		v.AddArg2(v0, v1)
  6441  		return true
  6442  	}
  6443  }
  6444  func rewriteValueMIPS64_OpRsh16x64(v *Value) bool {
  6445  	v_1 := v.Args[1]
  6446  	v_0 := v.Args[0]
  6447  	b := v.Block
  6448  	typ := &b.Func.Config.Types
  6449  	// match: (Rsh16x64 <t> x y)
  6450  	// result: (SRAV (SignExt16to64 x) (OR <t> (NEGV <t> (SGTU y (MOVVconst <typ.UInt64> [63]))) y))
  6451  	for {
  6452  		t := v.Type
  6453  		x := v_0
  6454  		y := v_1
  6455  		v.reset(OpMIPS64SRAV)
  6456  		v0 := b.NewValue0(v.Pos, OpSignExt16to64, typ.Int64)
  6457  		v0.AddArg(x)
  6458  		v1 := b.NewValue0(v.Pos, OpMIPS64OR, t)
  6459  		v2 := b.NewValue0(v.Pos, OpMIPS64NEGV, t)
  6460  		v3 := b.NewValue0(v.Pos, OpMIPS64SGTU, typ.Bool)
  6461  		v4 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
  6462  		v4.AuxInt = int64ToAuxInt(63)
  6463  		v3.AddArg2(y, v4)
  6464  		v2.AddArg(v3)
  6465  		v1.AddArg2(v2, y)
  6466  		v.AddArg2(v0, v1)
  6467  		return true
  6468  	}
  6469  }
  6470  func rewriteValueMIPS64_OpRsh16x8(v *Value) bool {
  6471  	v_1 := v.Args[1]
  6472  	v_0 := v.Args[0]
  6473  	b := v.Block
  6474  	typ := &b.Func.Config.Types
  6475  	// match: (Rsh16x8 <t> x y)
  6476  	// result: (SRAV (SignExt16to64 x) (OR <t> (NEGV <t> (SGTU (ZeroExt8to64 y) (MOVVconst <typ.UInt64> [63]))) (ZeroExt8to64 y)))
  6477  	for {
  6478  		t := v.Type
  6479  		x := v_0
  6480  		y := v_1
  6481  		v.reset(OpMIPS64SRAV)
  6482  		v0 := b.NewValue0(v.Pos, OpSignExt16to64, typ.Int64)
  6483  		v0.AddArg(x)
  6484  		v1 := b.NewValue0(v.Pos, OpMIPS64OR, t)
  6485  		v2 := b.NewValue0(v.Pos, OpMIPS64NEGV, t)
  6486  		v3 := b.NewValue0(v.Pos, OpMIPS64SGTU, typ.Bool)
  6487  		v4 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64)
  6488  		v4.AddArg(y)
  6489  		v5 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
  6490  		v5.AuxInt = int64ToAuxInt(63)
  6491  		v3.AddArg2(v4, v5)
  6492  		v2.AddArg(v3)
  6493  		v1.AddArg2(v2, v4)
  6494  		v.AddArg2(v0, v1)
  6495  		return true
  6496  	}
  6497  }
  6498  func rewriteValueMIPS64_OpRsh32Ux16(v *Value) bool {
  6499  	v_1 := v.Args[1]
  6500  	v_0 := v.Args[0]
  6501  	b := v.Block
  6502  	typ := &b.Func.Config.Types
  6503  	// match: (Rsh32Ux16 <t> x y)
  6504  	// result: (AND (NEGV <t> (SGTU (MOVVconst <typ.UInt64> [64]) (ZeroExt16to64 y))) (SRLV <t> (ZeroExt32to64 x) (ZeroExt16to64 y)))
  6505  	for {
  6506  		t := v.Type
  6507  		x := v_0
  6508  		y := v_1
  6509  		v.reset(OpMIPS64AND)
  6510  		v0 := b.NewValue0(v.Pos, OpMIPS64NEGV, t)
  6511  		v1 := b.NewValue0(v.Pos, OpMIPS64SGTU, typ.Bool)
  6512  		v2 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
  6513  		v2.AuxInt = int64ToAuxInt(64)
  6514  		v3 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64)
  6515  		v3.AddArg(y)
  6516  		v1.AddArg2(v2, v3)
  6517  		v0.AddArg(v1)
  6518  		v4 := b.NewValue0(v.Pos, OpMIPS64SRLV, t)
  6519  		v5 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64)
  6520  		v5.AddArg(x)
  6521  		v4.AddArg2(v5, v3)
  6522  		v.AddArg2(v0, v4)
  6523  		return true
  6524  	}
  6525  }
  6526  func rewriteValueMIPS64_OpRsh32Ux32(v *Value) bool {
  6527  	v_1 := v.Args[1]
  6528  	v_0 := v.Args[0]
  6529  	b := v.Block
  6530  	typ := &b.Func.Config.Types
  6531  	// match: (Rsh32Ux32 <t> x y)
  6532  	// result: (AND (NEGV <t> (SGTU (MOVVconst <typ.UInt64> [64]) (ZeroExt32to64 y))) (SRLV <t> (ZeroExt32to64 x) (ZeroExt32to64 y)))
  6533  	for {
  6534  		t := v.Type
  6535  		x := v_0
  6536  		y := v_1
  6537  		v.reset(OpMIPS64AND)
  6538  		v0 := b.NewValue0(v.Pos, OpMIPS64NEGV, t)
  6539  		v1 := b.NewValue0(v.Pos, OpMIPS64SGTU, typ.Bool)
  6540  		v2 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
  6541  		v2.AuxInt = int64ToAuxInt(64)
  6542  		v3 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64)
  6543  		v3.AddArg(y)
  6544  		v1.AddArg2(v2, v3)
  6545  		v0.AddArg(v1)
  6546  		v4 := b.NewValue0(v.Pos, OpMIPS64SRLV, t)
  6547  		v5 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64)
  6548  		v5.AddArg(x)
  6549  		v4.AddArg2(v5, v3)
  6550  		v.AddArg2(v0, v4)
  6551  		return true
  6552  	}
  6553  }
  6554  func rewriteValueMIPS64_OpRsh32Ux64(v *Value) bool {
  6555  	v_1 := v.Args[1]
  6556  	v_0 := v.Args[0]
  6557  	b := v.Block
  6558  	typ := &b.Func.Config.Types
  6559  	// match: (Rsh32Ux64 <t> x y)
  6560  	// result: (AND (NEGV <t> (SGTU (MOVVconst <typ.UInt64> [64]) y)) (SRLV <t> (ZeroExt32to64 x) y))
  6561  	for {
  6562  		t := v.Type
  6563  		x := v_0
  6564  		y := v_1
  6565  		v.reset(OpMIPS64AND)
  6566  		v0 := b.NewValue0(v.Pos, OpMIPS64NEGV, t)
  6567  		v1 := b.NewValue0(v.Pos, OpMIPS64SGTU, typ.Bool)
  6568  		v2 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
  6569  		v2.AuxInt = int64ToAuxInt(64)
  6570  		v1.AddArg2(v2, y)
  6571  		v0.AddArg(v1)
  6572  		v3 := b.NewValue0(v.Pos, OpMIPS64SRLV, t)
  6573  		v4 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64)
  6574  		v4.AddArg(x)
  6575  		v3.AddArg2(v4, y)
  6576  		v.AddArg2(v0, v3)
  6577  		return true
  6578  	}
  6579  }
  6580  func rewriteValueMIPS64_OpRsh32Ux8(v *Value) bool {
  6581  	v_1 := v.Args[1]
  6582  	v_0 := v.Args[0]
  6583  	b := v.Block
  6584  	typ := &b.Func.Config.Types
  6585  	// match: (Rsh32Ux8 <t> x y)
  6586  	// result: (AND (NEGV <t> (SGTU (MOVVconst <typ.UInt64> [64]) (ZeroExt8to64 y))) (SRLV <t> (ZeroExt32to64 x) (ZeroExt8to64 y)))
  6587  	for {
  6588  		t := v.Type
  6589  		x := v_0
  6590  		y := v_1
  6591  		v.reset(OpMIPS64AND)
  6592  		v0 := b.NewValue0(v.Pos, OpMIPS64NEGV, t)
  6593  		v1 := b.NewValue0(v.Pos, OpMIPS64SGTU, typ.Bool)
  6594  		v2 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
  6595  		v2.AuxInt = int64ToAuxInt(64)
  6596  		v3 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64)
  6597  		v3.AddArg(y)
  6598  		v1.AddArg2(v2, v3)
  6599  		v0.AddArg(v1)
  6600  		v4 := b.NewValue0(v.Pos, OpMIPS64SRLV, t)
  6601  		v5 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64)
  6602  		v5.AddArg(x)
  6603  		v4.AddArg2(v5, v3)
  6604  		v.AddArg2(v0, v4)
  6605  		return true
  6606  	}
  6607  }
  6608  func rewriteValueMIPS64_OpRsh32x16(v *Value) bool {
  6609  	v_1 := v.Args[1]
  6610  	v_0 := v.Args[0]
  6611  	b := v.Block
  6612  	typ := &b.Func.Config.Types
  6613  	// match: (Rsh32x16 <t> x y)
  6614  	// result: (SRAV (SignExt32to64 x) (OR <t> (NEGV <t> (SGTU (ZeroExt16to64 y) (MOVVconst <typ.UInt64> [63]))) (ZeroExt16to64 y)))
  6615  	for {
  6616  		t := v.Type
  6617  		x := v_0
  6618  		y := v_1
  6619  		v.reset(OpMIPS64SRAV)
  6620  		v0 := b.NewValue0(v.Pos, OpSignExt32to64, typ.Int64)
  6621  		v0.AddArg(x)
  6622  		v1 := b.NewValue0(v.Pos, OpMIPS64OR, t)
  6623  		v2 := b.NewValue0(v.Pos, OpMIPS64NEGV, t)
  6624  		v3 := b.NewValue0(v.Pos, OpMIPS64SGTU, typ.Bool)
  6625  		v4 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64)
  6626  		v4.AddArg(y)
  6627  		v5 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
  6628  		v5.AuxInt = int64ToAuxInt(63)
  6629  		v3.AddArg2(v4, v5)
  6630  		v2.AddArg(v3)
  6631  		v1.AddArg2(v2, v4)
  6632  		v.AddArg2(v0, v1)
  6633  		return true
  6634  	}
  6635  }
  6636  func rewriteValueMIPS64_OpRsh32x32(v *Value) bool {
  6637  	v_1 := v.Args[1]
  6638  	v_0 := v.Args[0]
  6639  	b := v.Block
  6640  	typ := &b.Func.Config.Types
  6641  	// match: (Rsh32x32 <t> x y)
  6642  	// result: (SRAV (SignExt32to64 x) (OR <t> (NEGV <t> (SGTU (ZeroExt32to64 y) (MOVVconst <typ.UInt64> [63]))) (ZeroExt32to64 y)))
  6643  	for {
  6644  		t := v.Type
  6645  		x := v_0
  6646  		y := v_1
  6647  		v.reset(OpMIPS64SRAV)
  6648  		v0 := b.NewValue0(v.Pos, OpSignExt32to64, typ.Int64)
  6649  		v0.AddArg(x)
  6650  		v1 := b.NewValue0(v.Pos, OpMIPS64OR, t)
  6651  		v2 := b.NewValue0(v.Pos, OpMIPS64NEGV, t)
  6652  		v3 := b.NewValue0(v.Pos, OpMIPS64SGTU, typ.Bool)
  6653  		v4 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64)
  6654  		v4.AddArg(y)
  6655  		v5 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
  6656  		v5.AuxInt = int64ToAuxInt(63)
  6657  		v3.AddArg2(v4, v5)
  6658  		v2.AddArg(v3)
  6659  		v1.AddArg2(v2, v4)
  6660  		v.AddArg2(v0, v1)
  6661  		return true
  6662  	}
  6663  }
  6664  func rewriteValueMIPS64_OpRsh32x64(v *Value) bool {
  6665  	v_1 := v.Args[1]
  6666  	v_0 := v.Args[0]
  6667  	b := v.Block
  6668  	typ := &b.Func.Config.Types
  6669  	// match: (Rsh32x64 <t> x y)
  6670  	// result: (SRAV (SignExt32to64 x) (OR <t> (NEGV <t> (SGTU y (MOVVconst <typ.UInt64> [63]))) y))
  6671  	for {
  6672  		t := v.Type
  6673  		x := v_0
  6674  		y := v_1
  6675  		v.reset(OpMIPS64SRAV)
  6676  		v0 := b.NewValue0(v.Pos, OpSignExt32to64, typ.Int64)
  6677  		v0.AddArg(x)
  6678  		v1 := b.NewValue0(v.Pos, OpMIPS64OR, t)
  6679  		v2 := b.NewValue0(v.Pos, OpMIPS64NEGV, t)
  6680  		v3 := b.NewValue0(v.Pos, OpMIPS64SGTU, typ.Bool)
  6681  		v4 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
  6682  		v4.AuxInt = int64ToAuxInt(63)
  6683  		v3.AddArg2(y, v4)
  6684  		v2.AddArg(v3)
  6685  		v1.AddArg2(v2, y)
  6686  		v.AddArg2(v0, v1)
  6687  		return true
  6688  	}
  6689  }
  6690  func rewriteValueMIPS64_OpRsh32x8(v *Value) bool {
  6691  	v_1 := v.Args[1]
  6692  	v_0 := v.Args[0]
  6693  	b := v.Block
  6694  	typ := &b.Func.Config.Types
  6695  	// match: (Rsh32x8 <t> x y)
  6696  	// result: (SRAV (SignExt32to64 x) (OR <t> (NEGV <t> (SGTU (ZeroExt8to64 y) (MOVVconst <typ.UInt64> [63]))) (ZeroExt8to64 y)))
  6697  	for {
  6698  		t := v.Type
  6699  		x := v_0
  6700  		y := v_1
  6701  		v.reset(OpMIPS64SRAV)
  6702  		v0 := b.NewValue0(v.Pos, OpSignExt32to64, typ.Int64)
  6703  		v0.AddArg(x)
  6704  		v1 := b.NewValue0(v.Pos, OpMIPS64OR, t)
  6705  		v2 := b.NewValue0(v.Pos, OpMIPS64NEGV, t)
  6706  		v3 := b.NewValue0(v.Pos, OpMIPS64SGTU, typ.Bool)
  6707  		v4 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64)
  6708  		v4.AddArg(y)
  6709  		v5 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
  6710  		v5.AuxInt = int64ToAuxInt(63)
  6711  		v3.AddArg2(v4, v5)
  6712  		v2.AddArg(v3)
  6713  		v1.AddArg2(v2, v4)
  6714  		v.AddArg2(v0, v1)
  6715  		return true
  6716  	}
  6717  }
  6718  func rewriteValueMIPS64_OpRsh64Ux16(v *Value) bool {
  6719  	v_1 := v.Args[1]
  6720  	v_0 := v.Args[0]
  6721  	b := v.Block
  6722  	typ := &b.Func.Config.Types
  6723  	// match: (Rsh64Ux16 <t> x y)
  6724  	// result: (AND (NEGV <t> (SGTU (MOVVconst <typ.UInt64> [64]) (ZeroExt16to64 y))) (SRLV <t> x (ZeroExt16to64 y)))
  6725  	for {
  6726  		t := v.Type
  6727  		x := v_0
  6728  		y := v_1
  6729  		v.reset(OpMIPS64AND)
  6730  		v0 := b.NewValue0(v.Pos, OpMIPS64NEGV, t)
  6731  		v1 := b.NewValue0(v.Pos, OpMIPS64SGTU, typ.Bool)
  6732  		v2 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
  6733  		v2.AuxInt = int64ToAuxInt(64)
  6734  		v3 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64)
  6735  		v3.AddArg(y)
  6736  		v1.AddArg2(v2, v3)
  6737  		v0.AddArg(v1)
  6738  		v4 := b.NewValue0(v.Pos, OpMIPS64SRLV, t)
  6739  		v4.AddArg2(x, v3)
  6740  		v.AddArg2(v0, v4)
  6741  		return true
  6742  	}
  6743  }
  6744  func rewriteValueMIPS64_OpRsh64Ux32(v *Value) bool {
  6745  	v_1 := v.Args[1]
  6746  	v_0 := v.Args[0]
  6747  	b := v.Block
  6748  	typ := &b.Func.Config.Types
  6749  	// match: (Rsh64Ux32 <t> x y)
  6750  	// result: (AND (NEGV <t> (SGTU (MOVVconst <typ.UInt64> [64]) (ZeroExt32to64 y))) (SRLV <t> x (ZeroExt32to64 y)))
  6751  	for {
  6752  		t := v.Type
  6753  		x := v_0
  6754  		y := v_1
  6755  		v.reset(OpMIPS64AND)
  6756  		v0 := b.NewValue0(v.Pos, OpMIPS64NEGV, t)
  6757  		v1 := b.NewValue0(v.Pos, OpMIPS64SGTU, typ.Bool)
  6758  		v2 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
  6759  		v2.AuxInt = int64ToAuxInt(64)
  6760  		v3 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64)
  6761  		v3.AddArg(y)
  6762  		v1.AddArg2(v2, v3)
  6763  		v0.AddArg(v1)
  6764  		v4 := b.NewValue0(v.Pos, OpMIPS64SRLV, t)
  6765  		v4.AddArg2(x, v3)
  6766  		v.AddArg2(v0, v4)
  6767  		return true
  6768  	}
  6769  }
  6770  func rewriteValueMIPS64_OpRsh64Ux64(v *Value) bool {
  6771  	v_1 := v.Args[1]
  6772  	v_0 := v.Args[0]
  6773  	b := v.Block
  6774  	typ := &b.Func.Config.Types
  6775  	// match: (Rsh64Ux64 <t> x y)
  6776  	// result: (AND (NEGV <t> (SGTU (MOVVconst <typ.UInt64> [64]) y)) (SRLV <t> x y))
  6777  	for {
  6778  		t := v.Type
  6779  		x := v_0
  6780  		y := v_1
  6781  		v.reset(OpMIPS64AND)
  6782  		v0 := b.NewValue0(v.Pos, OpMIPS64NEGV, t)
  6783  		v1 := b.NewValue0(v.Pos, OpMIPS64SGTU, typ.Bool)
  6784  		v2 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
  6785  		v2.AuxInt = int64ToAuxInt(64)
  6786  		v1.AddArg2(v2, y)
  6787  		v0.AddArg(v1)
  6788  		v3 := b.NewValue0(v.Pos, OpMIPS64SRLV, t)
  6789  		v3.AddArg2(x, y)
  6790  		v.AddArg2(v0, v3)
  6791  		return true
  6792  	}
  6793  }
  6794  func rewriteValueMIPS64_OpRsh64Ux8(v *Value) bool {
  6795  	v_1 := v.Args[1]
  6796  	v_0 := v.Args[0]
  6797  	b := v.Block
  6798  	typ := &b.Func.Config.Types
  6799  	// match: (Rsh64Ux8 <t> x y)
  6800  	// result: (AND (NEGV <t> (SGTU (MOVVconst <typ.UInt64> [64]) (ZeroExt8to64 y))) (SRLV <t> x (ZeroExt8to64 y)))
  6801  	for {
  6802  		t := v.Type
  6803  		x := v_0
  6804  		y := v_1
  6805  		v.reset(OpMIPS64AND)
  6806  		v0 := b.NewValue0(v.Pos, OpMIPS64NEGV, t)
  6807  		v1 := b.NewValue0(v.Pos, OpMIPS64SGTU, typ.Bool)
  6808  		v2 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
  6809  		v2.AuxInt = int64ToAuxInt(64)
  6810  		v3 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64)
  6811  		v3.AddArg(y)
  6812  		v1.AddArg2(v2, v3)
  6813  		v0.AddArg(v1)
  6814  		v4 := b.NewValue0(v.Pos, OpMIPS64SRLV, t)
  6815  		v4.AddArg2(x, v3)
  6816  		v.AddArg2(v0, v4)
  6817  		return true
  6818  	}
  6819  }
  6820  func rewriteValueMIPS64_OpRsh64x16(v *Value) bool {
  6821  	v_1 := v.Args[1]
  6822  	v_0 := v.Args[0]
  6823  	b := v.Block
  6824  	typ := &b.Func.Config.Types
  6825  	// match: (Rsh64x16 <t> x y)
  6826  	// result: (SRAV x (OR <t> (NEGV <t> (SGTU (ZeroExt16to64 y) (MOVVconst <typ.UInt64> [63]))) (ZeroExt16to64 y)))
  6827  	for {
  6828  		t := v.Type
  6829  		x := v_0
  6830  		y := v_1
  6831  		v.reset(OpMIPS64SRAV)
  6832  		v0 := b.NewValue0(v.Pos, OpMIPS64OR, t)
  6833  		v1 := b.NewValue0(v.Pos, OpMIPS64NEGV, t)
  6834  		v2 := b.NewValue0(v.Pos, OpMIPS64SGTU, typ.Bool)
  6835  		v3 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64)
  6836  		v3.AddArg(y)
  6837  		v4 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
  6838  		v4.AuxInt = int64ToAuxInt(63)
  6839  		v2.AddArg2(v3, v4)
  6840  		v1.AddArg(v2)
  6841  		v0.AddArg2(v1, v3)
  6842  		v.AddArg2(x, v0)
  6843  		return true
  6844  	}
  6845  }
  6846  func rewriteValueMIPS64_OpRsh64x32(v *Value) bool {
  6847  	v_1 := v.Args[1]
  6848  	v_0 := v.Args[0]
  6849  	b := v.Block
  6850  	typ := &b.Func.Config.Types
  6851  	// match: (Rsh64x32 <t> x y)
  6852  	// result: (SRAV x (OR <t> (NEGV <t> (SGTU (ZeroExt32to64 y) (MOVVconst <typ.UInt64> [63]))) (ZeroExt32to64 y)))
  6853  	for {
  6854  		t := v.Type
  6855  		x := v_0
  6856  		y := v_1
  6857  		v.reset(OpMIPS64SRAV)
  6858  		v0 := b.NewValue0(v.Pos, OpMIPS64OR, t)
  6859  		v1 := b.NewValue0(v.Pos, OpMIPS64NEGV, t)
  6860  		v2 := b.NewValue0(v.Pos, OpMIPS64SGTU, typ.Bool)
  6861  		v3 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64)
  6862  		v3.AddArg(y)
  6863  		v4 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
  6864  		v4.AuxInt = int64ToAuxInt(63)
  6865  		v2.AddArg2(v3, v4)
  6866  		v1.AddArg(v2)
  6867  		v0.AddArg2(v1, v3)
  6868  		v.AddArg2(x, v0)
  6869  		return true
  6870  	}
  6871  }
  6872  func rewriteValueMIPS64_OpRsh64x64(v *Value) bool {
  6873  	v_1 := v.Args[1]
  6874  	v_0 := v.Args[0]
  6875  	b := v.Block
  6876  	typ := &b.Func.Config.Types
  6877  	// match: (Rsh64x64 <t> x y)
  6878  	// result: (SRAV x (OR <t> (NEGV <t> (SGTU y (MOVVconst <typ.UInt64> [63]))) y))
  6879  	for {
  6880  		t := v.Type
  6881  		x := v_0
  6882  		y := v_1
  6883  		v.reset(OpMIPS64SRAV)
  6884  		v0 := b.NewValue0(v.Pos, OpMIPS64OR, t)
  6885  		v1 := b.NewValue0(v.Pos, OpMIPS64NEGV, t)
  6886  		v2 := b.NewValue0(v.Pos, OpMIPS64SGTU, typ.Bool)
  6887  		v3 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
  6888  		v3.AuxInt = int64ToAuxInt(63)
  6889  		v2.AddArg2(y, v3)
  6890  		v1.AddArg(v2)
  6891  		v0.AddArg2(v1, y)
  6892  		v.AddArg2(x, v0)
  6893  		return true
  6894  	}
  6895  }
  6896  func rewriteValueMIPS64_OpRsh64x8(v *Value) bool {
  6897  	v_1 := v.Args[1]
  6898  	v_0 := v.Args[0]
  6899  	b := v.Block
  6900  	typ := &b.Func.Config.Types
  6901  	// match: (Rsh64x8 <t> x y)
  6902  	// result: (SRAV x (OR <t> (NEGV <t> (SGTU (ZeroExt8to64 y) (MOVVconst <typ.UInt64> [63]))) (ZeroExt8to64 y)))
  6903  	for {
  6904  		t := v.Type
  6905  		x := v_0
  6906  		y := v_1
  6907  		v.reset(OpMIPS64SRAV)
  6908  		v0 := b.NewValue0(v.Pos, OpMIPS64OR, t)
  6909  		v1 := b.NewValue0(v.Pos, OpMIPS64NEGV, t)
  6910  		v2 := b.NewValue0(v.Pos, OpMIPS64SGTU, typ.Bool)
  6911  		v3 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64)
  6912  		v3.AddArg(y)
  6913  		v4 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
  6914  		v4.AuxInt = int64ToAuxInt(63)
  6915  		v2.AddArg2(v3, v4)
  6916  		v1.AddArg(v2)
  6917  		v0.AddArg2(v1, v3)
  6918  		v.AddArg2(x, v0)
  6919  		return true
  6920  	}
  6921  }
  6922  func rewriteValueMIPS64_OpRsh8Ux16(v *Value) bool {
  6923  	v_1 := v.Args[1]
  6924  	v_0 := v.Args[0]
  6925  	b := v.Block
  6926  	typ := &b.Func.Config.Types
  6927  	// match: (Rsh8Ux16 <t> x y)
  6928  	// result: (AND (NEGV <t> (SGTU (MOVVconst <typ.UInt64> [64]) (ZeroExt16to64 y))) (SRLV <t> (ZeroExt8to64 x) (ZeroExt16to64 y)))
  6929  	for {
  6930  		t := v.Type
  6931  		x := v_0
  6932  		y := v_1
  6933  		v.reset(OpMIPS64AND)
  6934  		v0 := b.NewValue0(v.Pos, OpMIPS64NEGV, t)
  6935  		v1 := b.NewValue0(v.Pos, OpMIPS64SGTU, typ.Bool)
  6936  		v2 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
  6937  		v2.AuxInt = int64ToAuxInt(64)
  6938  		v3 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64)
  6939  		v3.AddArg(y)
  6940  		v1.AddArg2(v2, v3)
  6941  		v0.AddArg(v1)
  6942  		v4 := b.NewValue0(v.Pos, OpMIPS64SRLV, t)
  6943  		v5 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64)
  6944  		v5.AddArg(x)
  6945  		v4.AddArg2(v5, v3)
  6946  		v.AddArg2(v0, v4)
  6947  		return true
  6948  	}
  6949  }
  6950  func rewriteValueMIPS64_OpRsh8Ux32(v *Value) bool {
  6951  	v_1 := v.Args[1]
  6952  	v_0 := v.Args[0]
  6953  	b := v.Block
  6954  	typ := &b.Func.Config.Types
  6955  	// match: (Rsh8Ux32 <t> x y)
  6956  	// result: (AND (NEGV <t> (SGTU (MOVVconst <typ.UInt64> [64]) (ZeroExt32to64 y))) (SRLV <t> (ZeroExt8to64 x) (ZeroExt32to64 y)))
  6957  	for {
  6958  		t := v.Type
  6959  		x := v_0
  6960  		y := v_1
  6961  		v.reset(OpMIPS64AND)
  6962  		v0 := b.NewValue0(v.Pos, OpMIPS64NEGV, t)
  6963  		v1 := b.NewValue0(v.Pos, OpMIPS64SGTU, typ.Bool)
  6964  		v2 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
  6965  		v2.AuxInt = int64ToAuxInt(64)
  6966  		v3 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64)
  6967  		v3.AddArg(y)
  6968  		v1.AddArg2(v2, v3)
  6969  		v0.AddArg(v1)
  6970  		v4 := b.NewValue0(v.Pos, OpMIPS64SRLV, t)
  6971  		v5 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64)
  6972  		v5.AddArg(x)
  6973  		v4.AddArg2(v5, v3)
  6974  		v.AddArg2(v0, v4)
  6975  		return true
  6976  	}
  6977  }
  6978  func rewriteValueMIPS64_OpRsh8Ux64(v *Value) bool {
  6979  	v_1 := v.Args[1]
  6980  	v_0 := v.Args[0]
  6981  	b := v.Block
  6982  	typ := &b.Func.Config.Types
  6983  	// match: (Rsh8Ux64 <t> x y)
  6984  	// result: (AND (NEGV <t> (SGTU (MOVVconst <typ.UInt64> [64]) y)) (SRLV <t> (ZeroExt8to64 x) y))
  6985  	for {
  6986  		t := v.Type
  6987  		x := v_0
  6988  		y := v_1
  6989  		v.reset(OpMIPS64AND)
  6990  		v0 := b.NewValue0(v.Pos, OpMIPS64NEGV, t)
  6991  		v1 := b.NewValue0(v.Pos, OpMIPS64SGTU, typ.Bool)
  6992  		v2 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
  6993  		v2.AuxInt = int64ToAuxInt(64)
  6994  		v1.AddArg2(v2, y)
  6995  		v0.AddArg(v1)
  6996  		v3 := b.NewValue0(v.Pos, OpMIPS64SRLV, t)
  6997  		v4 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64)
  6998  		v4.AddArg(x)
  6999  		v3.AddArg2(v4, y)
  7000  		v.AddArg2(v0, v3)
  7001  		return true
  7002  	}
  7003  }
  7004  func rewriteValueMIPS64_OpRsh8Ux8(v *Value) bool {
  7005  	v_1 := v.Args[1]
  7006  	v_0 := v.Args[0]
  7007  	b := v.Block
  7008  	typ := &b.Func.Config.Types
  7009  	// match: (Rsh8Ux8 <t> x y)
  7010  	// result: (AND (NEGV <t> (SGTU (MOVVconst <typ.UInt64> [64]) (ZeroExt8to64 y))) (SRLV <t> (ZeroExt8to64 x) (ZeroExt8to64 y)))
  7011  	for {
  7012  		t := v.Type
  7013  		x := v_0
  7014  		y := v_1
  7015  		v.reset(OpMIPS64AND)
  7016  		v0 := b.NewValue0(v.Pos, OpMIPS64NEGV, t)
  7017  		v1 := b.NewValue0(v.Pos, OpMIPS64SGTU, typ.Bool)
  7018  		v2 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
  7019  		v2.AuxInt = int64ToAuxInt(64)
  7020  		v3 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64)
  7021  		v3.AddArg(y)
  7022  		v1.AddArg2(v2, v3)
  7023  		v0.AddArg(v1)
  7024  		v4 := b.NewValue0(v.Pos, OpMIPS64SRLV, t)
  7025  		v5 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64)
  7026  		v5.AddArg(x)
  7027  		v4.AddArg2(v5, v3)
  7028  		v.AddArg2(v0, v4)
  7029  		return true
  7030  	}
  7031  }
  7032  func rewriteValueMIPS64_OpRsh8x16(v *Value) bool {
  7033  	v_1 := v.Args[1]
  7034  	v_0 := v.Args[0]
  7035  	b := v.Block
  7036  	typ := &b.Func.Config.Types
  7037  	// match: (Rsh8x16 <t> x y)
  7038  	// result: (SRAV (SignExt8to64 x) (OR <t> (NEGV <t> (SGTU (ZeroExt16to64 y) (MOVVconst <typ.UInt64> [63]))) (ZeroExt16to64 y)))
  7039  	for {
  7040  		t := v.Type
  7041  		x := v_0
  7042  		y := v_1
  7043  		v.reset(OpMIPS64SRAV)
  7044  		v0 := b.NewValue0(v.Pos, OpSignExt8to64, typ.Int64)
  7045  		v0.AddArg(x)
  7046  		v1 := b.NewValue0(v.Pos, OpMIPS64OR, t)
  7047  		v2 := b.NewValue0(v.Pos, OpMIPS64NEGV, t)
  7048  		v3 := b.NewValue0(v.Pos, OpMIPS64SGTU, typ.Bool)
  7049  		v4 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64)
  7050  		v4.AddArg(y)
  7051  		v5 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
  7052  		v5.AuxInt = int64ToAuxInt(63)
  7053  		v3.AddArg2(v4, v5)
  7054  		v2.AddArg(v3)
  7055  		v1.AddArg2(v2, v4)
  7056  		v.AddArg2(v0, v1)
  7057  		return true
  7058  	}
  7059  }
  7060  func rewriteValueMIPS64_OpRsh8x32(v *Value) bool {
  7061  	v_1 := v.Args[1]
  7062  	v_0 := v.Args[0]
  7063  	b := v.Block
  7064  	typ := &b.Func.Config.Types
  7065  	// match: (Rsh8x32 <t> x y)
  7066  	// result: (SRAV (SignExt8to64 x) (OR <t> (NEGV <t> (SGTU (ZeroExt32to64 y) (MOVVconst <typ.UInt64> [63]))) (ZeroExt32to64 y)))
  7067  	for {
  7068  		t := v.Type
  7069  		x := v_0
  7070  		y := v_1
  7071  		v.reset(OpMIPS64SRAV)
  7072  		v0 := b.NewValue0(v.Pos, OpSignExt8to64, typ.Int64)
  7073  		v0.AddArg(x)
  7074  		v1 := b.NewValue0(v.Pos, OpMIPS64OR, t)
  7075  		v2 := b.NewValue0(v.Pos, OpMIPS64NEGV, t)
  7076  		v3 := b.NewValue0(v.Pos, OpMIPS64SGTU, typ.Bool)
  7077  		v4 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64)
  7078  		v4.AddArg(y)
  7079  		v5 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
  7080  		v5.AuxInt = int64ToAuxInt(63)
  7081  		v3.AddArg2(v4, v5)
  7082  		v2.AddArg(v3)
  7083  		v1.AddArg2(v2, v4)
  7084  		v.AddArg2(v0, v1)
  7085  		return true
  7086  	}
  7087  }
  7088  func rewriteValueMIPS64_OpRsh8x64(v *Value) bool {
  7089  	v_1 := v.Args[1]
  7090  	v_0 := v.Args[0]
  7091  	b := v.Block
  7092  	typ := &b.Func.Config.Types
  7093  	// match: (Rsh8x64 <t> x y)
  7094  	// result: (SRAV (SignExt8to64 x) (OR <t> (NEGV <t> (SGTU y (MOVVconst <typ.UInt64> [63]))) y))
  7095  	for {
  7096  		t := v.Type
  7097  		x := v_0
  7098  		y := v_1
  7099  		v.reset(OpMIPS64SRAV)
  7100  		v0 := b.NewValue0(v.Pos, OpSignExt8to64, typ.Int64)
  7101  		v0.AddArg(x)
  7102  		v1 := b.NewValue0(v.Pos, OpMIPS64OR, t)
  7103  		v2 := b.NewValue0(v.Pos, OpMIPS64NEGV, t)
  7104  		v3 := b.NewValue0(v.Pos, OpMIPS64SGTU, typ.Bool)
  7105  		v4 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
  7106  		v4.AuxInt = int64ToAuxInt(63)
  7107  		v3.AddArg2(y, v4)
  7108  		v2.AddArg(v3)
  7109  		v1.AddArg2(v2, y)
  7110  		v.AddArg2(v0, v1)
  7111  		return true
  7112  	}
  7113  }
  7114  func rewriteValueMIPS64_OpRsh8x8(v *Value) bool {
  7115  	v_1 := v.Args[1]
  7116  	v_0 := v.Args[0]
  7117  	b := v.Block
  7118  	typ := &b.Func.Config.Types
  7119  	// match: (Rsh8x8 <t> x y)
  7120  	// result: (SRAV (SignExt8to64 x) (OR <t> (NEGV <t> (SGTU (ZeroExt8to64 y) (MOVVconst <typ.UInt64> [63]))) (ZeroExt8to64 y)))
  7121  	for {
  7122  		t := v.Type
  7123  		x := v_0
  7124  		y := v_1
  7125  		v.reset(OpMIPS64SRAV)
  7126  		v0 := b.NewValue0(v.Pos, OpSignExt8to64, typ.Int64)
  7127  		v0.AddArg(x)
  7128  		v1 := b.NewValue0(v.Pos, OpMIPS64OR, t)
  7129  		v2 := b.NewValue0(v.Pos, OpMIPS64NEGV, t)
  7130  		v3 := b.NewValue0(v.Pos, OpMIPS64SGTU, typ.Bool)
  7131  		v4 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64)
  7132  		v4.AddArg(y)
  7133  		v5 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
  7134  		v5.AuxInt = int64ToAuxInt(63)
  7135  		v3.AddArg2(v4, v5)
  7136  		v2.AddArg(v3)
  7137  		v1.AddArg2(v2, v4)
  7138  		v.AddArg2(v0, v1)
  7139  		return true
  7140  	}
  7141  }
  7142  func rewriteValueMIPS64_OpSelect0(v *Value) bool {
  7143  	v_0 := v.Args[0]
  7144  	b := v.Block
  7145  	typ := &b.Func.Config.Types
  7146  	// match: (Select0 (Mul64uover x y))
  7147  	// result: (Select1 <typ.UInt64> (MULVU x y))
  7148  	for {
  7149  		if v_0.Op != OpMul64uover {
  7150  			break
  7151  		}
  7152  		y := v_0.Args[1]
  7153  		x := v_0.Args[0]
  7154  		v.reset(OpSelect1)
  7155  		v.Type = typ.UInt64
  7156  		v0 := b.NewValue0(v.Pos, OpMIPS64MULVU, types.NewTuple(typ.UInt64, typ.UInt64))
  7157  		v0.AddArg2(x, y)
  7158  		v.AddArg(v0)
  7159  		return true
  7160  	}
  7161  	// match: (Select0 <t> (Add64carry x y c))
  7162  	// result: (ADDV (ADDV <t> x y) c)
  7163  	for {
  7164  		t := v.Type
  7165  		if v_0.Op != OpAdd64carry {
  7166  			break
  7167  		}
  7168  		c := v_0.Args[2]
  7169  		x := v_0.Args[0]
  7170  		y := v_0.Args[1]
  7171  		v.reset(OpMIPS64ADDV)
  7172  		v0 := b.NewValue0(v.Pos, OpMIPS64ADDV, t)
  7173  		v0.AddArg2(x, y)
  7174  		v.AddArg2(v0, c)
  7175  		return true
  7176  	}
  7177  	// match: (Select0 <t> (Sub64borrow x y c))
  7178  	// result: (SUBV (SUBV <t> x y) c)
  7179  	for {
  7180  		t := v.Type
  7181  		if v_0.Op != OpSub64borrow {
  7182  			break
  7183  		}
  7184  		c := v_0.Args[2]
  7185  		x := v_0.Args[0]
  7186  		y := v_0.Args[1]
  7187  		v.reset(OpMIPS64SUBV)
  7188  		v0 := b.NewValue0(v.Pos, OpMIPS64SUBV, t)
  7189  		v0.AddArg2(x, y)
  7190  		v.AddArg2(v0, c)
  7191  		return true
  7192  	}
  7193  	// match: (Select0 (DIVVU _ (MOVVconst [1])))
  7194  	// result: (MOVVconst [0])
  7195  	for {
  7196  		if v_0.Op != OpMIPS64DIVVU {
  7197  			break
  7198  		}
  7199  		_ = v_0.Args[1]
  7200  		v_0_1 := v_0.Args[1]
  7201  		if v_0_1.Op != OpMIPS64MOVVconst || auxIntToInt64(v_0_1.AuxInt) != 1 {
  7202  			break
  7203  		}
  7204  		v.reset(OpMIPS64MOVVconst)
  7205  		v.AuxInt = int64ToAuxInt(0)
  7206  		return true
  7207  	}
  7208  	// match: (Select0 (DIVVU x (MOVVconst [c])))
  7209  	// cond: isPowerOfTwo(c)
  7210  	// result: (ANDconst [c-1] x)
  7211  	for {
  7212  		if v_0.Op != OpMIPS64DIVVU {
  7213  			break
  7214  		}
  7215  		_ = v_0.Args[1]
  7216  		x := v_0.Args[0]
  7217  		v_0_1 := v_0.Args[1]
  7218  		if v_0_1.Op != OpMIPS64MOVVconst {
  7219  			break
  7220  		}
  7221  		c := auxIntToInt64(v_0_1.AuxInt)
  7222  		if !(isPowerOfTwo(c)) {
  7223  			break
  7224  		}
  7225  		v.reset(OpMIPS64ANDconst)
  7226  		v.AuxInt = int64ToAuxInt(c - 1)
  7227  		v.AddArg(x)
  7228  		return true
  7229  	}
  7230  	// match: (Select0 (DIVV (MOVVconst [c]) (MOVVconst [d])))
  7231  	// cond: d != 0
  7232  	// result: (MOVVconst [c%d])
  7233  	for {
  7234  		if v_0.Op != OpMIPS64DIVV {
  7235  			break
  7236  		}
  7237  		_ = v_0.Args[1]
  7238  		v_0_0 := v_0.Args[0]
  7239  		if v_0_0.Op != OpMIPS64MOVVconst {
  7240  			break
  7241  		}
  7242  		c := auxIntToInt64(v_0_0.AuxInt)
  7243  		v_0_1 := v_0.Args[1]
  7244  		if v_0_1.Op != OpMIPS64MOVVconst {
  7245  			break
  7246  		}
  7247  		d := auxIntToInt64(v_0_1.AuxInt)
  7248  		if !(d != 0) {
  7249  			break
  7250  		}
  7251  		v.reset(OpMIPS64MOVVconst)
  7252  		v.AuxInt = int64ToAuxInt(c % d)
  7253  		return true
  7254  	}
  7255  	// match: (Select0 (DIVVU (MOVVconst [c]) (MOVVconst [d])))
  7256  	// cond: d != 0
  7257  	// result: (MOVVconst [int64(uint64(c)%uint64(d))])
  7258  	for {
  7259  		if v_0.Op != OpMIPS64DIVVU {
  7260  			break
  7261  		}
  7262  		_ = v_0.Args[1]
  7263  		v_0_0 := v_0.Args[0]
  7264  		if v_0_0.Op != OpMIPS64MOVVconst {
  7265  			break
  7266  		}
  7267  		c := auxIntToInt64(v_0_0.AuxInt)
  7268  		v_0_1 := v_0.Args[1]
  7269  		if v_0_1.Op != OpMIPS64MOVVconst {
  7270  			break
  7271  		}
  7272  		d := auxIntToInt64(v_0_1.AuxInt)
  7273  		if !(d != 0) {
  7274  			break
  7275  		}
  7276  		v.reset(OpMIPS64MOVVconst)
  7277  		v.AuxInt = int64ToAuxInt(int64(uint64(c) % uint64(d)))
  7278  		return true
  7279  	}
  7280  	return false
  7281  }
  7282  func rewriteValueMIPS64_OpSelect1(v *Value) bool {
  7283  	v_0 := v.Args[0]
  7284  	b := v.Block
  7285  	typ := &b.Func.Config.Types
  7286  	// match: (Select1 (Mul64uover x y))
  7287  	// result: (SGTU <typ.Bool> (Select0 <typ.UInt64> (MULVU x y)) (MOVVconst <typ.UInt64> [0]))
  7288  	for {
  7289  		if v_0.Op != OpMul64uover {
  7290  			break
  7291  		}
  7292  		y := v_0.Args[1]
  7293  		x := v_0.Args[0]
  7294  		v.reset(OpMIPS64SGTU)
  7295  		v.Type = typ.Bool
  7296  		v0 := b.NewValue0(v.Pos, OpSelect0, typ.UInt64)
  7297  		v1 := b.NewValue0(v.Pos, OpMIPS64MULVU, types.NewTuple(typ.UInt64, typ.UInt64))
  7298  		v1.AddArg2(x, y)
  7299  		v0.AddArg(v1)
  7300  		v2 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
  7301  		v2.AuxInt = int64ToAuxInt(0)
  7302  		v.AddArg2(v0, v2)
  7303  		return true
  7304  	}
  7305  	// match: (Select1 <t> (Add64carry x y c))
  7306  	// result: (OR (SGTU <t> x s:(ADDV <t> x y)) (SGTU <t> s (ADDV <t> s c)))
  7307  	for {
  7308  		t := v.Type
  7309  		if v_0.Op != OpAdd64carry {
  7310  			break
  7311  		}
  7312  		c := v_0.Args[2]
  7313  		x := v_0.Args[0]
  7314  		y := v_0.Args[1]
  7315  		v.reset(OpMIPS64OR)
  7316  		v0 := b.NewValue0(v.Pos, OpMIPS64SGTU, t)
  7317  		s := b.NewValue0(v.Pos, OpMIPS64ADDV, t)
  7318  		s.AddArg2(x, y)
  7319  		v0.AddArg2(x, s)
  7320  		v2 := b.NewValue0(v.Pos, OpMIPS64SGTU, t)
  7321  		v3 := b.NewValue0(v.Pos, OpMIPS64ADDV, t)
  7322  		v3.AddArg2(s, c)
  7323  		v2.AddArg2(s, v3)
  7324  		v.AddArg2(v0, v2)
  7325  		return true
  7326  	}
  7327  	// match: (Select1 <t> (Sub64borrow x y c))
  7328  	// result: (OR (SGTU <t> s:(SUBV <t> x y) x) (SGTU <t> (SUBV <t> s c) s))
  7329  	for {
  7330  		t := v.Type
  7331  		if v_0.Op != OpSub64borrow {
  7332  			break
  7333  		}
  7334  		c := v_0.Args[2]
  7335  		x := v_0.Args[0]
  7336  		y := v_0.Args[1]
  7337  		v.reset(OpMIPS64OR)
  7338  		v0 := b.NewValue0(v.Pos, OpMIPS64SGTU, t)
  7339  		s := b.NewValue0(v.Pos, OpMIPS64SUBV, t)
  7340  		s.AddArg2(x, y)
  7341  		v0.AddArg2(s, x)
  7342  		v2 := b.NewValue0(v.Pos, OpMIPS64SGTU, t)
  7343  		v3 := b.NewValue0(v.Pos, OpMIPS64SUBV, t)
  7344  		v3.AddArg2(s, c)
  7345  		v2.AddArg2(v3, s)
  7346  		v.AddArg2(v0, v2)
  7347  		return true
  7348  	}
  7349  	// match: (Select1 (MULVU x (MOVVconst [-1])))
  7350  	// result: (NEGV x)
  7351  	for {
  7352  		if v_0.Op != OpMIPS64MULVU {
  7353  			break
  7354  		}
  7355  		_ = v_0.Args[1]
  7356  		v_0_0 := v_0.Args[0]
  7357  		v_0_1 := v_0.Args[1]
  7358  		for _i0 := 0; _i0 <= 1; _i0, v_0_0, v_0_1 = _i0+1, v_0_1, v_0_0 {
  7359  			x := v_0_0
  7360  			if v_0_1.Op != OpMIPS64MOVVconst || auxIntToInt64(v_0_1.AuxInt) != -1 {
  7361  				continue
  7362  			}
  7363  			v.reset(OpMIPS64NEGV)
  7364  			v.AddArg(x)
  7365  			return true
  7366  		}
  7367  		break
  7368  	}
  7369  	// match: (Select1 (MULVU _ (MOVVconst [0])))
  7370  	// result: (MOVVconst [0])
  7371  	for {
  7372  		if v_0.Op != OpMIPS64MULVU {
  7373  			break
  7374  		}
  7375  		_ = v_0.Args[1]
  7376  		v_0_0 := v_0.Args[0]
  7377  		v_0_1 := v_0.Args[1]
  7378  		for _i0 := 0; _i0 <= 1; _i0, v_0_0, v_0_1 = _i0+1, v_0_1, v_0_0 {
  7379  			if v_0_1.Op != OpMIPS64MOVVconst || auxIntToInt64(v_0_1.AuxInt) != 0 {
  7380  				continue
  7381  			}
  7382  			v.reset(OpMIPS64MOVVconst)
  7383  			v.AuxInt = int64ToAuxInt(0)
  7384  			return true
  7385  		}
  7386  		break
  7387  	}
  7388  	// match: (Select1 (MULVU x (MOVVconst [1])))
  7389  	// result: x
  7390  	for {
  7391  		if v_0.Op != OpMIPS64MULVU {
  7392  			break
  7393  		}
  7394  		_ = v_0.Args[1]
  7395  		v_0_0 := v_0.Args[0]
  7396  		v_0_1 := v_0.Args[1]
  7397  		for _i0 := 0; _i0 <= 1; _i0, v_0_0, v_0_1 = _i0+1, v_0_1, v_0_0 {
  7398  			x := v_0_0
  7399  			if v_0_1.Op != OpMIPS64MOVVconst || auxIntToInt64(v_0_1.AuxInt) != 1 {
  7400  				continue
  7401  			}
  7402  			v.copyOf(x)
  7403  			return true
  7404  		}
  7405  		break
  7406  	}
  7407  	// match: (Select1 (MULVU x (MOVVconst [c])))
  7408  	// cond: isPowerOfTwo(c)
  7409  	// result: (SLLVconst [log64(c)] x)
  7410  	for {
  7411  		if v_0.Op != OpMIPS64MULVU {
  7412  			break
  7413  		}
  7414  		_ = v_0.Args[1]
  7415  		v_0_0 := v_0.Args[0]
  7416  		v_0_1 := v_0.Args[1]
  7417  		for _i0 := 0; _i0 <= 1; _i0, v_0_0, v_0_1 = _i0+1, v_0_1, v_0_0 {
  7418  			x := v_0_0
  7419  			if v_0_1.Op != OpMIPS64MOVVconst {
  7420  				continue
  7421  			}
  7422  			c := auxIntToInt64(v_0_1.AuxInt)
  7423  			if !(isPowerOfTwo(c)) {
  7424  				continue
  7425  			}
  7426  			v.reset(OpMIPS64SLLVconst)
  7427  			v.AuxInt = int64ToAuxInt(log64(c))
  7428  			v.AddArg(x)
  7429  			return true
  7430  		}
  7431  		break
  7432  	}
  7433  	// match: (Select1 (DIVVU x (MOVVconst [1])))
  7434  	// result: x
  7435  	for {
  7436  		if v_0.Op != OpMIPS64DIVVU {
  7437  			break
  7438  		}
  7439  		_ = v_0.Args[1]
  7440  		x := v_0.Args[0]
  7441  		v_0_1 := v_0.Args[1]
  7442  		if v_0_1.Op != OpMIPS64MOVVconst || auxIntToInt64(v_0_1.AuxInt) != 1 {
  7443  			break
  7444  		}
  7445  		v.copyOf(x)
  7446  		return true
  7447  	}
  7448  	// match: (Select1 (DIVVU x (MOVVconst [c])))
  7449  	// cond: isPowerOfTwo(c)
  7450  	// result: (SRLVconst [log64(c)] x)
  7451  	for {
  7452  		if v_0.Op != OpMIPS64DIVVU {
  7453  			break
  7454  		}
  7455  		_ = v_0.Args[1]
  7456  		x := v_0.Args[0]
  7457  		v_0_1 := v_0.Args[1]
  7458  		if v_0_1.Op != OpMIPS64MOVVconst {
  7459  			break
  7460  		}
  7461  		c := auxIntToInt64(v_0_1.AuxInt)
  7462  		if !(isPowerOfTwo(c)) {
  7463  			break
  7464  		}
  7465  		v.reset(OpMIPS64SRLVconst)
  7466  		v.AuxInt = int64ToAuxInt(log64(c))
  7467  		v.AddArg(x)
  7468  		return true
  7469  	}
  7470  	// match: (Select1 (MULVU (MOVVconst [c]) (MOVVconst [d])))
  7471  	// result: (MOVVconst [c*d])
  7472  	for {
  7473  		if v_0.Op != OpMIPS64MULVU {
  7474  			break
  7475  		}
  7476  		_ = v_0.Args[1]
  7477  		v_0_0 := v_0.Args[0]
  7478  		v_0_1 := v_0.Args[1]
  7479  		for _i0 := 0; _i0 <= 1; _i0, v_0_0, v_0_1 = _i0+1, v_0_1, v_0_0 {
  7480  			if v_0_0.Op != OpMIPS64MOVVconst {
  7481  				continue
  7482  			}
  7483  			c := auxIntToInt64(v_0_0.AuxInt)
  7484  			if v_0_1.Op != OpMIPS64MOVVconst {
  7485  				continue
  7486  			}
  7487  			d := auxIntToInt64(v_0_1.AuxInt)
  7488  			v.reset(OpMIPS64MOVVconst)
  7489  			v.AuxInt = int64ToAuxInt(c * d)
  7490  			return true
  7491  		}
  7492  		break
  7493  	}
  7494  	// match: (Select1 (DIVV (MOVVconst [c]) (MOVVconst [d])))
  7495  	// cond: d != 0
  7496  	// result: (MOVVconst [c/d])
  7497  	for {
  7498  		if v_0.Op != OpMIPS64DIVV {
  7499  			break
  7500  		}
  7501  		_ = v_0.Args[1]
  7502  		v_0_0 := v_0.Args[0]
  7503  		if v_0_0.Op != OpMIPS64MOVVconst {
  7504  			break
  7505  		}
  7506  		c := auxIntToInt64(v_0_0.AuxInt)
  7507  		v_0_1 := v_0.Args[1]
  7508  		if v_0_1.Op != OpMIPS64MOVVconst {
  7509  			break
  7510  		}
  7511  		d := auxIntToInt64(v_0_1.AuxInt)
  7512  		if !(d != 0) {
  7513  			break
  7514  		}
  7515  		v.reset(OpMIPS64MOVVconst)
  7516  		v.AuxInt = int64ToAuxInt(c / d)
  7517  		return true
  7518  	}
  7519  	// match: (Select1 (DIVVU (MOVVconst [c]) (MOVVconst [d])))
  7520  	// cond: d != 0
  7521  	// result: (MOVVconst [int64(uint64(c)/uint64(d))])
  7522  	for {
  7523  		if v_0.Op != OpMIPS64DIVVU {
  7524  			break
  7525  		}
  7526  		_ = v_0.Args[1]
  7527  		v_0_0 := v_0.Args[0]
  7528  		if v_0_0.Op != OpMIPS64MOVVconst {
  7529  			break
  7530  		}
  7531  		c := auxIntToInt64(v_0_0.AuxInt)
  7532  		v_0_1 := v_0.Args[1]
  7533  		if v_0_1.Op != OpMIPS64MOVVconst {
  7534  			break
  7535  		}
  7536  		d := auxIntToInt64(v_0_1.AuxInt)
  7537  		if !(d != 0) {
  7538  			break
  7539  		}
  7540  		v.reset(OpMIPS64MOVVconst)
  7541  		v.AuxInt = int64ToAuxInt(int64(uint64(c) / uint64(d)))
  7542  		return true
  7543  	}
  7544  	return false
  7545  }
  7546  func rewriteValueMIPS64_OpSlicemask(v *Value) bool {
  7547  	v_0 := v.Args[0]
  7548  	b := v.Block
  7549  	// match: (Slicemask <t> x)
  7550  	// result: (SRAVconst (NEGV <t> x) [63])
  7551  	for {
  7552  		t := v.Type
  7553  		x := v_0
  7554  		v.reset(OpMIPS64SRAVconst)
  7555  		v.AuxInt = int64ToAuxInt(63)
  7556  		v0 := b.NewValue0(v.Pos, OpMIPS64NEGV, t)
  7557  		v0.AddArg(x)
  7558  		v.AddArg(v0)
  7559  		return true
  7560  	}
  7561  }
  7562  func rewriteValueMIPS64_OpStore(v *Value) bool {
  7563  	v_2 := v.Args[2]
  7564  	v_1 := v.Args[1]
  7565  	v_0 := v.Args[0]
  7566  	// match: (Store {t} ptr val mem)
  7567  	// cond: t.Size() == 1
  7568  	// result: (MOVBstore ptr val mem)
  7569  	for {
  7570  		t := auxToType(v.Aux)
  7571  		ptr := v_0
  7572  		val := v_1
  7573  		mem := v_2
  7574  		if !(t.Size() == 1) {
  7575  			break
  7576  		}
  7577  		v.reset(OpMIPS64MOVBstore)
  7578  		v.AddArg3(ptr, val, mem)
  7579  		return true
  7580  	}
  7581  	// match: (Store {t} ptr val mem)
  7582  	// cond: t.Size() == 2
  7583  	// result: (MOVHstore ptr val mem)
  7584  	for {
  7585  		t := auxToType(v.Aux)
  7586  		ptr := v_0
  7587  		val := v_1
  7588  		mem := v_2
  7589  		if !(t.Size() == 2) {
  7590  			break
  7591  		}
  7592  		v.reset(OpMIPS64MOVHstore)
  7593  		v.AddArg3(ptr, val, mem)
  7594  		return true
  7595  	}
  7596  	// match: (Store {t} ptr val mem)
  7597  	// cond: t.Size() == 4 && !t.IsFloat()
  7598  	// result: (MOVWstore ptr val mem)
  7599  	for {
  7600  		t := auxToType(v.Aux)
  7601  		ptr := v_0
  7602  		val := v_1
  7603  		mem := v_2
  7604  		if !(t.Size() == 4 && !t.IsFloat()) {
  7605  			break
  7606  		}
  7607  		v.reset(OpMIPS64MOVWstore)
  7608  		v.AddArg3(ptr, val, mem)
  7609  		return true
  7610  	}
  7611  	// match: (Store {t} ptr val mem)
  7612  	// cond: t.Size() == 8 && !t.IsFloat()
  7613  	// result: (MOVVstore ptr val mem)
  7614  	for {
  7615  		t := auxToType(v.Aux)
  7616  		ptr := v_0
  7617  		val := v_1
  7618  		mem := v_2
  7619  		if !(t.Size() == 8 && !t.IsFloat()) {
  7620  			break
  7621  		}
  7622  		v.reset(OpMIPS64MOVVstore)
  7623  		v.AddArg3(ptr, val, mem)
  7624  		return true
  7625  	}
  7626  	// match: (Store {t} ptr val mem)
  7627  	// cond: t.Size() == 4 && t.IsFloat()
  7628  	// result: (MOVFstore ptr val mem)
  7629  	for {
  7630  		t := auxToType(v.Aux)
  7631  		ptr := v_0
  7632  		val := v_1
  7633  		mem := v_2
  7634  		if !(t.Size() == 4 && t.IsFloat()) {
  7635  			break
  7636  		}
  7637  		v.reset(OpMIPS64MOVFstore)
  7638  		v.AddArg3(ptr, val, mem)
  7639  		return true
  7640  	}
  7641  	// match: (Store {t} ptr val mem)
  7642  	// cond: t.Size() == 8 && t.IsFloat()
  7643  	// result: (MOVDstore ptr val mem)
  7644  	for {
  7645  		t := auxToType(v.Aux)
  7646  		ptr := v_0
  7647  		val := v_1
  7648  		mem := v_2
  7649  		if !(t.Size() == 8 && t.IsFloat()) {
  7650  			break
  7651  		}
  7652  		v.reset(OpMIPS64MOVDstore)
  7653  		v.AddArg3(ptr, val, mem)
  7654  		return true
  7655  	}
  7656  	return false
  7657  }
  7658  func rewriteValueMIPS64_OpZero(v *Value) bool {
  7659  	v_1 := v.Args[1]
  7660  	v_0 := v.Args[0]
  7661  	b := v.Block
  7662  	config := b.Func.Config
  7663  	typ := &b.Func.Config.Types
  7664  	// match: (Zero [0] _ mem)
  7665  	// result: mem
  7666  	for {
  7667  		if auxIntToInt64(v.AuxInt) != 0 {
  7668  			break
  7669  		}
  7670  		mem := v_1
  7671  		v.copyOf(mem)
  7672  		return true
  7673  	}
  7674  	// match: (Zero [1] ptr mem)
  7675  	// result: (MOVBstore ptr (MOVVconst [0]) mem)
  7676  	for {
  7677  		if auxIntToInt64(v.AuxInt) != 1 {
  7678  			break
  7679  		}
  7680  		ptr := v_0
  7681  		mem := v_1
  7682  		v.reset(OpMIPS64MOVBstore)
  7683  		v0 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
  7684  		v0.AuxInt = int64ToAuxInt(0)
  7685  		v.AddArg3(ptr, v0, mem)
  7686  		return true
  7687  	}
  7688  	// match: (Zero [2] {t} ptr mem)
  7689  	// cond: t.Alignment()%2 == 0
  7690  	// result: (MOVHstore ptr (MOVVconst [0]) mem)
  7691  	for {
  7692  		if auxIntToInt64(v.AuxInt) != 2 {
  7693  			break
  7694  		}
  7695  		t := auxToType(v.Aux)
  7696  		ptr := v_0
  7697  		mem := v_1
  7698  		if !(t.Alignment()%2 == 0) {
  7699  			break
  7700  		}
  7701  		v.reset(OpMIPS64MOVHstore)
  7702  		v0 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
  7703  		v0.AuxInt = int64ToAuxInt(0)
  7704  		v.AddArg3(ptr, v0, mem)
  7705  		return true
  7706  	}
  7707  	// match: (Zero [2] ptr mem)
  7708  	// result: (MOVBstore [1] ptr (MOVVconst [0]) (MOVBstore [0] ptr (MOVVconst [0]) mem))
  7709  	for {
  7710  		if auxIntToInt64(v.AuxInt) != 2 {
  7711  			break
  7712  		}
  7713  		ptr := v_0
  7714  		mem := v_1
  7715  		v.reset(OpMIPS64MOVBstore)
  7716  		v.AuxInt = int32ToAuxInt(1)
  7717  		v0 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
  7718  		v0.AuxInt = int64ToAuxInt(0)
  7719  		v1 := b.NewValue0(v.Pos, OpMIPS64MOVBstore, types.TypeMem)
  7720  		v1.AuxInt = int32ToAuxInt(0)
  7721  		v1.AddArg3(ptr, v0, mem)
  7722  		v.AddArg3(ptr, v0, v1)
  7723  		return true
  7724  	}
  7725  	// match: (Zero [4] {t} ptr mem)
  7726  	// cond: t.Alignment()%4 == 0
  7727  	// result: (MOVWstore ptr (MOVVconst [0]) mem)
  7728  	for {
  7729  		if auxIntToInt64(v.AuxInt) != 4 {
  7730  			break
  7731  		}
  7732  		t := auxToType(v.Aux)
  7733  		ptr := v_0
  7734  		mem := v_1
  7735  		if !(t.Alignment()%4 == 0) {
  7736  			break
  7737  		}
  7738  		v.reset(OpMIPS64MOVWstore)
  7739  		v0 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
  7740  		v0.AuxInt = int64ToAuxInt(0)
  7741  		v.AddArg3(ptr, v0, mem)
  7742  		return true
  7743  	}
  7744  	// match: (Zero [4] {t} ptr mem)
  7745  	// cond: t.Alignment()%2 == 0
  7746  	// result: (MOVHstore [2] ptr (MOVVconst [0]) (MOVHstore [0] ptr (MOVVconst [0]) mem))
  7747  	for {
  7748  		if auxIntToInt64(v.AuxInt) != 4 {
  7749  			break
  7750  		}
  7751  		t := auxToType(v.Aux)
  7752  		ptr := v_0
  7753  		mem := v_1
  7754  		if !(t.Alignment()%2 == 0) {
  7755  			break
  7756  		}
  7757  		v.reset(OpMIPS64MOVHstore)
  7758  		v.AuxInt = int32ToAuxInt(2)
  7759  		v0 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
  7760  		v0.AuxInt = int64ToAuxInt(0)
  7761  		v1 := b.NewValue0(v.Pos, OpMIPS64MOVHstore, types.TypeMem)
  7762  		v1.AuxInt = int32ToAuxInt(0)
  7763  		v1.AddArg3(ptr, v0, mem)
  7764  		v.AddArg3(ptr, v0, v1)
  7765  		return true
  7766  	}
  7767  	// match: (Zero [4] ptr mem)
  7768  	// result: (MOVBstore [3] ptr (MOVVconst [0]) (MOVBstore [2] ptr (MOVVconst [0]) (MOVBstore [1] ptr (MOVVconst [0]) (MOVBstore [0] ptr (MOVVconst [0]) mem))))
  7769  	for {
  7770  		if auxIntToInt64(v.AuxInt) != 4 {
  7771  			break
  7772  		}
  7773  		ptr := v_0
  7774  		mem := v_1
  7775  		v.reset(OpMIPS64MOVBstore)
  7776  		v.AuxInt = int32ToAuxInt(3)
  7777  		v0 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
  7778  		v0.AuxInt = int64ToAuxInt(0)
  7779  		v1 := b.NewValue0(v.Pos, OpMIPS64MOVBstore, types.TypeMem)
  7780  		v1.AuxInt = int32ToAuxInt(2)
  7781  		v2 := b.NewValue0(v.Pos, OpMIPS64MOVBstore, types.TypeMem)
  7782  		v2.AuxInt = int32ToAuxInt(1)
  7783  		v3 := b.NewValue0(v.Pos, OpMIPS64MOVBstore, types.TypeMem)
  7784  		v3.AuxInt = int32ToAuxInt(0)
  7785  		v3.AddArg3(ptr, v0, mem)
  7786  		v2.AddArg3(ptr, v0, v3)
  7787  		v1.AddArg3(ptr, v0, v2)
  7788  		v.AddArg3(ptr, v0, v1)
  7789  		return true
  7790  	}
  7791  	// match: (Zero [8] {t} ptr mem)
  7792  	// cond: t.Alignment()%8 == 0
  7793  	// result: (MOVVstore ptr (MOVVconst [0]) mem)
  7794  	for {
  7795  		if auxIntToInt64(v.AuxInt) != 8 {
  7796  			break
  7797  		}
  7798  		t := auxToType(v.Aux)
  7799  		ptr := v_0
  7800  		mem := v_1
  7801  		if !(t.Alignment()%8 == 0) {
  7802  			break
  7803  		}
  7804  		v.reset(OpMIPS64MOVVstore)
  7805  		v0 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
  7806  		v0.AuxInt = int64ToAuxInt(0)
  7807  		v.AddArg3(ptr, v0, mem)
  7808  		return true
  7809  	}
  7810  	// match: (Zero [8] {t} ptr mem)
  7811  	// cond: t.Alignment()%4 == 0
  7812  	// result: (MOVWstore [4] ptr (MOVVconst [0]) (MOVWstore [0] ptr (MOVVconst [0]) mem))
  7813  	for {
  7814  		if auxIntToInt64(v.AuxInt) != 8 {
  7815  			break
  7816  		}
  7817  		t := auxToType(v.Aux)
  7818  		ptr := v_0
  7819  		mem := v_1
  7820  		if !(t.Alignment()%4 == 0) {
  7821  			break
  7822  		}
  7823  		v.reset(OpMIPS64MOVWstore)
  7824  		v.AuxInt = int32ToAuxInt(4)
  7825  		v0 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
  7826  		v0.AuxInt = int64ToAuxInt(0)
  7827  		v1 := b.NewValue0(v.Pos, OpMIPS64MOVWstore, types.TypeMem)
  7828  		v1.AuxInt = int32ToAuxInt(0)
  7829  		v1.AddArg3(ptr, v0, mem)
  7830  		v.AddArg3(ptr, v0, v1)
  7831  		return true
  7832  	}
  7833  	// match: (Zero [8] {t} ptr mem)
  7834  	// cond: t.Alignment()%2 == 0
  7835  	// result: (MOVHstore [6] ptr (MOVVconst [0]) (MOVHstore [4] ptr (MOVVconst [0]) (MOVHstore [2] ptr (MOVVconst [0]) (MOVHstore [0] ptr (MOVVconst [0]) mem))))
  7836  	for {
  7837  		if auxIntToInt64(v.AuxInt) != 8 {
  7838  			break
  7839  		}
  7840  		t := auxToType(v.Aux)
  7841  		ptr := v_0
  7842  		mem := v_1
  7843  		if !(t.Alignment()%2 == 0) {
  7844  			break
  7845  		}
  7846  		v.reset(OpMIPS64MOVHstore)
  7847  		v.AuxInt = int32ToAuxInt(6)
  7848  		v0 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
  7849  		v0.AuxInt = int64ToAuxInt(0)
  7850  		v1 := b.NewValue0(v.Pos, OpMIPS64MOVHstore, types.TypeMem)
  7851  		v1.AuxInt = int32ToAuxInt(4)
  7852  		v2 := b.NewValue0(v.Pos, OpMIPS64MOVHstore, types.TypeMem)
  7853  		v2.AuxInt = int32ToAuxInt(2)
  7854  		v3 := b.NewValue0(v.Pos, OpMIPS64MOVHstore, types.TypeMem)
  7855  		v3.AuxInt = int32ToAuxInt(0)
  7856  		v3.AddArg3(ptr, v0, mem)
  7857  		v2.AddArg3(ptr, v0, v3)
  7858  		v1.AddArg3(ptr, v0, v2)
  7859  		v.AddArg3(ptr, v0, v1)
  7860  		return true
  7861  	}
  7862  	// match: (Zero [3] ptr mem)
  7863  	// result: (MOVBstore [2] ptr (MOVVconst [0]) (MOVBstore [1] ptr (MOVVconst [0]) (MOVBstore [0] ptr (MOVVconst [0]) mem)))
  7864  	for {
  7865  		if auxIntToInt64(v.AuxInt) != 3 {
  7866  			break
  7867  		}
  7868  		ptr := v_0
  7869  		mem := v_1
  7870  		v.reset(OpMIPS64MOVBstore)
  7871  		v.AuxInt = int32ToAuxInt(2)
  7872  		v0 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
  7873  		v0.AuxInt = int64ToAuxInt(0)
  7874  		v1 := b.NewValue0(v.Pos, OpMIPS64MOVBstore, types.TypeMem)
  7875  		v1.AuxInt = int32ToAuxInt(1)
  7876  		v2 := b.NewValue0(v.Pos, OpMIPS64MOVBstore, types.TypeMem)
  7877  		v2.AuxInt = int32ToAuxInt(0)
  7878  		v2.AddArg3(ptr, v0, mem)
  7879  		v1.AddArg3(ptr, v0, v2)
  7880  		v.AddArg3(ptr, v0, v1)
  7881  		return true
  7882  	}
  7883  	// match: (Zero [6] {t} ptr mem)
  7884  	// cond: t.Alignment()%2 == 0
  7885  	// result: (MOVHstore [4] ptr (MOVVconst [0]) (MOVHstore [2] ptr (MOVVconst [0]) (MOVHstore [0] ptr (MOVVconst [0]) mem)))
  7886  	for {
  7887  		if auxIntToInt64(v.AuxInt) != 6 {
  7888  			break
  7889  		}
  7890  		t := auxToType(v.Aux)
  7891  		ptr := v_0
  7892  		mem := v_1
  7893  		if !(t.Alignment()%2 == 0) {
  7894  			break
  7895  		}
  7896  		v.reset(OpMIPS64MOVHstore)
  7897  		v.AuxInt = int32ToAuxInt(4)
  7898  		v0 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
  7899  		v0.AuxInt = int64ToAuxInt(0)
  7900  		v1 := b.NewValue0(v.Pos, OpMIPS64MOVHstore, types.TypeMem)
  7901  		v1.AuxInt = int32ToAuxInt(2)
  7902  		v2 := b.NewValue0(v.Pos, OpMIPS64MOVHstore, types.TypeMem)
  7903  		v2.AuxInt = int32ToAuxInt(0)
  7904  		v2.AddArg3(ptr, v0, mem)
  7905  		v1.AddArg3(ptr, v0, v2)
  7906  		v.AddArg3(ptr, v0, v1)
  7907  		return true
  7908  	}
  7909  	// match: (Zero [12] {t} ptr mem)
  7910  	// cond: t.Alignment()%4 == 0
  7911  	// result: (MOVWstore [8] ptr (MOVVconst [0]) (MOVWstore [4] ptr (MOVVconst [0]) (MOVWstore [0] ptr (MOVVconst [0]) mem)))
  7912  	for {
  7913  		if auxIntToInt64(v.AuxInt) != 12 {
  7914  			break
  7915  		}
  7916  		t := auxToType(v.Aux)
  7917  		ptr := v_0
  7918  		mem := v_1
  7919  		if !(t.Alignment()%4 == 0) {
  7920  			break
  7921  		}
  7922  		v.reset(OpMIPS64MOVWstore)
  7923  		v.AuxInt = int32ToAuxInt(8)
  7924  		v0 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
  7925  		v0.AuxInt = int64ToAuxInt(0)
  7926  		v1 := b.NewValue0(v.Pos, OpMIPS64MOVWstore, types.TypeMem)
  7927  		v1.AuxInt = int32ToAuxInt(4)
  7928  		v2 := b.NewValue0(v.Pos, OpMIPS64MOVWstore, types.TypeMem)
  7929  		v2.AuxInt = int32ToAuxInt(0)
  7930  		v2.AddArg3(ptr, v0, mem)
  7931  		v1.AddArg3(ptr, v0, v2)
  7932  		v.AddArg3(ptr, v0, v1)
  7933  		return true
  7934  	}
  7935  	// match: (Zero [16] {t} ptr mem)
  7936  	// cond: t.Alignment()%8 == 0
  7937  	// result: (MOVVstore [8] ptr (MOVVconst [0]) (MOVVstore [0] ptr (MOVVconst [0]) mem))
  7938  	for {
  7939  		if auxIntToInt64(v.AuxInt) != 16 {
  7940  			break
  7941  		}
  7942  		t := auxToType(v.Aux)
  7943  		ptr := v_0
  7944  		mem := v_1
  7945  		if !(t.Alignment()%8 == 0) {
  7946  			break
  7947  		}
  7948  		v.reset(OpMIPS64MOVVstore)
  7949  		v.AuxInt = int32ToAuxInt(8)
  7950  		v0 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
  7951  		v0.AuxInt = int64ToAuxInt(0)
  7952  		v1 := b.NewValue0(v.Pos, OpMIPS64MOVVstore, types.TypeMem)
  7953  		v1.AuxInt = int32ToAuxInt(0)
  7954  		v1.AddArg3(ptr, v0, mem)
  7955  		v.AddArg3(ptr, v0, v1)
  7956  		return true
  7957  	}
  7958  	// match: (Zero [24] {t} ptr mem)
  7959  	// cond: t.Alignment()%8 == 0
  7960  	// result: (MOVVstore [16] ptr (MOVVconst [0]) (MOVVstore [8] ptr (MOVVconst [0]) (MOVVstore [0] ptr (MOVVconst [0]) mem)))
  7961  	for {
  7962  		if auxIntToInt64(v.AuxInt) != 24 {
  7963  			break
  7964  		}
  7965  		t := auxToType(v.Aux)
  7966  		ptr := v_0
  7967  		mem := v_1
  7968  		if !(t.Alignment()%8 == 0) {
  7969  			break
  7970  		}
  7971  		v.reset(OpMIPS64MOVVstore)
  7972  		v.AuxInt = int32ToAuxInt(16)
  7973  		v0 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
  7974  		v0.AuxInt = int64ToAuxInt(0)
  7975  		v1 := b.NewValue0(v.Pos, OpMIPS64MOVVstore, types.TypeMem)
  7976  		v1.AuxInt = int32ToAuxInt(8)
  7977  		v2 := b.NewValue0(v.Pos, OpMIPS64MOVVstore, types.TypeMem)
  7978  		v2.AuxInt = int32ToAuxInt(0)
  7979  		v2.AddArg3(ptr, v0, mem)
  7980  		v1.AddArg3(ptr, v0, v2)
  7981  		v.AddArg3(ptr, v0, v1)
  7982  		return true
  7983  	}
  7984  	// match: (Zero [s] {t} ptr mem)
  7985  	// cond: s%8 == 0 && s > 24 && s <= 8*128 && t.Alignment()%8 == 0
  7986  	// result: (DUFFZERO [8 * (128 - s/8)] ptr mem)
  7987  	for {
  7988  		s := auxIntToInt64(v.AuxInt)
  7989  		t := auxToType(v.Aux)
  7990  		ptr := v_0
  7991  		mem := v_1
  7992  		if !(s%8 == 0 && s > 24 && s <= 8*128 && t.Alignment()%8 == 0) {
  7993  			break
  7994  		}
  7995  		v.reset(OpMIPS64DUFFZERO)
  7996  		v.AuxInt = int64ToAuxInt(8 * (128 - s/8))
  7997  		v.AddArg2(ptr, mem)
  7998  		return true
  7999  	}
  8000  	// match: (Zero [s] {t} ptr mem)
  8001  	// cond: s > 8*128 || t.Alignment()%8 != 0
  8002  	// result: (LoweredZero [t.Alignment()] ptr (ADDVconst <ptr.Type> ptr [s-moveSize(t.Alignment(), config)]) mem)
  8003  	for {
  8004  		s := auxIntToInt64(v.AuxInt)
  8005  		t := auxToType(v.Aux)
  8006  		ptr := v_0
  8007  		mem := v_1
  8008  		if !(s > 8*128 || t.Alignment()%8 != 0) {
  8009  			break
  8010  		}
  8011  		v.reset(OpMIPS64LoweredZero)
  8012  		v.AuxInt = int64ToAuxInt(t.Alignment())
  8013  		v0 := b.NewValue0(v.Pos, OpMIPS64ADDVconst, ptr.Type)
  8014  		v0.AuxInt = int64ToAuxInt(s - moveSize(t.Alignment(), config))
  8015  		v0.AddArg(ptr)
  8016  		v.AddArg3(ptr, v0, mem)
  8017  		return true
  8018  	}
  8019  	return false
  8020  }
  8021  func rewriteBlockMIPS64(b *Block) bool {
  8022  	switch b.Kind {
  8023  	case BlockMIPS64EQ:
  8024  		// match: (EQ (FPFlagTrue cmp) yes no)
  8025  		// result: (FPF cmp yes no)
  8026  		for b.Controls[0].Op == OpMIPS64FPFlagTrue {
  8027  			v_0 := b.Controls[0]
  8028  			cmp := v_0.Args[0]
  8029  			b.resetWithControl(BlockMIPS64FPF, cmp)
  8030  			return true
  8031  		}
  8032  		// match: (EQ (FPFlagFalse cmp) yes no)
  8033  		// result: (FPT cmp yes no)
  8034  		for b.Controls[0].Op == OpMIPS64FPFlagFalse {
  8035  			v_0 := b.Controls[0]
  8036  			cmp := v_0.Args[0]
  8037  			b.resetWithControl(BlockMIPS64FPT, cmp)
  8038  			return true
  8039  		}
  8040  		// match: (EQ (XORconst [1] cmp:(SGT _ _)) yes no)
  8041  		// result: (NE cmp yes no)
  8042  		for b.Controls[0].Op == OpMIPS64XORconst {
  8043  			v_0 := b.Controls[0]
  8044  			if auxIntToInt64(v_0.AuxInt) != 1 {
  8045  				break
  8046  			}
  8047  			cmp := v_0.Args[0]
  8048  			if cmp.Op != OpMIPS64SGT {
  8049  				break
  8050  			}
  8051  			b.resetWithControl(BlockMIPS64NE, cmp)
  8052  			return true
  8053  		}
  8054  		// match: (EQ (XORconst [1] cmp:(SGTU _ _)) yes no)
  8055  		// result: (NE cmp yes no)
  8056  		for b.Controls[0].Op == OpMIPS64XORconst {
  8057  			v_0 := b.Controls[0]
  8058  			if auxIntToInt64(v_0.AuxInt) != 1 {
  8059  				break
  8060  			}
  8061  			cmp := v_0.Args[0]
  8062  			if cmp.Op != OpMIPS64SGTU {
  8063  				break
  8064  			}
  8065  			b.resetWithControl(BlockMIPS64NE, cmp)
  8066  			return true
  8067  		}
  8068  		// match: (EQ (XORconst [1] cmp:(SGTconst _)) yes no)
  8069  		// result: (NE cmp yes no)
  8070  		for b.Controls[0].Op == OpMIPS64XORconst {
  8071  			v_0 := b.Controls[0]
  8072  			if auxIntToInt64(v_0.AuxInt) != 1 {
  8073  				break
  8074  			}
  8075  			cmp := v_0.Args[0]
  8076  			if cmp.Op != OpMIPS64SGTconst {
  8077  				break
  8078  			}
  8079  			b.resetWithControl(BlockMIPS64NE, cmp)
  8080  			return true
  8081  		}
  8082  		// match: (EQ (XORconst [1] cmp:(SGTUconst _)) yes no)
  8083  		// result: (NE cmp yes no)
  8084  		for b.Controls[0].Op == OpMIPS64XORconst {
  8085  			v_0 := b.Controls[0]
  8086  			if auxIntToInt64(v_0.AuxInt) != 1 {
  8087  				break
  8088  			}
  8089  			cmp := v_0.Args[0]
  8090  			if cmp.Op != OpMIPS64SGTUconst {
  8091  				break
  8092  			}
  8093  			b.resetWithControl(BlockMIPS64NE, cmp)
  8094  			return true
  8095  		}
  8096  		// match: (EQ (SGTUconst [1] x) yes no)
  8097  		// result: (NE x yes no)
  8098  		for b.Controls[0].Op == OpMIPS64SGTUconst {
  8099  			v_0 := b.Controls[0]
  8100  			if auxIntToInt64(v_0.AuxInt) != 1 {
  8101  				break
  8102  			}
  8103  			x := v_0.Args[0]
  8104  			b.resetWithControl(BlockMIPS64NE, x)
  8105  			return true
  8106  		}
  8107  		// match: (EQ (SGTU x (MOVVconst [0])) yes no)
  8108  		// result: (EQ x yes no)
  8109  		for b.Controls[0].Op == OpMIPS64SGTU {
  8110  			v_0 := b.Controls[0]
  8111  			_ = v_0.Args[1]
  8112  			x := v_0.Args[0]
  8113  			v_0_1 := v_0.Args[1]
  8114  			if v_0_1.Op != OpMIPS64MOVVconst || auxIntToInt64(v_0_1.AuxInt) != 0 {
  8115  				break
  8116  			}
  8117  			b.resetWithControl(BlockMIPS64EQ, x)
  8118  			return true
  8119  		}
  8120  		// match: (EQ (SGTconst [0] x) yes no)
  8121  		// result: (GEZ x yes no)
  8122  		for b.Controls[0].Op == OpMIPS64SGTconst {
  8123  			v_0 := b.Controls[0]
  8124  			if auxIntToInt64(v_0.AuxInt) != 0 {
  8125  				break
  8126  			}
  8127  			x := v_0.Args[0]
  8128  			b.resetWithControl(BlockMIPS64GEZ, x)
  8129  			return true
  8130  		}
  8131  		// match: (EQ (SGT x (MOVVconst [0])) yes no)
  8132  		// result: (LEZ x yes no)
  8133  		for b.Controls[0].Op == OpMIPS64SGT {
  8134  			v_0 := b.Controls[0]
  8135  			_ = v_0.Args[1]
  8136  			x := v_0.Args[0]
  8137  			v_0_1 := v_0.Args[1]
  8138  			if v_0_1.Op != OpMIPS64MOVVconst || auxIntToInt64(v_0_1.AuxInt) != 0 {
  8139  				break
  8140  			}
  8141  			b.resetWithControl(BlockMIPS64LEZ, x)
  8142  			return true
  8143  		}
  8144  		// match: (EQ (MOVVconst [0]) yes no)
  8145  		// result: (First yes no)
  8146  		for b.Controls[0].Op == OpMIPS64MOVVconst {
  8147  			v_0 := b.Controls[0]
  8148  			if auxIntToInt64(v_0.AuxInt) != 0 {
  8149  				break
  8150  			}
  8151  			b.Reset(BlockFirst)
  8152  			return true
  8153  		}
  8154  		// match: (EQ (MOVVconst [c]) yes no)
  8155  		// cond: c != 0
  8156  		// result: (First no yes)
  8157  		for b.Controls[0].Op == OpMIPS64MOVVconst {
  8158  			v_0 := b.Controls[0]
  8159  			c := auxIntToInt64(v_0.AuxInt)
  8160  			if !(c != 0) {
  8161  				break
  8162  			}
  8163  			b.Reset(BlockFirst)
  8164  			b.swapSuccessors()
  8165  			return true
  8166  		}
  8167  	case BlockMIPS64GEZ:
  8168  		// match: (GEZ (MOVVconst [c]) yes no)
  8169  		// cond: c >= 0
  8170  		// result: (First yes no)
  8171  		for b.Controls[0].Op == OpMIPS64MOVVconst {
  8172  			v_0 := b.Controls[0]
  8173  			c := auxIntToInt64(v_0.AuxInt)
  8174  			if !(c >= 0) {
  8175  				break
  8176  			}
  8177  			b.Reset(BlockFirst)
  8178  			return true
  8179  		}
  8180  		// match: (GEZ (MOVVconst [c]) yes no)
  8181  		// cond: c < 0
  8182  		// result: (First no yes)
  8183  		for b.Controls[0].Op == OpMIPS64MOVVconst {
  8184  			v_0 := b.Controls[0]
  8185  			c := auxIntToInt64(v_0.AuxInt)
  8186  			if !(c < 0) {
  8187  				break
  8188  			}
  8189  			b.Reset(BlockFirst)
  8190  			b.swapSuccessors()
  8191  			return true
  8192  		}
  8193  	case BlockMIPS64GTZ:
  8194  		// match: (GTZ (MOVVconst [c]) yes no)
  8195  		// cond: c > 0
  8196  		// result: (First yes no)
  8197  		for b.Controls[0].Op == OpMIPS64MOVVconst {
  8198  			v_0 := b.Controls[0]
  8199  			c := auxIntToInt64(v_0.AuxInt)
  8200  			if !(c > 0) {
  8201  				break
  8202  			}
  8203  			b.Reset(BlockFirst)
  8204  			return true
  8205  		}
  8206  		// match: (GTZ (MOVVconst [c]) yes no)
  8207  		// cond: c <= 0
  8208  		// result: (First no yes)
  8209  		for b.Controls[0].Op == OpMIPS64MOVVconst {
  8210  			v_0 := b.Controls[0]
  8211  			c := auxIntToInt64(v_0.AuxInt)
  8212  			if !(c <= 0) {
  8213  				break
  8214  			}
  8215  			b.Reset(BlockFirst)
  8216  			b.swapSuccessors()
  8217  			return true
  8218  		}
  8219  	case BlockIf:
  8220  		// match: (If cond yes no)
  8221  		// result: (NE cond yes no)
  8222  		for {
  8223  			cond := b.Controls[0]
  8224  			b.resetWithControl(BlockMIPS64NE, cond)
  8225  			return true
  8226  		}
  8227  	case BlockMIPS64LEZ:
  8228  		// match: (LEZ (MOVVconst [c]) yes no)
  8229  		// cond: c <= 0
  8230  		// result: (First yes no)
  8231  		for b.Controls[0].Op == OpMIPS64MOVVconst {
  8232  			v_0 := b.Controls[0]
  8233  			c := auxIntToInt64(v_0.AuxInt)
  8234  			if !(c <= 0) {
  8235  				break
  8236  			}
  8237  			b.Reset(BlockFirst)
  8238  			return true
  8239  		}
  8240  		// match: (LEZ (MOVVconst [c]) yes no)
  8241  		// cond: c > 0
  8242  		// result: (First no yes)
  8243  		for b.Controls[0].Op == OpMIPS64MOVVconst {
  8244  			v_0 := b.Controls[0]
  8245  			c := auxIntToInt64(v_0.AuxInt)
  8246  			if !(c > 0) {
  8247  				break
  8248  			}
  8249  			b.Reset(BlockFirst)
  8250  			b.swapSuccessors()
  8251  			return true
  8252  		}
  8253  	case BlockMIPS64LTZ:
  8254  		// match: (LTZ (MOVVconst [c]) yes no)
  8255  		// cond: c < 0
  8256  		// result: (First yes no)
  8257  		for b.Controls[0].Op == OpMIPS64MOVVconst {
  8258  			v_0 := b.Controls[0]
  8259  			c := auxIntToInt64(v_0.AuxInt)
  8260  			if !(c < 0) {
  8261  				break
  8262  			}
  8263  			b.Reset(BlockFirst)
  8264  			return true
  8265  		}
  8266  		// match: (LTZ (MOVVconst [c]) yes no)
  8267  		// cond: c >= 0
  8268  		// result: (First no yes)
  8269  		for b.Controls[0].Op == OpMIPS64MOVVconst {
  8270  			v_0 := b.Controls[0]
  8271  			c := auxIntToInt64(v_0.AuxInt)
  8272  			if !(c >= 0) {
  8273  				break
  8274  			}
  8275  			b.Reset(BlockFirst)
  8276  			b.swapSuccessors()
  8277  			return true
  8278  		}
  8279  	case BlockMIPS64NE:
  8280  		// match: (NE (FPFlagTrue cmp) yes no)
  8281  		// result: (FPT cmp yes no)
  8282  		for b.Controls[0].Op == OpMIPS64FPFlagTrue {
  8283  			v_0 := b.Controls[0]
  8284  			cmp := v_0.Args[0]
  8285  			b.resetWithControl(BlockMIPS64FPT, cmp)
  8286  			return true
  8287  		}
  8288  		// match: (NE (FPFlagFalse cmp) yes no)
  8289  		// result: (FPF cmp yes no)
  8290  		for b.Controls[0].Op == OpMIPS64FPFlagFalse {
  8291  			v_0 := b.Controls[0]
  8292  			cmp := v_0.Args[0]
  8293  			b.resetWithControl(BlockMIPS64FPF, cmp)
  8294  			return true
  8295  		}
  8296  		// match: (NE (XORconst [1] cmp:(SGT _ _)) yes no)
  8297  		// result: (EQ cmp yes no)
  8298  		for b.Controls[0].Op == OpMIPS64XORconst {
  8299  			v_0 := b.Controls[0]
  8300  			if auxIntToInt64(v_0.AuxInt) != 1 {
  8301  				break
  8302  			}
  8303  			cmp := v_0.Args[0]
  8304  			if cmp.Op != OpMIPS64SGT {
  8305  				break
  8306  			}
  8307  			b.resetWithControl(BlockMIPS64EQ, cmp)
  8308  			return true
  8309  		}
  8310  		// match: (NE (XORconst [1] cmp:(SGTU _ _)) yes no)
  8311  		// result: (EQ cmp yes no)
  8312  		for b.Controls[0].Op == OpMIPS64XORconst {
  8313  			v_0 := b.Controls[0]
  8314  			if auxIntToInt64(v_0.AuxInt) != 1 {
  8315  				break
  8316  			}
  8317  			cmp := v_0.Args[0]
  8318  			if cmp.Op != OpMIPS64SGTU {
  8319  				break
  8320  			}
  8321  			b.resetWithControl(BlockMIPS64EQ, cmp)
  8322  			return true
  8323  		}
  8324  		// match: (NE (XORconst [1] cmp:(SGTconst _)) yes no)
  8325  		// result: (EQ cmp yes no)
  8326  		for b.Controls[0].Op == OpMIPS64XORconst {
  8327  			v_0 := b.Controls[0]
  8328  			if auxIntToInt64(v_0.AuxInt) != 1 {
  8329  				break
  8330  			}
  8331  			cmp := v_0.Args[0]
  8332  			if cmp.Op != OpMIPS64SGTconst {
  8333  				break
  8334  			}
  8335  			b.resetWithControl(BlockMIPS64EQ, cmp)
  8336  			return true
  8337  		}
  8338  		// match: (NE (XORconst [1] cmp:(SGTUconst _)) yes no)
  8339  		// result: (EQ cmp yes no)
  8340  		for b.Controls[0].Op == OpMIPS64XORconst {
  8341  			v_0 := b.Controls[0]
  8342  			if auxIntToInt64(v_0.AuxInt) != 1 {
  8343  				break
  8344  			}
  8345  			cmp := v_0.Args[0]
  8346  			if cmp.Op != OpMIPS64SGTUconst {
  8347  				break
  8348  			}
  8349  			b.resetWithControl(BlockMIPS64EQ, cmp)
  8350  			return true
  8351  		}
  8352  		// match: (NE (SGTUconst [1] x) yes no)
  8353  		// result: (EQ x yes no)
  8354  		for b.Controls[0].Op == OpMIPS64SGTUconst {
  8355  			v_0 := b.Controls[0]
  8356  			if auxIntToInt64(v_0.AuxInt) != 1 {
  8357  				break
  8358  			}
  8359  			x := v_0.Args[0]
  8360  			b.resetWithControl(BlockMIPS64EQ, x)
  8361  			return true
  8362  		}
  8363  		// match: (NE (SGTU x (MOVVconst [0])) yes no)
  8364  		// result: (NE x yes no)
  8365  		for b.Controls[0].Op == OpMIPS64SGTU {
  8366  			v_0 := b.Controls[0]
  8367  			_ = v_0.Args[1]
  8368  			x := v_0.Args[0]
  8369  			v_0_1 := v_0.Args[1]
  8370  			if v_0_1.Op != OpMIPS64MOVVconst || auxIntToInt64(v_0_1.AuxInt) != 0 {
  8371  				break
  8372  			}
  8373  			b.resetWithControl(BlockMIPS64NE, x)
  8374  			return true
  8375  		}
  8376  		// match: (NE (SGTconst [0] x) yes no)
  8377  		// result: (LTZ x yes no)
  8378  		for b.Controls[0].Op == OpMIPS64SGTconst {
  8379  			v_0 := b.Controls[0]
  8380  			if auxIntToInt64(v_0.AuxInt) != 0 {
  8381  				break
  8382  			}
  8383  			x := v_0.Args[0]
  8384  			b.resetWithControl(BlockMIPS64LTZ, x)
  8385  			return true
  8386  		}
  8387  		// match: (NE (SGT x (MOVVconst [0])) yes no)
  8388  		// result: (GTZ x yes no)
  8389  		for b.Controls[0].Op == OpMIPS64SGT {
  8390  			v_0 := b.Controls[0]
  8391  			_ = v_0.Args[1]
  8392  			x := v_0.Args[0]
  8393  			v_0_1 := v_0.Args[1]
  8394  			if v_0_1.Op != OpMIPS64MOVVconst || auxIntToInt64(v_0_1.AuxInt) != 0 {
  8395  				break
  8396  			}
  8397  			b.resetWithControl(BlockMIPS64GTZ, x)
  8398  			return true
  8399  		}
  8400  		// match: (NE (MOVVconst [0]) yes no)
  8401  		// result: (First no yes)
  8402  		for b.Controls[0].Op == OpMIPS64MOVVconst {
  8403  			v_0 := b.Controls[0]
  8404  			if auxIntToInt64(v_0.AuxInt) != 0 {
  8405  				break
  8406  			}
  8407  			b.Reset(BlockFirst)
  8408  			b.swapSuccessors()
  8409  			return true
  8410  		}
  8411  		// match: (NE (MOVVconst [c]) yes no)
  8412  		// cond: c != 0
  8413  		// result: (First yes no)
  8414  		for b.Controls[0].Op == OpMIPS64MOVVconst {
  8415  			v_0 := b.Controls[0]
  8416  			c := auxIntToInt64(v_0.AuxInt)
  8417  			if !(c != 0) {
  8418  				break
  8419  			}
  8420  			b.Reset(BlockFirst)
  8421  			return true
  8422  		}
  8423  	}
  8424  	return false
  8425  }
  8426  

View as plain text