Source file src/cmd/compile/internal/ssa/rewriteRISCV64.go

     1  // Code generated from _gen/RISCV64.rules using 'go generate'; DO NOT EDIT.
     2  
     3  package ssa
     4  
     5  import "internal/buildcfg"
     6  import "math"
     7  import "math/bits"
     8  import "cmd/compile/internal/types"
     9  
    10  func rewriteValueRISCV64(v *Value) bool {
    11  	switch v.Op {
    12  	case OpAbs:
    13  		v.Op = OpRISCV64FABSD
    14  		return true
    15  	case OpAdd16:
    16  		v.Op = OpRISCV64ADD
    17  		return true
    18  	case OpAdd32:
    19  		v.Op = OpRISCV64ADD
    20  		return true
    21  	case OpAdd32F:
    22  		v.Op = OpRISCV64FADDS
    23  		return true
    24  	case OpAdd64:
    25  		v.Op = OpRISCV64ADD
    26  		return true
    27  	case OpAdd64F:
    28  		v.Op = OpRISCV64FADDD
    29  		return true
    30  	case OpAdd8:
    31  		v.Op = OpRISCV64ADD
    32  		return true
    33  	case OpAddPtr:
    34  		v.Op = OpRISCV64ADD
    35  		return true
    36  	case OpAddr:
    37  		return rewriteValueRISCV64_OpAddr(v)
    38  	case OpAnd16:
    39  		v.Op = OpRISCV64AND
    40  		return true
    41  	case OpAnd32:
    42  		v.Op = OpRISCV64AND
    43  		return true
    44  	case OpAnd64:
    45  		v.Op = OpRISCV64AND
    46  		return true
    47  	case OpAnd8:
    48  		v.Op = OpRISCV64AND
    49  		return true
    50  	case OpAndB:
    51  		v.Op = OpRISCV64AND
    52  		return true
    53  	case OpAtomicAdd32:
    54  		v.Op = OpRISCV64LoweredAtomicAdd32
    55  		return true
    56  	case OpAtomicAdd64:
    57  		v.Op = OpRISCV64LoweredAtomicAdd64
    58  		return true
    59  	case OpAtomicAnd32:
    60  		v.Op = OpRISCV64LoweredAtomicAnd32
    61  		return true
    62  	case OpAtomicAnd8:
    63  		return rewriteValueRISCV64_OpAtomicAnd8(v)
    64  	case OpAtomicCompareAndSwap32:
    65  		return rewriteValueRISCV64_OpAtomicCompareAndSwap32(v)
    66  	case OpAtomicCompareAndSwap64:
    67  		v.Op = OpRISCV64LoweredAtomicCas64
    68  		return true
    69  	case OpAtomicExchange32:
    70  		v.Op = OpRISCV64LoweredAtomicExchange32
    71  		return true
    72  	case OpAtomicExchange64:
    73  		v.Op = OpRISCV64LoweredAtomicExchange64
    74  		return true
    75  	case OpAtomicLoad32:
    76  		v.Op = OpRISCV64LoweredAtomicLoad32
    77  		return true
    78  	case OpAtomicLoad64:
    79  		v.Op = OpRISCV64LoweredAtomicLoad64
    80  		return true
    81  	case OpAtomicLoad8:
    82  		v.Op = OpRISCV64LoweredAtomicLoad8
    83  		return true
    84  	case OpAtomicLoadPtr:
    85  		v.Op = OpRISCV64LoweredAtomicLoad64
    86  		return true
    87  	case OpAtomicOr32:
    88  		v.Op = OpRISCV64LoweredAtomicOr32
    89  		return true
    90  	case OpAtomicOr8:
    91  		return rewriteValueRISCV64_OpAtomicOr8(v)
    92  	case OpAtomicStore32:
    93  		v.Op = OpRISCV64LoweredAtomicStore32
    94  		return true
    95  	case OpAtomicStore64:
    96  		v.Op = OpRISCV64LoweredAtomicStore64
    97  		return true
    98  	case OpAtomicStore8:
    99  		v.Op = OpRISCV64LoweredAtomicStore8
   100  		return true
   101  	case OpAtomicStorePtrNoWB:
   102  		v.Op = OpRISCV64LoweredAtomicStore64
   103  		return true
   104  	case OpAvg64u:
   105  		return rewriteValueRISCV64_OpAvg64u(v)
   106  	case OpBitLen16:
   107  		return rewriteValueRISCV64_OpBitLen16(v)
   108  	case OpBitLen32:
   109  		return rewriteValueRISCV64_OpBitLen32(v)
   110  	case OpBitLen64:
   111  		return rewriteValueRISCV64_OpBitLen64(v)
   112  	case OpBitLen8:
   113  		return rewriteValueRISCV64_OpBitLen8(v)
   114  	case OpBswap16:
   115  		return rewriteValueRISCV64_OpBswap16(v)
   116  	case OpBswap32:
   117  		return rewriteValueRISCV64_OpBswap32(v)
   118  	case OpBswap64:
   119  		v.Op = OpRISCV64REV8
   120  		return true
   121  	case OpClosureCall:
   122  		v.Op = OpRISCV64CALLclosure
   123  		return true
   124  	case OpCom16:
   125  		v.Op = OpRISCV64NOT
   126  		return true
   127  	case OpCom32:
   128  		v.Op = OpRISCV64NOT
   129  		return true
   130  	case OpCom64:
   131  		v.Op = OpRISCV64NOT
   132  		return true
   133  	case OpCom8:
   134  		v.Op = OpRISCV64NOT
   135  		return true
   136  	case OpCondSelect:
   137  		return rewriteValueRISCV64_OpCondSelect(v)
   138  	case OpConst16:
   139  		return rewriteValueRISCV64_OpConst16(v)
   140  	case OpConst32:
   141  		return rewriteValueRISCV64_OpConst32(v)
   142  	case OpConst32F:
   143  		v.Op = OpRISCV64FMOVFconst
   144  		return true
   145  	case OpConst64:
   146  		return rewriteValueRISCV64_OpConst64(v)
   147  	case OpConst64F:
   148  		v.Op = OpRISCV64FMOVDconst
   149  		return true
   150  	case OpConst8:
   151  		return rewriteValueRISCV64_OpConst8(v)
   152  	case OpConstBool:
   153  		return rewriteValueRISCV64_OpConstBool(v)
   154  	case OpConstNil:
   155  		return rewriteValueRISCV64_OpConstNil(v)
   156  	case OpCopysign:
   157  		v.Op = OpRISCV64FSGNJD
   158  		return true
   159  	case OpCtz16:
   160  		return rewriteValueRISCV64_OpCtz16(v)
   161  	case OpCtz16NonZero:
   162  		v.Op = OpCtz64
   163  		return true
   164  	case OpCtz32:
   165  		v.Op = OpRISCV64CTZW
   166  		return true
   167  	case OpCtz32NonZero:
   168  		v.Op = OpCtz64
   169  		return true
   170  	case OpCtz64:
   171  		v.Op = OpRISCV64CTZ
   172  		return true
   173  	case OpCtz64NonZero:
   174  		v.Op = OpCtz64
   175  		return true
   176  	case OpCtz8:
   177  		return rewriteValueRISCV64_OpCtz8(v)
   178  	case OpCtz8NonZero:
   179  		v.Op = OpCtz64
   180  		return true
   181  	case OpCvt32Fto32:
   182  		v.Op = OpRISCV64FCVTWS
   183  		return true
   184  	case OpCvt32Fto64:
   185  		v.Op = OpRISCV64FCVTLS
   186  		return true
   187  	case OpCvt32Fto64F:
   188  		v.Op = OpRISCV64FCVTDS
   189  		return true
   190  	case OpCvt32to32F:
   191  		v.Op = OpRISCV64FCVTSW
   192  		return true
   193  	case OpCvt32to64F:
   194  		v.Op = OpRISCV64FCVTDW
   195  		return true
   196  	case OpCvt64Fto32:
   197  		v.Op = OpRISCV64FCVTWD
   198  		return true
   199  	case OpCvt64Fto32F:
   200  		v.Op = OpRISCV64FCVTSD
   201  		return true
   202  	case OpCvt64Fto64:
   203  		v.Op = OpRISCV64FCVTLD
   204  		return true
   205  	case OpCvt64to32F:
   206  		v.Op = OpRISCV64FCVTSL
   207  		return true
   208  	case OpCvt64to64F:
   209  		v.Op = OpRISCV64FCVTDL
   210  		return true
   211  	case OpCvtBoolToUint8:
   212  		v.Op = OpCopy
   213  		return true
   214  	case OpDiv16:
   215  		return rewriteValueRISCV64_OpDiv16(v)
   216  	case OpDiv16u:
   217  		return rewriteValueRISCV64_OpDiv16u(v)
   218  	case OpDiv32:
   219  		return rewriteValueRISCV64_OpDiv32(v)
   220  	case OpDiv32F:
   221  		v.Op = OpRISCV64FDIVS
   222  		return true
   223  	case OpDiv32u:
   224  		v.Op = OpRISCV64DIVUW
   225  		return true
   226  	case OpDiv64:
   227  		return rewriteValueRISCV64_OpDiv64(v)
   228  	case OpDiv64F:
   229  		v.Op = OpRISCV64FDIVD
   230  		return true
   231  	case OpDiv64u:
   232  		v.Op = OpRISCV64DIVU
   233  		return true
   234  	case OpDiv8:
   235  		return rewriteValueRISCV64_OpDiv8(v)
   236  	case OpDiv8u:
   237  		return rewriteValueRISCV64_OpDiv8u(v)
   238  	case OpEq16:
   239  		return rewriteValueRISCV64_OpEq16(v)
   240  	case OpEq32:
   241  		return rewriteValueRISCV64_OpEq32(v)
   242  	case OpEq32F:
   243  		v.Op = OpRISCV64FEQS
   244  		return true
   245  	case OpEq64:
   246  		return rewriteValueRISCV64_OpEq64(v)
   247  	case OpEq64F:
   248  		v.Op = OpRISCV64FEQD
   249  		return true
   250  	case OpEq8:
   251  		return rewriteValueRISCV64_OpEq8(v)
   252  	case OpEqB:
   253  		return rewriteValueRISCV64_OpEqB(v)
   254  	case OpEqPtr:
   255  		return rewriteValueRISCV64_OpEqPtr(v)
   256  	case OpFMA:
   257  		v.Op = OpRISCV64FMADDD
   258  		return true
   259  	case OpGetCallerPC:
   260  		v.Op = OpRISCV64LoweredGetCallerPC
   261  		return true
   262  	case OpGetCallerSP:
   263  		v.Op = OpRISCV64LoweredGetCallerSP
   264  		return true
   265  	case OpGetClosurePtr:
   266  		v.Op = OpRISCV64LoweredGetClosurePtr
   267  		return true
   268  	case OpHmul32:
   269  		return rewriteValueRISCV64_OpHmul32(v)
   270  	case OpHmul32u:
   271  		return rewriteValueRISCV64_OpHmul32u(v)
   272  	case OpHmul64:
   273  		v.Op = OpRISCV64MULH
   274  		return true
   275  	case OpHmul64u:
   276  		v.Op = OpRISCV64MULHU
   277  		return true
   278  	case OpInterCall:
   279  		v.Op = OpRISCV64CALLinter
   280  		return true
   281  	case OpIsInBounds:
   282  		v.Op = OpLess64U
   283  		return true
   284  	case OpIsNonNil:
   285  		v.Op = OpRISCV64SNEZ
   286  		return true
   287  	case OpIsSliceInBounds:
   288  		v.Op = OpLeq64U
   289  		return true
   290  	case OpLeq16:
   291  		return rewriteValueRISCV64_OpLeq16(v)
   292  	case OpLeq16U:
   293  		return rewriteValueRISCV64_OpLeq16U(v)
   294  	case OpLeq32:
   295  		return rewriteValueRISCV64_OpLeq32(v)
   296  	case OpLeq32F:
   297  		v.Op = OpRISCV64FLES
   298  		return true
   299  	case OpLeq32U:
   300  		return rewriteValueRISCV64_OpLeq32U(v)
   301  	case OpLeq64:
   302  		return rewriteValueRISCV64_OpLeq64(v)
   303  	case OpLeq64F:
   304  		v.Op = OpRISCV64FLED
   305  		return true
   306  	case OpLeq64U:
   307  		return rewriteValueRISCV64_OpLeq64U(v)
   308  	case OpLeq8:
   309  		return rewriteValueRISCV64_OpLeq8(v)
   310  	case OpLeq8U:
   311  		return rewriteValueRISCV64_OpLeq8U(v)
   312  	case OpLess16:
   313  		return rewriteValueRISCV64_OpLess16(v)
   314  	case OpLess16U:
   315  		return rewriteValueRISCV64_OpLess16U(v)
   316  	case OpLess32:
   317  		return rewriteValueRISCV64_OpLess32(v)
   318  	case OpLess32F:
   319  		v.Op = OpRISCV64FLTS
   320  		return true
   321  	case OpLess32U:
   322  		return rewriteValueRISCV64_OpLess32U(v)
   323  	case OpLess64:
   324  		v.Op = OpRISCV64SLT
   325  		return true
   326  	case OpLess64F:
   327  		v.Op = OpRISCV64FLTD
   328  		return true
   329  	case OpLess64U:
   330  		v.Op = OpRISCV64SLTU
   331  		return true
   332  	case OpLess8:
   333  		return rewriteValueRISCV64_OpLess8(v)
   334  	case OpLess8U:
   335  		return rewriteValueRISCV64_OpLess8U(v)
   336  	case OpLoad:
   337  		return rewriteValueRISCV64_OpLoad(v)
   338  	case OpLocalAddr:
   339  		return rewriteValueRISCV64_OpLocalAddr(v)
   340  	case OpLsh16x16:
   341  		return rewriteValueRISCV64_OpLsh16x16(v)
   342  	case OpLsh16x32:
   343  		return rewriteValueRISCV64_OpLsh16x32(v)
   344  	case OpLsh16x64:
   345  		return rewriteValueRISCV64_OpLsh16x64(v)
   346  	case OpLsh16x8:
   347  		return rewriteValueRISCV64_OpLsh16x8(v)
   348  	case OpLsh32x16:
   349  		return rewriteValueRISCV64_OpLsh32x16(v)
   350  	case OpLsh32x32:
   351  		return rewriteValueRISCV64_OpLsh32x32(v)
   352  	case OpLsh32x64:
   353  		return rewriteValueRISCV64_OpLsh32x64(v)
   354  	case OpLsh32x8:
   355  		return rewriteValueRISCV64_OpLsh32x8(v)
   356  	case OpLsh64x16:
   357  		return rewriteValueRISCV64_OpLsh64x16(v)
   358  	case OpLsh64x32:
   359  		return rewriteValueRISCV64_OpLsh64x32(v)
   360  	case OpLsh64x64:
   361  		return rewriteValueRISCV64_OpLsh64x64(v)
   362  	case OpLsh64x8:
   363  		return rewriteValueRISCV64_OpLsh64x8(v)
   364  	case OpLsh8x16:
   365  		return rewriteValueRISCV64_OpLsh8x16(v)
   366  	case OpLsh8x32:
   367  		return rewriteValueRISCV64_OpLsh8x32(v)
   368  	case OpLsh8x64:
   369  		return rewriteValueRISCV64_OpLsh8x64(v)
   370  	case OpLsh8x8:
   371  		return rewriteValueRISCV64_OpLsh8x8(v)
   372  	case OpMax32F:
   373  		v.Op = OpRISCV64LoweredFMAXS
   374  		return true
   375  	case OpMax64:
   376  		return rewriteValueRISCV64_OpMax64(v)
   377  	case OpMax64F:
   378  		v.Op = OpRISCV64LoweredFMAXD
   379  		return true
   380  	case OpMax64u:
   381  		return rewriteValueRISCV64_OpMax64u(v)
   382  	case OpMin32F:
   383  		v.Op = OpRISCV64LoweredFMINS
   384  		return true
   385  	case OpMin64:
   386  		return rewriteValueRISCV64_OpMin64(v)
   387  	case OpMin64F:
   388  		v.Op = OpRISCV64LoweredFMIND
   389  		return true
   390  	case OpMin64u:
   391  		return rewriteValueRISCV64_OpMin64u(v)
   392  	case OpMod16:
   393  		return rewriteValueRISCV64_OpMod16(v)
   394  	case OpMod16u:
   395  		return rewriteValueRISCV64_OpMod16u(v)
   396  	case OpMod32:
   397  		return rewriteValueRISCV64_OpMod32(v)
   398  	case OpMod32u:
   399  		v.Op = OpRISCV64REMUW
   400  		return true
   401  	case OpMod64:
   402  		return rewriteValueRISCV64_OpMod64(v)
   403  	case OpMod64u:
   404  		v.Op = OpRISCV64REMU
   405  		return true
   406  	case OpMod8:
   407  		return rewriteValueRISCV64_OpMod8(v)
   408  	case OpMod8u:
   409  		return rewriteValueRISCV64_OpMod8u(v)
   410  	case OpMove:
   411  		return rewriteValueRISCV64_OpMove(v)
   412  	case OpMul16:
   413  		v.Op = OpRISCV64MULW
   414  		return true
   415  	case OpMul32:
   416  		v.Op = OpRISCV64MULW
   417  		return true
   418  	case OpMul32F:
   419  		v.Op = OpRISCV64FMULS
   420  		return true
   421  	case OpMul64:
   422  		v.Op = OpRISCV64MUL
   423  		return true
   424  	case OpMul64F:
   425  		v.Op = OpRISCV64FMULD
   426  		return true
   427  	case OpMul64uhilo:
   428  		v.Op = OpRISCV64LoweredMuluhilo
   429  		return true
   430  	case OpMul64uover:
   431  		v.Op = OpRISCV64LoweredMuluover
   432  		return true
   433  	case OpMul8:
   434  		v.Op = OpRISCV64MULW
   435  		return true
   436  	case OpNeg16:
   437  		v.Op = OpRISCV64NEG
   438  		return true
   439  	case OpNeg32:
   440  		v.Op = OpRISCV64NEG
   441  		return true
   442  	case OpNeg32F:
   443  		v.Op = OpRISCV64FNEGS
   444  		return true
   445  	case OpNeg64:
   446  		v.Op = OpRISCV64NEG
   447  		return true
   448  	case OpNeg64F:
   449  		v.Op = OpRISCV64FNEGD
   450  		return true
   451  	case OpNeg8:
   452  		v.Op = OpRISCV64NEG
   453  		return true
   454  	case OpNeq16:
   455  		return rewriteValueRISCV64_OpNeq16(v)
   456  	case OpNeq32:
   457  		return rewriteValueRISCV64_OpNeq32(v)
   458  	case OpNeq32F:
   459  		v.Op = OpRISCV64FNES
   460  		return true
   461  	case OpNeq64:
   462  		return rewriteValueRISCV64_OpNeq64(v)
   463  	case OpNeq64F:
   464  		v.Op = OpRISCV64FNED
   465  		return true
   466  	case OpNeq8:
   467  		return rewriteValueRISCV64_OpNeq8(v)
   468  	case OpNeqB:
   469  		return rewriteValueRISCV64_OpNeqB(v)
   470  	case OpNeqPtr:
   471  		return rewriteValueRISCV64_OpNeqPtr(v)
   472  	case OpNilCheck:
   473  		v.Op = OpRISCV64LoweredNilCheck
   474  		return true
   475  	case OpNot:
   476  		v.Op = OpRISCV64SEQZ
   477  		return true
   478  	case OpOffPtr:
   479  		return rewriteValueRISCV64_OpOffPtr(v)
   480  	case OpOr16:
   481  		v.Op = OpRISCV64OR
   482  		return true
   483  	case OpOr32:
   484  		v.Op = OpRISCV64OR
   485  		return true
   486  	case OpOr64:
   487  		v.Op = OpRISCV64OR
   488  		return true
   489  	case OpOr8:
   490  		v.Op = OpRISCV64OR
   491  		return true
   492  	case OpOrB:
   493  		v.Op = OpRISCV64OR
   494  		return true
   495  	case OpPanicBounds:
   496  		v.Op = OpRISCV64LoweredPanicBoundsRR
   497  		return true
   498  	case OpPopCount16:
   499  		return rewriteValueRISCV64_OpPopCount16(v)
   500  	case OpPopCount32:
   501  		v.Op = OpRISCV64CPOPW
   502  		return true
   503  	case OpPopCount64:
   504  		v.Op = OpRISCV64CPOP
   505  		return true
   506  	case OpPopCount8:
   507  		return rewriteValueRISCV64_OpPopCount8(v)
   508  	case OpPubBarrier:
   509  		v.Op = OpRISCV64LoweredPubBarrier
   510  		return true
   511  	case OpRISCV64ADD:
   512  		return rewriteValueRISCV64_OpRISCV64ADD(v)
   513  	case OpRISCV64ADDI:
   514  		return rewriteValueRISCV64_OpRISCV64ADDI(v)
   515  	case OpRISCV64AND:
   516  		return rewriteValueRISCV64_OpRISCV64AND(v)
   517  	case OpRISCV64ANDI:
   518  		return rewriteValueRISCV64_OpRISCV64ANDI(v)
   519  	case OpRISCV64CZEROEQZ:
   520  		return rewriteValueRISCV64_OpRISCV64CZEROEQZ(v)
   521  	case OpRISCV64CZERONEZ:
   522  		return rewriteValueRISCV64_OpRISCV64CZERONEZ(v)
   523  	case OpRISCV64FADDD:
   524  		return rewriteValueRISCV64_OpRISCV64FADDD(v)
   525  	case OpRISCV64FADDS:
   526  		return rewriteValueRISCV64_OpRISCV64FADDS(v)
   527  	case OpRISCV64FCVTSD:
   528  		return rewriteValueRISCV64_OpRISCV64FCVTSD(v)
   529  	case OpRISCV64FEQD:
   530  		return rewriteValueRISCV64_OpRISCV64FEQD(v)
   531  	case OpRISCV64FLED:
   532  		return rewriteValueRISCV64_OpRISCV64FLED(v)
   533  	case OpRISCV64FLTD:
   534  		return rewriteValueRISCV64_OpRISCV64FLTD(v)
   535  	case OpRISCV64FMADDD:
   536  		return rewriteValueRISCV64_OpRISCV64FMADDD(v)
   537  	case OpRISCV64FMADDS:
   538  		return rewriteValueRISCV64_OpRISCV64FMADDS(v)
   539  	case OpRISCV64FMOVDload:
   540  		return rewriteValueRISCV64_OpRISCV64FMOVDload(v)
   541  	case OpRISCV64FMOVDstore:
   542  		return rewriteValueRISCV64_OpRISCV64FMOVDstore(v)
   543  	case OpRISCV64FMOVWload:
   544  		return rewriteValueRISCV64_OpRISCV64FMOVWload(v)
   545  	case OpRISCV64FMOVWstore:
   546  		return rewriteValueRISCV64_OpRISCV64FMOVWstore(v)
   547  	case OpRISCV64FMSUBD:
   548  		return rewriteValueRISCV64_OpRISCV64FMSUBD(v)
   549  	case OpRISCV64FMSUBS:
   550  		return rewriteValueRISCV64_OpRISCV64FMSUBS(v)
   551  	case OpRISCV64FNED:
   552  		return rewriteValueRISCV64_OpRISCV64FNED(v)
   553  	case OpRISCV64FNMADDD:
   554  		return rewriteValueRISCV64_OpRISCV64FNMADDD(v)
   555  	case OpRISCV64FNMADDS:
   556  		return rewriteValueRISCV64_OpRISCV64FNMADDS(v)
   557  	case OpRISCV64FNMSUBD:
   558  		return rewriteValueRISCV64_OpRISCV64FNMSUBD(v)
   559  	case OpRISCV64FNMSUBS:
   560  		return rewriteValueRISCV64_OpRISCV64FNMSUBS(v)
   561  	case OpRISCV64FSUBD:
   562  		return rewriteValueRISCV64_OpRISCV64FSUBD(v)
   563  	case OpRISCV64FSUBS:
   564  		return rewriteValueRISCV64_OpRISCV64FSUBS(v)
   565  	case OpRISCV64LoweredPanicBoundsCR:
   566  		return rewriteValueRISCV64_OpRISCV64LoweredPanicBoundsCR(v)
   567  	case OpRISCV64LoweredPanicBoundsRC:
   568  		return rewriteValueRISCV64_OpRISCV64LoweredPanicBoundsRC(v)
   569  	case OpRISCV64LoweredPanicBoundsRR:
   570  		return rewriteValueRISCV64_OpRISCV64LoweredPanicBoundsRR(v)
   571  	case OpRISCV64MOVBUload:
   572  		return rewriteValueRISCV64_OpRISCV64MOVBUload(v)
   573  	case OpRISCV64MOVBUreg:
   574  		return rewriteValueRISCV64_OpRISCV64MOVBUreg(v)
   575  	case OpRISCV64MOVBload:
   576  		return rewriteValueRISCV64_OpRISCV64MOVBload(v)
   577  	case OpRISCV64MOVBreg:
   578  		return rewriteValueRISCV64_OpRISCV64MOVBreg(v)
   579  	case OpRISCV64MOVBstore:
   580  		return rewriteValueRISCV64_OpRISCV64MOVBstore(v)
   581  	case OpRISCV64MOVBstorezero:
   582  		return rewriteValueRISCV64_OpRISCV64MOVBstorezero(v)
   583  	case OpRISCV64MOVDload:
   584  		return rewriteValueRISCV64_OpRISCV64MOVDload(v)
   585  	case OpRISCV64MOVDnop:
   586  		return rewriteValueRISCV64_OpRISCV64MOVDnop(v)
   587  	case OpRISCV64MOVDreg:
   588  		return rewriteValueRISCV64_OpRISCV64MOVDreg(v)
   589  	case OpRISCV64MOVDstore:
   590  		return rewriteValueRISCV64_OpRISCV64MOVDstore(v)
   591  	case OpRISCV64MOVDstorezero:
   592  		return rewriteValueRISCV64_OpRISCV64MOVDstorezero(v)
   593  	case OpRISCV64MOVHUload:
   594  		return rewriteValueRISCV64_OpRISCV64MOVHUload(v)
   595  	case OpRISCV64MOVHUreg:
   596  		return rewriteValueRISCV64_OpRISCV64MOVHUreg(v)
   597  	case OpRISCV64MOVHload:
   598  		return rewriteValueRISCV64_OpRISCV64MOVHload(v)
   599  	case OpRISCV64MOVHreg:
   600  		return rewriteValueRISCV64_OpRISCV64MOVHreg(v)
   601  	case OpRISCV64MOVHstore:
   602  		return rewriteValueRISCV64_OpRISCV64MOVHstore(v)
   603  	case OpRISCV64MOVHstorezero:
   604  		return rewriteValueRISCV64_OpRISCV64MOVHstorezero(v)
   605  	case OpRISCV64MOVWUload:
   606  		return rewriteValueRISCV64_OpRISCV64MOVWUload(v)
   607  	case OpRISCV64MOVWUreg:
   608  		return rewriteValueRISCV64_OpRISCV64MOVWUreg(v)
   609  	case OpRISCV64MOVWload:
   610  		return rewriteValueRISCV64_OpRISCV64MOVWload(v)
   611  	case OpRISCV64MOVWreg:
   612  		return rewriteValueRISCV64_OpRISCV64MOVWreg(v)
   613  	case OpRISCV64MOVWstore:
   614  		return rewriteValueRISCV64_OpRISCV64MOVWstore(v)
   615  	case OpRISCV64MOVWstorezero:
   616  		return rewriteValueRISCV64_OpRISCV64MOVWstorezero(v)
   617  	case OpRISCV64NEG:
   618  		return rewriteValueRISCV64_OpRISCV64NEG(v)
   619  	case OpRISCV64NEGW:
   620  		return rewriteValueRISCV64_OpRISCV64NEGW(v)
   621  	case OpRISCV64OR:
   622  		return rewriteValueRISCV64_OpRISCV64OR(v)
   623  	case OpRISCV64ORI:
   624  		return rewriteValueRISCV64_OpRISCV64ORI(v)
   625  	case OpRISCV64ORN:
   626  		return rewriteValueRISCV64_OpRISCV64ORN(v)
   627  	case OpRISCV64ROL:
   628  		return rewriteValueRISCV64_OpRISCV64ROL(v)
   629  	case OpRISCV64ROLW:
   630  		return rewriteValueRISCV64_OpRISCV64ROLW(v)
   631  	case OpRISCV64ROR:
   632  		return rewriteValueRISCV64_OpRISCV64ROR(v)
   633  	case OpRISCV64RORW:
   634  		return rewriteValueRISCV64_OpRISCV64RORW(v)
   635  	case OpRISCV64SEQZ:
   636  		return rewriteValueRISCV64_OpRISCV64SEQZ(v)
   637  	case OpRISCV64SLL:
   638  		return rewriteValueRISCV64_OpRISCV64SLL(v)
   639  	case OpRISCV64SLLI:
   640  		return rewriteValueRISCV64_OpRISCV64SLLI(v)
   641  	case OpRISCV64SLLW:
   642  		return rewriteValueRISCV64_OpRISCV64SLLW(v)
   643  	case OpRISCV64SLT:
   644  		return rewriteValueRISCV64_OpRISCV64SLT(v)
   645  	case OpRISCV64SLTI:
   646  		return rewriteValueRISCV64_OpRISCV64SLTI(v)
   647  	case OpRISCV64SLTIU:
   648  		return rewriteValueRISCV64_OpRISCV64SLTIU(v)
   649  	case OpRISCV64SLTU:
   650  		return rewriteValueRISCV64_OpRISCV64SLTU(v)
   651  	case OpRISCV64SNEZ:
   652  		return rewriteValueRISCV64_OpRISCV64SNEZ(v)
   653  	case OpRISCV64SRA:
   654  		return rewriteValueRISCV64_OpRISCV64SRA(v)
   655  	case OpRISCV64SRAI:
   656  		return rewriteValueRISCV64_OpRISCV64SRAI(v)
   657  	case OpRISCV64SRAW:
   658  		return rewriteValueRISCV64_OpRISCV64SRAW(v)
   659  	case OpRISCV64SRL:
   660  		return rewriteValueRISCV64_OpRISCV64SRL(v)
   661  	case OpRISCV64SRLI:
   662  		return rewriteValueRISCV64_OpRISCV64SRLI(v)
   663  	case OpRISCV64SRLW:
   664  		return rewriteValueRISCV64_OpRISCV64SRLW(v)
   665  	case OpRISCV64SUB:
   666  		return rewriteValueRISCV64_OpRISCV64SUB(v)
   667  	case OpRISCV64SUBW:
   668  		return rewriteValueRISCV64_OpRISCV64SUBW(v)
   669  	case OpRISCV64XOR:
   670  		return rewriteValueRISCV64_OpRISCV64XOR(v)
   671  	case OpRotateLeft16:
   672  		return rewriteValueRISCV64_OpRotateLeft16(v)
   673  	case OpRotateLeft32:
   674  		v.Op = OpRISCV64ROLW
   675  		return true
   676  	case OpRotateLeft64:
   677  		v.Op = OpRISCV64ROL
   678  		return true
   679  	case OpRotateLeft8:
   680  		return rewriteValueRISCV64_OpRotateLeft8(v)
   681  	case OpRound32F:
   682  		v.Op = OpRISCV64LoweredRound32F
   683  		return true
   684  	case OpRound64F:
   685  		v.Op = OpRISCV64LoweredRound64F
   686  		return true
   687  	case OpRsh16Ux16:
   688  		return rewriteValueRISCV64_OpRsh16Ux16(v)
   689  	case OpRsh16Ux32:
   690  		return rewriteValueRISCV64_OpRsh16Ux32(v)
   691  	case OpRsh16Ux64:
   692  		return rewriteValueRISCV64_OpRsh16Ux64(v)
   693  	case OpRsh16Ux8:
   694  		return rewriteValueRISCV64_OpRsh16Ux8(v)
   695  	case OpRsh16x16:
   696  		return rewriteValueRISCV64_OpRsh16x16(v)
   697  	case OpRsh16x32:
   698  		return rewriteValueRISCV64_OpRsh16x32(v)
   699  	case OpRsh16x64:
   700  		return rewriteValueRISCV64_OpRsh16x64(v)
   701  	case OpRsh16x8:
   702  		return rewriteValueRISCV64_OpRsh16x8(v)
   703  	case OpRsh32Ux16:
   704  		return rewriteValueRISCV64_OpRsh32Ux16(v)
   705  	case OpRsh32Ux32:
   706  		return rewriteValueRISCV64_OpRsh32Ux32(v)
   707  	case OpRsh32Ux64:
   708  		return rewriteValueRISCV64_OpRsh32Ux64(v)
   709  	case OpRsh32Ux8:
   710  		return rewriteValueRISCV64_OpRsh32Ux8(v)
   711  	case OpRsh32x16:
   712  		return rewriteValueRISCV64_OpRsh32x16(v)
   713  	case OpRsh32x32:
   714  		return rewriteValueRISCV64_OpRsh32x32(v)
   715  	case OpRsh32x64:
   716  		return rewriteValueRISCV64_OpRsh32x64(v)
   717  	case OpRsh32x8:
   718  		return rewriteValueRISCV64_OpRsh32x8(v)
   719  	case OpRsh64Ux16:
   720  		return rewriteValueRISCV64_OpRsh64Ux16(v)
   721  	case OpRsh64Ux32:
   722  		return rewriteValueRISCV64_OpRsh64Ux32(v)
   723  	case OpRsh64Ux64:
   724  		return rewriteValueRISCV64_OpRsh64Ux64(v)
   725  	case OpRsh64Ux8:
   726  		return rewriteValueRISCV64_OpRsh64Ux8(v)
   727  	case OpRsh64x16:
   728  		return rewriteValueRISCV64_OpRsh64x16(v)
   729  	case OpRsh64x32:
   730  		return rewriteValueRISCV64_OpRsh64x32(v)
   731  	case OpRsh64x64:
   732  		return rewriteValueRISCV64_OpRsh64x64(v)
   733  	case OpRsh64x8:
   734  		return rewriteValueRISCV64_OpRsh64x8(v)
   735  	case OpRsh8Ux16:
   736  		return rewriteValueRISCV64_OpRsh8Ux16(v)
   737  	case OpRsh8Ux32:
   738  		return rewriteValueRISCV64_OpRsh8Ux32(v)
   739  	case OpRsh8Ux64:
   740  		return rewriteValueRISCV64_OpRsh8Ux64(v)
   741  	case OpRsh8Ux8:
   742  		return rewriteValueRISCV64_OpRsh8Ux8(v)
   743  	case OpRsh8x16:
   744  		return rewriteValueRISCV64_OpRsh8x16(v)
   745  	case OpRsh8x32:
   746  		return rewriteValueRISCV64_OpRsh8x32(v)
   747  	case OpRsh8x64:
   748  		return rewriteValueRISCV64_OpRsh8x64(v)
   749  	case OpRsh8x8:
   750  		return rewriteValueRISCV64_OpRsh8x8(v)
   751  	case OpSelect0:
   752  		return rewriteValueRISCV64_OpSelect0(v)
   753  	case OpSelect1:
   754  		return rewriteValueRISCV64_OpSelect1(v)
   755  	case OpSignExt16to32:
   756  		v.Op = OpRISCV64MOVHreg
   757  		return true
   758  	case OpSignExt16to64:
   759  		v.Op = OpRISCV64MOVHreg
   760  		return true
   761  	case OpSignExt32to64:
   762  		v.Op = OpRISCV64MOVWreg
   763  		return true
   764  	case OpSignExt8to16:
   765  		v.Op = OpRISCV64MOVBreg
   766  		return true
   767  	case OpSignExt8to32:
   768  		v.Op = OpRISCV64MOVBreg
   769  		return true
   770  	case OpSignExt8to64:
   771  		v.Op = OpRISCV64MOVBreg
   772  		return true
   773  	case OpSlicemask:
   774  		return rewriteValueRISCV64_OpSlicemask(v)
   775  	case OpSqrt:
   776  		v.Op = OpRISCV64FSQRTD
   777  		return true
   778  	case OpSqrt32:
   779  		v.Op = OpRISCV64FSQRTS
   780  		return true
   781  	case OpStaticCall:
   782  		v.Op = OpRISCV64CALLstatic
   783  		return true
   784  	case OpStore:
   785  		return rewriteValueRISCV64_OpStore(v)
   786  	case OpSub16:
   787  		v.Op = OpRISCV64SUB
   788  		return true
   789  	case OpSub32:
   790  		v.Op = OpRISCV64SUB
   791  		return true
   792  	case OpSub32F:
   793  		v.Op = OpRISCV64FSUBS
   794  		return true
   795  	case OpSub64:
   796  		v.Op = OpRISCV64SUB
   797  		return true
   798  	case OpSub64F:
   799  		v.Op = OpRISCV64FSUBD
   800  		return true
   801  	case OpSub8:
   802  		v.Op = OpRISCV64SUB
   803  		return true
   804  	case OpSubPtr:
   805  		v.Op = OpRISCV64SUB
   806  		return true
   807  	case OpTailCall:
   808  		v.Op = OpRISCV64CALLtail
   809  		return true
   810  	case OpTailCallInter:
   811  		v.Op = OpRISCV64CALLtailinter
   812  		return true
   813  	case OpTrunc16to8:
   814  		v.Op = OpCopy
   815  		return true
   816  	case OpTrunc32to16:
   817  		v.Op = OpCopy
   818  		return true
   819  	case OpTrunc32to8:
   820  		v.Op = OpCopy
   821  		return true
   822  	case OpTrunc64to16:
   823  		v.Op = OpCopy
   824  		return true
   825  	case OpTrunc64to32:
   826  		v.Op = OpCopy
   827  		return true
   828  	case OpTrunc64to8:
   829  		v.Op = OpCopy
   830  		return true
   831  	case OpWB:
   832  		v.Op = OpRISCV64LoweredWB
   833  		return true
   834  	case OpXor16:
   835  		v.Op = OpRISCV64XOR
   836  		return true
   837  	case OpXor32:
   838  		v.Op = OpRISCV64XOR
   839  		return true
   840  	case OpXor64:
   841  		v.Op = OpRISCV64XOR
   842  		return true
   843  	case OpXor8:
   844  		v.Op = OpRISCV64XOR
   845  		return true
   846  	case OpZero:
   847  		return rewriteValueRISCV64_OpZero(v)
   848  	case OpZeroExt16to32:
   849  		v.Op = OpRISCV64MOVHUreg
   850  		return true
   851  	case OpZeroExt16to64:
   852  		v.Op = OpRISCV64MOVHUreg
   853  		return true
   854  	case OpZeroExt32to64:
   855  		v.Op = OpRISCV64MOVWUreg
   856  		return true
   857  	case OpZeroExt8to16:
   858  		v.Op = OpRISCV64MOVBUreg
   859  		return true
   860  	case OpZeroExt8to32:
   861  		v.Op = OpRISCV64MOVBUreg
   862  		return true
   863  	case OpZeroExt8to64:
   864  		v.Op = OpRISCV64MOVBUreg
   865  		return true
   866  	}
   867  	return false
   868  }
   869  func rewriteValueRISCV64_OpAddr(v *Value) bool {
   870  	v_0 := v.Args[0]
   871  	// match: (Addr {sym} base)
   872  	// result: (MOVaddr {sym} [0] base)
   873  	for {
   874  		sym := auxToSym(v.Aux)
   875  		base := v_0
   876  		v.reset(OpRISCV64MOVaddr)
   877  		v.AuxInt = int32ToAuxInt(0)
   878  		v.Aux = symToAux(sym)
   879  		v.AddArg(base)
   880  		return true
   881  	}
   882  }
   883  func rewriteValueRISCV64_OpAtomicAnd8(v *Value) bool {
   884  	v_2 := v.Args[2]
   885  	v_1 := v.Args[1]
   886  	v_0 := v.Args[0]
   887  	b := v.Block
   888  	typ := &b.Func.Config.Types
   889  	// match: (AtomicAnd8 ptr val mem)
   890  	// result: (LoweredAtomicAnd32 (ANDI <typ.Uintptr> [^3] ptr) (NOT <typ.UInt32> (SLL <typ.UInt32> (XORI <typ.UInt32> [0xff] (ZeroExt8to32 val)) (SLLI <typ.UInt64> [3] (ANDI <typ.UInt64> [3] ptr)))) mem)
   891  	for {
   892  		ptr := v_0
   893  		val := v_1
   894  		mem := v_2
   895  		v.reset(OpRISCV64LoweredAtomicAnd32)
   896  		v0 := b.NewValue0(v.Pos, OpRISCV64ANDI, typ.Uintptr)
   897  		v0.AuxInt = int64ToAuxInt(^3)
   898  		v0.AddArg(ptr)
   899  		v1 := b.NewValue0(v.Pos, OpRISCV64NOT, typ.UInt32)
   900  		v2 := b.NewValue0(v.Pos, OpRISCV64SLL, typ.UInt32)
   901  		v3 := b.NewValue0(v.Pos, OpRISCV64XORI, typ.UInt32)
   902  		v3.AuxInt = int64ToAuxInt(0xff)
   903  		v4 := b.NewValue0(v.Pos, OpZeroExt8to32, typ.UInt32)
   904  		v4.AddArg(val)
   905  		v3.AddArg(v4)
   906  		v5 := b.NewValue0(v.Pos, OpRISCV64SLLI, typ.UInt64)
   907  		v5.AuxInt = int64ToAuxInt(3)
   908  		v6 := b.NewValue0(v.Pos, OpRISCV64ANDI, typ.UInt64)
   909  		v6.AuxInt = int64ToAuxInt(3)
   910  		v6.AddArg(ptr)
   911  		v5.AddArg(v6)
   912  		v2.AddArg2(v3, v5)
   913  		v1.AddArg(v2)
   914  		v.AddArg3(v0, v1, mem)
   915  		return true
   916  	}
   917  }
   918  func rewriteValueRISCV64_OpAtomicCompareAndSwap32(v *Value) bool {
   919  	v_3 := v.Args[3]
   920  	v_2 := v.Args[2]
   921  	v_1 := v.Args[1]
   922  	v_0 := v.Args[0]
   923  	b := v.Block
   924  	typ := &b.Func.Config.Types
   925  	// match: (AtomicCompareAndSwap32 ptr old new mem)
   926  	// result: (LoweredAtomicCas32 ptr (SignExt32to64 old) new mem)
   927  	for {
   928  		ptr := v_0
   929  		old := v_1
   930  		new := v_2
   931  		mem := v_3
   932  		v.reset(OpRISCV64LoweredAtomicCas32)
   933  		v0 := b.NewValue0(v.Pos, OpSignExt32to64, typ.Int64)
   934  		v0.AddArg(old)
   935  		v.AddArg4(ptr, v0, new, mem)
   936  		return true
   937  	}
   938  }
   939  func rewriteValueRISCV64_OpAtomicOr8(v *Value) bool {
   940  	v_2 := v.Args[2]
   941  	v_1 := v.Args[1]
   942  	v_0 := v.Args[0]
   943  	b := v.Block
   944  	typ := &b.Func.Config.Types
   945  	// match: (AtomicOr8 ptr val mem)
   946  	// result: (LoweredAtomicOr32 (ANDI <typ.Uintptr> [^3] ptr) (SLL <typ.UInt32> (ZeroExt8to32 val) (SLLI <typ.UInt64> [3] (ANDI <typ.UInt64> [3] ptr))) mem)
   947  	for {
   948  		ptr := v_0
   949  		val := v_1
   950  		mem := v_2
   951  		v.reset(OpRISCV64LoweredAtomicOr32)
   952  		v0 := b.NewValue0(v.Pos, OpRISCV64ANDI, typ.Uintptr)
   953  		v0.AuxInt = int64ToAuxInt(^3)
   954  		v0.AddArg(ptr)
   955  		v1 := b.NewValue0(v.Pos, OpRISCV64SLL, typ.UInt32)
   956  		v2 := b.NewValue0(v.Pos, OpZeroExt8to32, typ.UInt32)
   957  		v2.AddArg(val)
   958  		v3 := b.NewValue0(v.Pos, OpRISCV64SLLI, typ.UInt64)
   959  		v3.AuxInt = int64ToAuxInt(3)
   960  		v4 := b.NewValue0(v.Pos, OpRISCV64ANDI, typ.UInt64)
   961  		v4.AuxInt = int64ToAuxInt(3)
   962  		v4.AddArg(ptr)
   963  		v3.AddArg(v4)
   964  		v1.AddArg2(v2, v3)
   965  		v.AddArg3(v0, v1, mem)
   966  		return true
   967  	}
   968  }
   969  func rewriteValueRISCV64_OpAvg64u(v *Value) bool {
   970  	v_1 := v.Args[1]
   971  	v_0 := v.Args[0]
   972  	b := v.Block
   973  	// match: (Avg64u <t> x y)
   974  	// result: (ADD (ADD <t> (SRLI <t> [1] x) (SRLI <t> [1] y)) (ANDI <t> [1] (AND <t> x y)))
   975  	for {
   976  		t := v.Type
   977  		x := v_0
   978  		y := v_1
   979  		v.reset(OpRISCV64ADD)
   980  		v0 := b.NewValue0(v.Pos, OpRISCV64ADD, t)
   981  		v1 := b.NewValue0(v.Pos, OpRISCV64SRLI, t)
   982  		v1.AuxInt = int64ToAuxInt(1)
   983  		v1.AddArg(x)
   984  		v2 := b.NewValue0(v.Pos, OpRISCV64SRLI, t)
   985  		v2.AuxInt = int64ToAuxInt(1)
   986  		v2.AddArg(y)
   987  		v0.AddArg2(v1, v2)
   988  		v3 := b.NewValue0(v.Pos, OpRISCV64ANDI, t)
   989  		v3.AuxInt = int64ToAuxInt(1)
   990  		v4 := b.NewValue0(v.Pos, OpRISCV64AND, t)
   991  		v4.AddArg2(x, y)
   992  		v3.AddArg(v4)
   993  		v.AddArg2(v0, v3)
   994  		return true
   995  	}
   996  }
   997  func rewriteValueRISCV64_OpBitLen16(v *Value) bool {
   998  	v_0 := v.Args[0]
   999  	b := v.Block
  1000  	typ := &b.Func.Config.Types
  1001  	// match: (BitLen16 x)
  1002  	// result: (BitLen64 (ZeroExt16to64 x))
  1003  	for {
  1004  		x := v_0
  1005  		v.reset(OpBitLen64)
  1006  		v0 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64)
  1007  		v0.AddArg(x)
  1008  		v.AddArg(v0)
  1009  		return true
  1010  	}
  1011  }
  1012  func rewriteValueRISCV64_OpBitLen32(v *Value) bool {
  1013  	v_0 := v.Args[0]
  1014  	b := v.Block
  1015  	typ := &b.Func.Config.Types
  1016  	// match: (BitLen32 <t> x)
  1017  	// result: (SUB (MOVDconst [32]) (CLZW <t> x))
  1018  	for {
  1019  		t := v.Type
  1020  		x := v_0
  1021  		v.reset(OpRISCV64SUB)
  1022  		v0 := b.NewValue0(v.Pos, OpRISCV64MOVDconst, typ.UInt64)
  1023  		v0.AuxInt = int64ToAuxInt(32)
  1024  		v1 := b.NewValue0(v.Pos, OpRISCV64CLZW, t)
  1025  		v1.AddArg(x)
  1026  		v.AddArg2(v0, v1)
  1027  		return true
  1028  	}
  1029  }
  1030  func rewriteValueRISCV64_OpBitLen64(v *Value) bool {
  1031  	v_0 := v.Args[0]
  1032  	b := v.Block
  1033  	typ := &b.Func.Config.Types
  1034  	// match: (BitLen64 <t> x)
  1035  	// result: (SUB (MOVDconst [64]) (CLZ <t> x))
  1036  	for {
  1037  		t := v.Type
  1038  		x := v_0
  1039  		v.reset(OpRISCV64SUB)
  1040  		v0 := b.NewValue0(v.Pos, OpRISCV64MOVDconst, typ.UInt64)
  1041  		v0.AuxInt = int64ToAuxInt(64)
  1042  		v1 := b.NewValue0(v.Pos, OpRISCV64CLZ, t)
  1043  		v1.AddArg(x)
  1044  		v.AddArg2(v0, v1)
  1045  		return true
  1046  	}
  1047  }
  1048  func rewriteValueRISCV64_OpBitLen8(v *Value) bool {
  1049  	v_0 := v.Args[0]
  1050  	b := v.Block
  1051  	typ := &b.Func.Config.Types
  1052  	// match: (BitLen8 x)
  1053  	// result: (BitLen64 (ZeroExt8to64 x))
  1054  	for {
  1055  		x := v_0
  1056  		v.reset(OpBitLen64)
  1057  		v0 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64)
  1058  		v0.AddArg(x)
  1059  		v.AddArg(v0)
  1060  		return true
  1061  	}
  1062  }
  1063  func rewriteValueRISCV64_OpBswap16(v *Value) bool {
  1064  	v_0 := v.Args[0]
  1065  	b := v.Block
  1066  	// match: (Bswap16 <t> x)
  1067  	// result: (SRLI [48] (REV8 <t> x))
  1068  	for {
  1069  		t := v.Type
  1070  		x := v_0
  1071  		v.reset(OpRISCV64SRLI)
  1072  		v.AuxInt = int64ToAuxInt(48)
  1073  		v0 := b.NewValue0(v.Pos, OpRISCV64REV8, t)
  1074  		v0.AddArg(x)
  1075  		v.AddArg(v0)
  1076  		return true
  1077  	}
  1078  }
  1079  func rewriteValueRISCV64_OpBswap32(v *Value) bool {
  1080  	v_0 := v.Args[0]
  1081  	b := v.Block
  1082  	// match: (Bswap32 <t> x)
  1083  	// result: (SRLI [32] (REV8 <t> x))
  1084  	for {
  1085  		t := v.Type
  1086  		x := v_0
  1087  		v.reset(OpRISCV64SRLI)
  1088  		v.AuxInt = int64ToAuxInt(32)
  1089  		v0 := b.NewValue0(v.Pos, OpRISCV64REV8, t)
  1090  		v0.AddArg(x)
  1091  		v.AddArg(v0)
  1092  		return true
  1093  	}
  1094  }
  1095  func rewriteValueRISCV64_OpCondSelect(v *Value) bool {
  1096  	v_2 := v.Args[2]
  1097  	v_1 := v.Args[1]
  1098  	v_0 := v.Args[0]
  1099  	b := v.Block
  1100  	typ := &b.Func.Config.Types
  1101  	// match: (CondSelect <t> x y cond)
  1102  	// result: (OR (CZEROEQZ <t> x (MOVBUreg <typ.UInt64> cond)) (CZERONEZ <t> y (MOVBUreg <typ.UInt64> cond)))
  1103  	for {
  1104  		t := v.Type
  1105  		x := v_0
  1106  		y := v_1
  1107  		cond := v_2
  1108  		v.reset(OpRISCV64OR)
  1109  		v0 := b.NewValue0(v.Pos, OpRISCV64CZEROEQZ, t)
  1110  		v1 := b.NewValue0(v.Pos, OpRISCV64MOVBUreg, typ.UInt64)
  1111  		v1.AddArg(cond)
  1112  		v0.AddArg2(x, v1)
  1113  		v2 := b.NewValue0(v.Pos, OpRISCV64CZERONEZ, t)
  1114  		v2.AddArg2(y, v1)
  1115  		v.AddArg2(v0, v2)
  1116  		return true
  1117  	}
  1118  }
  1119  func rewriteValueRISCV64_OpConst16(v *Value) bool {
  1120  	// match: (Const16 [val])
  1121  	// result: (MOVDconst [int64(val)])
  1122  	for {
  1123  		val := auxIntToInt16(v.AuxInt)
  1124  		v.reset(OpRISCV64MOVDconst)
  1125  		v.AuxInt = int64ToAuxInt(int64(val))
  1126  		return true
  1127  	}
  1128  }
  1129  func rewriteValueRISCV64_OpConst32(v *Value) bool {
  1130  	// match: (Const32 [val])
  1131  	// result: (MOVDconst [int64(val)])
  1132  	for {
  1133  		val := auxIntToInt32(v.AuxInt)
  1134  		v.reset(OpRISCV64MOVDconst)
  1135  		v.AuxInt = int64ToAuxInt(int64(val))
  1136  		return true
  1137  	}
  1138  }
  1139  func rewriteValueRISCV64_OpConst64(v *Value) bool {
  1140  	// match: (Const64 [val])
  1141  	// result: (MOVDconst [int64(val)])
  1142  	for {
  1143  		val := auxIntToInt64(v.AuxInt)
  1144  		v.reset(OpRISCV64MOVDconst)
  1145  		v.AuxInt = int64ToAuxInt(int64(val))
  1146  		return true
  1147  	}
  1148  }
  1149  func rewriteValueRISCV64_OpConst8(v *Value) bool {
  1150  	// match: (Const8 [val])
  1151  	// result: (MOVDconst [int64(val)])
  1152  	for {
  1153  		val := auxIntToInt8(v.AuxInt)
  1154  		v.reset(OpRISCV64MOVDconst)
  1155  		v.AuxInt = int64ToAuxInt(int64(val))
  1156  		return true
  1157  	}
  1158  }
  1159  func rewriteValueRISCV64_OpConstBool(v *Value) bool {
  1160  	// match: (ConstBool [val])
  1161  	// result: (MOVDconst [int64(b2i(val))])
  1162  	for {
  1163  		val := auxIntToBool(v.AuxInt)
  1164  		v.reset(OpRISCV64MOVDconst)
  1165  		v.AuxInt = int64ToAuxInt(int64(b2i(val)))
  1166  		return true
  1167  	}
  1168  }
  1169  func rewriteValueRISCV64_OpConstNil(v *Value) bool {
  1170  	// match: (ConstNil)
  1171  	// result: (MOVDconst [0])
  1172  	for {
  1173  		v.reset(OpRISCV64MOVDconst)
  1174  		v.AuxInt = int64ToAuxInt(0)
  1175  		return true
  1176  	}
  1177  }
  1178  func rewriteValueRISCV64_OpCtz16(v *Value) bool {
  1179  	v_0 := v.Args[0]
  1180  	b := v.Block
  1181  	typ := &b.Func.Config.Types
  1182  	// match: (Ctz16 x)
  1183  	// result: (CTZW (ORI <typ.UInt32> [1<<16] x))
  1184  	for {
  1185  		x := v_0
  1186  		v.reset(OpRISCV64CTZW)
  1187  		v0 := b.NewValue0(v.Pos, OpRISCV64ORI, typ.UInt32)
  1188  		v0.AuxInt = int64ToAuxInt(1 << 16)
  1189  		v0.AddArg(x)
  1190  		v.AddArg(v0)
  1191  		return true
  1192  	}
  1193  }
  1194  func rewriteValueRISCV64_OpCtz8(v *Value) bool {
  1195  	v_0 := v.Args[0]
  1196  	b := v.Block
  1197  	typ := &b.Func.Config.Types
  1198  	// match: (Ctz8 x)
  1199  	// result: (CTZW (ORI <typ.UInt32> [1<<8] x))
  1200  	for {
  1201  		x := v_0
  1202  		v.reset(OpRISCV64CTZW)
  1203  		v0 := b.NewValue0(v.Pos, OpRISCV64ORI, typ.UInt32)
  1204  		v0.AuxInt = int64ToAuxInt(1 << 8)
  1205  		v0.AddArg(x)
  1206  		v.AddArg(v0)
  1207  		return true
  1208  	}
  1209  }
  1210  func rewriteValueRISCV64_OpDiv16(v *Value) bool {
  1211  	v_1 := v.Args[1]
  1212  	v_0 := v.Args[0]
  1213  	b := v.Block
  1214  	typ := &b.Func.Config.Types
  1215  	// match: (Div16 x y [false])
  1216  	// result: (DIVW (SignExt16to32 x) (SignExt16to32 y))
  1217  	for {
  1218  		if auxIntToBool(v.AuxInt) != false {
  1219  			break
  1220  		}
  1221  		x := v_0
  1222  		y := v_1
  1223  		v.reset(OpRISCV64DIVW)
  1224  		v0 := b.NewValue0(v.Pos, OpSignExt16to32, typ.Int32)
  1225  		v0.AddArg(x)
  1226  		v1 := b.NewValue0(v.Pos, OpSignExt16to32, typ.Int32)
  1227  		v1.AddArg(y)
  1228  		v.AddArg2(v0, v1)
  1229  		return true
  1230  	}
  1231  	return false
  1232  }
  1233  func rewriteValueRISCV64_OpDiv16u(v *Value) bool {
  1234  	v_1 := v.Args[1]
  1235  	v_0 := v.Args[0]
  1236  	b := v.Block
  1237  	typ := &b.Func.Config.Types
  1238  	// match: (Div16u x y)
  1239  	// result: (DIVUW (ZeroExt16to32 x) (ZeroExt16to32 y))
  1240  	for {
  1241  		x := v_0
  1242  		y := v_1
  1243  		v.reset(OpRISCV64DIVUW)
  1244  		v0 := b.NewValue0(v.Pos, OpZeroExt16to32, typ.UInt32)
  1245  		v0.AddArg(x)
  1246  		v1 := b.NewValue0(v.Pos, OpZeroExt16to32, typ.UInt32)
  1247  		v1.AddArg(y)
  1248  		v.AddArg2(v0, v1)
  1249  		return true
  1250  	}
  1251  }
  1252  func rewriteValueRISCV64_OpDiv32(v *Value) bool {
  1253  	v_1 := v.Args[1]
  1254  	v_0 := v.Args[0]
  1255  	// match: (Div32 x y [false])
  1256  	// result: (DIVW x y)
  1257  	for {
  1258  		if auxIntToBool(v.AuxInt) != false {
  1259  			break
  1260  		}
  1261  		x := v_0
  1262  		y := v_1
  1263  		v.reset(OpRISCV64DIVW)
  1264  		v.AddArg2(x, y)
  1265  		return true
  1266  	}
  1267  	return false
  1268  }
  1269  func rewriteValueRISCV64_OpDiv64(v *Value) bool {
  1270  	v_1 := v.Args[1]
  1271  	v_0 := v.Args[0]
  1272  	// match: (Div64 x y [false])
  1273  	// result: (DIV x y)
  1274  	for {
  1275  		if auxIntToBool(v.AuxInt) != false {
  1276  			break
  1277  		}
  1278  		x := v_0
  1279  		y := v_1
  1280  		v.reset(OpRISCV64DIV)
  1281  		v.AddArg2(x, y)
  1282  		return true
  1283  	}
  1284  	return false
  1285  }
  1286  func rewriteValueRISCV64_OpDiv8(v *Value) bool {
  1287  	v_1 := v.Args[1]
  1288  	v_0 := v.Args[0]
  1289  	b := v.Block
  1290  	typ := &b.Func.Config.Types
  1291  	// match: (Div8 x y)
  1292  	// result: (DIVW (SignExt8to32 x) (SignExt8to32 y))
  1293  	for {
  1294  		x := v_0
  1295  		y := v_1
  1296  		v.reset(OpRISCV64DIVW)
  1297  		v0 := b.NewValue0(v.Pos, OpSignExt8to32, typ.Int32)
  1298  		v0.AddArg(x)
  1299  		v1 := b.NewValue0(v.Pos, OpSignExt8to32, typ.Int32)
  1300  		v1.AddArg(y)
  1301  		v.AddArg2(v0, v1)
  1302  		return true
  1303  	}
  1304  }
  1305  func rewriteValueRISCV64_OpDiv8u(v *Value) bool {
  1306  	v_1 := v.Args[1]
  1307  	v_0 := v.Args[0]
  1308  	b := v.Block
  1309  	typ := &b.Func.Config.Types
  1310  	// match: (Div8u x y)
  1311  	// result: (DIVUW (ZeroExt8to32 x) (ZeroExt8to32 y))
  1312  	for {
  1313  		x := v_0
  1314  		y := v_1
  1315  		v.reset(OpRISCV64DIVUW)
  1316  		v0 := b.NewValue0(v.Pos, OpZeroExt8to32, typ.UInt32)
  1317  		v0.AddArg(x)
  1318  		v1 := b.NewValue0(v.Pos, OpZeroExt8to32, typ.UInt32)
  1319  		v1.AddArg(y)
  1320  		v.AddArg2(v0, v1)
  1321  		return true
  1322  	}
  1323  }
  1324  func rewriteValueRISCV64_OpEq16(v *Value) bool {
  1325  	v_1 := v.Args[1]
  1326  	v_0 := v.Args[0]
  1327  	b := v.Block
  1328  	typ := &b.Func.Config.Types
  1329  	// match: (Eq16 x y)
  1330  	// result: (SEQZ (SUB <x.Type> (ZeroExt16to64 x) (ZeroExt16to64 y)))
  1331  	for {
  1332  		x := v_0
  1333  		y := v_1
  1334  		v.reset(OpRISCV64SEQZ)
  1335  		v0 := b.NewValue0(v.Pos, OpRISCV64SUB, x.Type)
  1336  		v1 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64)
  1337  		v1.AddArg(x)
  1338  		v2 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64)
  1339  		v2.AddArg(y)
  1340  		v0.AddArg2(v1, v2)
  1341  		v.AddArg(v0)
  1342  		return true
  1343  	}
  1344  }
  1345  func rewriteValueRISCV64_OpEq32(v *Value) bool {
  1346  	v_1 := v.Args[1]
  1347  	v_0 := v.Args[0]
  1348  	b := v.Block
  1349  	typ := &b.Func.Config.Types
  1350  	// match: (Eq32 x y)
  1351  	// cond: x.Type.IsSigned()
  1352  	// result: (SEQZ (SUB <x.Type> (SignExt32to64 x) (SignExt32to64 y)))
  1353  	for {
  1354  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
  1355  			x := v_0
  1356  			y := v_1
  1357  			if !(x.Type.IsSigned()) {
  1358  				continue
  1359  			}
  1360  			v.reset(OpRISCV64SEQZ)
  1361  			v0 := b.NewValue0(v.Pos, OpRISCV64SUB, x.Type)
  1362  			v1 := b.NewValue0(v.Pos, OpSignExt32to64, typ.Int64)
  1363  			v1.AddArg(x)
  1364  			v2 := b.NewValue0(v.Pos, OpSignExt32to64, typ.Int64)
  1365  			v2.AddArg(y)
  1366  			v0.AddArg2(v1, v2)
  1367  			v.AddArg(v0)
  1368  			return true
  1369  		}
  1370  		break
  1371  	}
  1372  	// match: (Eq32 x y)
  1373  	// cond: !x.Type.IsSigned()
  1374  	// result: (SEQZ (SUB <x.Type> (ZeroExt32to64 x) (ZeroExt32to64 y)))
  1375  	for {
  1376  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
  1377  			x := v_0
  1378  			y := v_1
  1379  			if !(!x.Type.IsSigned()) {
  1380  				continue
  1381  			}
  1382  			v.reset(OpRISCV64SEQZ)
  1383  			v0 := b.NewValue0(v.Pos, OpRISCV64SUB, x.Type)
  1384  			v1 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64)
  1385  			v1.AddArg(x)
  1386  			v2 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64)
  1387  			v2.AddArg(y)
  1388  			v0.AddArg2(v1, v2)
  1389  			v.AddArg(v0)
  1390  			return true
  1391  		}
  1392  		break
  1393  	}
  1394  	return false
  1395  }
  1396  func rewriteValueRISCV64_OpEq64(v *Value) bool {
  1397  	v_1 := v.Args[1]
  1398  	v_0 := v.Args[0]
  1399  	b := v.Block
  1400  	// match: (Eq64 x y)
  1401  	// result: (SEQZ (SUB <x.Type> x y))
  1402  	for {
  1403  		x := v_0
  1404  		y := v_1
  1405  		v.reset(OpRISCV64SEQZ)
  1406  		v0 := b.NewValue0(v.Pos, OpRISCV64SUB, x.Type)
  1407  		v0.AddArg2(x, y)
  1408  		v.AddArg(v0)
  1409  		return true
  1410  	}
  1411  }
  1412  func rewriteValueRISCV64_OpEq8(v *Value) bool {
  1413  	v_1 := v.Args[1]
  1414  	v_0 := v.Args[0]
  1415  	b := v.Block
  1416  	typ := &b.Func.Config.Types
  1417  	// match: (Eq8 x y)
  1418  	// result: (SEQZ (SUB <x.Type> (ZeroExt8to64 x) (ZeroExt8to64 y)))
  1419  	for {
  1420  		x := v_0
  1421  		y := v_1
  1422  		v.reset(OpRISCV64SEQZ)
  1423  		v0 := b.NewValue0(v.Pos, OpRISCV64SUB, x.Type)
  1424  		v1 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64)
  1425  		v1.AddArg(x)
  1426  		v2 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64)
  1427  		v2.AddArg(y)
  1428  		v0.AddArg2(v1, v2)
  1429  		v.AddArg(v0)
  1430  		return true
  1431  	}
  1432  }
  1433  func rewriteValueRISCV64_OpEqB(v *Value) bool {
  1434  	v_1 := v.Args[1]
  1435  	v_0 := v.Args[0]
  1436  	b := v.Block
  1437  	typ := &b.Func.Config.Types
  1438  	// match: (EqB x y)
  1439  	// result: (SEQZ (SUB <typ.Bool> x y))
  1440  	for {
  1441  		x := v_0
  1442  		y := v_1
  1443  		v.reset(OpRISCV64SEQZ)
  1444  		v0 := b.NewValue0(v.Pos, OpRISCV64SUB, typ.Bool)
  1445  		v0.AddArg2(x, y)
  1446  		v.AddArg(v0)
  1447  		return true
  1448  	}
  1449  }
  1450  func rewriteValueRISCV64_OpEqPtr(v *Value) bool {
  1451  	v_1 := v.Args[1]
  1452  	v_0 := v.Args[0]
  1453  	b := v.Block
  1454  	typ := &b.Func.Config.Types
  1455  	// match: (EqPtr x y)
  1456  	// result: (SEQZ (SUB <typ.Uintptr> x y))
  1457  	for {
  1458  		x := v_0
  1459  		y := v_1
  1460  		v.reset(OpRISCV64SEQZ)
  1461  		v0 := b.NewValue0(v.Pos, OpRISCV64SUB, typ.Uintptr)
  1462  		v0.AddArg2(x, y)
  1463  		v.AddArg(v0)
  1464  		return true
  1465  	}
  1466  }
  1467  func rewriteValueRISCV64_OpHmul32(v *Value) bool {
  1468  	v_1 := v.Args[1]
  1469  	v_0 := v.Args[0]
  1470  	b := v.Block
  1471  	typ := &b.Func.Config.Types
  1472  	// match: (Hmul32 x y)
  1473  	// result: (SRAI [32] (MUL (SignExt32to64 x) (SignExt32to64 y)))
  1474  	for {
  1475  		x := v_0
  1476  		y := v_1
  1477  		v.reset(OpRISCV64SRAI)
  1478  		v.AuxInt = int64ToAuxInt(32)
  1479  		v0 := b.NewValue0(v.Pos, OpRISCV64MUL, typ.Int64)
  1480  		v1 := b.NewValue0(v.Pos, OpSignExt32to64, typ.Int64)
  1481  		v1.AddArg(x)
  1482  		v2 := b.NewValue0(v.Pos, OpSignExt32to64, typ.Int64)
  1483  		v2.AddArg(y)
  1484  		v0.AddArg2(v1, v2)
  1485  		v.AddArg(v0)
  1486  		return true
  1487  	}
  1488  }
  1489  func rewriteValueRISCV64_OpHmul32u(v *Value) bool {
  1490  	v_1 := v.Args[1]
  1491  	v_0 := v.Args[0]
  1492  	b := v.Block
  1493  	typ := &b.Func.Config.Types
  1494  	// match: (Hmul32u x y)
  1495  	// result: (SRLI [32] (MUL (ZeroExt32to64 x) (ZeroExt32to64 y)))
  1496  	for {
  1497  		x := v_0
  1498  		y := v_1
  1499  		v.reset(OpRISCV64SRLI)
  1500  		v.AuxInt = int64ToAuxInt(32)
  1501  		v0 := b.NewValue0(v.Pos, OpRISCV64MUL, typ.Int64)
  1502  		v1 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64)
  1503  		v1.AddArg(x)
  1504  		v2 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64)
  1505  		v2.AddArg(y)
  1506  		v0.AddArg2(v1, v2)
  1507  		v.AddArg(v0)
  1508  		return true
  1509  	}
  1510  }
  1511  func rewriteValueRISCV64_OpLeq16(v *Value) bool {
  1512  	v_1 := v.Args[1]
  1513  	v_0 := v.Args[0]
  1514  	b := v.Block
  1515  	typ := &b.Func.Config.Types
  1516  	// match: (Leq16 x y)
  1517  	// result: (Not (Less16 y x))
  1518  	for {
  1519  		x := v_0
  1520  		y := v_1
  1521  		v.reset(OpNot)
  1522  		v0 := b.NewValue0(v.Pos, OpLess16, typ.Bool)
  1523  		v0.AddArg2(y, x)
  1524  		v.AddArg(v0)
  1525  		return true
  1526  	}
  1527  }
  1528  func rewriteValueRISCV64_OpLeq16U(v *Value) bool {
  1529  	v_1 := v.Args[1]
  1530  	v_0 := v.Args[0]
  1531  	b := v.Block
  1532  	typ := &b.Func.Config.Types
  1533  	// match: (Leq16U x y)
  1534  	// result: (Not (Less16U y x))
  1535  	for {
  1536  		x := v_0
  1537  		y := v_1
  1538  		v.reset(OpNot)
  1539  		v0 := b.NewValue0(v.Pos, OpLess16U, typ.Bool)
  1540  		v0.AddArg2(y, x)
  1541  		v.AddArg(v0)
  1542  		return true
  1543  	}
  1544  }
  1545  func rewriteValueRISCV64_OpLeq32(v *Value) bool {
  1546  	v_1 := v.Args[1]
  1547  	v_0 := v.Args[0]
  1548  	b := v.Block
  1549  	typ := &b.Func.Config.Types
  1550  	// match: (Leq32 x y)
  1551  	// result: (Not (Less32 y x))
  1552  	for {
  1553  		x := v_0
  1554  		y := v_1
  1555  		v.reset(OpNot)
  1556  		v0 := b.NewValue0(v.Pos, OpLess32, typ.Bool)
  1557  		v0.AddArg2(y, x)
  1558  		v.AddArg(v0)
  1559  		return true
  1560  	}
  1561  }
  1562  func rewriteValueRISCV64_OpLeq32U(v *Value) bool {
  1563  	v_1 := v.Args[1]
  1564  	v_0 := v.Args[0]
  1565  	b := v.Block
  1566  	typ := &b.Func.Config.Types
  1567  	// match: (Leq32U x y)
  1568  	// result: (Not (Less32U y x))
  1569  	for {
  1570  		x := v_0
  1571  		y := v_1
  1572  		v.reset(OpNot)
  1573  		v0 := b.NewValue0(v.Pos, OpLess32U, typ.Bool)
  1574  		v0.AddArg2(y, x)
  1575  		v.AddArg(v0)
  1576  		return true
  1577  	}
  1578  }
  1579  func rewriteValueRISCV64_OpLeq64(v *Value) bool {
  1580  	v_1 := v.Args[1]
  1581  	v_0 := v.Args[0]
  1582  	b := v.Block
  1583  	typ := &b.Func.Config.Types
  1584  	// match: (Leq64 x y)
  1585  	// result: (Not (Less64 y x))
  1586  	for {
  1587  		x := v_0
  1588  		y := v_1
  1589  		v.reset(OpNot)
  1590  		v0 := b.NewValue0(v.Pos, OpLess64, typ.Bool)
  1591  		v0.AddArg2(y, x)
  1592  		v.AddArg(v0)
  1593  		return true
  1594  	}
  1595  }
  1596  func rewriteValueRISCV64_OpLeq64U(v *Value) bool {
  1597  	v_1 := v.Args[1]
  1598  	v_0 := v.Args[0]
  1599  	b := v.Block
  1600  	typ := &b.Func.Config.Types
  1601  	// match: (Leq64U x y)
  1602  	// result: (Not (Less64U y x))
  1603  	for {
  1604  		x := v_0
  1605  		y := v_1
  1606  		v.reset(OpNot)
  1607  		v0 := b.NewValue0(v.Pos, OpLess64U, typ.Bool)
  1608  		v0.AddArg2(y, x)
  1609  		v.AddArg(v0)
  1610  		return true
  1611  	}
  1612  }
  1613  func rewriteValueRISCV64_OpLeq8(v *Value) bool {
  1614  	v_1 := v.Args[1]
  1615  	v_0 := v.Args[0]
  1616  	b := v.Block
  1617  	typ := &b.Func.Config.Types
  1618  	// match: (Leq8 x y)
  1619  	// result: (Not (Less8 y x))
  1620  	for {
  1621  		x := v_0
  1622  		y := v_1
  1623  		v.reset(OpNot)
  1624  		v0 := b.NewValue0(v.Pos, OpLess8, typ.Bool)
  1625  		v0.AddArg2(y, x)
  1626  		v.AddArg(v0)
  1627  		return true
  1628  	}
  1629  }
  1630  func rewriteValueRISCV64_OpLeq8U(v *Value) bool {
  1631  	v_1 := v.Args[1]
  1632  	v_0 := v.Args[0]
  1633  	b := v.Block
  1634  	typ := &b.Func.Config.Types
  1635  	// match: (Leq8U x y)
  1636  	// result: (Not (Less8U y x))
  1637  	for {
  1638  		x := v_0
  1639  		y := v_1
  1640  		v.reset(OpNot)
  1641  		v0 := b.NewValue0(v.Pos, OpLess8U, typ.Bool)
  1642  		v0.AddArg2(y, x)
  1643  		v.AddArg(v0)
  1644  		return true
  1645  	}
  1646  }
  1647  func rewriteValueRISCV64_OpLess16(v *Value) bool {
  1648  	v_1 := v.Args[1]
  1649  	v_0 := v.Args[0]
  1650  	b := v.Block
  1651  	typ := &b.Func.Config.Types
  1652  	// match: (Less16 x y)
  1653  	// result: (SLT (SignExt16to64 x) (SignExt16to64 y))
  1654  	for {
  1655  		x := v_0
  1656  		y := v_1
  1657  		v.reset(OpRISCV64SLT)
  1658  		v0 := b.NewValue0(v.Pos, OpSignExt16to64, typ.Int64)
  1659  		v0.AddArg(x)
  1660  		v1 := b.NewValue0(v.Pos, OpSignExt16to64, typ.Int64)
  1661  		v1.AddArg(y)
  1662  		v.AddArg2(v0, v1)
  1663  		return true
  1664  	}
  1665  }
  1666  func rewriteValueRISCV64_OpLess16U(v *Value) bool {
  1667  	v_1 := v.Args[1]
  1668  	v_0 := v.Args[0]
  1669  	b := v.Block
  1670  	typ := &b.Func.Config.Types
  1671  	// match: (Less16U x y)
  1672  	// result: (SLTU (ZeroExt16to64 x) (ZeroExt16to64 y))
  1673  	for {
  1674  		x := v_0
  1675  		y := v_1
  1676  		v.reset(OpRISCV64SLTU)
  1677  		v0 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64)
  1678  		v0.AddArg(x)
  1679  		v1 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64)
  1680  		v1.AddArg(y)
  1681  		v.AddArg2(v0, v1)
  1682  		return true
  1683  	}
  1684  }
  1685  func rewriteValueRISCV64_OpLess32(v *Value) bool {
  1686  	v_1 := v.Args[1]
  1687  	v_0 := v.Args[0]
  1688  	b := v.Block
  1689  	typ := &b.Func.Config.Types
  1690  	// match: (Less32 x y)
  1691  	// result: (SLT (SignExt32to64 x) (SignExt32to64 y))
  1692  	for {
  1693  		x := v_0
  1694  		y := v_1
  1695  		v.reset(OpRISCV64SLT)
  1696  		v0 := b.NewValue0(v.Pos, OpSignExt32to64, typ.Int64)
  1697  		v0.AddArg(x)
  1698  		v1 := b.NewValue0(v.Pos, OpSignExt32to64, typ.Int64)
  1699  		v1.AddArg(y)
  1700  		v.AddArg2(v0, v1)
  1701  		return true
  1702  	}
  1703  }
  1704  func rewriteValueRISCV64_OpLess32U(v *Value) bool {
  1705  	v_1 := v.Args[1]
  1706  	v_0 := v.Args[0]
  1707  	b := v.Block
  1708  	typ := &b.Func.Config.Types
  1709  	// match: (Less32U x y)
  1710  	// result: (SLTU (ZeroExt32to64 x) (ZeroExt32to64 y))
  1711  	for {
  1712  		x := v_0
  1713  		y := v_1
  1714  		v.reset(OpRISCV64SLTU)
  1715  		v0 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64)
  1716  		v0.AddArg(x)
  1717  		v1 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64)
  1718  		v1.AddArg(y)
  1719  		v.AddArg2(v0, v1)
  1720  		return true
  1721  	}
  1722  }
  1723  func rewriteValueRISCV64_OpLess8(v *Value) bool {
  1724  	v_1 := v.Args[1]
  1725  	v_0 := v.Args[0]
  1726  	b := v.Block
  1727  	typ := &b.Func.Config.Types
  1728  	// match: (Less8 x y)
  1729  	// result: (SLT (SignExt8to64 x) (SignExt8to64 y))
  1730  	for {
  1731  		x := v_0
  1732  		y := v_1
  1733  		v.reset(OpRISCV64SLT)
  1734  		v0 := b.NewValue0(v.Pos, OpSignExt8to64, typ.Int64)
  1735  		v0.AddArg(x)
  1736  		v1 := b.NewValue0(v.Pos, OpSignExt8to64, typ.Int64)
  1737  		v1.AddArg(y)
  1738  		v.AddArg2(v0, v1)
  1739  		return true
  1740  	}
  1741  }
  1742  func rewriteValueRISCV64_OpLess8U(v *Value) bool {
  1743  	v_1 := v.Args[1]
  1744  	v_0 := v.Args[0]
  1745  	b := v.Block
  1746  	typ := &b.Func.Config.Types
  1747  	// match: (Less8U x y)
  1748  	// result: (SLTU (ZeroExt8to64 x) (ZeroExt8to64 y))
  1749  	for {
  1750  		x := v_0
  1751  		y := v_1
  1752  		v.reset(OpRISCV64SLTU)
  1753  		v0 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64)
  1754  		v0.AddArg(x)
  1755  		v1 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64)
  1756  		v1.AddArg(y)
  1757  		v.AddArg2(v0, v1)
  1758  		return true
  1759  	}
  1760  }
  1761  func rewriteValueRISCV64_OpLoad(v *Value) bool {
  1762  	v_1 := v.Args[1]
  1763  	v_0 := v.Args[0]
  1764  	// match: (Load <t> ptr mem)
  1765  	// cond: t.IsBoolean()
  1766  	// result: (MOVBUload ptr mem)
  1767  	for {
  1768  		t := v.Type
  1769  		ptr := v_0
  1770  		mem := v_1
  1771  		if !(t.IsBoolean()) {
  1772  			break
  1773  		}
  1774  		v.reset(OpRISCV64MOVBUload)
  1775  		v.AddArg2(ptr, mem)
  1776  		return true
  1777  	}
  1778  	// match: (Load <t> ptr mem)
  1779  	// cond: ( is8BitInt(t) && t.IsSigned())
  1780  	// result: (MOVBload ptr mem)
  1781  	for {
  1782  		t := v.Type
  1783  		ptr := v_0
  1784  		mem := v_1
  1785  		if !(is8BitInt(t) && t.IsSigned()) {
  1786  			break
  1787  		}
  1788  		v.reset(OpRISCV64MOVBload)
  1789  		v.AddArg2(ptr, mem)
  1790  		return true
  1791  	}
  1792  	// match: (Load <t> ptr mem)
  1793  	// cond: ( is8BitInt(t) && !t.IsSigned())
  1794  	// result: (MOVBUload ptr mem)
  1795  	for {
  1796  		t := v.Type
  1797  		ptr := v_0
  1798  		mem := v_1
  1799  		if !(is8BitInt(t) && !t.IsSigned()) {
  1800  			break
  1801  		}
  1802  		v.reset(OpRISCV64MOVBUload)
  1803  		v.AddArg2(ptr, mem)
  1804  		return true
  1805  	}
  1806  	// match: (Load <t> ptr mem)
  1807  	// cond: (is16BitInt(t) && t.IsSigned())
  1808  	// result: (MOVHload ptr mem)
  1809  	for {
  1810  		t := v.Type
  1811  		ptr := v_0
  1812  		mem := v_1
  1813  		if !(is16BitInt(t) && t.IsSigned()) {
  1814  			break
  1815  		}
  1816  		v.reset(OpRISCV64MOVHload)
  1817  		v.AddArg2(ptr, mem)
  1818  		return true
  1819  	}
  1820  	// match: (Load <t> ptr mem)
  1821  	// cond: (is16BitInt(t) && !t.IsSigned())
  1822  	// result: (MOVHUload ptr mem)
  1823  	for {
  1824  		t := v.Type
  1825  		ptr := v_0
  1826  		mem := v_1
  1827  		if !(is16BitInt(t) && !t.IsSigned()) {
  1828  			break
  1829  		}
  1830  		v.reset(OpRISCV64MOVHUload)
  1831  		v.AddArg2(ptr, mem)
  1832  		return true
  1833  	}
  1834  	// match: (Load <t> ptr mem)
  1835  	// cond: (is32BitInt(t) && t.IsSigned())
  1836  	// result: (MOVWload ptr mem)
  1837  	for {
  1838  		t := v.Type
  1839  		ptr := v_0
  1840  		mem := v_1
  1841  		if !(is32BitInt(t) && t.IsSigned()) {
  1842  			break
  1843  		}
  1844  		v.reset(OpRISCV64MOVWload)
  1845  		v.AddArg2(ptr, mem)
  1846  		return true
  1847  	}
  1848  	// match: (Load <t> ptr mem)
  1849  	// cond: (is32BitInt(t) && !t.IsSigned())
  1850  	// result: (MOVWUload ptr mem)
  1851  	for {
  1852  		t := v.Type
  1853  		ptr := v_0
  1854  		mem := v_1
  1855  		if !(is32BitInt(t) && !t.IsSigned()) {
  1856  			break
  1857  		}
  1858  		v.reset(OpRISCV64MOVWUload)
  1859  		v.AddArg2(ptr, mem)
  1860  		return true
  1861  	}
  1862  	// match: (Load <t> ptr mem)
  1863  	// cond: (is64BitInt(t) || isPtr(t))
  1864  	// result: (MOVDload ptr mem)
  1865  	for {
  1866  		t := v.Type
  1867  		ptr := v_0
  1868  		mem := v_1
  1869  		if !(is64BitInt(t) || isPtr(t)) {
  1870  			break
  1871  		}
  1872  		v.reset(OpRISCV64MOVDload)
  1873  		v.AddArg2(ptr, mem)
  1874  		return true
  1875  	}
  1876  	// match: (Load <t> ptr mem)
  1877  	// cond: is32BitFloat(t)
  1878  	// result: (FMOVWload ptr mem)
  1879  	for {
  1880  		t := v.Type
  1881  		ptr := v_0
  1882  		mem := v_1
  1883  		if !(is32BitFloat(t)) {
  1884  			break
  1885  		}
  1886  		v.reset(OpRISCV64FMOVWload)
  1887  		v.AddArg2(ptr, mem)
  1888  		return true
  1889  	}
  1890  	// match: (Load <t> ptr mem)
  1891  	// cond: is64BitFloat(t)
  1892  	// result: (FMOVDload ptr mem)
  1893  	for {
  1894  		t := v.Type
  1895  		ptr := v_0
  1896  		mem := v_1
  1897  		if !(is64BitFloat(t)) {
  1898  			break
  1899  		}
  1900  		v.reset(OpRISCV64FMOVDload)
  1901  		v.AddArg2(ptr, mem)
  1902  		return true
  1903  	}
  1904  	return false
  1905  }
  1906  func rewriteValueRISCV64_OpLocalAddr(v *Value) bool {
  1907  	v_1 := v.Args[1]
  1908  	v_0 := v.Args[0]
  1909  	b := v.Block
  1910  	typ := &b.Func.Config.Types
  1911  	// match: (LocalAddr <t> {sym} base mem)
  1912  	// cond: t.Elem().HasPointers()
  1913  	// result: (MOVaddr {sym} (SPanchored base mem))
  1914  	for {
  1915  		t := v.Type
  1916  		sym := auxToSym(v.Aux)
  1917  		base := v_0
  1918  		mem := v_1
  1919  		if !(t.Elem().HasPointers()) {
  1920  			break
  1921  		}
  1922  		v.reset(OpRISCV64MOVaddr)
  1923  		v.Aux = symToAux(sym)
  1924  		v0 := b.NewValue0(v.Pos, OpSPanchored, typ.Uintptr)
  1925  		v0.AddArg2(base, mem)
  1926  		v.AddArg(v0)
  1927  		return true
  1928  	}
  1929  	// match: (LocalAddr <t> {sym} base _)
  1930  	// cond: !t.Elem().HasPointers()
  1931  	// result: (MOVaddr {sym} base)
  1932  	for {
  1933  		t := v.Type
  1934  		sym := auxToSym(v.Aux)
  1935  		base := v_0
  1936  		if !(!t.Elem().HasPointers()) {
  1937  			break
  1938  		}
  1939  		v.reset(OpRISCV64MOVaddr)
  1940  		v.Aux = symToAux(sym)
  1941  		v.AddArg(base)
  1942  		return true
  1943  	}
  1944  	return false
  1945  }
  1946  func rewriteValueRISCV64_OpLsh16x16(v *Value) bool {
  1947  	v_1 := v.Args[1]
  1948  	v_0 := v.Args[0]
  1949  	b := v.Block
  1950  	typ := &b.Func.Config.Types
  1951  	// match: (Lsh16x16 <t> x y)
  1952  	// cond: !shiftIsBounded(v)
  1953  	// result: (AND (SLL <t> x y) (Neg16 <t> (SLTIU <t> [64] (ZeroExt16to64 y))))
  1954  	for {
  1955  		t := v.Type
  1956  		x := v_0
  1957  		y := v_1
  1958  		if !(!shiftIsBounded(v)) {
  1959  			break
  1960  		}
  1961  		v.reset(OpRISCV64AND)
  1962  		v0 := b.NewValue0(v.Pos, OpRISCV64SLL, t)
  1963  		v0.AddArg2(x, y)
  1964  		v1 := b.NewValue0(v.Pos, OpNeg16, t)
  1965  		v2 := b.NewValue0(v.Pos, OpRISCV64SLTIU, t)
  1966  		v2.AuxInt = int64ToAuxInt(64)
  1967  		v3 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64)
  1968  		v3.AddArg(y)
  1969  		v2.AddArg(v3)
  1970  		v1.AddArg(v2)
  1971  		v.AddArg2(v0, v1)
  1972  		return true
  1973  	}
  1974  	// match: (Lsh16x16 x y)
  1975  	// cond: shiftIsBounded(v)
  1976  	// result: (SLL x y)
  1977  	for {
  1978  		x := v_0
  1979  		y := v_1
  1980  		if !(shiftIsBounded(v)) {
  1981  			break
  1982  		}
  1983  		v.reset(OpRISCV64SLL)
  1984  		v.AddArg2(x, y)
  1985  		return true
  1986  	}
  1987  	return false
  1988  }
  1989  func rewriteValueRISCV64_OpLsh16x32(v *Value) bool {
  1990  	v_1 := v.Args[1]
  1991  	v_0 := v.Args[0]
  1992  	b := v.Block
  1993  	typ := &b.Func.Config.Types
  1994  	// match: (Lsh16x32 <t> x y)
  1995  	// cond: !shiftIsBounded(v)
  1996  	// result: (AND (SLL <t> x y) (Neg16 <t> (SLTIU <t> [64] (ZeroExt32to64 y))))
  1997  	for {
  1998  		t := v.Type
  1999  		x := v_0
  2000  		y := v_1
  2001  		if !(!shiftIsBounded(v)) {
  2002  			break
  2003  		}
  2004  		v.reset(OpRISCV64AND)
  2005  		v0 := b.NewValue0(v.Pos, OpRISCV64SLL, t)
  2006  		v0.AddArg2(x, y)
  2007  		v1 := b.NewValue0(v.Pos, OpNeg16, t)
  2008  		v2 := b.NewValue0(v.Pos, OpRISCV64SLTIU, t)
  2009  		v2.AuxInt = int64ToAuxInt(64)
  2010  		v3 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64)
  2011  		v3.AddArg(y)
  2012  		v2.AddArg(v3)
  2013  		v1.AddArg(v2)
  2014  		v.AddArg2(v0, v1)
  2015  		return true
  2016  	}
  2017  	// match: (Lsh16x32 x y)
  2018  	// cond: shiftIsBounded(v)
  2019  	// result: (SLL x y)
  2020  	for {
  2021  		x := v_0
  2022  		y := v_1
  2023  		if !(shiftIsBounded(v)) {
  2024  			break
  2025  		}
  2026  		v.reset(OpRISCV64SLL)
  2027  		v.AddArg2(x, y)
  2028  		return true
  2029  	}
  2030  	return false
  2031  }
  2032  func rewriteValueRISCV64_OpLsh16x64(v *Value) bool {
  2033  	v_1 := v.Args[1]
  2034  	v_0 := v.Args[0]
  2035  	b := v.Block
  2036  	// match: (Lsh16x64 <t> x y)
  2037  	// cond: !shiftIsBounded(v)
  2038  	// result: (AND (SLL <t> x y) (Neg16 <t> (SLTIU <t> [64] y)))
  2039  	for {
  2040  		t := v.Type
  2041  		x := v_0
  2042  		y := v_1
  2043  		if !(!shiftIsBounded(v)) {
  2044  			break
  2045  		}
  2046  		v.reset(OpRISCV64AND)
  2047  		v0 := b.NewValue0(v.Pos, OpRISCV64SLL, t)
  2048  		v0.AddArg2(x, y)
  2049  		v1 := b.NewValue0(v.Pos, OpNeg16, t)
  2050  		v2 := b.NewValue0(v.Pos, OpRISCV64SLTIU, t)
  2051  		v2.AuxInt = int64ToAuxInt(64)
  2052  		v2.AddArg(y)
  2053  		v1.AddArg(v2)
  2054  		v.AddArg2(v0, v1)
  2055  		return true
  2056  	}
  2057  	// match: (Lsh16x64 x y)
  2058  	// cond: shiftIsBounded(v)
  2059  	// result: (SLL x y)
  2060  	for {
  2061  		x := v_0
  2062  		y := v_1
  2063  		if !(shiftIsBounded(v)) {
  2064  			break
  2065  		}
  2066  		v.reset(OpRISCV64SLL)
  2067  		v.AddArg2(x, y)
  2068  		return true
  2069  	}
  2070  	return false
  2071  }
  2072  func rewriteValueRISCV64_OpLsh16x8(v *Value) bool {
  2073  	v_1 := v.Args[1]
  2074  	v_0 := v.Args[0]
  2075  	b := v.Block
  2076  	typ := &b.Func.Config.Types
  2077  	// match: (Lsh16x8 <t> x y)
  2078  	// cond: !shiftIsBounded(v)
  2079  	// result: (AND (SLL <t> x y) (Neg16 <t> (SLTIU <t> [64] (ZeroExt8to64 y))))
  2080  	for {
  2081  		t := v.Type
  2082  		x := v_0
  2083  		y := v_1
  2084  		if !(!shiftIsBounded(v)) {
  2085  			break
  2086  		}
  2087  		v.reset(OpRISCV64AND)
  2088  		v0 := b.NewValue0(v.Pos, OpRISCV64SLL, t)
  2089  		v0.AddArg2(x, y)
  2090  		v1 := b.NewValue0(v.Pos, OpNeg16, t)
  2091  		v2 := b.NewValue0(v.Pos, OpRISCV64SLTIU, t)
  2092  		v2.AuxInt = int64ToAuxInt(64)
  2093  		v3 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64)
  2094  		v3.AddArg(y)
  2095  		v2.AddArg(v3)
  2096  		v1.AddArg(v2)
  2097  		v.AddArg2(v0, v1)
  2098  		return true
  2099  	}
  2100  	// match: (Lsh16x8 x y)
  2101  	// cond: shiftIsBounded(v)
  2102  	// result: (SLL x y)
  2103  	for {
  2104  		x := v_0
  2105  		y := v_1
  2106  		if !(shiftIsBounded(v)) {
  2107  			break
  2108  		}
  2109  		v.reset(OpRISCV64SLL)
  2110  		v.AddArg2(x, y)
  2111  		return true
  2112  	}
  2113  	return false
  2114  }
  2115  func rewriteValueRISCV64_OpLsh32x16(v *Value) bool {
  2116  	v_1 := v.Args[1]
  2117  	v_0 := v.Args[0]
  2118  	b := v.Block
  2119  	typ := &b.Func.Config.Types
  2120  	// match: (Lsh32x16 <t> x y)
  2121  	// cond: !shiftIsBounded(v)
  2122  	// result: (AND (SLL <t> x y) (Neg32 <t> (SLTIU <t> [64] (ZeroExt16to64 y))))
  2123  	for {
  2124  		t := v.Type
  2125  		x := v_0
  2126  		y := v_1
  2127  		if !(!shiftIsBounded(v)) {
  2128  			break
  2129  		}
  2130  		v.reset(OpRISCV64AND)
  2131  		v0 := b.NewValue0(v.Pos, OpRISCV64SLL, t)
  2132  		v0.AddArg2(x, y)
  2133  		v1 := b.NewValue0(v.Pos, OpNeg32, t)
  2134  		v2 := b.NewValue0(v.Pos, OpRISCV64SLTIU, t)
  2135  		v2.AuxInt = int64ToAuxInt(64)
  2136  		v3 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64)
  2137  		v3.AddArg(y)
  2138  		v2.AddArg(v3)
  2139  		v1.AddArg(v2)
  2140  		v.AddArg2(v0, v1)
  2141  		return true
  2142  	}
  2143  	// match: (Lsh32x16 x y)
  2144  	// cond: shiftIsBounded(v)
  2145  	// result: (SLL x y)
  2146  	for {
  2147  		x := v_0
  2148  		y := v_1
  2149  		if !(shiftIsBounded(v)) {
  2150  			break
  2151  		}
  2152  		v.reset(OpRISCV64SLL)
  2153  		v.AddArg2(x, y)
  2154  		return true
  2155  	}
  2156  	return false
  2157  }
  2158  func rewriteValueRISCV64_OpLsh32x32(v *Value) bool {
  2159  	v_1 := v.Args[1]
  2160  	v_0 := v.Args[0]
  2161  	b := v.Block
  2162  	typ := &b.Func.Config.Types
  2163  	// match: (Lsh32x32 <t> x y)
  2164  	// cond: !shiftIsBounded(v)
  2165  	// result: (AND (SLL <t> x y) (Neg32 <t> (SLTIU <t> [64] (ZeroExt32to64 y))))
  2166  	for {
  2167  		t := v.Type
  2168  		x := v_0
  2169  		y := v_1
  2170  		if !(!shiftIsBounded(v)) {
  2171  			break
  2172  		}
  2173  		v.reset(OpRISCV64AND)
  2174  		v0 := b.NewValue0(v.Pos, OpRISCV64SLL, t)
  2175  		v0.AddArg2(x, y)
  2176  		v1 := b.NewValue0(v.Pos, OpNeg32, t)
  2177  		v2 := b.NewValue0(v.Pos, OpRISCV64SLTIU, t)
  2178  		v2.AuxInt = int64ToAuxInt(64)
  2179  		v3 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64)
  2180  		v3.AddArg(y)
  2181  		v2.AddArg(v3)
  2182  		v1.AddArg(v2)
  2183  		v.AddArg2(v0, v1)
  2184  		return true
  2185  	}
  2186  	// match: (Lsh32x32 x y)
  2187  	// cond: shiftIsBounded(v)
  2188  	// result: (SLL x y)
  2189  	for {
  2190  		x := v_0
  2191  		y := v_1
  2192  		if !(shiftIsBounded(v)) {
  2193  			break
  2194  		}
  2195  		v.reset(OpRISCV64SLL)
  2196  		v.AddArg2(x, y)
  2197  		return true
  2198  	}
  2199  	return false
  2200  }
  2201  func rewriteValueRISCV64_OpLsh32x64(v *Value) bool {
  2202  	v_1 := v.Args[1]
  2203  	v_0 := v.Args[0]
  2204  	b := v.Block
  2205  	// match: (Lsh32x64 <t> x y)
  2206  	// cond: !shiftIsBounded(v)
  2207  	// result: (AND (SLL <t> x y) (Neg32 <t> (SLTIU <t> [64] y)))
  2208  	for {
  2209  		t := v.Type
  2210  		x := v_0
  2211  		y := v_1
  2212  		if !(!shiftIsBounded(v)) {
  2213  			break
  2214  		}
  2215  		v.reset(OpRISCV64AND)
  2216  		v0 := b.NewValue0(v.Pos, OpRISCV64SLL, t)
  2217  		v0.AddArg2(x, y)
  2218  		v1 := b.NewValue0(v.Pos, OpNeg32, t)
  2219  		v2 := b.NewValue0(v.Pos, OpRISCV64SLTIU, t)
  2220  		v2.AuxInt = int64ToAuxInt(64)
  2221  		v2.AddArg(y)
  2222  		v1.AddArg(v2)
  2223  		v.AddArg2(v0, v1)
  2224  		return true
  2225  	}
  2226  	// match: (Lsh32x64 x y)
  2227  	// cond: shiftIsBounded(v)
  2228  	// result: (SLL x y)
  2229  	for {
  2230  		x := v_0
  2231  		y := v_1
  2232  		if !(shiftIsBounded(v)) {
  2233  			break
  2234  		}
  2235  		v.reset(OpRISCV64SLL)
  2236  		v.AddArg2(x, y)
  2237  		return true
  2238  	}
  2239  	return false
  2240  }
  2241  func rewriteValueRISCV64_OpLsh32x8(v *Value) bool {
  2242  	v_1 := v.Args[1]
  2243  	v_0 := v.Args[0]
  2244  	b := v.Block
  2245  	typ := &b.Func.Config.Types
  2246  	// match: (Lsh32x8 <t> x y)
  2247  	// cond: !shiftIsBounded(v)
  2248  	// result: (AND (SLL <t> x y) (Neg32 <t> (SLTIU <t> [64] (ZeroExt8to64 y))))
  2249  	for {
  2250  		t := v.Type
  2251  		x := v_0
  2252  		y := v_1
  2253  		if !(!shiftIsBounded(v)) {
  2254  			break
  2255  		}
  2256  		v.reset(OpRISCV64AND)
  2257  		v0 := b.NewValue0(v.Pos, OpRISCV64SLL, t)
  2258  		v0.AddArg2(x, y)
  2259  		v1 := b.NewValue0(v.Pos, OpNeg32, t)
  2260  		v2 := b.NewValue0(v.Pos, OpRISCV64SLTIU, t)
  2261  		v2.AuxInt = int64ToAuxInt(64)
  2262  		v3 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64)
  2263  		v3.AddArg(y)
  2264  		v2.AddArg(v3)
  2265  		v1.AddArg(v2)
  2266  		v.AddArg2(v0, v1)
  2267  		return true
  2268  	}
  2269  	// match: (Lsh32x8 x y)
  2270  	// cond: shiftIsBounded(v)
  2271  	// result: (SLL x y)
  2272  	for {
  2273  		x := v_0
  2274  		y := v_1
  2275  		if !(shiftIsBounded(v)) {
  2276  			break
  2277  		}
  2278  		v.reset(OpRISCV64SLL)
  2279  		v.AddArg2(x, y)
  2280  		return true
  2281  	}
  2282  	return false
  2283  }
  2284  func rewriteValueRISCV64_OpLsh64x16(v *Value) bool {
  2285  	v_1 := v.Args[1]
  2286  	v_0 := v.Args[0]
  2287  	b := v.Block
  2288  	typ := &b.Func.Config.Types
  2289  	// match: (Lsh64x16 <t> x y)
  2290  	// cond: !shiftIsBounded(v)
  2291  	// result: (AND (SLL <t> x y) (Neg64 <t> (SLTIU <t> [64] (ZeroExt16to64 y))))
  2292  	for {
  2293  		t := v.Type
  2294  		x := v_0
  2295  		y := v_1
  2296  		if !(!shiftIsBounded(v)) {
  2297  			break
  2298  		}
  2299  		v.reset(OpRISCV64AND)
  2300  		v0 := b.NewValue0(v.Pos, OpRISCV64SLL, t)
  2301  		v0.AddArg2(x, y)
  2302  		v1 := b.NewValue0(v.Pos, OpNeg64, t)
  2303  		v2 := b.NewValue0(v.Pos, OpRISCV64SLTIU, t)
  2304  		v2.AuxInt = int64ToAuxInt(64)
  2305  		v3 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64)
  2306  		v3.AddArg(y)
  2307  		v2.AddArg(v3)
  2308  		v1.AddArg(v2)
  2309  		v.AddArg2(v0, v1)
  2310  		return true
  2311  	}
  2312  	// match: (Lsh64x16 x y)
  2313  	// cond: shiftIsBounded(v)
  2314  	// result: (SLL x y)
  2315  	for {
  2316  		x := v_0
  2317  		y := v_1
  2318  		if !(shiftIsBounded(v)) {
  2319  			break
  2320  		}
  2321  		v.reset(OpRISCV64SLL)
  2322  		v.AddArg2(x, y)
  2323  		return true
  2324  	}
  2325  	return false
  2326  }
  2327  func rewriteValueRISCV64_OpLsh64x32(v *Value) bool {
  2328  	v_1 := v.Args[1]
  2329  	v_0 := v.Args[0]
  2330  	b := v.Block
  2331  	typ := &b.Func.Config.Types
  2332  	// match: (Lsh64x32 <t> x y)
  2333  	// cond: !shiftIsBounded(v)
  2334  	// result: (AND (SLL <t> x y) (Neg64 <t> (SLTIU <t> [64] (ZeroExt32to64 y))))
  2335  	for {
  2336  		t := v.Type
  2337  		x := v_0
  2338  		y := v_1
  2339  		if !(!shiftIsBounded(v)) {
  2340  			break
  2341  		}
  2342  		v.reset(OpRISCV64AND)
  2343  		v0 := b.NewValue0(v.Pos, OpRISCV64SLL, t)
  2344  		v0.AddArg2(x, y)
  2345  		v1 := b.NewValue0(v.Pos, OpNeg64, t)
  2346  		v2 := b.NewValue0(v.Pos, OpRISCV64SLTIU, t)
  2347  		v2.AuxInt = int64ToAuxInt(64)
  2348  		v3 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64)
  2349  		v3.AddArg(y)
  2350  		v2.AddArg(v3)
  2351  		v1.AddArg(v2)
  2352  		v.AddArg2(v0, v1)
  2353  		return true
  2354  	}
  2355  	// match: (Lsh64x32 x y)
  2356  	// cond: shiftIsBounded(v)
  2357  	// result: (SLL x y)
  2358  	for {
  2359  		x := v_0
  2360  		y := v_1
  2361  		if !(shiftIsBounded(v)) {
  2362  			break
  2363  		}
  2364  		v.reset(OpRISCV64SLL)
  2365  		v.AddArg2(x, y)
  2366  		return true
  2367  	}
  2368  	return false
  2369  }
  2370  func rewriteValueRISCV64_OpLsh64x64(v *Value) bool {
  2371  	v_1 := v.Args[1]
  2372  	v_0 := v.Args[0]
  2373  	b := v.Block
  2374  	// match: (Lsh64x64 <t> x y)
  2375  	// cond: !shiftIsBounded(v)
  2376  	// result: (AND (SLL <t> x y) (Neg64 <t> (SLTIU <t> [64] y)))
  2377  	for {
  2378  		t := v.Type
  2379  		x := v_0
  2380  		y := v_1
  2381  		if !(!shiftIsBounded(v)) {
  2382  			break
  2383  		}
  2384  		v.reset(OpRISCV64AND)
  2385  		v0 := b.NewValue0(v.Pos, OpRISCV64SLL, t)
  2386  		v0.AddArg2(x, y)
  2387  		v1 := b.NewValue0(v.Pos, OpNeg64, t)
  2388  		v2 := b.NewValue0(v.Pos, OpRISCV64SLTIU, t)
  2389  		v2.AuxInt = int64ToAuxInt(64)
  2390  		v2.AddArg(y)
  2391  		v1.AddArg(v2)
  2392  		v.AddArg2(v0, v1)
  2393  		return true
  2394  	}
  2395  	// match: (Lsh64x64 x y)
  2396  	// cond: shiftIsBounded(v)
  2397  	// result: (SLL x y)
  2398  	for {
  2399  		x := v_0
  2400  		y := v_1
  2401  		if !(shiftIsBounded(v)) {
  2402  			break
  2403  		}
  2404  		v.reset(OpRISCV64SLL)
  2405  		v.AddArg2(x, y)
  2406  		return true
  2407  	}
  2408  	return false
  2409  }
  2410  func rewriteValueRISCV64_OpLsh64x8(v *Value) bool {
  2411  	v_1 := v.Args[1]
  2412  	v_0 := v.Args[0]
  2413  	b := v.Block
  2414  	typ := &b.Func.Config.Types
  2415  	// match: (Lsh64x8 <t> x y)
  2416  	// cond: !shiftIsBounded(v)
  2417  	// result: (AND (SLL <t> x y) (Neg64 <t> (SLTIU <t> [64] (ZeroExt8to64 y))))
  2418  	for {
  2419  		t := v.Type
  2420  		x := v_0
  2421  		y := v_1
  2422  		if !(!shiftIsBounded(v)) {
  2423  			break
  2424  		}
  2425  		v.reset(OpRISCV64AND)
  2426  		v0 := b.NewValue0(v.Pos, OpRISCV64SLL, t)
  2427  		v0.AddArg2(x, y)
  2428  		v1 := b.NewValue0(v.Pos, OpNeg64, t)
  2429  		v2 := b.NewValue0(v.Pos, OpRISCV64SLTIU, t)
  2430  		v2.AuxInt = int64ToAuxInt(64)
  2431  		v3 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64)
  2432  		v3.AddArg(y)
  2433  		v2.AddArg(v3)
  2434  		v1.AddArg(v2)
  2435  		v.AddArg2(v0, v1)
  2436  		return true
  2437  	}
  2438  	// match: (Lsh64x8 x y)
  2439  	// cond: shiftIsBounded(v)
  2440  	// result: (SLL x y)
  2441  	for {
  2442  		x := v_0
  2443  		y := v_1
  2444  		if !(shiftIsBounded(v)) {
  2445  			break
  2446  		}
  2447  		v.reset(OpRISCV64SLL)
  2448  		v.AddArg2(x, y)
  2449  		return true
  2450  	}
  2451  	return false
  2452  }
  2453  func rewriteValueRISCV64_OpLsh8x16(v *Value) bool {
  2454  	v_1 := v.Args[1]
  2455  	v_0 := v.Args[0]
  2456  	b := v.Block
  2457  	typ := &b.Func.Config.Types
  2458  	// match: (Lsh8x16 <t> x y)
  2459  	// cond: !shiftIsBounded(v)
  2460  	// result: (AND (SLL <t> x y) (Neg8 <t> (SLTIU <t> [64] (ZeroExt16to64 y))))
  2461  	for {
  2462  		t := v.Type
  2463  		x := v_0
  2464  		y := v_1
  2465  		if !(!shiftIsBounded(v)) {
  2466  			break
  2467  		}
  2468  		v.reset(OpRISCV64AND)
  2469  		v0 := b.NewValue0(v.Pos, OpRISCV64SLL, t)
  2470  		v0.AddArg2(x, y)
  2471  		v1 := b.NewValue0(v.Pos, OpNeg8, t)
  2472  		v2 := b.NewValue0(v.Pos, OpRISCV64SLTIU, t)
  2473  		v2.AuxInt = int64ToAuxInt(64)
  2474  		v3 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64)
  2475  		v3.AddArg(y)
  2476  		v2.AddArg(v3)
  2477  		v1.AddArg(v2)
  2478  		v.AddArg2(v0, v1)
  2479  		return true
  2480  	}
  2481  	// match: (Lsh8x16 x y)
  2482  	// cond: shiftIsBounded(v)
  2483  	// result: (SLL x y)
  2484  	for {
  2485  		x := v_0
  2486  		y := v_1
  2487  		if !(shiftIsBounded(v)) {
  2488  			break
  2489  		}
  2490  		v.reset(OpRISCV64SLL)
  2491  		v.AddArg2(x, y)
  2492  		return true
  2493  	}
  2494  	return false
  2495  }
  2496  func rewriteValueRISCV64_OpLsh8x32(v *Value) bool {
  2497  	v_1 := v.Args[1]
  2498  	v_0 := v.Args[0]
  2499  	b := v.Block
  2500  	typ := &b.Func.Config.Types
  2501  	// match: (Lsh8x32 <t> x y)
  2502  	// cond: !shiftIsBounded(v)
  2503  	// result: (AND (SLL <t> x y) (Neg8 <t> (SLTIU <t> [64] (ZeroExt32to64 y))))
  2504  	for {
  2505  		t := v.Type
  2506  		x := v_0
  2507  		y := v_1
  2508  		if !(!shiftIsBounded(v)) {
  2509  			break
  2510  		}
  2511  		v.reset(OpRISCV64AND)
  2512  		v0 := b.NewValue0(v.Pos, OpRISCV64SLL, t)
  2513  		v0.AddArg2(x, y)
  2514  		v1 := b.NewValue0(v.Pos, OpNeg8, t)
  2515  		v2 := b.NewValue0(v.Pos, OpRISCV64SLTIU, t)
  2516  		v2.AuxInt = int64ToAuxInt(64)
  2517  		v3 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64)
  2518  		v3.AddArg(y)
  2519  		v2.AddArg(v3)
  2520  		v1.AddArg(v2)
  2521  		v.AddArg2(v0, v1)
  2522  		return true
  2523  	}
  2524  	// match: (Lsh8x32 x y)
  2525  	// cond: shiftIsBounded(v)
  2526  	// result: (SLL x y)
  2527  	for {
  2528  		x := v_0
  2529  		y := v_1
  2530  		if !(shiftIsBounded(v)) {
  2531  			break
  2532  		}
  2533  		v.reset(OpRISCV64SLL)
  2534  		v.AddArg2(x, y)
  2535  		return true
  2536  	}
  2537  	return false
  2538  }
  2539  func rewriteValueRISCV64_OpLsh8x64(v *Value) bool {
  2540  	v_1 := v.Args[1]
  2541  	v_0 := v.Args[0]
  2542  	b := v.Block
  2543  	// match: (Lsh8x64 <t> x y)
  2544  	// cond: !shiftIsBounded(v)
  2545  	// result: (AND (SLL <t> x y) (Neg8 <t> (SLTIU <t> [64] y)))
  2546  	for {
  2547  		t := v.Type
  2548  		x := v_0
  2549  		y := v_1
  2550  		if !(!shiftIsBounded(v)) {
  2551  			break
  2552  		}
  2553  		v.reset(OpRISCV64AND)
  2554  		v0 := b.NewValue0(v.Pos, OpRISCV64SLL, t)
  2555  		v0.AddArg2(x, y)
  2556  		v1 := b.NewValue0(v.Pos, OpNeg8, t)
  2557  		v2 := b.NewValue0(v.Pos, OpRISCV64SLTIU, t)
  2558  		v2.AuxInt = int64ToAuxInt(64)
  2559  		v2.AddArg(y)
  2560  		v1.AddArg(v2)
  2561  		v.AddArg2(v0, v1)
  2562  		return true
  2563  	}
  2564  	// match: (Lsh8x64 x y)
  2565  	// cond: shiftIsBounded(v)
  2566  	// result: (SLL x y)
  2567  	for {
  2568  		x := v_0
  2569  		y := v_1
  2570  		if !(shiftIsBounded(v)) {
  2571  			break
  2572  		}
  2573  		v.reset(OpRISCV64SLL)
  2574  		v.AddArg2(x, y)
  2575  		return true
  2576  	}
  2577  	return false
  2578  }
  2579  func rewriteValueRISCV64_OpLsh8x8(v *Value) bool {
  2580  	v_1 := v.Args[1]
  2581  	v_0 := v.Args[0]
  2582  	b := v.Block
  2583  	typ := &b.Func.Config.Types
  2584  	// match: (Lsh8x8 <t> x y)
  2585  	// cond: !shiftIsBounded(v)
  2586  	// result: (AND (SLL <t> x y) (Neg8 <t> (SLTIU <t> [64] (ZeroExt8to64 y))))
  2587  	for {
  2588  		t := v.Type
  2589  		x := v_0
  2590  		y := v_1
  2591  		if !(!shiftIsBounded(v)) {
  2592  			break
  2593  		}
  2594  		v.reset(OpRISCV64AND)
  2595  		v0 := b.NewValue0(v.Pos, OpRISCV64SLL, t)
  2596  		v0.AddArg2(x, y)
  2597  		v1 := b.NewValue0(v.Pos, OpNeg8, t)
  2598  		v2 := b.NewValue0(v.Pos, OpRISCV64SLTIU, t)
  2599  		v2.AuxInt = int64ToAuxInt(64)
  2600  		v3 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64)
  2601  		v3.AddArg(y)
  2602  		v2.AddArg(v3)
  2603  		v1.AddArg(v2)
  2604  		v.AddArg2(v0, v1)
  2605  		return true
  2606  	}
  2607  	// match: (Lsh8x8 x y)
  2608  	// cond: shiftIsBounded(v)
  2609  	// result: (SLL x y)
  2610  	for {
  2611  		x := v_0
  2612  		y := v_1
  2613  		if !(shiftIsBounded(v)) {
  2614  			break
  2615  		}
  2616  		v.reset(OpRISCV64SLL)
  2617  		v.AddArg2(x, y)
  2618  		return true
  2619  	}
  2620  	return false
  2621  }
  2622  func rewriteValueRISCV64_OpMax64(v *Value) bool {
  2623  	v_1 := v.Args[1]
  2624  	v_0 := v.Args[0]
  2625  	// match: (Max64 x y)
  2626  	// cond: buildcfg.GORISCV64 >= 22
  2627  	// result: (MAX x y)
  2628  	for {
  2629  		x := v_0
  2630  		y := v_1
  2631  		if !(buildcfg.GORISCV64 >= 22) {
  2632  			break
  2633  		}
  2634  		v.reset(OpRISCV64MAX)
  2635  		v.AddArg2(x, y)
  2636  		return true
  2637  	}
  2638  	return false
  2639  }
  2640  func rewriteValueRISCV64_OpMax64u(v *Value) bool {
  2641  	v_1 := v.Args[1]
  2642  	v_0 := v.Args[0]
  2643  	// match: (Max64u x y)
  2644  	// cond: buildcfg.GORISCV64 >= 22
  2645  	// result: (MAXU x y)
  2646  	for {
  2647  		x := v_0
  2648  		y := v_1
  2649  		if !(buildcfg.GORISCV64 >= 22) {
  2650  			break
  2651  		}
  2652  		v.reset(OpRISCV64MAXU)
  2653  		v.AddArg2(x, y)
  2654  		return true
  2655  	}
  2656  	return false
  2657  }
  2658  func rewriteValueRISCV64_OpMin64(v *Value) bool {
  2659  	v_1 := v.Args[1]
  2660  	v_0 := v.Args[0]
  2661  	// match: (Min64 x y)
  2662  	// cond: buildcfg.GORISCV64 >= 22
  2663  	// result: (MIN x y)
  2664  	for {
  2665  		x := v_0
  2666  		y := v_1
  2667  		if !(buildcfg.GORISCV64 >= 22) {
  2668  			break
  2669  		}
  2670  		v.reset(OpRISCV64MIN)
  2671  		v.AddArg2(x, y)
  2672  		return true
  2673  	}
  2674  	return false
  2675  }
  2676  func rewriteValueRISCV64_OpMin64u(v *Value) bool {
  2677  	v_1 := v.Args[1]
  2678  	v_0 := v.Args[0]
  2679  	// match: (Min64u x y)
  2680  	// cond: buildcfg.GORISCV64 >= 22
  2681  	// result: (MINU x y)
  2682  	for {
  2683  		x := v_0
  2684  		y := v_1
  2685  		if !(buildcfg.GORISCV64 >= 22) {
  2686  			break
  2687  		}
  2688  		v.reset(OpRISCV64MINU)
  2689  		v.AddArg2(x, y)
  2690  		return true
  2691  	}
  2692  	return false
  2693  }
  2694  func rewriteValueRISCV64_OpMod16(v *Value) bool {
  2695  	v_1 := v.Args[1]
  2696  	v_0 := v.Args[0]
  2697  	b := v.Block
  2698  	typ := &b.Func.Config.Types
  2699  	// match: (Mod16 x y [false])
  2700  	// result: (REMW (SignExt16to32 x) (SignExt16to32 y))
  2701  	for {
  2702  		if auxIntToBool(v.AuxInt) != false {
  2703  			break
  2704  		}
  2705  		x := v_0
  2706  		y := v_1
  2707  		v.reset(OpRISCV64REMW)
  2708  		v0 := b.NewValue0(v.Pos, OpSignExt16to32, typ.Int32)
  2709  		v0.AddArg(x)
  2710  		v1 := b.NewValue0(v.Pos, OpSignExt16to32, typ.Int32)
  2711  		v1.AddArg(y)
  2712  		v.AddArg2(v0, v1)
  2713  		return true
  2714  	}
  2715  	return false
  2716  }
  2717  func rewriteValueRISCV64_OpMod16u(v *Value) bool {
  2718  	v_1 := v.Args[1]
  2719  	v_0 := v.Args[0]
  2720  	b := v.Block
  2721  	typ := &b.Func.Config.Types
  2722  	// match: (Mod16u x y)
  2723  	// result: (REMUW (ZeroExt16to32 x) (ZeroExt16to32 y))
  2724  	for {
  2725  		x := v_0
  2726  		y := v_1
  2727  		v.reset(OpRISCV64REMUW)
  2728  		v0 := b.NewValue0(v.Pos, OpZeroExt16to32, typ.UInt32)
  2729  		v0.AddArg(x)
  2730  		v1 := b.NewValue0(v.Pos, OpZeroExt16to32, typ.UInt32)
  2731  		v1.AddArg(y)
  2732  		v.AddArg2(v0, v1)
  2733  		return true
  2734  	}
  2735  }
  2736  func rewriteValueRISCV64_OpMod32(v *Value) bool {
  2737  	v_1 := v.Args[1]
  2738  	v_0 := v.Args[0]
  2739  	// match: (Mod32 x y [false])
  2740  	// result: (REMW x y)
  2741  	for {
  2742  		if auxIntToBool(v.AuxInt) != false {
  2743  			break
  2744  		}
  2745  		x := v_0
  2746  		y := v_1
  2747  		v.reset(OpRISCV64REMW)
  2748  		v.AddArg2(x, y)
  2749  		return true
  2750  	}
  2751  	return false
  2752  }
  2753  func rewriteValueRISCV64_OpMod64(v *Value) bool {
  2754  	v_1 := v.Args[1]
  2755  	v_0 := v.Args[0]
  2756  	// match: (Mod64 x y [false])
  2757  	// result: (REM x y)
  2758  	for {
  2759  		if auxIntToBool(v.AuxInt) != false {
  2760  			break
  2761  		}
  2762  		x := v_0
  2763  		y := v_1
  2764  		v.reset(OpRISCV64REM)
  2765  		v.AddArg2(x, y)
  2766  		return true
  2767  	}
  2768  	return false
  2769  }
  2770  func rewriteValueRISCV64_OpMod8(v *Value) bool {
  2771  	v_1 := v.Args[1]
  2772  	v_0 := v.Args[0]
  2773  	b := v.Block
  2774  	typ := &b.Func.Config.Types
  2775  	// match: (Mod8 x y)
  2776  	// result: (REMW (SignExt8to32 x) (SignExt8to32 y))
  2777  	for {
  2778  		x := v_0
  2779  		y := v_1
  2780  		v.reset(OpRISCV64REMW)
  2781  		v0 := b.NewValue0(v.Pos, OpSignExt8to32, typ.Int32)
  2782  		v0.AddArg(x)
  2783  		v1 := b.NewValue0(v.Pos, OpSignExt8to32, typ.Int32)
  2784  		v1.AddArg(y)
  2785  		v.AddArg2(v0, v1)
  2786  		return true
  2787  	}
  2788  }
  2789  func rewriteValueRISCV64_OpMod8u(v *Value) bool {
  2790  	v_1 := v.Args[1]
  2791  	v_0 := v.Args[0]
  2792  	b := v.Block
  2793  	typ := &b.Func.Config.Types
  2794  	// match: (Mod8u x y)
  2795  	// result: (REMUW (ZeroExt8to32 x) (ZeroExt8to32 y))
  2796  	for {
  2797  		x := v_0
  2798  		y := v_1
  2799  		v.reset(OpRISCV64REMUW)
  2800  		v0 := b.NewValue0(v.Pos, OpZeroExt8to32, typ.UInt32)
  2801  		v0.AddArg(x)
  2802  		v1 := b.NewValue0(v.Pos, OpZeroExt8to32, typ.UInt32)
  2803  		v1.AddArg(y)
  2804  		v.AddArg2(v0, v1)
  2805  		return true
  2806  	}
  2807  }
  2808  func rewriteValueRISCV64_OpMove(v *Value) bool {
  2809  	v_2 := v.Args[2]
  2810  	v_1 := v.Args[1]
  2811  	v_0 := v.Args[0]
  2812  	b := v.Block
  2813  	config := b.Func.Config
  2814  	typ := &b.Func.Config.Types
  2815  	// match: (Move [0] _ _ mem)
  2816  	// result: mem
  2817  	for {
  2818  		if auxIntToInt64(v.AuxInt) != 0 {
  2819  			break
  2820  		}
  2821  		mem := v_2
  2822  		v.copyOf(mem)
  2823  		return true
  2824  	}
  2825  	// match: (Move [1] dst src mem)
  2826  	// result: (MOVBstore dst (MOVBload src mem) mem)
  2827  	for {
  2828  		if auxIntToInt64(v.AuxInt) != 1 {
  2829  			break
  2830  		}
  2831  		dst := v_0
  2832  		src := v_1
  2833  		mem := v_2
  2834  		v.reset(OpRISCV64MOVBstore)
  2835  		v0 := b.NewValue0(v.Pos, OpRISCV64MOVBload, typ.Int8)
  2836  		v0.AddArg2(src, mem)
  2837  		v.AddArg3(dst, v0, mem)
  2838  		return true
  2839  	}
  2840  	// match: (Move [2] {t} dst src mem)
  2841  	// cond: t.Alignment()%2 == 0
  2842  	// result: (MOVHstore dst (MOVHload src mem) mem)
  2843  	for {
  2844  		if auxIntToInt64(v.AuxInt) != 2 {
  2845  			break
  2846  		}
  2847  		t := auxToType(v.Aux)
  2848  		dst := v_0
  2849  		src := v_1
  2850  		mem := v_2
  2851  		if !(t.Alignment()%2 == 0) {
  2852  			break
  2853  		}
  2854  		v.reset(OpRISCV64MOVHstore)
  2855  		v0 := b.NewValue0(v.Pos, OpRISCV64MOVHload, typ.Int16)
  2856  		v0.AddArg2(src, mem)
  2857  		v.AddArg3(dst, v0, mem)
  2858  		return true
  2859  	}
  2860  	// match: (Move [2] dst src mem)
  2861  	// result: (MOVBstore [1] dst (MOVBload [1] src mem) (MOVBstore dst (MOVBload src mem) mem))
  2862  	for {
  2863  		if auxIntToInt64(v.AuxInt) != 2 {
  2864  			break
  2865  		}
  2866  		dst := v_0
  2867  		src := v_1
  2868  		mem := v_2
  2869  		v.reset(OpRISCV64MOVBstore)
  2870  		v.AuxInt = int32ToAuxInt(1)
  2871  		v0 := b.NewValue0(v.Pos, OpRISCV64MOVBload, typ.Int8)
  2872  		v0.AuxInt = int32ToAuxInt(1)
  2873  		v0.AddArg2(src, mem)
  2874  		v1 := b.NewValue0(v.Pos, OpRISCV64MOVBstore, types.TypeMem)
  2875  		v2 := b.NewValue0(v.Pos, OpRISCV64MOVBload, typ.Int8)
  2876  		v2.AddArg2(src, mem)
  2877  		v1.AddArg3(dst, v2, mem)
  2878  		v.AddArg3(dst, v0, v1)
  2879  		return true
  2880  	}
  2881  	// match: (Move [4] {t} dst src mem)
  2882  	// cond: t.Alignment()%4 == 0
  2883  	// result: (MOVWstore dst (MOVWload src mem) mem)
  2884  	for {
  2885  		if auxIntToInt64(v.AuxInt) != 4 {
  2886  			break
  2887  		}
  2888  		t := auxToType(v.Aux)
  2889  		dst := v_0
  2890  		src := v_1
  2891  		mem := v_2
  2892  		if !(t.Alignment()%4 == 0) {
  2893  			break
  2894  		}
  2895  		v.reset(OpRISCV64MOVWstore)
  2896  		v0 := b.NewValue0(v.Pos, OpRISCV64MOVWload, typ.Int32)
  2897  		v0.AddArg2(src, mem)
  2898  		v.AddArg3(dst, v0, mem)
  2899  		return true
  2900  	}
  2901  	// match: (Move [4] {t} dst src mem)
  2902  	// cond: t.Alignment()%2 == 0
  2903  	// result: (MOVHstore [2] dst (MOVHload [2] src mem) (MOVHstore dst (MOVHload src mem) mem))
  2904  	for {
  2905  		if auxIntToInt64(v.AuxInt) != 4 {
  2906  			break
  2907  		}
  2908  		t := auxToType(v.Aux)
  2909  		dst := v_0
  2910  		src := v_1
  2911  		mem := v_2
  2912  		if !(t.Alignment()%2 == 0) {
  2913  			break
  2914  		}
  2915  		v.reset(OpRISCV64MOVHstore)
  2916  		v.AuxInt = int32ToAuxInt(2)
  2917  		v0 := b.NewValue0(v.Pos, OpRISCV64MOVHload, typ.Int16)
  2918  		v0.AuxInt = int32ToAuxInt(2)
  2919  		v0.AddArg2(src, mem)
  2920  		v1 := b.NewValue0(v.Pos, OpRISCV64MOVHstore, types.TypeMem)
  2921  		v2 := b.NewValue0(v.Pos, OpRISCV64MOVHload, typ.Int16)
  2922  		v2.AddArg2(src, mem)
  2923  		v1.AddArg3(dst, v2, mem)
  2924  		v.AddArg3(dst, v0, v1)
  2925  		return true
  2926  	}
  2927  	// match: (Move [4] dst src mem)
  2928  	// result: (MOVBstore [3] dst (MOVBload [3] src mem) (MOVBstore [2] dst (MOVBload [2] src mem) (MOVBstore [1] dst (MOVBload [1] src mem) (MOVBstore dst (MOVBload src mem) mem))))
  2929  	for {
  2930  		if auxIntToInt64(v.AuxInt) != 4 {
  2931  			break
  2932  		}
  2933  		dst := v_0
  2934  		src := v_1
  2935  		mem := v_2
  2936  		v.reset(OpRISCV64MOVBstore)
  2937  		v.AuxInt = int32ToAuxInt(3)
  2938  		v0 := b.NewValue0(v.Pos, OpRISCV64MOVBload, typ.Int8)
  2939  		v0.AuxInt = int32ToAuxInt(3)
  2940  		v0.AddArg2(src, mem)
  2941  		v1 := b.NewValue0(v.Pos, OpRISCV64MOVBstore, types.TypeMem)
  2942  		v1.AuxInt = int32ToAuxInt(2)
  2943  		v2 := b.NewValue0(v.Pos, OpRISCV64MOVBload, typ.Int8)
  2944  		v2.AuxInt = int32ToAuxInt(2)
  2945  		v2.AddArg2(src, mem)
  2946  		v3 := b.NewValue0(v.Pos, OpRISCV64MOVBstore, types.TypeMem)
  2947  		v3.AuxInt = int32ToAuxInt(1)
  2948  		v4 := b.NewValue0(v.Pos, OpRISCV64MOVBload, typ.Int8)
  2949  		v4.AuxInt = int32ToAuxInt(1)
  2950  		v4.AddArg2(src, mem)
  2951  		v5 := b.NewValue0(v.Pos, OpRISCV64MOVBstore, types.TypeMem)
  2952  		v6 := b.NewValue0(v.Pos, OpRISCV64MOVBload, typ.Int8)
  2953  		v6.AddArg2(src, mem)
  2954  		v5.AddArg3(dst, v6, mem)
  2955  		v3.AddArg3(dst, v4, v5)
  2956  		v1.AddArg3(dst, v2, v3)
  2957  		v.AddArg3(dst, v0, v1)
  2958  		return true
  2959  	}
  2960  	// match: (Move [8] {t} dst src mem)
  2961  	// cond: t.Alignment()%8 == 0
  2962  	// result: (MOVDstore dst (MOVDload src mem) mem)
  2963  	for {
  2964  		if auxIntToInt64(v.AuxInt) != 8 {
  2965  			break
  2966  		}
  2967  		t := auxToType(v.Aux)
  2968  		dst := v_0
  2969  		src := v_1
  2970  		mem := v_2
  2971  		if !(t.Alignment()%8 == 0) {
  2972  			break
  2973  		}
  2974  		v.reset(OpRISCV64MOVDstore)
  2975  		v0 := b.NewValue0(v.Pos, OpRISCV64MOVDload, typ.Int64)
  2976  		v0.AddArg2(src, mem)
  2977  		v.AddArg3(dst, v0, mem)
  2978  		return true
  2979  	}
  2980  	// match: (Move [8] {t} dst src mem)
  2981  	// cond: t.Alignment()%4 == 0
  2982  	// result: (MOVWstore [4] dst (MOVWload [4] src mem) (MOVWstore dst (MOVWload src mem) mem))
  2983  	for {
  2984  		if auxIntToInt64(v.AuxInt) != 8 {
  2985  			break
  2986  		}
  2987  		t := auxToType(v.Aux)
  2988  		dst := v_0
  2989  		src := v_1
  2990  		mem := v_2
  2991  		if !(t.Alignment()%4 == 0) {
  2992  			break
  2993  		}
  2994  		v.reset(OpRISCV64MOVWstore)
  2995  		v.AuxInt = int32ToAuxInt(4)
  2996  		v0 := b.NewValue0(v.Pos, OpRISCV64MOVWload, typ.Int32)
  2997  		v0.AuxInt = int32ToAuxInt(4)
  2998  		v0.AddArg2(src, mem)
  2999  		v1 := b.NewValue0(v.Pos, OpRISCV64MOVWstore, types.TypeMem)
  3000  		v2 := b.NewValue0(v.Pos, OpRISCV64MOVWload, typ.Int32)
  3001  		v2.AddArg2(src, mem)
  3002  		v1.AddArg3(dst, v2, mem)
  3003  		v.AddArg3(dst, v0, v1)
  3004  		return true
  3005  	}
  3006  	// match: (Move [8] {t} dst src mem)
  3007  	// cond: t.Alignment()%2 == 0
  3008  	// result: (MOVHstore [6] dst (MOVHload [6] src mem) (MOVHstore [4] dst (MOVHload [4] src mem) (MOVHstore [2] dst (MOVHload [2] src mem) (MOVHstore dst (MOVHload src mem) mem))))
  3009  	for {
  3010  		if auxIntToInt64(v.AuxInt) != 8 {
  3011  			break
  3012  		}
  3013  		t := auxToType(v.Aux)
  3014  		dst := v_0
  3015  		src := v_1
  3016  		mem := v_2
  3017  		if !(t.Alignment()%2 == 0) {
  3018  			break
  3019  		}
  3020  		v.reset(OpRISCV64MOVHstore)
  3021  		v.AuxInt = int32ToAuxInt(6)
  3022  		v0 := b.NewValue0(v.Pos, OpRISCV64MOVHload, typ.Int16)
  3023  		v0.AuxInt = int32ToAuxInt(6)
  3024  		v0.AddArg2(src, mem)
  3025  		v1 := b.NewValue0(v.Pos, OpRISCV64MOVHstore, types.TypeMem)
  3026  		v1.AuxInt = int32ToAuxInt(4)
  3027  		v2 := b.NewValue0(v.Pos, OpRISCV64MOVHload, typ.Int16)
  3028  		v2.AuxInt = int32ToAuxInt(4)
  3029  		v2.AddArg2(src, mem)
  3030  		v3 := b.NewValue0(v.Pos, OpRISCV64MOVHstore, types.TypeMem)
  3031  		v3.AuxInt = int32ToAuxInt(2)
  3032  		v4 := b.NewValue0(v.Pos, OpRISCV64MOVHload, typ.Int16)
  3033  		v4.AuxInt = int32ToAuxInt(2)
  3034  		v4.AddArg2(src, mem)
  3035  		v5 := b.NewValue0(v.Pos, OpRISCV64MOVHstore, types.TypeMem)
  3036  		v6 := b.NewValue0(v.Pos, OpRISCV64MOVHload, typ.Int16)
  3037  		v6.AddArg2(src, mem)
  3038  		v5.AddArg3(dst, v6, mem)
  3039  		v3.AddArg3(dst, v4, v5)
  3040  		v1.AddArg3(dst, v2, v3)
  3041  		v.AddArg3(dst, v0, v1)
  3042  		return true
  3043  	}
  3044  	// match: (Move [3] dst src mem)
  3045  	// result: (MOVBstore [2] dst (MOVBload [2] src mem) (MOVBstore [1] dst (MOVBload [1] src mem) (MOVBstore dst (MOVBload src mem) mem)))
  3046  	for {
  3047  		if auxIntToInt64(v.AuxInt) != 3 {
  3048  			break
  3049  		}
  3050  		dst := v_0
  3051  		src := v_1
  3052  		mem := v_2
  3053  		v.reset(OpRISCV64MOVBstore)
  3054  		v.AuxInt = int32ToAuxInt(2)
  3055  		v0 := b.NewValue0(v.Pos, OpRISCV64MOVBload, typ.Int8)
  3056  		v0.AuxInt = int32ToAuxInt(2)
  3057  		v0.AddArg2(src, mem)
  3058  		v1 := b.NewValue0(v.Pos, OpRISCV64MOVBstore, types.TypeMem)
  3059  		v1.AuxInt = int32ToAuxInt(1)
  3060  		v2 := b.NewValue0(v.Pos, OpRISCV64MOVBload, typ.Int8)
  3061  		v2.AuxInt = int32ToAuxInt(1)
  3062  		v2.AddArg2(src, mem)
  3063  		v3 := b.NewValue0(v.Pos, OpRISCV64MOVBstore, types.TypeMem)
  3064  		v4 := b.NewValue0(v.Pos, OpRISCV64MOVBload, typ.Int8)
  3065  		v4.AddArg2(src, mem)
  3066  		v3.AddArg3(dst, v4, mem)
  3067  		v1.AddArg3(dst, v2, v3)
  3068  		v.AddArg3(dst, v0, v1)
  3069  		return true
  3070  	}
  3071  	// match: (Move [6] {t} dst src mem)
  3072  	// cond: t.Alignment()%2 == 0
  3073  	// result: (MOVHstore [4] dst (MOVHload [4] src mem) (MOVHstore [2] dst (MOVHload [2] src mem) (MOVHstore dst (MOVHload src mem) mem)))
  3074  	for {
  3075  		if auxIntToInt64(v.AuxInt) != 6 {
  3076  			break
  3077  		}
  3078  		t := auxToType(v.Aux)
  3079  		dst := v_0
  3080  		src := v_1
  3081  		mem := v_2
  3082  		if !(t.Alignment()%2 == 0) {
  3083  			break
  3084  		}
  3085  		v.reset(OpRISCV64MOVHstore)
  3086  		v.AuxInt = int32ToAuxInt(4)
  3087  		v0 := b.NewValue0(v.Pos, OpRISCV64MOVHload, typ.Int16)
  3088  		v0.AuxInt = int32ToAuxInt(4)
  3089  		v0.AddArg2(src, mem)
  3090  		v1 := b.NewValue0(v.Pos, OpRISCV64MOVHstore, types.TypeMem)
  3091  		v1.AuxInt = int32ToAuxInt(2)
  3092  		v2 := b.NewValue0(v.Pos, OpRISCV64MOVHload, typ.Int16)
  3093  		v2.AuxInt = int32ToAuxInt(2)
  3094  		v2.AddArg2(src, mem)
  3095  		v3 := b.NewValue0(v.Pos, OpRISCV64MOVHstore, types.TypeMem)
  3096  		v4 := b.NewValue0(v.Pos, OpRISCV64MOVHload, typ.Int16)
  3097  		v4.AddArg2(src, mem)
  3098  		v3.AddArg3(dst, v4, mem)
  3099  		v1.AddArg3(dst, v2, v3)
  3100  		v.AddArg3(dst, v0, v1)
  3101  		return true
  3102  	}
  3103  	// match: (Move [s] {t} dst src mem)
  3104  	// cond: s > 0 && s <= 3*8*moveSize(t.Alignment(), config) && logLargeCopy(v, s)
  3105  	// result: (LoweredMove [makeValAndOff(int32(s),int32(t.Alignment()))] dst src mem)
  3106  	for {
  3107  		s := auxIntToInt64(v.AuxInt)
  3108  		t := auxToType(v.Aux)
  3109  		dst := v_0
  3110  		src := v_1
  3111  		mem := v_2
  3112  		if !(s > 0 && s <= 3*8*moveSize(t.Alignment(), config) && logLargeCopy(v, s)) {
  3113  			break
  3114  		}
  3115  		v.reset(OpRISCV64LoweredMove)
  3116  		v.AuxInt = valAndOffToAuxInt(makeValAndOff(int32(s), int32(t.Alignment())))
  3117  		v.AddArg3(dst, src, mem)
  3118  		return true
  3119  	}
  3120  	// match: (Move [s] {t} dst src mem)
  3121  	// cond: s > 3*8*moveSize(t.Alignment(), config) && logLargeCopy(v, s)
  3122  	// result: (LoweredMoveLoop [makeValAndOff(int32(s),int32(t.Alignment()))] dst src mem)
  3123  	for {
  3124  		s := auxIntToInt64(v.AuxInt)
  3125  		t := auxToType(v.Aux)
  3126  		dst := v_0
  3127  		src := v_1
  3128  		mem := v_2
  3129  		if !(s > 3*8*moveSize(t.Alignment(), config) && logLargeCopy(v, s)) {
  3130  			break
  3131  		}
  3132  		v.reset(OpRISCV64LoweredMoveLoop)
  3133  		v.AuxInt = valAndOffToAuxInt(makeValAndOff(int32(s), int32(t.Alignment())))
  3134  		v.AddArg3(dst, src, mem)
  3135  		return true
  3136  	}
  3137  	return false
  3138  }
  3139  func rewriteValueRISCV64_OpNeq16(v *Value) bool {
  3140  	v_1 := v.Args[1]
  3141  	v_0 := v.Args[0]
  3142  	b := v.Block
  3143  	typ := &b.Func.Config.Types
  3144  	// match: (Neq16 x y)
  3145  	// result: (Not (Eq16 x y))
  3146  	for {
  3147  		x := v_0
  3148  		y := v_1
  3149  		v.reset(OpNot)
  3150  		v0 := b.NewValue0(v.Pos, OpEq16, typ.Bool)
  3151  		v0.AddArg2(x, y)
  3152  		v.AddArg(v0)
  3153  		return true
  3154  	}
  3155  }
  3156  func rewriteValueRISCV64_OpNeq32(v *Value) bool {
  3157  	v_1 := v.Args[1]
  3158  	v_0 := v.Args[0]
  3159  	b := v.Block
  3160  	typ := &b.Func.Config.Types
  3161  	// match: (Neq32 x y)
  3162  	// result: (Not (Eq32 x y))
  3163  	for {
  3164  		x := v_0
  3165  		y := v_1
  3166  		v.reset(OpNot)
  3167  		v0 := b.NewValue0(v.Pos, OpEq32, typ.Bool)
  3168  		v0.AddArg2(x, y)
  3169  		v.AddArg(v0)
  3170  		return true
  3171  	}
  3172  }
  3173  func rewriteValueRISCV64_OpNeq64(v *Value) bool {
  3174  	v_1 := v.Args[1]
  3175  	v_0 := v.Args[0]
  3176  	b := v.Block
  3177  	typ := &b.Func.Config.Types
  3178  	// match: (Neq64 x y)
  3179  	// result: (Not (Eq64 x y))
  3180  	for {
  3181  		x := v_0
  3182  		y := v_1
  3183  		v.reset(OpNot)
  3184  		v0 := b.NewValue0(v.Pos, OpEq64, typ.Bool)
  3185  		v0.AddArg2(x, y)
  3186  		v.AddArg(v0)
  3187  		return true
  3188  	}
  3189  }
  3190  func rewriteValueRISCV64_OpNeq8(v *Value) bool {
  3191  	v_1 := v.Args[1]
  3192  	v_0 := v.Args[0]
  3193  	b := v.Block
  3194  	typ := &b.Func.Config.Types
  3195  	// match: (Neq8 x y)
  3196  	// result: (Not (Eq8 x y))
  3197  	for {
  3198  		x := v_0
  3199  		y := v_1
  3200  		v.reset(OpNot)
  3201  		v0 := b.NewValue0(v.Pos, OpEq8, typ.Bool)
  3202  		v0.AddArg2(x, y)
  3203  		v.AddArg(v0)
  3204  		return true
  3205  	}
  3206  }
  3207  func rewriteValueRISCV64_OpNeqB(v *Value) bool {
  3208  	v_1 := v.Args[1]
  3209  	v_0 := v.Args[0]
  3210  	b := v.Block
  3211  	typ := &b.Func.Config.Types
  3212  	// match: (NeqB x y)
  3213  	// result: (SNEZ (SUB <typ.Bool> x y))
  3214  	for {
  3215  		x := v_0
  3216  		y := v_1
  3217  		v.reset(OpRISCV64SNEZ)
  3218  		v0 := b.NewValue0(v.Pos, OpRISCV64SUB, typ.Bool)
  3219  		v0.AddArg2(x, y)
  3220  		v.AddArg(v0)
  3221  		return true
  3222  	}
  3223  }
  3224  func rewriteValueRISCV64_OpNeqPtr(v *Value) bool {
  3225  	v_1 := v.Args[1]
  3226  	v_0 := v.Args[0]
  3227  	b := v.Block
  3228  	typ := &b.Func.Config.Types
  3229  	// match: (NeqPtr x y)
  3230  	// result: (Not (EqPtr x y))
  3231  	for {
  3232  		x := v_0
  3233  		y := v_1
  3234  		v.reset(OpNot)
  3235  		v0 := b.NewValue0(v.Pos, OpEqPtr, typ.Bool)
  3236  		v0.AddArg2(x, y)
  3237  		v.AddArg(v0)
  3238  		return true
  3239  	}
  3240  }
  3241  func rewriteValueRISCV64_OpOffPtr(v *Value) bool {
  3242  	v_0 := v.Args[0]
  3243  	b := v.Block
  3244  	typ := &b.Func.Config.Types
  3245  	// match: (OffPtr [off] ptr:(SP))
  3246  	// cond: is32Bit(off)
  3247  	// result: (MOVaddr [int32(off)] ptr)
  3248  	for {
  3249  		off := auxIntToInt64(v.AuxInt)
  3250  		ptr := v_0
  3251  		if ptr.Op != OpSP || !(is32Bit(off)) {
  3252  			break
  3253  		}
  3254  		v.reset(OpRISCV64MOVaddr)
  3255  		v.AuxInt = int32ToAuxInt(int32(off))
  3256  		v.AddArg(ptr)
  3257  		return true
  3258  	}
  3259  	// match: (OffPtr [off] ptr)
  3260  	// cond: is32Bit(off)
  3261  	// result: (ADDI [off] ptr)
  3262  	for {
  3263  		off := auxIntToInt64(v.AuxInt)
  3264  		ptr := v_0
  3265  		if !(is32Bit(off)) {
  3266  			break
  3267  		}
  3268  		v.reset(OpRISCV64ADDI)
  3269  		v.AuxInt = int64ToAuxInt(off)
  3270  		v.AddArg(ptr)
  3271  		return true
  3272  	}
  3273  	// match: (OffPtr [off] ptr)
  3274  	// result: (ADD (MOVDconst [off]) ptr)
  3275  	for {
  3276  		off := auxIntToInt64(v.AuxInt)
  3277  		ptr := v_0
  3278  		v.reset(OpRISCV64ADD)
  3279  		v0 := b.NewValue0(v.Pos, OpRISCV64MOVDconst, typ.UInt64)
  3280  		v0.AuxInt = int64ToAuxInt(off)
  3281  		v.AddArg2(v0, ptr)
  3282  		return true
  3283  	}
  3284  }
  3285  func rewriteValueRISCV64_OpPopCount16(v *Value) bool {
  3286  	v_0 := v.Args[0]
  3287  	b := v.Block
  3288  	typ := &b.Func.Config.Types
  3289  	// match: (PopCount16 x)
  3290  	// result: (CPOP (ZeroExt16to64 x))
  3291  	for {
  3292  		x := v_0
  3293  		v.reset(OpRISCV64CPOP)
  3294  		v0 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64)
  3295  		v0.AddArg(x)
  3296  		v.AddArg(v0)
  3297  		return true
  3298  	}
  3299  }
  3300  func rewriteValueRISCV64_OpPopCount8(v *Value) bool {
  3301  	v_0 := v.Args[0]
  3302  	b := v.Block
  3303  	typ := &b.Func.Config.Types
  3304  	// match: (PopCount8 x)
  3305  	// result: (CPOP (ZeroExt8to64 x))
  3306  	for {
  3307  		x := v_0
  3308  		v.reset(OpRISCV64CPOP)
  3309  		v0 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64)
  3310  		v0.AddArg(x)
  3311  		v.AddArg(v0)
  3312  		return true
  3313  	}
  3314  }
  3315  func rewriteValueRISCV64_OpRISCV64ADD(v *Value) bool {
  3316  	v_1 := v.Args[1]
  3317  	v_0 := v.Args[0]
  3318  	// match: (ADD (MOVDconst <t> [val]) x)
  3319  	// cond: is32Bit(val) && !t.IsPtr()
  3320  	// result: (ADDI [val] x)
  3321  	for {
  3322  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
  3323  			if v_0.Op != OpRISCV64MOVDconst {
  3324  				continue
  3325  			}
  3326  			t := v_0.Type
  3327  			val := auxIntToInt64(v_0.AuxInt)
  3328  			x := v_1
  3329  			if !(is32Bit(val) && !t.IsPtr()) {
  3330  				continue
  3331  			}
  3332  			v.reset(OpRISCV64ADDI)
  3333  			v.AuxInt = int64ToAuxInt(val)
  3334  			v.AddArg(x)
  3335  			return true
  3336  		}
  3337  		break
  3338  	}
  3339  	// match: (ADD x (NEG y))
  3340  	// result: (SUB x y)
  3341  	for {
  3342  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
  3343  			x := v_0
  3344  			if v_1.Op != OpRISCV64NEG {
  3345  				continue
  3346  			}
  3347  			y := v_1.Args[0]
  3348  			v.reset(OpRISCV64SUB)
  3349  			v.AddArg2(x, y)
  3350  			return true
  3351  		}
  3352  		break
  3353  	}
  3354  	// match: (ADD (SLLI [1] x) y)
  3355  	// cond: buildcfg.GORISCV64 >= 22
  3356  	// result: (SH1ADD x y)
  3357  	for {
  3358  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
  3359  			if v_0.Op != OpRISCV64SLLI || auxIntToInt64(v_0.AuxInt) != 1 {
  3360  				continue
  3361  			}
  3362  			x := v_0.Args[0]
  3363  			y := v_1
  3364  			if !(buildcfg.GORISCV64 >= 22) {
  3365  				continue
  3366  			}
  3367  			v.reset(OpRISCV64SH1ADD)
  3368  			v.AddArg2(x, y)
  3369  			return true
  3370  		}
  3371  		break
  3372  	}
  3373  	// match: (ADD (SLLI [2] x) y)
  3374  	// cond: buildcfg.GORISCV64 >= 22
  3375  	// result: (SH2ADD x y)
  3376  	for {
  3377  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
  3378  			if v_0.Op != OpRISCV64SLLI || auxIntToInt64(v_0.AuxInt) != 2 {
  3379  				continue
  3380  			}
  3381  			x := v_0.Args[0]
  3382  			y := v_1
  3383  			if !(buildcfg.GORISCV64 >= 22) {
  3384  				continue
  3385  			}
  3386  			v.reset(OpRISCV64SH2ADD)
  3387  			v.AddArg2(x, y)
  3388  			return true
  3389  		}
  3390  		break
  3391  	}
  3392  	// match: (ADD (SLLI [3] x) y)
  3393  	// cond: buildcfg.GORISCV64 >= 22
  3394  	// result: (SH3ADD x y)
  3395  	for {
  3396  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
  3397  			if v_0.Op != OpRISCV64SLLI || auxIntToInt64(v_0.AuxInt) != 3 {
  3398  				continue
  3399  			}
  3400  			x := v_0.Args[0]
  3401  			y := v_1
  3402  			if !(buildcfg.GORISCV64 >= 22) {
  3403  				continue
  3404  			}
  3405  			v.reset(OpRISCV64SH3ADD)
  3406  			v.AddArg2(x, y)
  3407  			return true
  3408  		}
  3409  		break
  3410  	}
  3411  	return false
  3412  }
  3413  func rewriteValueRISCV64_OpRISCV64ADDI(v *Value) bool {
  3414  	v_0 := v.Args[0]
  3415  	// match: (ADDI [c] (MOVaddr [d] {s} x))
  3416  	// cond: is32Bit(c+int64(d))
  3417  	// result: (MOVaddr [int32(c)+d] {s} x)
  3418  	for {
  3419  		c := auxIntToInt64(v.AuxInt)
  3420  		if v_0.Op != OpRISCV64MOVaddr {
  3421  			break
  3422  		}
  3423  		d := auxIntToInt32(v_0.AuxInt)
  3424  		s := auxToSym(v_0.Aux)
  3425  		x := v_0.Args[0]
  3426  		if !(is32Bit(c + int64(d))) {
  3427  			break
  3428  		}
  3429  		v.reset(OpRISCV64MOVaddr)
  3430  		v.AuxInt = int32ToAuxInt(int32(c) + d)
  3431  		v.Aux = symToAux(s)
  3432  		v.AddArg(x)
  3433  		return true
  3434  	}
  3435  	// match: (ADDI [0] x)
  3436  	// result: x
  3437  	for {
  3438  		if auxIntToInt64(v.AuxInt) != 0 {
  3439  			break
  3440  		}
  3441  		x := v_0
  3442  		v.copyOf(x)
  3443  		return true
  3444  	}
  3445  	// match: (ADDI [x] (MOVDconst [y]))
  3446  	// cond: is32Bit(x + y)
  3447  	// result: (MOVDconst [x + y])
  3448  	for {
  3449  		x := auxIntToInt64(v.AuxInt)
  3450  		if v_0.Op != OpRISCV64MOVDconst {
  3451  			break
  3452  		}
  3453  		y := auxIntToInt64(v_0.AuxInt)
  3454  		if !(is32Bit(x + y)) {
  3455  			break
  3456  		}
  3457  		v.reset(OpRISCV64MOVDconst)
  3458  		v.AuxInt = int64ToAuxInt(x + y)
  3459  		return true
  3460  	}
  3461  	// match: (ADDI [x] (ADDI [y] z))
  3462  	// cond: is32Bit(x + y)
  3463  	// result: (ADDI [x + y] z)
  3464  	for {
  3465  		x := auxIntToInt64(v.AuxInt)
  3466  		if v_0.Op != OpRISCV64ADDI {
  3467  			break
  3468  		}
  3469  		y := auxIntToInt64(v_0.AuxInt)
  3470  		z := v_0.Args[0]
  3471  		if !(is32Bit(x + y)) {
  3472  			break
  3473  		}
  3474  		v.reset(OpRISCV64ADDI)
  3475  		v.AuxInt = int64ToAuxInt(x + y)
  3476  		v.AddArg(z)
  3477  		return true
  3478  	}
  3479  	return false
  3480  }
  3481  func rewriteValueRISCV64_OpRISCV64AND(v *Value) bool {
  3482  	v_1 := v.Args[1]
  3483  	v_0 := v.Args[0]
  3484  	// match: (AND (MOVDconst [val]) x)
  3485  	// cond: is32Bit(val)
  3486  	// result: (ANDI [val] x)
  3487  	for {
  3488  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
  3489  			if v_0.Op != OpRISCV64MOVDconst {
  3490  				continue
  3491  			}
  3492  			val := auxIntToInt64(v_0.AuxInt)
  3493  			x := v_1
  3494  			if !(is32Bit(val)) {
  3495  				continue
  3496  			}
  3497  			v.reset(OpRISCV64ANDI)
  3498  			v.AuxInt = int64ToAuxInt(val)
  3499  			v.AddArg(x)
  3500  			return true
  3501  		}
  3502  		break
  3503  	}
  3504  	// match: (AND x x)
  3505  	// result: x
  3506  	for {
  3507  		x := v_0
  3508  		if x != v_1 {
  3509  			break
  3510  		}
  3511  		v.copyOf(x)
  3512  		return true
  3513  	}
  3514  	return false
  3515  }
  3516  func rewriteValueRISCV64_OpRISCV64ANDI(v *Value) bool {
  3517  	v_0 := v.Args[0]
  3518  	// match: (ANDI [0] x)
  3519  	// result: (MOVDconst [0])
  3520  	for {
  3521  		if auxIntToInt64(v.AuxInt) != 0 {
  3522  			break
  3523  		}
  3524  		v.reset(OpRISCV64MOVDconst)
  3525  		v.AuxInt = int64ToAuxInt(0)
  3526  		return true
  3527  	}
  3528  	// match: (ANDI [-1] x)
  3529  	// result: x
  3530  	for {
  3531  		if auxIntToInt64(v.AuxInt) != -1 {
  3532  			break
  3533  		}
  3534  		x := v_0
  3535  		v.copyOf(x)
  3536  		return true
  3537  	}
  3538  	// match: (ANDI [x] (MOVDconst [y]))
  3539  	// result: (MOVDconst [x & y])
  3540  	for {
  3541  		x := auxIntToInt64(v.AuxInt)
  3542  		if v_0.Op != OpRISCV64MOVDconst {
  3543  			break
  3544  		}
  3545  		y := auxIntToInt64(v_0.AuxInt)
  3546  		v.reset(OpRISCV64MOVDconst)
  3547  		v.AuxInt = int64ToAuxInt(x & y)
  3548  		return true
  3549  	}
  3550  	// match: (ANDI [x] (ANDI [y] z))
  3551  	// result: (ANDI [x & y] z)
  3552  	for {
  3553  		x := auxIntToInt64(v.AuxInt)
  3554  		if v_0.Op != OpRISCV64ANDI {
  3555  			break
  3556  		}
  3557  		y := auxIntToInt64(v_0.AuxInt)
  3558  		z := v_0.Args[0]
  3559  		v.reset(OpRISCV64ANDI)
  3560  		v.AuxInt = int64ToAuxInt(x & y)
  3561  		v.AddArg(z)
  3562  		return true
  3563  	}
  3564  	return false
  3565  }
  3566  func rewriteValueRISCV64_OpRISCV64CZEROEQZ(v *Value) bool {
  3567  	v_1 := v.Args[1]
  3568  	v_0 := v.Args[0]
  3569  	// match: (CZEROEQZ x (SNEZ y))
  3570  	// result: (CZEROEQZ x y)
  3571  	for {
  3572  		x := v_0
  3573  		if v_1.Op != OpRISCV64SNEZ {
  3574  			break
  3575  		}
  3576  		y := v_1.Args[0]
  3577  		v.reset(OpRISCV64CZEROEQZ)
  3578  		v.AddArg2(x, y)
  3579  		return true
  3580  	}
  3581  	// match: (CZEROEQZ x (SEQZ y))
  3582  	// result: (CZERONEZ x y)
  3583  	for {
  3584  		x := v_0
  3585  		if v_1.Op != OpRISCV64SEQZ {
  3586  			break
  3587  		}
  3588  		y := v_1.Args[0]
  3589  		v.reset(OpRISCV64CZERONEZ)
  3590  		v.AddArg2(x, y)
  3591  		return true
  3592  	}
  3593  	// match: (CZEROEQZ x (NEG y))
  3594  	// result: (CZEROEQZ x y)
  3595  	for {
  3596  		x := v_0
  3597  		if v_1.Op != OpRISCV64NEG {
  3598  			break
  3599  		}
  3600  		y := v_1.Args[0]
  3601  		v.reset(OpRISCV64CZEROEQZ)
  3602  		v.AddArg2(x, y)
  3603  		return true
  3604  	}
  3605  	// match: (CZEROEQZ x x)
  3606  	// result: x
  3607  	for {
  3608  		x := v_0
  3609  		if x != v_1 {
  3610  			break
  3611  		}
  3612  		v.copyOf(x)
  3613  		return true
  3614  	}
  3615  	// match: (CZEROEQZ (MOVDconst [0]) _)
  3616  	// result: (MOVDconst [0])
  3617  	for {
  3618  		if v_0.Op != OpRISCV64MOVDconst || auxIntToInt64(v_0.AuxInt) != 0 {
  3619  			break
  3620  		}
  3621  		v.reset(OpRISCV64MOVDconst)
  3622  		v.AuxInt = int64ToAuxInt(0)
  3623  		return true
  3624  	}
  3625  	return false
  3626  }
  3627  func rewriteValueRISCV64_OpRISCV64CZERONEZ(v *Value) bool {
  3628  	v_1 := v.Args[1]
  3629  	v_0 := v.Args[0]
  3630  	// match: (CZERONEZ x (SNEZ y))
  3631  	// result: (CZERONEZ x y)
  3632  	for {
  3633  		x := v_0
  3634  		if v_1.Op != OpRISCV64SNEZ {
  3635  			break
  3636  		}
  3637  		y := v_1.Args[0]
  3638  		v.reset(OpRISCV64CZERONEZ)
  3639  		v.AddArg2(x, y)
  3640  		return true
  3641  	}
  3642  	// match: (CZERONEZ x (SEQZ y))
  3643  	// result: (CZEROEQZ x y)
  3644  	for {
  3645  		x := v_0
  3646  		if v_1.Op != OpRISCV64SEQZ {
  3647  			break
  3648  		}
  3649  		y := v_1.Args[0]
  3650  		v.reset(OpRISCV64CZEROEQZ)
  3651  		v.AddArg2(x, y)
  3652  		return true
  3653  	}
  3654  	// match: (CZERONEZ x (NEG y))
  3655  	// result: (CZERONEZ x y)
  3656  	for {
  3657  		x := v_0
  3658  		if v_1.Op != OpRISCV64NEG {
  3659  			break
  3660  		}
  3661  		y := v_1.Args[0]
  3662  		v.reset(OpRISCV64CZERONEZ)
  3663  		v.AddArg2(x, y)
  3664  		return true
  3665  	}
  3666  	// match: (CZERONEZ x x)
  3667  	// result: (MOVDconst [0])
  3668  	for {
  3669  		x := v_0
  3670  		if x != v_1 {
  3671  			break
  3672  		}
  3673  		v.reset(OpRISCV64MOVDconst)
  3674  		v.AuxInt = int64ToAuxInt(0)
  3675  		return true
  3676  	}
  3677  	// match: (CZERONEZ (MOVDconst [0]) _)
  3678  	// result: (MOVDconst [0])
  3679  	for {
  3680  		if v_0.Op != OpRISCV64MOVDconst || auxIntToInt64(v_0.AuxInt) != 0 {
  3681  			break
  3682  		}
  3683  		v.reset(OpRISCV64MOVDconst)
  3684  		v.AuxInt = int64ToAuxInt(0)
  3685  		return true
  3686  	}
  3687  	return false
  3688  }
  3689  func rewriteValueRISCV64_OpRISCV64FADDD(v *Value) bool {
  3690  	v_1 := v.Args[1]
  3691  	v_0 := v.Args[0]
  3692  	// match: (FADDD a (FMULD x y))
  3693  	// cond: a.Block.Func.useFMA(v)
  3694  	// result: (FMADDD x y a)
  3695  	for {
  3696  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
  3697  			a := v_0
  3698  			if v_1.Op != OpRISCV64FMULD {
  3699  				continue
  3700  			}
  3701  			y := v_1.Args[1]
  3702  			x := v_1.Args[0]
  3703  			if !(a.Block.Func.useFMA(v)) {
  3704  				continue
  3705  			}
  3706  			v.reset(OpRISCV64FMADDD)
  3707  			v.AddArg3(x, y, a)
  3708  			return true
  3709  		}
  3710  		break
  3711  	}
  3712  	return false
  3713  }
  3714  func rewriteValueRISCV64_OpRISCV64FADDS(v *Value) bool {
  3715  	v_1 := v.Args[1]
  3716  	v_0 := v.Args[0]
  3717  	// match: (FADDS a (FMULS x y))
  3718  	// cond: a.Block.Func.useFMA(v)
  3719  	// result: (FMADDS x y a)
  3720  	for {
  3721  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
  3722  			a := v_0
  3723  			if v_1.Op != OpRISCV64FMULS {
  3724  				continue
  3725  			}
  3726  			y := v_1.Args[1]
  3727  			x := v_1.Args[0]
  3728  			if !(a.Block.Func.useFMA(v)) {
  3729  				continue
  3730  			}
  3731  			v.reset(OpRISCV64FMADDS)
  3732  			v.AddArg3(x, y, a)
  3733  			return true
  3734  		}
  3735  		break
  3736  	}
  3737  	return false
  3738  }
  3739  func rewriteValueRISCV64_OpRISCV64FCVTSD(v *Value) bool {
  3740  	v_0 := v.Args[0]
  3741  	// match: (FCVTSD (FABSD (FCVTDS X)))
  3742  	// result: (FABSS X)
  3743  	for {
  3744  		if v_0.Op != OpRISCV64FABSD {
  3745  			break
  3746  		}
  3747  		v_0_0 := v_0.Args[0]
  3748  		if v_0_0.Op != OpRISCV64FCVTDS {
  3749  			break
  3750  		}
  3751  		X := v_0_0.Args[0]
  3752  		v.reset(OpRISCV64FABSS)
  3753  		v.AddArg(X)
  3754  		return true
  3755  	}
  3756  	// match: (FCVTSD (FSQRTD (FCVTDS X)))
  3757  	// result: (FSQRTS X)
  3758  	for {
  3759  		if v_0.Op != OpRISCV64FSQRTD {
  3760  			break
  3761  		}
  3762  		v_0_0 := v_0.Args[0]
  3763  		if v_0_0.Op != OpRISCV64FCVTDS {
  3764  			break
  3765  		}
  3766  		X := v_0_0.Args[0]
  3767  		v.reset(OpRISCV64FSQRTS)
  3768  		v.AddArg(X)
  3769  		return true
  3770  	}
  3771  	return false
  3772  }
  3773  func rewriteValueRISCV64_OpRISCV64FEQD(v *Value) bool {
  3774  	v_1 := v.Args[1]
  3775  	v_0 := v.Args[0]
  3776  	b := v.Block
  3777  	typ := &b.Func.Config.Types
  3778  	// match: (FEQD x (FMOVDconst [math.Inf(-1)]))
  3779  	// result: (ANDI [0b00_0000_0001] (FCLASSD x))
  3780  	for {
  3781  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
  3782  			x := v_0
  3783  			if v_1.Op != OpRISCV64FMOVDconst || auxIntToFloat64(v_1.AuxInt) != math.Inf(-1) {
  3784  				continue
  3785  			}
  3786  			v.reset(OpRISCV64ANDI)
  3787  			v.AuxInt = int64ToAuxInt(0b00_0000_0001)
  3788  			v0 := b.NewValue0(v.Pos, OpRISCV64FCLASSD, typ.Int64)
  3789  			v0.AddArg(x)
  3790  			v.AddArg(v0)
  3791  			return true
  3792  		}
  3793  		break
  3794  	}
  3795  	// match: (FEQD x (FMOVDconst [math.Inf(1)]))
  3796  	// result: (SNEZ (ANDI <typ.Int64> [0b00_1000_0000] (FCLASSD x)))
  3797  	for {
  3798  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
  3799  			x := v_0
  3800  			if v_1.Op != OpRISCV64FMOVDconst || auxIntToFloat64(v_1.AuxInt) != math.Inf(1) {
  3801  				continue
  3802  			}
  3803  			v.reset(OpRISCV64SNEZ)
  3804  			v0 := b.NewValue0(v.Pos, OpRISCV64ANDI, typ.Int64)
  3805  			v0.AuxInt = int64ToAuxInt(0b00_1000_0000)
  3806  			v1 := b.NewValue0(v.Pos, OpRISCV64FCLASSD, typ.Int64)
  3807  			v1.AddArg(x)
  3808  			v0.AddArg(v1)
  3809  			v.AddArg(v0)
  3810  			return true
  3811  		}
  3812  		break
  3813  	}
  3814  	return false
  3815  }
  3816  func rewriteValueRISCV64_OpRISCV64FLED(v *Value) bool {
  3817  	v_1 := v.Args[1]
  3818  	v_0 := v.Args[0]
  3819  	b := v.Block
  3820  	typ := &b.Func.Config.Types
  3821  	// match: (FLED (FMOVDconst [-math.MaxFloat64]) x)
  3822  	// result: (SNEZ (ANDI <typ.Int64> [0b00_1111_1110] (FCLASSD x)))
  3823  	for {
  3824  		if v_0.Op != OpRISCV64FMOVDconst || auxIntToFloat64(v_0.AuxInt) != -math.MaxFloat64 {
  3825  			break
  3826  		}
  3827  		x := v_1
  3828  		v.reset(OpRISCV64SNEZ)
  3829  		v0 := b.NewValue0(v.Pos, OpRISCV64ANDI, typ.Int64)
  3830  		v0.AuxInt = int64ToAuxInt(0b00_1111_1110)
  3831  		v1 := b.NewValue0(v.Pos, OpRISCV64FCLASSD, typ.Int64)
  3832  		v1.AddArg(x)
  3833  		v0.AddArg(v1)
  3834  		v.AddArg(v0)
  3835  		return true
  3836  	}
  3837  	// match: (FLED x (FMOVDconst [math.MaxFloat64]))
  3838  	// result: (SNEZ (ANDI <typ.Int64> [0b00_0111_1111] (FCLASSD x)))
  3839  	for {
  3840  		x := v_0
  3841  		if v_1.Op != OpRISCV64FMOVDconst || auxIntToFloat64(v_1.AuxInt) != math.MaxFloat64 {
  3842  			break
  3843  		}
  3844  		v.reset(OpRISCV64SNEZ)
  3845  		v0 := b.NewValue0(v.Pos, OpRISCV64ANDI, typ.Int64)
  3846  		v0.AuxInt = int64ToAuxInt(0b00_0111_1111)
  3847  		v1 := b.NewValue0(v.Pos, OpRISCV64FCLASSD, typ.Int64)
  3848  		v1.AddArg(x)
  3849  		v0.AddArg(v1)
  3850  		v.AddArg(v0)
  3851  		return true
  3852  	}
  3853  	// match: (FLED (FMOVDconst [+0x1p-1022]) x)
  3854  	// result: (SNEZ (ANDI <typ.Int64> [0b00_1100_0000] (FCLASSD x)))
  3855  	for {
  3856  		if v_0.Op != OpRISCV64FMOVDconst || auxIntToFloat64(v_0.AuxInt) != +0x1p-1022 {
  3857  			break
  3858  		}
  3859  		x := v_1
  3860  		v.reset(OpRISCV64SNEZ)
  3861  		v0 := b.NewValue0(v.Pos, OpRISCV64ANDI, typ.Int64)
  3862  		v0.AuxInt = int64ToAuxInt(0b00_1100_0000)
  3863  		v1 := b.NewValue0(v.Pos, OpRISCV64FCLASSD, typ.Int64)
  3864  		v1.AddArg(x)
  3865  		v0.AddArg(v1)
  3866  		v.AddArg(v0)
  3867  		return true
  3868  	}
  3869  	// match: (FLED x (FMOVDconst [-0x1p-1022]))
  3870  	// result: (SNEZ (ANDI <typ.Int64> [0b00_0000_0011] (FCLASSD x)))
  3871  	for {
  3872  		x := v_0
  3873  		if v_1.Op != OpRISCV64FMOVDconst || auxIntToFloat64(v_1.AuxInt) != -0x1p-1022 {
  3874  			break
  3875  		}
  3876  		v.reset(OpRISCV64SNEZ)
  3877  		v0 := b.NewValue0(v.Pos, OpRISCV64ANDI, typ.Int64)
  3878  		v0.AuxInt = int64ToAuxInt(0b00_0000_0011)
  3879  		v1 := b.NewValue0(v.Pos, OpRISCV64FCLASSD, typ.Int64)
  3880  		v1.AddArg(x)
  3881  		v0.AddArg(v1)
  3882  		v.AddArg(v0)
  3883  		return true
  3884  	}
  3885  	return false
  3886  }
  3887  func rewriteValueRISCV64_OpRISCV64FLTD(v *Value) bool {
  3888  	v_1 := v.Args[1]
  3889  	v_0 := v.Args[0]
  3890  	b := v.Block
  3891  	typ := &b.Func.Config.Types
  3892  	// match: (FLTD x (FMOVDconst [-math.MaxFloat64]))
  3893  	// result: (ANDI [0b00_0000_0001] (FCLASSD x))
  3894  	for {
  3895  		x := v_0
  3896  		if v_1.Op != OpRISCV64FMOVDconst || auxIntToFloat64(v_1.AuxInt) != -math.MaxFloat64 {
  3897  			break
  3898  		}
  3899  		v.reset(OpRISCV64ANDI)
  3900  		v.AuxInt = int64ToAuxInt(0b00_0000_0001)
  3901  		v0 := b.NewValue0(v.Pos, OpRISCV64FCLASSD, typ.Int64)
  3902  		v0.AddArg(x)
  3903  		v.AddArg(v0)
  3904  		return true
  3905  	}
  3906  	// match: (FLTD (FMOVDconst [math.MaxFloat64]) x)
  3907  	// result: (SNEZ (ANDI <typ.Int64> [0b00_1000_0000] (FCLASSD x)))
  3908  	for {
  3909  		if v_0.Op != OpRISCV64FMOVDconst || auxIntToFloat64(v_0.AuxInt) != math.MaxFloat64 {
  3910  			break
  3911  		}
  3912  		x := v_1
  3913  		v.reset(OpRISCV64SNEZ)
  3914  		v0 := b.NewValue0(v.Pos, OpRISCV64ANDI, typ.Int64)
  3915  		v0.AuxInt = int64ToAuxInt(0b00_1000_0000)
  3916  		v1 := b.NewValue0(v.Pos, OpRISCV64FCLASSD, typ.Int64)
  3917  		v1.AddArg(x)
  3918  		v0.AddArg(v1)
  3919  		v.AddArg(v0)
  3920  		return true
  3921  	}
  3922  	// match: (FLTD x (FMOVDconst [+0x1p-1022]))
  3923  	// result: (SNEZ (ANDI <typ.Int64> [0b00_0011_1111] (FCLASSD x)))
  3924  	for {
  3925  		x := v_0
  3926  		if v_1.Op != OpRISCV64FMOVDconst || auxIntToFloat64(v_1.AuxInt) != +0x1p-1022 {
  3927  			break
  3928  		}
  3929  		v.reset(OpRISCV64SNEZ)
  3930  		v0 := b.NewValue0(v.Pos, OpRISCV64ANDI, typ.Int64)
  3931  		v0.AuxInt = int64ToAuxInt(0b00_0011_1111)
  3932  		v1 := b.NewValue0(v.Pos, OpRISCV64FCLASSD, typ.Int64)
  3933  		v1.AddArg(x)
  3934  		v0.AddArg(v1)
  3935  		v.AddArg(v0)
  3936  		return true
  3937  	}
  3938  	// match: (FLTD (FMOVDconst [-0x1p-1022]) x)
  3939  	// result: (SNEZ (ANDI <typ.Int64> [0b00_1111_1100] (FCLASSD x)))
  3940  	for {
  3941  		if v_0.Op != OpRISCV64FMOVDconst || auxIntToFloat64(v_0.AuxInt) != -0x1p-1022 {
  3942  			break
  3943  		}
  3944  		x := v_1
  3945  		v.reset(OpRISCV64SNEZ)
  3946  		v0 := b.NewValue0(v.Pos, OpRISCV64ANDI, typ.Int64)
  3947  		v0.AuxInt = int64ToAuxInt(0b00_1111_1100)
  3948  		v1 := b.NewValue0(v.Pos, OpRISCV64FCLASSD, typ.Int64)
  3949  		v1.AddArg(x)
  3950  		v0.AddArg(v1)
  3951  		v.AddArg(v0)
  3952  		return true
  3953  	}
  3954  	return false
  3955  }
  3956  func rewriteValueRISCV64_OpRISCV64FMADDD(v *Value) bool {
  3957  	v_2 := v.Args[2]
  3958  	v_1 := v.Args[1]
  3959  	v_0 := v.Args[0]
  3960  	// match: (FMADDD neg:(FNEGD x) y z)
  3961  	// cond: neg.Uses == 1
  3962  	// result: (FNMSUBD x y z)
  3963  	for {
  3964  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
  3965  			neg := v_0
  3966  			if neg.Op != OpRISCV64FNEGD {
  3967  				continue
  3968  			}
  3969  			x := neg.Args[0]
  3970  			y := v_1
  3971  			z := v_2
  3972  			if !(neg.Uses == 1) {
  3973  				continue
  3974  			}
  3975  			v.reset(OpRISCV64FNMSUBD)
  3976  			v.AddArg3(x, y, z)
  3977  			return true
  3978  		}
  3979  		break
  3980  	}
  3981  	// match: (FMADDD x y neg:(FNEGD z))
  3982  	// cond: neg.Uses == 1
  3983  	// result: (FMSUBD x y z)
  3984  	for {
  3985  		x := v_0
  3986  		y := v_1
  3987  		neg := v_2
  3988  		if neg.Op != OpRISCV64FNEGD {
  3989  			break
  3990  		}
  3991  		z := neg.Args[0]
  3992  		if !(neg.Uses == 1) {
  3993  			break
  3994  		}
  3995  		v.reset(OpRISCV64FMSUBD)
  3996  		v.AddArg3(x, y, z)
  3997  		return true
  3998  	}
  3999  	return false
  4000  }
  4001  func rewriteValueRISCV64_OpRISCV64FMADDS(v *Value) bool {
  4002  	v_2 := v.Args[2]
  4003  	v_1 := v.Args[1]
  4004  	v_0 := v.Args[0]
  4005  	// match: (FMADDS neg:(FNEGS x) y z)
  4006  	// cond: neg.Uses == 1
  4007  	// result: (FNMSUBS x y z)
  4008  	for {
  4009  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
  4010  			neg := v_0
  4011  			if neg.Op != OpRISCV64FNEGS {
  4012  				continue
  4013  			}
  4014  			x := neg.Args[0]
  4015  			y := v_1
  4016  			z := v_2
  4017  			if !(neg.Uses == 1) {
  4018  				continue
  4019  			}
  4020  			v.reset(OpRISCV64FNMSUBS)
  4021  			v.AddArg3(x, y, z)
  4022  			return true
  4023  		}
  4024  		break
  4025  	}
  4026  	// match: (FMADDS x y neg:(FNEGS z))
  4027  	// cond: neg.Uses == 1
  4028  	// result: (FMSUBS x y z)
  4029  	for {
  4030  		x := v_0
  4031  		y := v_1
  4032  		neg := v_2
  4033  		if neg.Op != OpRISCV64FNEGS {
  4034  			break
  4035  		}
  4036  		z := neg.Args[0]
  4037  		if !(neg.Uses == 1) {
  4038  			break
  4039  		}
  4040  		v.reset(OpRISCV64FMSUBS)
  4041  		v.AddArg3(x, y, z)
  4042  		return true
  4043  	}
  4044  	return false
  4045  }
  4046  func rewriteValueRISCV64_OpRISCV64FMOVDload(v *Value) bool {
  4047  	v_1 := v.Args[1]
  4048  	v_0 := v.Args[0]
  4049  	b := v.Block
  4050  	config := b.Func.Config
  4051  	// match: (FMOVDload [off1] {sym1} (MOVaddr [off2] {sym2} base) mem)
  4052  	// cond: is32Bit(int64(off1)+int64(off2)) && canMergeSym(sym1, sym2) && (base.Op != OpSB || !config.ctxt.Flag_dynlink)
  4053  	// result: (FMOVDload [off1+off2] {mergeSym(sym1,sym2)} base mem)
  4054  	for {
  4055  		off1 := auxIntToInt32(v.AuxInt)
  4056  		sym1 := auxToSym(v.Aux)
  4057  		if v_0.Op != OpRISCV64MOVaddr {
  4058  			break
  4059  		}
  4060  		off2 := auxIntToInt32(v_0.AuxInt)
  4061  		sym2 := auxToSym(v_0.Aux)
  4062  		base := v_0.Args[0]
  4063  		mem := v_1
  4064  		if !(is32Bit(int64(off1)+int64(off2)) && canMergeSym(sym1, sym2) && (base.Op != OpSB || !config.ctxt.Flag_dynlink)) {
  4065  			break
  4066  		}
  4067  		v.reset(OpRISCV64FMOVDload)
  4068  		v.AuxInt = int32ToAuxInt(off1 + off2)
  4069  		v.Aux = symToAux(mergeSym(sym1, sym2))
  4070  		v.AddArg2(base, mem)
  4071  		return true
  4072  	}
  4073  	// match: (FMOVDload [off1] {sym} (ADDI [off2] base) mem)
  4074  	// cond: is32Bit(int64(off1)+off2)
  4075  	// result: (FMOVDload [off1+int32(off2)] {sym} base mem)
  4076  	for {
  4077  		off1 := auxIntToInt32(v.AuxInt)
  4078  		sym := auxToSym(v.Aux)
  4079  		if v_0.Op != OpRISCV64ADDI {
  4080  			break
  4081  		}
  4082  		off2 := auxIntToInt64(v_0.AuxInt)
  4083  		base := v_0.Args[0]
  4084  		mem := v_1
  4085  		if !(is32Bit(int64(off1) + off2)) {
  4086  			break
  4087  		}
  4088  		v.reset(OpRISCV64FMOVDload)
  4089  		v.AuxInt = int32ToAuxInt(off1 + int32(off2))
  4090  		v.Aux = symToAux(sym)
  4091  		v.AddArg2(base, mem)
  4092  		return true
  4093  	}
  4094  	// match: (FMOVDload [off] {sym} ptr1 (MOVDstore [off] {sym} ptr2 x _))
  4095  	// cond: isSamePtr(ptr1, ptr2)
  4096  	// result: (FMVDX x)
  4097  	for {
  4098  		off := auxIntToInt32(v.AuxInt)
  4099  		sym := auxToSym(v.Aux)
  4100  		ptr1 := v_0
  4101  		if v_1.Op != OpRISCV64MOVDstore || auxIntToInt32(v_1.AuxInt) != off || auxToSym(v_1.Aux) != sym {
  4102  			break
  4103  		}
  4104  		x := v_1.Args[1]
  4105  		ptr2 := v_1.Args[0]
  4106  		if !(isSamePtr(ptr1, ptr2)) {
  4107  			break
  4108  		}
  4109  		v.reset(OpRISCV64FMVDX)
  4110  		v.AddArg(x)
  4111  		return true
  4112  	}
  4113  	return false
  4114  }
  4115  func rewriteValueRISCV64_OpRISCV64FMOVDstore(v *Value) bool {
  4116  	v_2 := v.Args[2]
  4117  	v_1 := v.Args[1]
  4118  	v_0 := v.Args[0]
  4119  	b := v.Block
  4120  	config := b.Func.Config
  4121  	// match: (FMOVDstore [off1] {sym1} (MOVaddr [off2] {sym2} base) val mem)
  4122  	// cond: is32Bit(int64(off1)+int64(off2)) && canMergeSym(sym1, sym2) && (base.Op != OpSB || !config.ctxt.Flag_dynlink)
  4123  	// result: (FMOVDstore [off1+off2] {mergeSym(sym1,sym2)} base val mem)
  4124  	for {
  4125  		off1 := auxIntToInt32(v.AuxInt)
  4126  		sym1 := auxToSym(v.Aux)
  4127  		if v_0.Op != OpRISCV64MOVaddr {
  4128  			break
  4129  		}
  4130  		off2 := auxIntToInt32(v_0.AuxInt)
  4131  		sym2 := auxToSym(v_0.Aux)
  4132  		base := v_0.Args[0]
  4133  		val := v_1
  4134  		mem := v_2
  4135  		if !(is32Bit(int64(off1)+int64(off2)) && canMergeSym(sym1, sym2) && (base.Op != OpSB || !config.ctxt.Flag_dynlink)) {
  4136  			break
  4137  		}
  4138  		v.reset(OpRISCV64FMOVDstore)
  4139  		v.AuxInt = int32ToAuxInt(off1 + off2)
  4140  		v.Aux = symToAux(mergeSym(sym1, sym2))
  4141  		v.AddArg3(base, val, mem)
  4142  		return true
  4143  	}
  4144  	// match: (FMOVDstore [off1] {sym} (ADDI [off2] base) val mem)
  4145  	// cond: is32Bit(int64(off1)+off2)
  4146  	// result: (FMOVDstore [off1+int32(off2)] {sym} base val mem)
  4147  	for {
  4148  		off1 := auxIntToInt32(v.AuxInt)
  4149  		sym := auxToSym(v.Aux)
  4150  		if v_0.Op != OpRISCV64ADDI {
  4151  			break
  4152  		}
  4153  		off2 := auxIntToInt64(v_0.AuxInt)
  4154  		base := v_0.Args[0]
  4155  		val := v_1
  4156  		mem := v_2
  4157  		if !(is32Bit(int64(off1) + off2)) {
  4158  			break
  4159  		}
  4160  		v.reset(OpRISCV64FMOVDstore)
  4161  		v.AuxInt = int32ToAuxInt(off1 + int32(off2))
  4162  		v.Aux = symToAux(sym)
  4163  		v.AddArg3(base, val, mem)
  4164  		return true
  4165  	}
  4166  	return false
  4167  }
  4168  func rewriteValueRISCV64_OpRISCV64FMOVWload(v *Value) bool {
  4169  	v_1 := v.Args[1]
  4170  	v_0 := v.Args[0]
  4171  	b := v.Block
  4172  	config := b.Func.Config
  4173  	// match: (FMOVWload [off1] {sym1} (MOVaddr [off2] {sym2} base) mem)
  4174  	// cond: is32Bit(int64(off1)+int64(off2)) && canMergeSym(sym1, sym2) && (base.Op != OpSB || !config.ctxt.Flag_dynlink)
  4175  	// result: (FMOVWload [off1+off2] {mergeSym(sym1,sym2)} base mem)
  4176  	for {
  4177  		off1 := auxIntToInt32(v.AuxInt)
  4178  		sym1 := auxToSym(v.Aux)
  4179  		if v_0.Op != OpRISCV64MOVaddr {
  4180  			break
  4181  		}
  4182  		off2 := auxIntToInt32(v_0.AuxInt)
  4183  		sym2 := auxToSym(v_0.Aux)
  4184  		base := v_0.Args[0]
  4185  		mem := v_1
  4186  		if !(is32Bit(int64(off1)+int64(off2)) && canMergeSym(sym1, sym2) && (base.Op != OpSB || !config.ctxt.Flag_dynlink)) {
  4187  			break
  4188  		}
  4189  		v.reset(OpRISCV64FMOVWload)
  4190  		v.AuxInt = int32ToAuxInt(off1 + off2)
  4191  		v.Aux = symToAux(mergeSym(sym1, sym2))
  4192  		v.AddArg2(base, mem)
  4193  		return true
  4194  	}
  4195  	// match: (FMOVWload [off1] {sym} (ADDI [off2] base) mem)
  4196  	// cond: is32Bit(int64(off1)+off2)
  4197  	// result: (FMOVWload [off1+int32(off2)] {sym} base mem)
  4198  	for {
  4199  		off1 := auxIntToInt32(v.AuxInt)
  4200  		sym := auxToSym(v.Aux)
  4201  		if v_0.Op != OpRISCV64ADDI {
  4202  			break
  4203  		}
  4204  		off2 := auxIntToInt64(v_0.AuxInt)
  4205  		base := v_0.Args[0]
  4206  		mem := v_1
  4207  		if !(is32Bit(int64(off1) + off2)) {
  4208  			break
  4209  		}
  4210  		v.reset(OpRISCV64FMOVWload)
  4211  		v.AuxInt = int32ToAuxInt(off1 + int32(off2))
  4212  		v.Aux = symToAux(sym)
  4213  		v.AddArg2(base, mem)
  4214  		return true
  4215  	}
  4216  	// match: (FMOVWload [off] {sym} ptr1 (MOVWstore [off] {sym} ptr2 x _))
  4217  	// cond: isSamePtr(ptr1, ptr2)
  4218  	// result: (FMVSX x)
  4219  	for {
  4220  		off := auxIntToInt32(v.AuxInt)
  4221  		sym := auxToSym(v.Aux)
  4222  		ptr1 := v_0
  4223  		if v_1.Op != OpRISCV64MOVWstore || auxIntToInt32(v_1.AuxInt) != off || auxToSym(v_1.Aux) != sym {
  4224  			break
  4225  		}
  4226  		x := v_1.Args[1]
  4227  		ptr2 := v_1.Args[0]
  4228  		if !(isSamePtr(ptr1, ptr2)) {
  4229  			break
  4230  		}
  4231  		v.reset(OpRISCV64FMVSX)
  4232  		v.AddArg(x)
  4233  		return true
  4234  	}
  4235  	return false
  4236  }
  4237  func rewriteValueRISCV64_OpRISCV64FMOVWstore(v *Value) bool {
  4238  	v_2 := v.Args[2]
  4239  	v_1 := v.Args[1]
  4240  	v_0 := v.Args[0]
  4241  	b := v.Block
  4242  	config := b.Func.Config
  4243  	// match: (FMOVWstore [off1] {sym1} (MOVaddr [off2] {sym2} base) val mem)
  4244  	// cond: is32Bit(int64(off1)+int64(off2)) && canMergeSym(sym1, sym2) && (base.Op != OpSB || !config.ctxt.Flag_dynlink)
  4245  	// result: (FMOVWstore [off1+off2] {mergeSym(sym1,sym2)} base val mem)
  4246  	for {
  4247  		off1 := auxIntToInt32(v.AuxInt)
  4248  		sym1 := auxToSym(v.Aux)
  4249  		if v_0.Op != OpRISCV64MOVaddr {
  4250  			break
  4251  		}
  4252  		off2 := auxIntToInt32(v_0.AuxInt)
  4253  		sym2 := auxToSym(v_0.Aux)
  4254  		base := v_0.Args[0]
  4255  		val := v_1
  4256  		mem := v_2
  4257  		if !(is32Bit(int64(off1)+int64(off2)) && canMergeSym(sym1, sym2) && (base.Op != OpSB || !config.ctxt.Flag_dynlink)) {
  4258  			break
  4259  		}
  4260  		v.reset(OpRISCV64FMOVWstore)
  4261  		v.AuxInt = int32ToAuxInt(off1 + off2)
  4262  		v.Aux = symToAux(mergeSym(sym1, sym2))
  4263  		v.AddArg3(base, val, mem)
  4264  		return true
  4265  	}
  4266  	// match: (FMOVWstore [off1] {sym} (ADDI [off2] base) val mem)
  4267  	// cond: is32Bit(int64(off1)+off2)
  4268  	// result: (FMOVWstore [off1+int32(off2)] {sym} base val mem)
  4269  	for {
  4270  		off1 := auxIntToInt32(v.AuxInt)
  4271  		sym := auxToSym(v.Aux)
  4272  		if v_0.Op != OpRISCV64ADDI {
  4273  			break
  4274  		}
  4275  		off2 := auxIntToInt64(v_0.AuxInt)
  4276  		base := v_0.Args[0]
  4277  		val := v_1
  4278  		mem := v_2
  4279  		if !(is32Bit(int64(off1) + off2)) {
  4280  			break
  4281  		}
  4282  		v.reset(OpRISCV64FMOVWstore)
  4283  		v.AuxInt = int32ToAuxInt(off1 + int32(off2))
  4284  		v.Aux = symToAux(sym)
  4285  		v.AddArg3(base, val, mem)
  4286  		return true
  4287  	}
  4288  	return false
  4289  }
  4290  func rewriteValueRISCV64_OpRISCV64FMSUBD(v *Value) bool {
  4291  	v_2 := v.Args[2]
  4292  	v_1 := v.Args[1]
  4293  	v_0 := v.Args[0]
  4294  	// match: (FMSUBD neg:(FNEGD x) y z)
  4295  	// cond: neg.Uses == 1
  4296  	// result: (FNMADDD x y z)
  4297  	for {
  4298  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
  4299  			neg := v_0
  4300  			if neg.Op != OpRISCV64FNEGD {
  4301  				continue
  4302  			}
  4303  			x := neg.Args[0]
  4304  			y := v_1
  4305  			z := v_2
  4306  			if !(neg.Uses == 1) {
  4307  				continue
  4308  			}
  4309  			v.reset(OpRISCV64FNMADDD)
  4310  			v.AddArg3(x, y, z)
  4311  			return true
  4312  		}
  4313  		break
  4314  	}
  4315  	// match: (FMSUBD x y neg:(FNEGD z))
  4316  	// cond: neg.Uses == 1
  4317  	// result: (FMADDD x y z)
  4318  	for {
  4319  		x := v_0
  4320  		y := v_1
  4321  		neg := v_2
  4322  		if neg.Op != OpRISCV64FNEGD {
  4323  			break
  4324  		}
  4325  		z := neg.Args[0]
  4326  		if !(neg.Uses == 1) {
  4327  			break
  4328  		}
  4329  		v.reset(OpRISCV64FMADDD)
  4330  		v.AddArg3(x, y, z)
  4331  		return true
  4332  	}
  4333  	return false
  4334  }
  4335  func rewriteValueRISCV64_OpRISCV64FMSUBS(v *Value) bool {
  4336  	v_2 := v.Args[2]
  4337  	v_1 := v.Args[1]
  4338  	v_0 := v.Args[0]
  4339  	// match: (FMSUBS neg:(FNEGS x) y z)
  4340  	// cond: neg.Uses == 1
  4341  	// result: (FNMADDS x y z)
  4342  	for {
  4343  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
  4344  			neg := v_0
  4345  			if neg.Op != OpRISCV64FNEGS {
  4346  				continue
  4347  			}
  4348  			x := neg.Args[0]
  4349  			y := v_1
  4350  			z := v_2
  4351  			if !(neg.Uses == 1) {
  4352  				continue
  4353  			}
  4354  			v.reset(OpRISCV64FNMADDS)
  4355  			v.AddArg3(x, y, z)
  4356  			return true
  4357  		}
  4358  		break
  4359  	}
  4360  	// match: (FMSUBS x y neg:(FNEGS z))
  4361  	// cond: neg.Uses == 1
  4362  	// result: (FMADDS x y z)
  4363  	for {
  4364  		x := v_0
  4365  		y := v_1
  4366  		neg := v_2
  4367  		if neg.Op != OpRISCV64FNEGS {
  4368  			break
  4369  		}
  4370  		z := neg.Args[0]
  4371  		if !(neg.Uses == 1) {
  4372  			break
  4373  		}
  4374  		v.reset(OpRISCV64FMADDS)
  4375  		v.AddArg3(x, y, z)
  4376  		return true
  4377  	}
  4378  	return false
  4379  }
  4380  func rewriteValueRISCV64_OpRISCV64FNED(v *Value) bool {
  4381  	v_1 := v.Args[1]
  4382  	v_0 := v.Args[0]
  4383  	b := v.Block
  4384  	typ := &b.Func.Config.Types
  4385  	// match: (FNED x (FMOVDconst [math.Inf(-1)]))
  4386  	// result: (SEQZ (ANDI <typ.Int64> [0b00_0000_0001] (FCLASSD x)))
  4387  	for {
  4388  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
  4389  			x := v_0
  4390  			if v_1.Op != OpRISCV64FMOVDconst || auxIntToFloat64(v_1.AuxInt) != math.Inf(-1) {
  4391  				continue
  4392  			}
  4393  			v.reset(OpRISCV64SEQZ)
  4394  			v0 := b.NewValue0(v.Pos, OpRISCV64ANDI, typ.Int64)
  4395  			v0.AuxInt = int64ToAuxInt(0b00_0000_0001)
  4396  			v1 := b.NewValue0(v.Pos, OpRISCV64FCLASSD, typ.Int64)
  4397  			v1.AddArg(x)
  4398  			v0.AddArg(v1)
  4399  			v.AddArg(v0)
  4400  			return true
  4401  		}
  4402  		break
  4403  	}
  4404  	// match: (FNED x (FMOVDconst [math.Inf(1)]))
  4405  	// result: (SEQZ (ANDI <typ.Int64> [0b00_1000_0000] (FCLASSD x)))
  4406  	for {
  4407  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
  4408  			x := v_0
  4409  			if v_1.Op != OpRISCV64FMOVDconst || auxIntToFloat64(v_1.AuxInt) != math.Inf(1) {
  4410  				continue
  4411  			}
  4412  			v.reset(OpRISCV64SEQZ)
  4413  			v0 := b.NewValue0(v.Pos, OpRISCV64ANDI, typ.Int64)
  4414  			v0.AuxInt = int64ToAuxInt(0b00_1000_0000)
  4415  			v1 := b.NewValue0(v.Pos, OpRISCV64FCLASSD, typ.Int64)
  4416  			v1.AddArg(x)
  4417  			v0.AddArg(v1)
  4418  			v.AddArg(v0)
  4419  			return true
  4420  		}
  4421  		break
  4422  	}
  4423  	return false
  4424  }
  4425  func rewriteValueRISCV64_OpRISCV64FNMADDD(v *Value) bool {
  4426  	v_2 := v.Args[2]
  4427  	v_1 := v.Args[1]
  4428  	v_0 := v.Args[0]
  4429  	// match: (FNMADDD neg:(FNEGD x) y z)
  4430  	// cond: neg.Uses == 1
  4431  	// result: (FMSUBD x y z)
  4432  	for {
  4433  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
  4434  			neg := v_0
  4435  			if neg.Op != OpRISCV64FNEGD {
  4436  				continue
  4437  			}
  4438  			x := neg.Args[0]
  4439  			y := v_1
  4440  			z := v_2
  4441  			if !(neg.Uses == 1) {
  4442  				continue
  4443  			}
  4444  			v.reset(OpRISCV64FMSUBD)
  4445  			v.AddArg3(x, y, z)
  4446  			return true
  4447  		}
  4448  		break
  4449  	}
  4450  	// match: (FNMADDD x y neg:(FNEGD z))
  4451  	// cond: neg.Uses == 1
  4452  	// result: (FNMSUBD x y z)
  4453  	for {
  4454  		x := v_0
  4455  		y := v_1
  4456  		neg := v_2
  4457  		if neg.Op != OpRISCV64FNEGD {
  4458  			break
  4459  		}
  4460  		z := neg.Args[0]
  4461  		if !(neg.Uses == 1) {
  4462  			break
  4463  		}
  4464  		v.reset(OpRISCV64FNMSUBD)
  4465  		v.AddArg3(x, y, z)
  4466  		return true
  4467  	}
  4468  	return false
  4469  }
  4470  func rewriteValueRISCV64_OpRISCV64FNMADDS(v *Value) bool {
  4471  	v_2 := v.Args[2]
  4472  	v_1 := v.Args[1]
  4473  	v_0 := v.Args[0]
  4474  	// match: (FNMADDS neg:(FNEGS x) y z)
  4475  	// cond: neg.Uses == 1
  4476  	// result: (FMSUBS x y z)
  4477  	for {
  4478  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
  4479  			neg := v_0
  4480  			if neg.Op != OpRISCV64FNEGS {
  4481  				continue
  4482  			}
  4483  			x := neg.Args[0]
  4484  			y := v_1
  4485  			z := v_2
  4486  			if !(neg.Uses == 1) {
  4487  				continue
  4488  			}
  4489  			v.reset(OpRISCV64FMSUBS)
  4490  			v.AddArg3(x, y, z)
  4491  			return true
  4492  		}
  4493  		break
  4494  	}
  4495  	// match: (FNMADDS x y neg:(FNEGS z))
  4496  	// cond: neg.Uses == 1
  4497  	// result: (FNMSUBS x y z)
  4498  	for {
  4499  		x := v_0
  4500  		y := v_1
  4501  		neg := v_2
  4502  		if neg.Op != OpRISCV64FNEGS {
  4503  			break
  4504  		}
  4505  		z := neg.Args[0]
  4506  		if !(neg.Uses == 1) {
  4507  			break
  4508  		}
  4509  		v.reset(OpRISCV64FNMSUBS)
  4510  		v.AddArg3(x, y, z)
  4511  		return true
  4512  	}
  4513  	return false
  4514  }
  4515  func rewriteValueRISCV64_OpRISCV64FNMSUBD(v *Value) bool {
  4516  	v_2 := v.Args[2]
  4517  	v_1 := v.Args[1]
  4518  	v_0 := v.Args[0]
  4519  	// match: (FNMSUBD neg:(FNEGD x) y z)
  4520  	// cond: neg.Uses == 1
  4521  	// result: (FMADDD x y z)
  4522  	for {
  4523  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
  4524  			neg := v_0
  4525  			if neg.Op != OpRISCV64FNEGD {
  4526  				continue
  4527  			}
  4528  			x := neg.Args[0]
  4529  			y := v_1
  4530  			z := v_2
  4531  			if !(neg.Uses == 1) {
  4532  				continue
  4533  			}
  4534  			v.reset(OpRISCV64FMADDD)
  4535  			v.AddArg3(x, y, z)
  4536  			return true
  4537  		}
  4538  		break
  4539  	}
  4540  	// match: (FNMSUBD x y neg:(FNEGD z))
  4541  	// cond: neg.Uses == 1
  4542  	// result: (FNMADDD x y z)
  4543  	for {
  4544  		x := v_0
  4545  		y := v_1
  4546  		neg := v_2
  4547  		if neg.Op != OpRISCV64FNEGD {
  4548  			break
  4549  		}
  4550  		z := neg.Args[0]
  4551  		if !(neg.Uses == 1) {
  4552  			break
  4553  		}
  4554  		v.reset(OpRISCV64FNMADDD)
  4555  		v.AddArg3(x, y, z)
  4556  		return true
  4557  	}
  4558  	return false
  4559  }
  4560  func rewriteValueRISCV64_OpRISCV64FNMSUBS(v *Value) bool {
  4561  	v_2 := v.Args[2]
  4562  	v_1 := v.Args[1]
  4563  	v_0 := v.Args[0]
  4564  	// match: (FNMSUBS neg:(FNEGS x) y z)
  4565  	// cond: neg.Uses == 1
  4566  	// result: (FMADDS x y z)
  4567  	for {
  4568  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
  4569  			neg := v_0
  4570  			if neg.Op != OpRISCV64FNEGS {
  4571  				continue
  4572  			}
  4573  			x := neg.Args[0]
  4574  			y := v_1
  4575  			z := v_2
  4576  			if !(neg.Uses == 1) {
  4577  				continue
  4578  			}
  4579  			v.reset(OpRISCV64FMADDS)
  4580  			v.AddArg3(x, y, z)
  4581  			return true
  4582  		}
  4583  		break
  4584  	}
  4585  	// match: (FNMSUBS x y neg:(FNEGS z))
  4586  	// cond: neg.Uses == 1
  4587  	// result: (FNMADDS x y z)
  4588  	for {
  4589  		x := v_0
  4590  		y := v_1
  4591  		neg := v_2
  4592  		if neg.Op != OpRISCV64FNEGS {
  4593  			break
  4594  		}
  4595  		z := neg.Args[0]
  4596  		if !(neg.Uses == 1) {
  4597  			break
  4598  		}
  4599  		v.reset(OpRISCV64FNMADDS)
  4600  		v.AddArg3(x, y, z)
  4601  		return true
  4602  	}
  4603  	return false
  4604  }
  4605  func rewriteValueRISCV64_OpRISCV64FSUBD(v *Value) bool {
  4606  	v_1 := v.Args[1]
  4607  	v_0 := v.Args[0]
  4608  	// match: (FSUBD a (FMULD x y))
  4609  	// cond: a.Block.Func.useFMA(v)
  4610  	// result: (FNMSUBD x y a)
  4611  	for {
  4612  		a := v_0
  4613  		if v_1.Op != OpRISCV64FMULD {
  4614  			break
  4615  		}
  4616  		y := v_1.Args[1]
  4617  		x := v_1.Args[0]
  4618  		if !(a.Block.Func.useFMA(v)) {
  4619  			break
  4620  		}
  4621  		v.reset(OpRISCV64FNMSUBD)
  4622  		v.AddArg3(x, y, a)
  4623  		return true
  4624  	}
  4625  	// match: (FSUBD (FMULD x y) a)
  4626  	// cond: a.Block.Func.useFMA(v)
  4627  	// result: (FMSUBD x y a)
  4628  	for {
  4629  		if v_0.Op != OpRISCV64FMULD {
  4630  			break
  4631  		}
  4632  		y := v_0.Args[1]
  4633  		x := v_0.Args[0]
  4634  		a := v_1
  4635  		if !(a.Block.Func.useFMA(v)) {
  4636  			break
  4637  		}
  4638  		v.reset(OpRISCV64FMSUBD)
  4639  		v.AddArg3(x, y, a)
  4640  		return true
  4641  	}
  4642  	return false
  4643  }
  4644  func rewriteValueRISCV64_OpRISCV64FSUBS(v *Value) bool {
  4645  	v_1 := v.Args[1]
  4646  	v_0 := v.Args[0]
  4647  	// match: (FSUBS a (FMULS x y))
  4648  	// cond: a.Block.Func.useFMA(v)
  4649  	// result: (FNMSUBS x y a)
  4650  	for {
  4651  		a := v_0
  4652  		if v_1.Op != OpRISCV64FMULS {
  4653  			break
  4654  		}
  4655  		y := v_1.Args[1]
  4656  		x := v_1.Args[0]
  4657  		if !(a.Block.Func.useFMA(v)) {
  4658  			break
  4659  		}
  4660  		v.reset(OpRISCV64FNMSUBS)
  4661  		v.AddArg3(x, y, a)
  4662  		return true
  4663  	}
  4664  	// match: (FSUBS (FMULS x y) a)
  4665  	// cond: a.Block.Func.useFMA(v)
  4666  	// result: (FMSUBS x y a)
  4667  	for {
  4668  		if v_0.Op != OpRISCV64FMULS {
  4669  			break
  4670  		}
  4671  		y := v_0.Args[1]
  4672  		x := v_0.Args[0]
  4673  		a := v_1
  4674  		if !(a.Block.Func.useFMA(v)) {
  4675  			break
  4676  		}
  4677  		v.reset(OpRISCV64FMSUBS)
  4678  		v.AddArg3(x, y, a)
  4679  		return true
  4680  	}
  4681  	return false
  4682  }
  4683  func rewriteValueRISCV64_OpRISCV64LoweredPanicBoundsCR(v *Value) bool {
  4684  	v_1 := v.Args[1]
  4685  	v_0 := v.Args[0]
  4686  	// match: (LoweredPanicBoundsCR [kind] {p} (MOVDconst [c]) mem)
  4687  	// result: (LoweredPanicBoundsCC [kind] {PanicBoundsCC{Cx:p.C, Cy:c}} mem)
  4688  	for {
  4689  		kind := auxIntToInt64(v.AuxInt)
  4690  		p := auxToPanicBoundsC(v.Aux)
  4691  		if v_0.Op != OpRISCV64MOVDconst {
  4692  			break
  4693  		}
  4694  		c := auxIntToInt64(v_0.AuxInt)
  4695  		mem := v_1
  4696  		v.reset(OpRISCV64LoweredPanicBoundsCC)
  4697  		v.AuxInt = int64ToAuxInt(kind)
  4698  		v.Aux = panicBoundsCCToAux(PanicBoundsCC{Cx: p.C, Cy: c})
  4699  		v.AddArg(mem)
  4700  		return true
  4701  	}
  4702  	return false
  4703  }
  4704  func rewriteValueRISCV64_OpRISCV64LoweredPanicBoundsRC(v *Value) bool {
  4705  	v_1 := v.Args[1]
  4706  	v_0 := v.Args[0]
  4707  	// match: (LoweredPanicBoundsRC [kind] {p} (MOVDconst [c]) mem)
  4708  	// result: (LoweredPanicBoundsCC [kind] {PanicBoundsCC{Cx:c, Cy:p.C}} mem)
  4709  	for {
  4710  		kind := auxIntToInt64(v.AuxInt)
  4711  		p := auxToPanicBoundsC(v.Aux)
  4712  		if v_0.Op != OpRISCV64MOVDconst {
  4713  			break
  4714  		}
  4715  		c := auxIntToInt64(v_0.AuxInt)
  4716  		mem := v_1
  4717  		v.reset(OpRISCV64LoweredPanicBoundsCC)
  4718  		v.AuxInt = int64ToAuxInt(kind)
  4719  		v.Aux = panicBoundsCCToAux(PanicBoundsCC{Cx: c, Cy: p.C})
  4720  		v.AddArg(mem)
  4721  		return true
  4722  	}
  4723  	return false
  4724  }
  4725  func rewriteValueRISCV64_OpRISCV64LoweredPanicBoundsRR(v *Value) bool {
  4726  	v_2 := v.Args[2]
  4727  	v_1 := v.Args[1]
  4728  	v_0 := v.Args[0]
  4729  	// match: (LoweredPanicBoundsRR [kind] x (MOVDconst [c]) mem)
  4730  	// result: (LoweredPanicBoundsRC [kind] x {PanicBoundsC{C:c}} mem)
  4731  	for {
  4732  		kind := auxIntToInt64(v.AuxInt)
  4733  		x := v_0
  4734  		if v_1.Op != OpRISCV64MOVDconst {
  4735  			break
  4736  		}
  4737  		c := auxIntToInt64(v_1.AuxInt)
  4738  		mem := v_2
  4739  		v.reset(OpRISCV64LoweredPanicBoundsRC)
  4740  		v.AuxInt = int64ToAuxInt(kind)
  4741  		v.Aux = panicBoundsCToAux(PanicBoundsC{C: c})
  4742  		v.AddArg2(x, mem)
  4743  		return true
  4744  	}
  4745  	// match: (LoweredPanicBoundsRR [kind] (MOVDconst [c]) y mem)
  4746  	// result: (LoweredPanicBoundsCR [kind] {PanicBoundsC{C:c}} y mem)
  4747  	for {
  4748  		kind := auxIntToInt64(v.AuxInt)
  4749  		if v_0.Op != OpRISCV64MOVDconst {
  4750  			break
  4751  		}
  4752  		c := auxIntToInt64(v_0.AuxInt)
  4753  		y := v_1
  4754  		mem := v_2
  4755  		v.reset(OpRISCV64LoweredPanicBoundsCR)
  4756  		v.AuxInt = int64ToAuxInt(kind)
  4757  		v.Aux = panicBoundsCToAux(PanicBoundsC{C: c})
  4758  		v.AddArg2(y, mem)
  4759  		return true
  4760  	}
  4761  	return false
  4762  }
  4763  func rewriteValueRISCV64_OpRISCV64MOVBUload(v *Value) bool {
  4764  	v_1 := v.Args[1]
  4765  	v_0 := v.Args[0]
  4766  	b := v.Block
  4767  	config := b.Func.Config
  4768  	// match: (MOVBUload [off1] {sym1} (MOVaddr [off2] {sym2} base) mem)
  4769  	// cond: is32Bit(int64(off1)+int64(off2)) && canMergeSym(sym1, sym2) && (base.Op != OpSB || !config.ctxt.Flag_dynlink)
  4770  	// result: (MOVBUload [off1+off2] {mergeSym(sym1,sym2)} base mem)
  4771  	for {
  4772  		off1 := auxIntToInt32(v.AuxInt)
  4773  		sym1 := auxToSym(v.Aux)
  4774  		if v_0.Op != OpRISCV64MOVaddr {
  4775  			break
  4776  		}
  4777  		off2 := auxIntToInt32(v_0.AuxInt)
  4778  		sym2 := auxToSym(v_0.Aux)
  4779  		base := v_0.Args[0]
  4780  		mem := v_1
  4781  		if !(is32Bit(int64(off1)+int64(off2)) && canMergeSym(sym1, sym2) && (base.Op != OpSB || !config.ctxt.Flag_dynlink)) {
  4782  			break
  4783  		}
  4784  		v.reset(OpRISCV64MOVBUload)
  4785  		v.AuxInt = int32ToAuxInt(off1 + off2)
  4786  		v.Aux = symToAux(mergeSym(sym1, sym2))
  4787  		v.AddArg2(base, mem)
  4788  		return true
  4789  	}
  4790  	// match: (MOVBUload [off1] {sym} (ADDI [off2] base) mem)
  4791  	// cond: is32Bit(int64(off1)+off2)
  4792  	// result: (MOVBUload [off1+int32(off2)] {sym} base mem)
  4793  	for {
  4794  		off1 := auxIntToInt32(v.AuxInt)
  4795  		sym := auxToSym(v.Aux)
  4796  		if v_0.Op != OpRISCV64ADDI {
  4797  			break
  4798  		}
  4799  		off2 := auxIntToInt64(v_0.AuxInt)
  4800  		base := v_0.Args[0]
  4801  		mem := v_1
  4802  		if !(is32Bit(int64(off1) + off2)) {
  4803  			break
  4804  		}
  4805  		v.reset(OpRISCV64MOVBUload)
  4806  		v.AuxInt = int32ToAuxInt(off1 + int32(off2))
  4807  		v.Aux = symToAux(sym)
  4808  		v.AddArg2(base, mem)
  4809  		return true
  4810  	}
  4811  	// match: (MOVBUload [off] {sym} ptr1 (MOVBstore [off] {sym} ptr2 x _))
  4812  	// cond: isSamePtr(ptr1, ptr2)
  4813  	// result: (MOVBUreg x)
  4814  	for {
  4815  		off := auxIntToInt32(v.AuxInt)
  4816  		sym := auxToSym(v.Aux)
  4817  		ptr1 := v_0
  4818  		if v_1.Op != OpRISCV64MOVBstore || auxIntToInt32(v_1.AuxInt) != off || auxToSym(v_1.Aux) != sym {
  4819  			break
  4820  		}
  4821  		x := v_1.Args[1]
  4822  		ptr2 := v_1.Args[0]
  4823  		if !(isSamePtr(ptr1, ptr2)) {
  4824  			break
  4825  		}
  4826  		v.reset(OpRISCV64MOVBUreg)
  4827  		v.AddArg(x)
  4828  		return true
  4829  	}
  4830  	return false
  4831  }
  4832  func rewriteValueRISCV64_OpRISCV64MOVBUreg(v *Value) bool {
  4833  	v_0 := v.Args[0]
  4834  	b := v.Block
  4835  	// match: (MOVBUreg x:(FLES _ _))
  4836  	// result: x
  4837  	for {
  4838  		x := v_0
  4839  		if x.Op != OpRISCV64FLES {
  4840  			break
  4841  		}
  4842  		v.copyOf(x)
  4843  		return true
  4844  	}
  4845  	// match: (MOVBUreg x:(FLTS _ _))
  4846  	// result: x
  4847  	for {
  4848  		x := v_0
  4849  		if x.Op != OpRISCV64FLTS {
  4850  			break
  4851  		}
  4852  		v.copyOf(x)
  4853  		return true
  4854  	}
  4855  	// match: (MOVBUreg x:(FEQS _ _))
  4856  	// result: x
  4857  	for {
  4858  		x := v_0
  4859  		if x.Op != OpRISCV64FEQS {
  4860  			break
  4861  		}
  4862  		v.copyOf(x)
  4863  		return true
  4864  	}
  4865  	// match: (MOVBUreg x:(FNES _ _))
  4866  	// result: x
  4867  	for {
  4868  		x := v_0
  4869  		if x.Op != OpRISCV64FNES {
  4870  			break
  4871  		}
  4872  		v.copyOf(x)
  4873  		return true
  4874  	}
  4875  	// match: (MOVBUreg x:(FLED _ _))
  4876  	// result: x
  4877  	for {
  4878  		x := v_0
  4879  		if x.Op != OpRISCV64FLED {
  4880  			break
  4881  		}
  4882  		v.copyOf(x)
  4883  		return true
  4884  	}
  4885  	// match: (MOVBUreg x:(FLTD _ _))
  4886  	// result: x
  4887  	for {
  4888  		x := v_0
  4889  		if x.Op != OpRISCV64FLTD {
  4890  			break
  4891  		}
  4892  		v.copyOf(x)
  4893  		return true
  4894  	}
  4895  	// match: (MOVBUreg x:(FEQD _ _))
  4896  	// result: x
  4897  	for {
  4898  		x := v_0
  4899  		if x.Op != OpRISCV64FEQD {
  4900  			break
  4901  		}
  4902  		v.copyOf(x)
  4903  		return true
  4904  	}
  4905  	// match: (MOVBUreg x:(FNED _ _))
  4906  	// result: x
  4907  	for {
  4908  		x := v_0
  4909  		if x.Op != OpRISCV64FNED {
  4910  			break
  4911  		}
  4912  		v.copyOf(x)
  4913  		return true
  4914  	}
  4915  	// match: (MOVBUreg x:(SEQZ _))
  4916  	// result: x
  4917  	for {
  4918  		x := v_0
  4919  		if x.Op != OpRISCV64SEQZ {
  4920  			break
  4921  		}
  4922  		v.copyOf(x)
  4923  		return true
  4924  	}
  4925  	// match: (MOVBUreg x:(SNEZ _))
  4926  	// result: x
  4927  	for {
  4928  		x := v_0
  4929  		if x.Op != OpRISCV64SNEZ {
  4930  			break
  4931  		}
  4932  		v.copyOf(x)
  4933  		return true
  4934  	}
  4935  	// match: (MOVBUreg x:(SLT _ _))
  4936  	// result: x
  4937  	for {
  4938  		x := v_0
  4939  		if x.Op != OpRISCV64SLT {
  4940  			break
  4941  		}
  4942  		v.copyOf(x)
  4943  		return true
  4944  	}
  4945  	// match: (MOVBUreg x:(SLTU _ _))
  4946  	// result: x
  4947  	for {
  4948  		x := v_0
  4949  		if x.Op != OpRISCV64SLTU {
  4950  			break
  4951  		}
  4952  		v.copyOf(x)
  4953  		return true
  4954  	}
  4955  	// match: (MOVBUreg x:(ANDI [c] y))
  4956  	// cond: c >= 0 && int64(uint8(c)) == c
  4957  	// result: x
  4958  	for {
  4959  		x := v_0
  4960  		if x.Op != OpRISCV64ANDI {
  4961  			break
  4962  		}
  4963  		c := auxIntToInt64(x.AuxInt)
  4964  		if !(c >= 0 && int64(uint8(c)) == c) {
  4965  			break
  4966  		}
  4967  		v.copyOf(x)
  4968  		return true
  4969  	}
  4970  	// match: (MOVBUreg (ANDI [c] x))
  4971  	// cond: c < 0
  4972  	// result: (ANDI [int64(uint8(c))] x)
  4973  	for {
  4974  		if v_0.Op != OpRISCV64ANDI {
  4975  			break
  4976  		}
  4977  		c := auxIntToInt64(v_0.AuxInt)
  4978  		x := v_0.Args[0]
  4979  		if !(c < 0) {
  4980  			break
  4981  		}
  4982  		v.reset(OpRISCV64ANDI)
  4983  		v.AuxInt = int64ToAuxInt(int64(uint8(c)))
  4984  		v.AddArg(x)
  4985  		return true
  4986  	}
  4987  	// match: (MOVBUreg (MOVDconst [c]))
  4988  	// result: (MOVDconst [int64(uint8(c))])
  4989  	for {
  4990  		if v_0.Op != OpRISCV64MOVDconst {
  4991  			break
  4992  		}
  4993  		c := auxIntToInt64(v_0.AuxInt)
  4994  		v.reset(OpRISCV64MOVDconst)
  4995  		v.AuxInt = int64ToAuxInt(int64(uint8(c)))
  4996  		return true
  4997  	}
  4998  	// match: (MOVBUreg x:(MOVBUload _ _))
  4999  	// result: (MOVDreg x)
  5000  	for {
  5001  		x := v_0
  5002  		if x.Op != OpRISCV64MOVBUload {
  5003  			break
  5004  		}
  5005  		v.reset(OpRISCV64MOVDreg)
  5006  		v.AddArg(x)
  5007  		return true
  5008  	}
  5009  	// match: (MOVBUreg x:(Select0 (LoweredAtomicLoad8 _ _)))
  5010  	// result: (MOVDreg x)
  5011  	for {
  5012  		x := v_0
  5013  		if x.Op != OpSelect0 {
  5014  			break
  5015  		}
  5016  		x_0 := x.Args[0]
  5017  		if x_0.Op != OpRISCV64LoweredAtomicLoad8 {
  5018  			break
  5019  		}
  5020  		v.reset(OpRISCV64MOVDreg)
  5021  		v.AddArg(x)
  5022  		return true
  5023  	}
  5024  	// match: (MOVBUreg x:(Select0 (LoweredAtomicCas32 _ _ _ _)))
  5025  	// result: (MOVDreg x)
  5026  	for {
  5027  		x := v_0
  5028  		if x.Op != OpSelect0 {
  5029  			break
  5030  		}
  5031  		x_0 := x.Args[0]
  5032  		if x_0.Op != OpRISCV64LoweredAtomicCas32 {
  5033  			break
  5034  		}
  5035  		v.reset(OpRISCV64MOVDreg)
  5036  		v.AddArg(x)
  5037  		return true
  5038  	}
  5039  	// match: (MOVBUreg x:(Select0 (LoweredAtomicCas64 _ _ _ _)))
  5040  	// result: (MOVDreg x)
  5041  	for {
  5042  		x := v_0
  5043  		if x.Op != OpSelect0 {
  5044  			break
  5045  		}
  5046  		x_0 := x.Args[0]
  5047  		if x_0.Op != OpRISCV64LoweredAtomicCas64 {
  5048  			break
  5049  		}
  5050  		v.reset(OpRISCV64MOVDreg)
  5051  		v.AddArg(x)
  5052  		return true
  5053  	}
  5054  	// match: (MOVBUreg x:(MOVBUreg _))
  5055  	// result: (MOVDreg x)
  5056  	for {
  5057  		x := v_0
  5058  		if x.Op != OpRISCV64MOVBUreg {
  5059  			break
  5060  		}
  5061  		v.reset(OpRISCV64MOVDreg)
  5062  		v.AddArg(x)
  5063  		return true
  5064  	}
  5065  	// match: (MOVBUreg <t> x:(MOVBload [off] {sym} ptr mem))
  5066  	// cond: x.Uses == 1 && clobber(x)
  5067  	// result: @x.Block (MOVBUload <t> [off] {sym} ptr mem)
  5068  	for {
  5069  		t := v.Type
  5070  		x := v_0
  5071  		if x.Op != OpRISCV64MOVBload {
  5072  			break
  5073  		}
  5074  		off := auxIntToInt32(x.AuxInt)
  5075  		sym := auxToSym(x.Aux)
  5076  		mem := x.Args[1]
  5077  		ptr := x.Args[0]
  5078  		if !(x.Uses == 1 && clobber(x)) {
  5079  			break
  5080  		}
  5081  		b = x.Block
  5082  		v0 := b.NewValue0(x.Pos, OpRISCV64MOVBUload, t)
  5083  		v.copyOf(v0)
  5084  		v0.AuxInt = int32ToAuxInt(off)
  5085  		v0.Aux = symToAux(sym)
  5086  		v0.AddArg2(ptr, mem)
  5087  		return true
  5088  	}
  5089  	return false
  5090  }
  5091  func rewriteValueRISCV64_OpRISCV64MOVBload(v *Value) bool {
  5092  	v_1 := v.Args[1]
  5093  	v_0 := v.Args[0]
  5094  	b := v.Block
  5095  	config := b.Func.Config
  5096  	// match: (MOVBload [off1] {sym1} (MOVaddr [off2] {sym2} base) mem)
  5097  	// cond: is32Bit(int64(off1)+int64(off2)) && canMergeSym(sym1, sym2) && (base.Op != OpSB || !config.ctxt.Flag_dynlink)
  5098  	// result: (MOVBload [off1+off2] {mergeSym(sym1,sym2)} base mem)
  5099  	for {
  5100  		off1 := auxIntToInt32(v.AuxInt)
  5101  		sym1 := auxToSym(v.Aux)
  5102  		if v_0.Op != OpRISCV64MOVaddr {
  5103  			break
  5104  		}
  5105  		off2 := auxIntToInt32(v_0.AuxInt)
  5106  		sym2 := auxToSym(v_0.Aux)
  5107  		base := v_0.Args[0]
  5108  		mem := v_1
  5109  		if !(is32Bit(int64(off1)+int64(off2)) && canMergeSym(sym1, sym2) && (base.Op != OpSB || !config.ctxt.Flag_dynlink)) {
  5110  			break
  5111  		}
  5112  		v.reset(OpRISCV64MOVBload)
  5113  		v.AuxInt = int32ToAuxInt(off1 + off2)
  5114  		v.Aux = symToAux(mergeSym(sym1, sym2))
  5115  		v.AddArg2(base, mem)
  5116  		return true
  5117  	}
  5118  	// match: (MOVBload [off1] {sym} (ADDI [off2] base) mem)
  5119  	// cond: is32Bit(int64(off1)+off2)
  5120  	// result: (MOVBload [off1+int32(off2)] {sym} base mem)
  5121  	for {
  5122  		off1 := auxIntToInt32(v.AuxInt)
  5123  		sym := auxToSym(v.Aux)
  5124  		if v_0.Op != OpRISCV64ADDI {
  5125  			break
  5126  		}
  5127  		off2 := auxIntToInt64(v_0.AuxInt)
  5128  		base := v_0.Args[0]
  5129  		mem := v_1
  5130  		if !(is32Bit(int64(off1) + off2)) {
  5131  			break
  5132  		}
  5133  		v.reset(OpRISCV64MOVBload)
  5134  		v.AuxInt = int32ToAuxInt(off1 + int32(off2))
  5135  		v.Aux = symToAux(sym)
  5136  		v.AddArg2(base, mem)
  5137  		return true
  5138  	}
  5139  	// match: (MOVBload [off] {sym} ptr1 (MOVBstore [off] {sym} ptr2 x _))
  5140  	// cond: isSamePtr(ptr1, ptr2)
  5141  	// result: (MOVBreg x)
  5142  	for {
  5143  		off := auxIntToInt32(v.AuxInt)
  5144  		sym := auxToSym(v.Aux)
  5145  		ptr1 := v_0
  5146  		if v_1.Op != OpRISCV64MOVBstore || auxIntToInt32(v_1.AuxInt) != off || auxToSym(v_1.Aux) != sym {
  5147  			break
  5148  		}
  5149  		x := v_1.Args[1]
  5150  		ptr2 := v_1.Args[0]
  5151  		if !(isSamePtr(ptr1, ptr2)) {
  5152  			break
  5153  		}
  5154  		v.reset(OpRISCV64MOVBreg)
  5155  		v.AddArg(x)
  5156  		return true
  5157  	}
  5158  	return false
  5159  }
  5160  func rewriteValueRISCV64_OpRISCV64MOVBreg(v *Value) bool {
  5161  	v_0 := v.Args[0]
  5162  	b := v.Block
  5163  	// match: (MOVBreg x:(ANDI [c] y))
  5164  	// cond: c >= 0 && int64(int8(c)) == c
  5165  	// result: x
  5166  	for {
  5167  		x := v_0
  5168  		if x.Op != OpRISCV64ANDI {
  5169  			break
  5170  		}
  5171  		c := auxIntToInt64(x.AuxInt)
  5172  		if !(c >= 0 && int64(int8(c)) == c) {
  5173  			break
  5174  		}
  5175  		v.copyOf(x)
  5176  		return true
  5177  	}
  5178  	// match: (MOVBreg (MOVDconst [c]))
  5179  	// result: (MOVDconst [int64(int8(c))])
  5180  	for {
  5181  		if v_0.Op != OpRISCV64MOVDconst {
  5182  			break
  5183  		}
  5184  		c := auxIntToInt64(v_0.AuxInt)
  5185  		v.reset(OpRISCV64MOVDconst)
  5186  		v.AuxInt = int64ToAuxInt(int64(int8(c)))
  5187  		return true
  5188  	}
  5189  	// match: (MOVBreg x:(MOVBload _ _))
  5190  	// result: (MOVDreg x)
  5191  	for {
  5192  		x := v_0
  5193  		if x.Op != OpRISCV64MOVBload {
  5194  			break
  5195  		}
  5196  		v.reset(OpRISCV64MOVDreg)
  5197  		v.AddArg(x)
  5198  		return true
  5199  	}
  5200  	// match: (MOVBreg x:(MOVBreg _))
  5201  	// result: (MOVDreg x)
  5202  	for {
  5203  		x := v_0
  5204  		if x.Op != OpRISCV64MOVBreg {
  5205  			break
  5206  		}
  5207  		v.reset(OpRISCV64MOVDreg)
  5208  		v.AddArg(x)
  5209  		return true
  5210  	}
  5211  	// match: (MOVBreg <t> x:(MOVBUload [off] {sym} ptr mem))
  5212  	// cond: x.Uses == 1 && clobber(x)
  5213  	// result: @x.Block (MOVBload <t> [off] {sym} ptr mem)
  5214  	for {
  5215  		t := v.Type
  5216  		x := v_0
  5217  		if x.Op != OpRISCV64MOVBUload {
  5218  			break
  5219  		}
  5220  		off := auxIntToInt32(x.AuxInt)
  5221  		sym := auxToSym(x.Aux)
  5222  		mem := x.Args[1]
  5223  		ptr := x.Args[0]
  5224  		if !(x.Uses == 1 && clobber(x)) {
  5225  			break
  5226  		}
  5227  		b = x.Block
  5228  		v0 := b.NewValue0(x.Pos, OpRISCV64MOVBload, t)
  5229  		v.copyOf(v0)
  5230  		v0.AuxInt = int32ToAuxInt(off)
  5231  		v0.Aux = symToAux(sym)
  5232  		v0.AddArg2(ptr, mem)
  5233  		return true
  5234  	}
  5235  	return false
  5236  }
  5237  func rewriteValueRISCV64_OpRISCV64MOVBstore(v *Value) bool {
  5238  	v_2 := v.Args[2]
  5239  	v_1 := v.Args[1]
  5240  	v_0 := v.Args[0]
  5241  	b := v.Block
  5242  	config := b.Func.Config
  5243  	// match: (MOVBstore [off1] {sym1} (MOVaddr [off2] {sym2} base) val mem)
  5244  	// cond: is32Bit(int64(off1)+int64(off2)) && canMergeSym(sym1, sym2) && (base.Op != OpSB || !config.ctxt.Flag_dynlink)
  5245  	// result: (MOVBstore [off1+off2] {mergeSym(sym1,sym2)} base val mem)
  5246  	for {
  5247  		off1 := auxIntToInt32(v.AuxInt)
  5248  		sym1 := auxToSym(v.Aux)
  5249  		if v_0.Op != OpRISCV64MOVaddr {
  5250  			break
  5251  		}
  5252  		off2 := auxIntToInt32(v_0.AuxInt)
  5253  		sym2 := auxToSym(v_0.Aux)
  5254  		base := v_0.Args[0]
  5255  		val := v_1
  5256  		mem := v_2
  5257  		if !(is32Bit(int64(off1)+int64(off2)) && canMergeSym(sym1, sym2) && (base.Op != OpSB || !config.ctxt.Flag_dynlink)) {
  5258  			break
  5259  		}
  5260  		v.reset(OpRISCV64MOVBstore)
  5261  		v.AuxInt = int32ToAuxInt(off1 + off2)
  5262  		v.Aux = symToAux(mergeSym(sym1, sym2))
  5263  		v.AddArg3(base, val, mem)
  5264  		return true
  5265  	}
  5266  	// match: (MOVBstore [off1] {sym} (ADDI [off2] base) val mem)
  5267  	// cond: is32Bit(int64(off1)+off2)
  5268  	// result: (MOVBstore [off1+int32(off2)] {sym} base val mem)
  5269  	for {
  5270  		off1 := auxIntToInt32(v.AuxInt)
  5271  		sym := auxToSym(v.Aux)
  5272  		if v_0.Op != OpRISCV64ADDI {
  5273  			break
  5274  		}
  5275  		off2 := auxIntToInt64(v_0.AuxInt)
  5276  		base := v_0.Args[0]
  5277  		val := v_1
  5278  		mem := v_2
  5279  		if !(is32Bit(int64(off1) + off2)) {
  5280  			break
  5281  		}
  5282  		v.reset(OpRISCV64MOVBstore)
  5283  		v.AuxInt = int32ToAuxInt(off1 + int32(off2))
  5284  		v.Aux = symToAux(sym)
  5285  		v.AddArg3(base, val, mem)
  5286  		return true
  5287  	}
  5288  	// match: (MOVBstore [off] {sym} ptr (MOVDconst [0]) mem)
  5289  	// result: (MOVBstorezero [off] {sym} ptr mem)
  5290  	for {
  5291  		off := auxIntToInt32(v.AuxInt)
  5292  		sym := auxToSym(v.Aux)
  5293  		ptr := v_0
  5294  		if v_1.Op != OpRISCV64MOVDconst || auxIntToInt64(v_1.AuxInt) != 0 {
  5295  			break
  5296  		}
  5297  		mem := v_2
  5298  		v.reset(OpRISCV64MOVBstorezero)
  5299  		v.AuxInt = int32ToAuxInt(off)
  5300  		v.Aux = symToAux(sym)
  5301  		v.AddArg2(ptr, mem)
  5302  		return true
  5303  	}
  5304  	// match: (MOVBstore [off] {sym} ptr (MOVBreg x) mem)
  5305  	// result: (MOVBstore [off] {sym} ptr x mem)
  5306  	for {
  5307  		off := auxIntToInt32(v.AuxInt)
  5308  		sym := auxToSym(v.Aux)
  5309  		ptr := v_0
  5310  		if v_1.Op != OpRISCV64MOVBreg {
  5311  			break
  5312  		}
  5313  		x := v_1.Args[0]
  5314  		mem := v_2
  5315  		v.reset(OpRISCV64MOVBstore)
  5316  		v.AuxInt = int32ToAuxInt(off)
  5317  		v.Aux = symToAux(sym)
  5318  		v.AddArg3(ptr, x, mem)
  5319  		return true
  5320  	}
  5321  	// match: (MOVBstore [off] {sym} ptr (MOVHreg x) mem)
  5322  	// result: (MOVBstore [off] {sym} ptr x mem)
  5323  	for {
  5324  		off := auxIntToInt32(v.AuxInt)
  5325  		sym := auxToSym(v.Aux)
  5326  		ptr := v_0
  5327  		if v_1.Op != OpRISCV64MOVHreg {
  5328  			break
  5329  		}
  5330  		x := v_1.Args[0]
  5331  		mem := v_2
  5332  		v.reset(OpRISCV64MOVBstore)
  5333  		v.AuxInt = int32ToAuxInt(off)
  5334  		v.Aux = symToAux(sym)
  5335  		v.AddArg3(ptr, x, mem)
  5336  		return true
  5337  	}
  5338  	// match: (MOVBstore [off] {sym} ptr (MOVWreg x) mem)
  5339  	// result: (MOVBstore [off] {sym} ptr x mem)
  5340  	for {
  5341  		off := auxIntToInt32(v.AuxInt)
  5342  		sym := auxToSym(v.Aux)
  5343  		ptr := v_0
  5344  		if v_1.Op != OpRISCV64MOVWreg {
  5345  			break
  5346  		}
  5347  		x := v_1.Args[0]
  5348  		mem := v_2
  5349  		v.reset(OpRISCV64MOVBstore)
  5350  		v.AuxInt = int32ToAuxInt(off)
  5351  		v.Aux = symToAux(sym)
  5352  		v.AddArg3(ptr, x, mem)
  5353  		return true
  5354  	}
  5355  	// match: (MOVBstore [off] {sym} ptr (MOVBUreg x) mem)
  5356  	// result: (MOVBstore [off] {sym} ptr x mem)
  5357  	for {
  5358  		off := auxIntToInt32(v.AuxInt)
  5359  		sym := auxToSym(v.Aux)
  5360  		ptr := v_0
  5361  		if v_1.Op != OpRISCV64MOVBUreg {
  5362  			break
  5363  		}
  5364  		x := v_1.Args[0]
  5365  		mem := v_2
  5366  		v.reset(OpRISCV64MOVBstore)
  5367  		v.AuxInt = int32ToAuxInt(off)
  5368  		v.Aux = symToAux(sym)
  5369  		v.AddArg3(ptr, x, mem)
  5370  		return true
  5371  	}
  5372  	// match: (MOVBstore [off] {sym} ptr (MOVHUreg x) mem)
  5373  	// result: (MOVBstore [off] {sym} ptr x mem)
  5374  	for {
  5375  		off := auxIntToInt32(v.AuxInt)
  5376  		sym := auxToSym(v.Aux)
  5377  		ptr := v_0
  5378  		if v_1.Op != OpRISCV64MOVHUreg {
  5379  			break
  5380  		}
  5381  		x := v_1.Args[0]
  5382  		mem := v_2
  5383  		v.reset(OpRISCV64MOVBstore)
  5384  		v.AuxInt = int32ToAuxInt(off)
  5385  		v.Aux = symToAux(sym)
  5386  		v.AddArg3(ptr, x, mem)
  5387  		return true
  5388  	}
  5389  	// match: (MOVBstore [off] {sym} ptr (MOVWUreg x) mem)
  5390  	// result: (MOVBstore [off] {sym} ptr x mem)
  5391  	for {
  5392  		off := auxIntToInt32(v.AuxInt)
  5393  		sym := auxToSym(v.Aux)
  5394  		ptr := v_0
  5395  		if v_1.Op != OpRISCV64MOVWUreg {
  5396  			break
  5397  		}
  5398  		x := v_1.Args[0]
  5399  		mem := v_2
  5400  		v.reset(OpRISCV64MOVBstore)
  5401  		v.AuxInt = int32ToAuxInt(off)
  5402  		v.Aux = symToAux(sym)
  5403  		v.AddArg3(ptr, x, mem)
  5404  		return true
  5405  	}
  5406  	return false
  5407  }
  5408  func rewriteValueRISCV64_OpRISCV64MOVBstorezero(v *Value) bool {
  5409  	v_1 := v.Args[1]
  5410  	v_0 := v.Args[0]
  5411  	b := v.Block
  5412  	config := b.Func.Config
  5413  	// match: (MOVBstorezero [off1] {sym1} (MOVaddr [off2] {sym2} base) mem)
  5414  	// cond: canMergeSym(sym1,sym2) && is32Bit(int64(off1)+int64(off2)) && (base.Op != OpSB || !config.ctxt.Flag_dynlink)
  5415  	// result: (MOVBstorezero [off1+off2] {mergeSym(sym1,sym2)} base mem)
  5416  	for {
  5417  		off1 := auxIntToInt32(v.AuxInt)
  5418  		sym1 := auxToSym(v.Aux)
  5419  		if v_0.Op != OpRISCV64MOVaddr {
  5420  			break
  5421  		}
  5422  		off2 := auxIntToInt32(v_0.AuxInt)
  5423  		sym2 := auxToSym(v_0.Aux)
  5424  		base := v_0.Args[0]
  5425  		mem := v_1
  5426  		if !(canMergeSym(sym1, sym2) && is32Bit(int64(off1)+int64(off2)) && (base.Op != OpSB || !config.ctxt.Flag_dynlink)) {
  5427  			break
  5428  		}
  5429  		v.reset(OpRISCV64MOVBstorezero)
  5430  		v.AuxInt = int32ToAuxInt(off1 + off2)
  5431  		v.Aux = symToAux(mergeSym(sym1, sym2))
  5432  		v.AddArg2(base, mem)
  5433  		return true
  5434  	}
  5435  	// match: (MOVBstorezero [off1] {sym} (ADDI [off2] base) mem)
  5436  	// cond: is32Bit(int64(off1)+off2)
  5437  	// result: (MOVBstorezero [off1+int32(off2)] {sym} base mem)
  5438  	for {
  5439  		off1 := auxIntToInt32(v.AuxInt)
  5440  		sym := auxToSym(v.Aux)
  5441  		if v_0.Op != OpRISCV64ADDI {
  5442  			break
  5443  		}
  5444  		off2 := auxIntToInt64(v_0.AuxInt)
  5445  		base := v_0.Args[0]
  5446  		mem := v_1
  5447  		if !(is32Bit(int64(off1) + off2)) {
  5448  			break
  5449  		}
  5450  		v.reset(OpRISCV64MOVBstorezero)
  5451  		v.AuxInt = int32ToAuxInt(off1 + int32(off2))
  5452  		v.Aux = symToAux(sym)
  5453  		v.AddArg2(base, mem)
  5454  		return true
  5455  	}
  5456  	return false
  5457  }
  5458  func rewriteValueRISCV64_OpRISCV64MOVDload(v *Value) bool {
  5459  	v_1 := v.Args[1]
  5460  	v_0 := v.Args[0]
  5461  	b := v.Block
  5462  	config := b.Func.Config
  5463  	// match: (MOVDload [off1] {sym1} (MOVaddr [off2] {sym2} base) mem)
  5464  	// cond: is32Bit(int64(off1)+int64(off2)) && canMergeSym(sym1, sym2) && (base.Op != OpSB || !config.ctxt.Flag_dynlink)
  5465  	// result: (MOVDload [off1+off2] {mergeSym(sym1,sym2)} base mem)
  5466  	for {
  5467  		off1 := auxIntToInt32(v.AuxInt)
  5468  		sym1 := auxToSym(v.Aux)
  5469  		if v_0.Op != OpRISCV64MOVaddr {
  5470  			break
  5471  		}
  5472  		off2 := auxIntToInt32(v_0.AuxInt)
  5473  		sym2 := auxToSym(v_0.Aux)
  5474  		base := v_0.Args[0]
  5475  		mem := v_1
  5476  		if !(is32Bit(int64(off1)+int64(off2)) && canMergeSym(sym1, sym2) && (base.Op != OpSB || !config.ctxt.Flag_dynlink)) {
  5477  			break
  5478  		}
  5479  		v.reset(OpRISCV64MOVDload)
  5480  		v.AuxInt = int32ToAuxInt(off1 + off2)
  5481  		v.Aux = symToAux(mergeSym(sym1, sym2))
  5482  		v.AddArg2(base, mem)
  5483  		return true
  5484  	}
  5485  	// match: (MOVDload [off1] {sym} (ADDI [off2] base) mem)
  5486  	// cond: is32Bit(int64(off1)+off2)
  5487  	// result: (MOVDload [off1+int32(off2)] {sym} base mem)
  5488  	for {
  5489  		off1 := auxIntToInt32(v.AuxInt)
  5490  		sym := auxToSym(v.Aux)
  5491  		if v_0.Op != OpRISCV64ADDI {
  5492  			break
  5493  		}
  5494  		off2 := auxIntToInt64(v_0.AuxInt)
  5495  		base := v_0.Args[0]
  5496  		mem := v_1
  5497  		if !(is32Bit(int64(off1) + off2)) {
  5498  			break
  5499  		}
  5500  		v.reset(OpRISCV64MOVDload)
  5501  		v.AuxInt = int32ToAuxInt(off1 + int32(off2))
  5502  		v.Aux = symToAux(sym)
  5503  		v.AddArg2(base, mem)
  5504  		return true
  5505  	}
  5506  	// match: (MOVDload [off] {sym} ptr1 (MOVDstore [off] {sym} ptr2 x _))
  5507  	// cond: isSamePtr(ptr1, ptr2)
  5508  	// result: (MOVDreg x)
  5509  	for {
  5510  		off := auxIntToInt32(v.AuxInt)
  5511  		sym := auxToSym(v.Aux)
  5512  		ptr1 := v_0
  5513  		if v_1.Op != OpRISCV64MOVDstore || auxIntToInt32(v_1.AuxInt) != off || auxToSym(v_1.Aux) != sym {
  5514  			break
  5515  		}
  5516  		x := v_1.Args[1]
  5517  		ptr2 := v_1.Args[0]
  5518  		if !(isSamePtr(ptr1, ptr2)) {
  5519  			break
  5520  		}
  5521  		v.reset(OpRISCV64MOVDreg)
  5522  		v.AddArg(x)
  5523  		return true
  5524  	}
  5525  	// match: (MOVDload [off] {sym} ptr1 (FMOVDstore [off] {sym} ptr2 x _))
  5526  	// cond: isSamePtr(ptr1, ptr2)
  5527  	// result: (FMVXD x)
  5528  	for {
  5529  		off := auxIntToInt32(v.AuxInt)
  5530  		sym := auxToSym(v.Aux)
  5531  		ptr1 := v_0
  5532  		if v_1.Op != OpRISCV64FMOVDstore || auxIntToInt32(v_1.AuxInt) != off || auxToSym(v_1.Aux) != sym {
  5533  			break
  5534  		}
  5535  		x := v_1.Args[1]
  5536  		ptr2 := v_1.Args[0]
  5537  		if !(isSamePtr(ptr1, ptr2)) {
  5538  			break
  5539  		}
  5540  		v.reset(OpRISCV64FMVXD)
  5541  		v.AddArg(x)
  5542  		return true
  5543  	}
  5544  	return false
  5545  }
  5546  func rewriteValueRISCV64_OpRISCV64MOVDnop(v *Value) bool {
  5547  	v_0 := v.Args[0]
  5548  	// match: (MOVDnop (MOVDconst [c]))
  5549  	// result: (MOVDconst [c])
  5550  	for {
  5551  		if v_0.Op != OpRISCV64MOVDconst {
  5552  			break
  5553  		}
  5554  		c := auxIntToInt64(v_0.AuxInt)
  5555  		v.reset(OpRISCV64MOVDconst)
  5556  		v.AuxInt = int64ToAuxInt(c)
  5557  		return true
  5558  	}
  5559  	return false
  5560  }
  5561  func rewriteValueRISCV64_OpRISCV64MOVDreg(v *Value) bool {
  5562  	v_0 := v.Args[0]
  5563  	// match: (MOVDreg x)
  5564  	// cond: x.Uses == 1
  5565  	// result: (MOVDnop x)
  5566  	for {
  5567  		x := v_0
  5568  		if !(x.Uses == 1) {
  5569  			break
  5570  		}
  5571  		v.reset(OpRISCV64MOVDnop)
  5572  		v.AddArg(x)
  5573  		return true
  5574  	}
  5575  	return false
  5576  }
  5577  func rewriteValueRISCV64_OpRISCV64MOVDstore(v *Value) bool {
  5578  	v_2 := v.Args[2]
  5579  	v_1 := v.Args[1]
  5580  	v_0 := v.Args[0]
  5581  	b := v.Block
  5582  	config := b.Func.Config
  5583  	// match: (MOVDstore [off1] {sym1} (MOVaddr [off2] {sym2} base) val mem)
  5584  	// cond: is32Bit(int64(off1)+int64(off2)) && canMergeSym(sym1, sym2) && (base.Op != OpSB || !config.ctxt.Flag_dynlink)
  5585  	// result: (MOVDstore [off1+off2] {mergeSym(sym1,sym2)} base val mem)
  5586  	for {
  5587  		off1 := auxIntToInt32(v.AuxInt)
  5588  		sym1 := auxToSym(v.Aux)
  5589  		if v_0.Op != OpRISCV64MOVaddr {
  5590  			break
  5591  		}
  5592  		off2 := auxIntToInt32(v_0.AuxInt)
  5593  		sym2 := auxToSym(v_0.Aux)
  5594  		base := v_0.Args[0]
  5595  		val := v_1
  5596  		mem := v_2
  5597  		if !(is32Bit(int64(off1)+int64(off2)) && canMergeSym(sym1, sym2) && (base.Op != OpSB || !config.ctxt.Flag_dynlink)) {
  5598  			break
  5599  		}
  5600  		v.reset(OpRISCV64MOVDstore)
  5601  		v.AuxInt = int32ToAuxInt(off1 + off2)
  5602  		v.Aux = symToAux(mergeSym(sym1, sym2))
  5603  		v.AddArg3(base, val, mem)
  5604  		return true
  5605  	}
  5606  	// match: (MOVDstore [off1] {sym} (ADDI [off2] base) val mem)
  5607  	// cond: is32Bit(int64(off1)+off2)
  5608  	// result: (MOVDstore [off1+int32(off2)] {sym} base val mem)
  5609  	for {
  5610  		off1 := auxIntToInt32(v.AuxInt)
  5611  		sym := auxToSym(v.Aux)
  5612  		if v_0.Op != OpRISCV64ADDI {
  5613  			break
  5614  		}
  5615  		off2 := auxIntToInt64(v_0.AuxInt)
  5616  		base := v_0.Args[0]
  5617  		val := v_1
  5618  		mem := v_2
  5619  		if !(is32Bit(int64(off1) + off2)) {
  5620  			break
  5621  		}
  5622  		v.reset(OpRISCV64MOVDstore)
  5623  		v.AuxInt = int32ToAuxInt(off1 + int32(off2))
  5624  		v.Aux = symToAux(sym)
  5625  		v.AddArg3(base, val, mem)
  5626  		return true
  5627  	}
  5628  	// match: (MOVDstore [off] {sym} ptr (MOVDconst [0]) mem)
  5629  	// result: (MOVDstorezero [off] {sym} ptr mem)
  5630  	for {
  5631  		off := auxIntToInt32(v.AuxInt)
  5632  		sym := auxToSym(v.Aux)
  5633  		ptr := v_0
  5634  		if v_1.Op != OpRISCV64MOVDconst || auxIntToInt64(v_1.AuxInt) != 0 {
  5635  			break
  5636  		}
  5637  		mem := v_2
  5638  		v.reset(OpRISCV64MOVDstorezero)
  5639  		v.AuxInt = int32ToAuxInt(off)
  5640  		v.Aux = symToAux(sym)
  5641  		v.AddArg2(ptr, mem)
  5642  		return true
  5643  	}
  5644  	return false
  5645  }
  5646  func rewriteValueRISCV64_OpRISCV64MOVDstorezero(v *Value) bool {
  5647  	v_1 := v.Args[1]
  5648  	v_0 := v.Args[0]
  5649  	b := v.Block
  5650  	config := b.Func.Config
  5651  	// match: (MOVDstorezero [off1] {sym1} (MOVaddr [off2] {sym2} base) mem)
  5652  	// cond: canMergeSym(sym1,sym2) && is32Bit(int64(off1)+int64(off2)) && (base.Op != OpSB || !config.ctxt.Flag_dynlink)
  5653  	// result: (MOVDstorezero [off1+off2] {mergeSym(sym1,sym2)} base mem)
  5654  	for {
  5655  		off1 := auxIntToInt32(v.AuxInt)
  5656  		sym1 := auxToSym(v.Aux)
  5657  		if v_0.Op != OpRISCV64MOVaddr {
  5658  			break
  5659  		}
  5660  		off2 := auxIntToInt32(v_0.AuxInt)
  5661  		sym2 := auxToSym(v_0.Aux)
  5662  		base := v_0.Args[0]
  5663  		mem := v_1
  5664  		if !(canMergeSym(sym1, sym2) && is32Bit(int64(off1)+int64(off2)) && (base.Op != OpSB || !config.ctxt.Flag_dynlink)) {
  5665  			break
  5666  		}
  5667  		v.reset(OpRISCV64MOVDstorezero)
  5668  		v.AuxInt = int32ToAuxInt(off1 + off2)
  5669  		v.Aux = symToAux(mergeSym(sym1, sym2))
  5670  		v.AddArg2(base, mem)
  5671  		return true
  5672  	}
  5673  	// match: (MOVDstorezero [off1] {sym} (ADDI [off2] base) mem)
  5674  	// cond: is32Bit(int64(off1)+off2)
  5675  	// result: (MOVDstorezero [off1+int32(off2)] {sym} base mem)
  5676  	for {
  5677  		off1 := auxIntToInt32(v.AuxInt)
  5678  		sym := auxToSym(v.Aux)
  5679  		if v_0.Op != OpRISCV64ADDI {
  5680  			break
  5681  		}
  5682  		off2 := auxIntToInt64(v_0.AuxInt)
  5683  		base := v_0.Args[0]
  5684  		mem := v_1
  5685  		if !(is32Bit(int64(off1) + off2)) {
  5686  			break
  5687  		}
  5688  		v.reset(OpRISCV64MOVDstorezero)
  5689  		v.AuxInt = int32ToAuxInt(off1 + int32(off2))
  5690  		v.Aux = symToAux(sym)
  5691  		v.AddArg2(base, mem)
  5692  		return true
  5693  	}
  5694  	return false
  5695  }
  5696  func rewriteValueRISCV64_OpRISCV64MOVHUload(v *Value) bool {
  5697  	v_1 := v.Args[1]
  5698  	v_0 := v.Args[0]
  5699  	b := v.Block
  5700  	config := b.Func.Config
  5701  	// match: (MOVHUload [off1] {sym1} (MOVaddr [off2] {sym2} base) mem)
  5702  	// cond: is32Bit(int64(off1)+int64(off2)) && canMergeSym(sym1, sym2) && (base.Op != OpSB || !config.ctxt.Flag_dynlink)
  5703  	// result: (MOVHUload [off1+off2] {mergeSym(sym1,sym2)} base mem)
  5704  	for {
  5705  		off1 := auxIntToInt32(v.AuxInt)
  5706  		sym1 := auxToSym(v.Aux)
  5707  		if v_0.Op != OpRISCV64MOVaddr {
  5708  			break
  5709  		}
  5710  		off2 := auxIntToInt32(v_0.AuxInt)
  5711  		sym2 := auxToSym(v_0.Aux)
  5712  		base := v_0.Args[0]
  5713  		mem := v_1
  5714  		if !(is32Bit(int64(off1)+int64(off2)) && canMergeSym(sym1, sym2) && (base.Op != OpSB || !config.ctxt.Flag_dynlink)) {
  5715  			break
  5716  		}
  5717  		v.reset(OpRISCV64MOVHUload)
  5718  		v.AuxInt = int32ToAuxInt(off1 + off2)
  5719  		v.Aux = symToAux(mergeSym(sym1, sym2))
  5720  		v.AddArg2(base, mem)
  5721  		return true
  5722  	}
  5723  	// match: (MOVHUload [off1] {sym} (ADDI [off2] base) mem)
  5724  	// cond: is32Bit(int64(off1)+off2)
  5725  	// result: (MOVHUload [off1+int32(off2)] {sym} base mem)
  5726  	for {
  5727  		off1 := auxIntToInt32(v.AuxInt)
  5728  		sym := auxToSym(v.Aux)
  5729  		if v_0.Op != OpRISCV64ADDI {
  5730  			break
  5731  		}
  5732  		off2 := auxIntToInt64(v_0.AuxInt)
  5733  		base := v_0.Args[0]
  5734  		mem := v_1
  5735  		if !(is32Bit(int64(off1) + off2)) {
  5736  			break
  5737  		}
  5738  		v.reset(OpRISCV64MOVHUload)
  5739  		v.AuxInt = int32ToAuxInt(off1 + int32(off2))
  5740  		v.Aux = symToAux(sym)
  5741  		v.AddArg2(base, mem)
  5742  		return true
  5743  	}
  5744  	// match: (MOVHUload [off] {sym} ptr1 (MOVHstore [off] {sym} ptr2 x _))
  5745  	// cond: isSamePtr(ptr1, ptr2)
  5746  	// result: (MOVHUreg x)
  5747  	for {
  5748  		off := auxIntToInt32(v.AuxInt)
  5749  		sym := auxToSym(v.Aux)
  5750  		ptr1 := v_0
  5751  		if v_1.Op != OpRISCV64MOVHstore || auxIntToInt32(v_1.AuxInt) != off || auxToSym(v_1.Aux) != sym {
  5752  			break
  5753  		}
  5754  		x := v_1.Args[1]
  5755  		ptr2 := v_1.Args[0]
  5756  		if !(isSamePtr(ptr1, ptr2)) {
  5757  			break
  5758  		}
  5759  		v.reset(OpRISCV64MOVHUreg)
  5760  		v.AddArg(x)
  5761  		return true
  5762  	}
  5763  	return false
  5764  }
  5765  func rewriteValueRISCV64_OpRISCV64MOVHUreg(v *Value) bool {
  5766  	v_0 := v.Args[0]
  5767  	b := v.Block
  5768  	// match: (MOVHUreg x:(ANDI [c] y))
  5769  	// cond: c >= 0 && int64(uint16(c)) == c
  5770  	// result: x
  5771  	for {
  5772  		x := v_0
  5773  		if x.Op != OpRISCV64ANDI {
  5774  			break
  5775  		}
  5776  		c := auxIntToInt64(x.AuxInt)
  5777  		if !(c >= 0 && int64(uint16(c)) == c) {
  5778  			break
  5779  		}
  5780  		v.copyOf(x)
  5781  		return true
  5782  	}
  5783  	// match: (MOVHUreg (ANDI [c] x))
  5784  	// cond: c < 0
  5785  	// result: (ANDI [int64(uint16(c))] x)
  5786  	for {
  5787  		if v_0.Op != OpRISCV64ANDI {
  5788  			break
  5789  		}
  5790  		c := auxIntToInt64(v_0.AuxInt)
  5791  		x := v_0.Args[0]
  5792  		if !(c < 0) {
  5793  			break
  5794  		}
  5795  		v.reset(OpRISCV64ANDI)
  5796  		v.AuxInt = int64ToAuxInt(int64(uint16(c)))
  5797  		v.AddArg(x)
  5798  		return true
  5799  	}
  5800  	// match: (MOVHUreg (MOVDconst [c]))
  5801  	// result: (MOVDconst [int64(uint16(c))])
  5802  	for {
  5803  		if v_0.Op != OpRISCV64MOVDconst {
  5804  			break
  5805  		}
  5806  		c := auxIntToInt64(v_0.AuxInt)
  5807  		v.reset(OpRISCV64MOVDconst)
  5808  		v.AuxInt = int64ToAuxInt(int64(uint16(c)))
  5809  		return true
  5810  	}
  5811  	// match: (MOVHUreg x:(MOVBUload _ _))
  5812  	// result: (MOVDreg x)
  5813  	for {
  5814  		x := v_0
  5815  		if x.Op != OpRISCV64MOVBUload {
  5816  			break
  5817  		}
  5818  		v.reset(OpRISCV64MOVDreg)
  5819  		v.AddArg(x)
  5820  		return true
  5821  	}
  5822  	// match: (MOVHUreg x:(MOVHUload _ _))
  5823  	// result: (MOVDreg x)
  5824  	for {
  5825  		x := v_0
  5826  		if x.Op != OpRISCV64MOVHUload {
  5827  			break
  5828  		}
  5829  		v.reset(OpRISCV64MOVDreg)
  5830  		v.AddArg(x)
  5831  		return true
  5832  	}
  5833  	// match: (MOVHUreg x:(MOVBUreg _))
  5834  	// result: (MOVDreg x)
  5835  	for {
  5836  		x := v_0
  5837  		if x.Op != OpRISCV64MOVBUreg {
  5838  			break
  5839  		}
  5840  		v.reset(OpRISCV64MOVDreg)
  5841  		v.AddArg(x)
  5842  		return true
  5843  	}
  5844  	// match: (MOVHUreg x:(MOVHUreg _))
  5845  	// result: (MOVDreg x)
  5846  	for {
  5847  		x := v_0
  5848  		if x.Op != OpRISCV64MOVHUreg {
  5849  			break
  5850  		}
  5851  		v.reset(OpRISCV64MOVDreg)
  5852  		v.AddArg(x)
  5853  		return true
  5854  	}
  5855  	// match: (MOVHUreg <t> x:(MOVHload [off] {sym} ptr mem))
  5856  	// cond: x.Uses == 1 && clobber(x)
  5857  	// result: @x.Block (MOVHUload <t> [off] {sym} ptr mem)
  5858  	for {
  5859  		t := v.Type
  5860  		x := v_0
  5861  		if x.Op != OpRISCV64MOVHload {
  5862  			break
  5863  		}
  5864  		off := auxIntToInt32(x.AuxInt)
  5865  		sym := auxToSym(x.Aux)
  5866  		mem := x.Args[1]
  5867  		ptr := x.Args[0]
  5868  		if !(x.Uses == 1 && clobber(x)) {
  5869  			break
  5870  		}
  5871  		b = x.Block
  5872  		v0 := b.NewValue0(x.Pos, OpRISCV64MOVHUload, t)
  5873  		v.copyOf(v0)
  5874  		v0.AuxInt = int32ToAuxInt(off)
  5875  		v0.Aux = symToAux(sym)
  5876  		v0.AddArg2(ptr, mem)
  5877  		return true
  5878  	}
  5879  	return false
  5880  }
  5881  func rewriteValueRISCV64_OpRISCV64MOVHload(v *Value) bool {
  5882  	v_1 := v.Args[1]
  5883  	v_0 := v.Args[0]
  5884  	b := v.Block
  5885  	config := b.Func.Config
  5886  	// match: (MOVHload [off1] {sym1} (MOVaddr [off2] {sym2} base) mem)
  5887  	// cond: is32Bit(int64(off1)+int64(off2)) && canMergeSym(sym1, sym2) && (base.Op != OpSB || !config.ctxt.Flag_dynlink)
  5888  	// result: (MOVHload [off1+off2] {mergeSym(sym1,sym2)} base mem)
  5889  	for {
  5890  		off1 := auxIntToInt32(v.AuxInt)
  5891  		sym1 := auxToSym(v.Aux)
  5892  		if v_0.Op != OpRISCV64MOVaddr {
  5893  			break
  5894  		}
  5895  		off2 := auxIntToInt32(v_0.AuxInt)
  5896  		sym2 := auxToSym(v_0.Aux)
  5897  		base := v_0.Args[0]
  5898  		mem := v_1
  5899  		if !(is32Bit(int64(off1)+int64(off2)) && canMergeSym(sym1, sym2) && (base.Op != OpSB || !config.ctxt.Flag_dynlink)) {
  5900  			break
  5901  		}
  5902  		v.reset(OpRISCV64MOVHload)
  5903  		v.AuxInt = int32ToAuxInt(off1 + off2)
  5904  		v.Aux = symToAux(mergeSym(sym1, sym2))
  5905  		v.AddArg2(base, mem)
  5906  		return true
  5907  	}
  5908  	// match: (MOVHload [off1] {sym} (ADDI [off2] base) mem)
  5909  	// cond: is32Bit(int64(off1)+off2)
  5910  	// result: (MOVHload [off1+int32(off2)] {sym} base mem)
  5911  	for {
  5912  		off1 := auxIntToInt32(v.AuxInt)
  5913  		sym := auxToSym(v.Aux)
  5914  		if v_0.Op != OpRISCV64ADDI {
  5915  			break
  5916  		}
  5917  		off2 := auxIntToInt64(v_0.AuxInt)
  5918  		base := v_0.Args[0]
  5919  		mem := v_1
  5920  		if !(is32Bit(int64(off1) + off2)) {
  5921  			break
  5922  		}
  5923  		v.reset(OpRISCV64MOVHload)
  5924  		v.AuxInt = int32ToAuxInt(off1 + int32(off2))
  5925  		v.Aux = symToAux(sym)
  5926  		v.AddArg2(base, mem)
  5927  		return true
  5928  	}
  5929  	// match: (MOVHload [off] {sym} ptr1 (MOVHstore [off] {sym} ptr2 x _))
  5930  	// cond: isSamePtr(ptr1, ptr2)
  5931  	// result: (MOVHreg x)
  5932  	for {
  5933  		off := auxIntToInt32(v.AuxInt)
  5934  		sym := auxToSym(v.Aux)
  5935  		ptr1 := v_0
  5936  		if v_1.Op != OpRISCV64MOVHstore || auxIntToInt32(v_1.AuxInt) != off || auxToSym(v_1.Aux) != sym {
  5937  			break
  5938  		}
  5939  		x := v_1.Args[1]
  5940  		ptr2 := v_1.Args[0]
  5941  		if !(isSamePtr(ptr1, ptr2)) {
  5942  			break
  5943  		}
  5944  		v.reset(OpRISCV64MOVHreg)
  5945  		v.AddArg(x)
  5946  		return true
  5947  	}
  5948  	return false
  5949  }
  5950  func rewriteValueRISCV64_OpRISCV64MOVHreg(v *Value) bool {
  5951  	v_0 := v.Args[0]
  5952  	b := v.Block
  5953  	// match: (MOVHreg x:(ANDI [c] y))
  5954  	// cond: c >= 0 && int64(int16(c)) == c
  5955  	// result: x
  5956  	for {
  5957  		x := v_0
  5958  		if x.Op != OpRISCV64ANDI {
  5959  			break
  5960  		}
  5961  		c := auxIntToInt64(x.AuxInt)
  5962  		if !(c >= 0 && int64(int16(c)) == c) {
  5963  			break
  5964  		}
  5965  		v.copyOf(x)
  5966  		return true
  5967  	}
  5968  	// match: (MOVHreg (MOVDconst [c]))
  5969  	// result: (MOVDconst [int64(int16(c))])
  5970  	for {
  5971  		if v_0.Op != OpRISCV64MOVDconst {
  5972  			break
  5973  		}
  5974  		c := auxIntToInt64(v_0.AuxInt)
  5975  		v.reset(OpRISCV64MOVDconst)
  5976  		v.AuxInt = int64ToAuxInt(int64(int16(c)))
  5977  		return true
  5978  	}
  5979  	// match: (MOVHreg x:(MOVBload _ _))
  5980  	// result: (MOVDreg x)
  5981  	for {
  5982  		x := v_0
  5983  		if x.Op != OpRISCV64MOVBload {
  5984  			break
  5985  		}
  5986  		v.reset(OpRISCV64MOVDreg)
  5987  		v.AddArg(x)
  5988  		return true
  5989  	}
  5990  	// match: (MOVHreg x:(MOVBUload _ _))
  5991  	// result: (MOVDreg x)
  5992  	for {
  5993  		x := v_0
  5994  		if x.Op != OpRISCV64MOVBUload {
  5995  			break
  5996  		}
  5997  		v.reset(OpRISCV64MOVDreg)
  5998  		v.AddArg(x)
  5999  		return true
  6000  	}
  6001  	// match: (MOVHreg x:(MOVHload _ _))
  6002  	// result: (MOVDreg x)
  6003  	for {
  6004  		x := v_0
  6005  		if x.Op != OpRISCV64MOVHload {
  6006  			break
  6007  		}
  6008  		v.reset(OpRISCV64MOVDreg)
  6009  		v.AddArg(x)
  6010  		return true
  6011  	}
  6012  	// match: (MOVHreg x:(MOVBreg _))
  6013  	// result: (MOVDreg x)
  6014  	for {
  6015  		x := v_0
  6016  		if x.Op != OpRISCV64MOVBreg {
  6017  			break
  6018  		}
  6019  		v.reset(OpRISCV64MOVDreg)
  6020  		v.AddArg(x)
  6021  		return true
  6022  	}
  6023  	// match: (MOVHreg x:(MOVBUreg _))
  6024  	// result: (MOVDreg x)
  6025  	for {
  6026  		x := v_0
  6027  		if x.Op != OpRISCV64MOVBUreg {
  6028  			break
  6029  		}
  6030  		v.reset(OpRISCV64MOVDreg)
  6031  		v.AddArg(x)
  6032  		return true
  6033  	}
  6034  	// match: (MOVHreg x:(MOVHreg _))
  6035  	// result: (MOVDreg x)
  6036  	for {
  6037  		x := v_0
  6038  		if x.Op != OpRISCV64MOVHreg {
  6039  			break
  6040  		}
  6041  		v.reset(OpRISCV64MOVDreg)
  6042  		v.AddArg(x)
  6043  		return true
  6044  	}
  6045  	// match: (MOVHreg <t> x:(MOVHUload [off] {sym} ptr mem))
  6046  	// cond: x.Uses == 1 && clobber(x)
  6047  	// result: @x.Block (MOVHload <t> [off] {sym} ptr mem)
  6048  	for {
  6049  		t := v.Type
  6050  		x := v_0
  6051  		if x.Op != OpRISCV64MOVHUload {
  6052  			break
  6053  		}
  6054  		off := auxIntToInt32(x.AuxInt)
  6055  		sym := auxToSym(x.Aux)
  6056  		mem := x.Args[1]
  6057  		ptr := x.Args[0]
  6058  		if !(x.Uses == 1 && clobber(x)) {
  6059  			break
  6060  		}
  6061  		b = x.Block
  6062  		v0 := b.NewValue0(x.Pos, OpRISCV64MOVHload, t)
  6063  		v.copyOf(v0)
  6064  		v0.AuxInt = int32ToAuxInt(off)
  6065  		v0.Aux = symToAux(sym)
  6066  		v0.AddArg2(ptr, mem)
  6067  		return true
  6068  	}
  6069  	return false
  6070  }
  6071  func rewriteValueRISCV64_OpRISCV64MOVHstore(v *Value) bool {
  6072  	v_2 := v.Args[2]
  6073  	v_1 := v.Args[1]
  6074  	v_0 := v.Args[0]
  6075  	b := v.Block
  6076  	config := b.Func.Config
  6077  	// match: (MOVHstore [off1] {sym1} (MOVaddr [off2] {sym2} base) val mem)
  6078  	// cond: is32Bit(int64(off1)+int64(off2)) && canMergeSym(sym1, sym2) && (base.Op != OpSB || !config.ctxt.Flag_dynlink)
  6079  	// result: (MOVHstore [off1+off2] {mergeSym(sym1,sym2)} base val mem)
  6080  	for {
  6081  		off1 := auxIntToInt32(v.AuxInt)
  6082  		sym1 := auxToSym(v.Aux)
  6083  		if v_0.Op != OpRISCV64MOVaddr {
  6084  			break
  6085  		}
  6086  		off2 := auxIntToInt32(v_0.AuxInt)
  6087  		sym2 := auxToSym(v_0.Aux)
  6088  		base := v_0.Args[0]
  6089  		val := v_1
  6090  		mem := v_2
  6091  		if !(is32Bit(int64(off1)+int64(off2)) && canMergeSym(sym1, sym2) && (base.Op != OpSB || !config.ctxt.Flag_dynlink)) {
  6092  			break
  6093  		}
  6094  		v.reset(OpRISCV64MOVHstore)
  6095  		v.AuxInt = int32ToAuxInt(off1 + off2)
  6096  		v.Aux = symToAux(mergeSym(sym1, sym2))
  6097  		v.AddArg3(base, val, mem)
  6098  		return true
  6099  	}
  6100  	// match: (MOVHstore [off1] {sym} (ADDI [off2] base) val mem)
  6101  	// cond: is32Bit(int64(off1)+off2)
  6102  	// result: (MOVHstore [off1+int32(off2)] {sym} base val mem)
  6103  	for {
  6104  		off1 := auxIntToInt32(v.AuxInt)
  6105  		sym := auxToSym(v.Aux)
  6106  		if v_0.Op != OpRISCV64ADDI {
  6107  			break
  6108  		}
  6109  		off2 := auxIntToInt64(v_0.AuxInt)
  6110  		base := v_0.Args[0]
  6111  		val := v_1
  6112  		mem := v_2
  6113  		if !(is32Bit(int64(off1) + off2)) {
  6114  			break
  6115  		}
  6116  		v.reset(OpRISCV64MOVHstore)
  6117  		v.AuxInt = int32ToAuxInt(off1 + int32(off2))
  6118  		v.Aux = symToAux(sym)
  6119  		v.AddArg3(base, val, mem)
  6120  		return true
  6121  	}
  6122  	// match: (MOVHstore [off] {sym} ptr (MOVDconst [0]) mem)
  6123  	// result: (MOVHstorezero [off] {sym} ptr mem)
  6124  	for {
  6125  		off := auxIntToInt32(v.AuxInt)
  6126  		sym := auxToSym(v.Aux)
  6127  		ptr := v_0
  6128  		if v_1.Op != OpRISCV64MOVDconst || auxIntToInt64(v_1.AuxInt) != 0 {
  6129  			break
  6130  		}
  6131  		mem := v_2
  6132  		v.reset(OpRISCV64MOVHstorezero)
  6133  		v.AuxInt = int32ToAuxInt(off)
  6134  		v.Aux = symToAux(sym)
  6135  		v.AddArg2(ptr, mem)
  6136  		return true
  6137  	}
  6138  	// match: (MOVHstore [off] {sym} ptr (MOVHreg x) mem)
  6139  	// result: (MOVHstore [off] {sym} ptr x mem)
  6140  	for {
  6141  		off := auxIntToInt32(v.AuxInt)
  6142  		sym := auxToSym(v.Aux)
  6143  		ptr := v_0
  6144  		if v_1.Op != OpRISCV64MOVHreg {
  6145  			break
  6146  		}
  6147  		x := v_1.Args[0]
  6148  		mem := v_2
  6149  		v.reset(OpRISCV64MOVHstore)
  6150  		v.AuxInt = int32ToAuxInt(off)
  6151  		v.Aux = symToAux(sym)
  6152  		v.AddArg3(ptr, x, mem)
  6153  		return true
  6154  	}
  6155  	// match: (MOVHstore [off] {sym} ptr (MOVWreg x) mem)
  6156  	// result: (MOVHstore [off] {sym} ptr x mem)
  6157  	for {
  6158  		off := auxIntToInt32(v.AuxInt)
  6159  		sym := auxToSym(v.Aux)
  6160  		ptr := v_0
  6161  		if v_1.Op != OpRISCV64MOVWreg {
  6162  			break
  6163  		}
  6164  		x := v_1.Args[0]
  6165  		mem := v_2
  6166  		v.reset(OpRISCV64MOVHstore)
  6167  		v.AuxInt = int32ToAuxInt(off)
  6168  		v.Aux = symToAux(sym)
  6169  		v.AddArg3(ptr, x, mem)
  6170  		return true
  6171  	}
  6172  	// match: (MOVHstore [off] {sym} ptr (MOVHUreg x) mem)
  6173  	// result: (MOVHstore [off] {sym} ptr x mem)
  6174  	for {
  6175  		off := auxIntToInt32(v.AuxInt)
  6176  		sym := auxToSym(v.Aux)
  6177  		ptr := v_0
  6178  		if v_1.Op != OpRISCV64MOVHUreg {
  6179  			break
  6180  		}
  6181  		x := v_1.Args[0]
  6182  		mem := v_2
  6183  		v.reset(OpRISCV64MOVHstore)
  6184  		v.AuxInt = int32ToAuxInt(off)
  6185  		v.Aux = symToAux(sym)
  6186  		v.AddArg3(ptr, x, mem)
  6187  		return true
  6188  	}
  6189  	// match: (MOVHstore [off] {sym} ptr (MOVWUreg x) mem)
  6190  	// result: (MOVHstore [off] {sym} ptr x mem)
  6191  	for {
  6192  		off := auxIntToInt32(v.AuxInt)
  6193  		sym := auxToSym(v.Aux)
  6194  		ptr := v_0
  6195  		if v_1.Op != OpRISCV64MOVWUreg {
  6196  			break
  6197  		}
  6198  		x := v_1.Args[0]
  6199  		mem := v_2
  6200  		v.reset(OpRISCV64MOVHstore)
  6201  		v.AuxInt = int32ToAuxInt(off)
  6202  		v.Aux = symToAux(sym)
  6203  		v.AddArg3(ptr, x, mem)
  6204  		return true
  6205  	}
  6206  	return false
  6207  }
  6208  func rewriteValueRISCV64_OpRISCV64MOVHstorezero(v *Value) bool {
  6209  	v_1 := v.Args[1]
  6210  	v_0 := v.Args[0]
  6211  	b := v.Block
  6212  	config := b.Func.Config
  6213  	// match: (MOVHstorezero [off1] {sym1} (MOVaddr [off2] {sym2} base) mem)
  6214  	// cond: canMergeSym(sym1,sym2) && is32Bit(int64(off1)+int64(off2)) && (base.Op != OpSB || !config.ctxt.Flag_dynlink)
  6215  	// result: (MOVHstorezero [off1+off2] {mergeSym(sym1,sym2)} base mem)
  6216  	for {
  6217  		off1 := auxIntToInt32(v.AuxInt)
  6218  		sym1 := auxToSym(v.Aux)
  6219  		if v_0.Op != OpRISCV64MOVaddr {
  6220  			break
  6221  		}
  6222  		off2 := auxIntToInt32(v_0.AuxInt)
  6223  		sym2 := auxToSym(v_0.Aux)
  6224  		base := v_0.Args[0]
  6225  		mem := v_1
  6226  		if !(canMergeSym(sym1, sym2) && is32Bit(int64(off1)+int64(off2)) && (base.Op != OpSB || !config.ctxt.Flag_dynlink)) {
  6227  			break
  6228  		}
  6229  		v.reset(OpRISCV64MOVHstorezero)
  6230  		v.AuxInt = int32ToAuxInt(off1 + off2)
  6231  		v.Aux = symToAux(mergeSym(sym1, sym2))
  6232  		v.AddArg2(base, mem)
  6233  		return true
  6234  	}
  6235  	// match: (MOVHstorezero [off1] {sym} (ADDI [off2] base) mem)
  6236  	// cond: is32Bit(int64(off1)+off2)
  6237  	// result: (MOVHstorezero [off1+int32(off2)] {sym} base mem)
  6238  	for {
  6239  		off1 := auxIntToInt32(v.AuxInt)
  6240  		sym := auxToSym(v.Aux)
  6241  		if v_0.Op != OpRISCV64ADDI {
  6242  			break
  6243  		}
  6244  		off2 := auxIntToInt64(v_0.AuxInt)
  6245  		base := v_0.Args[0]
  6246  		mem := v_1
  6247  		if !(is32Bit(int64(off1) + off2)) {
  6248  			break
  6249  		}
  6250  		v.reset(OpRISCV64MOVHstorezero)
  6251  		v.AuxInt = int32ToAuxInt(off1 + int32(off2))
  6252  		v.Aux = symToAux(sym)
  6253  		v.AddArg2(base, mem)
  6254  		return true
  6255  	}
  6256  	return false
  6257  }
  6258  func rewriteValueRISCV64_OpRISCV64MOVWUload(v *Value) bool {
  6259  	v_1 := v.Args[1]
  6260  	v_0 := v.Args[0]
  6261  	b := v.Block
  6262  	config := b.Func.Config
  6263  	typ := &b.Func.Config.Types
  6264  	// match: (MOVWUload [off1] {sym1} (MOVaddr [off2] {sym2} base) mem)
  6265  	// cond: is32Bit(int64(off1)+int64(off2)) && canMergeSym(sym1, sym2) && (base.Op != OpSB || !config.ctxt.Flag_dynlink)
  6266  	// result: (MOVWUload [off1+off2] {mergeSym(sym1,sym2)} base mem)
  6267  	for {
  6268  		off1 := auxIntToInt32(v.AuxInt)
  6269  		sym1 := auxToSym(v.Aux)
  6270  		if v_0.Op != OpRISCV64MOVaddr {
  6271  			break
  6272  		}
  6273  		off2 := auxIntToInt32(v_0.AuxInt)
  6274  		sym2 := auxToSym(v_0.Aux)
  6275  		base := v_0.Args[0]
  6276  		mem := v_1
  6277  		if !(is32Bit(int64(off1)+int64(off2)) && canMergeSym(sym1, sym2) && (base.Op != OpSB || !config.ctxt.Flag_dynlink)) {
  6278  			break
  6279  		}
  6280  		v.reset(OpRISCV64MOVWUload)
  6281  		v.AuxInt = int32ToAuxInt(off1 + off2)
  6282  		v.Aux = symToAux(mergeSym(sym1, sym2))
  6283  		v.AddArg2(base, mem)
  6284  		return true
  6285  	}
  6286  	// match: (MOVWUload [off1] {sym} (ADDI [off2] base) mem)
  6287  	// cond: is32Bit(int64(off1)+off2)
  6288  	// result: (MOVWUload [off1+int32(off2)] {sym} base mem)
  6289  	for {
  6290  		off1 := auxIntToInt32(v.AuxInt)
  6291  		sym := auxToSym(v.Aux)
  6292  		if v_0.Op != OpRISCV64ADDI {
  6293  			break
  6294  		}
  6295  		off2 := auxIntToInt64(v_0.AuxInt)
  6296  		base := v_0.Args[0]
  6297  		mem := v_1
  6298  		if !(is32Bit(int64(off1) + off2)) {
  6299  			break
  6300  		}
  6301  		v.reset(OpRISCV64MOVWUload)
  6302  		v.AuxInt = int32ToAuxInt(off1 + int32(off2))
  6303  		v.Aux = symToAux(sym)
  6304  		v.AddArg2(base, mem)
  6305  		return true
  6306  	}
  6307  	// match: (MOVWUload [off] {sym} ptr1 (MOVWstore [off] {sym} ptr2 x _))
  6308  	// cond: isSamePtr(ptr1, ptr2)
  6309  	// result: (MOVWUreg x)
  6310  	for {
  6311  		off := auxIntToInt32(v.AuxInt)
  6312  		sym := auxToSym(v.Aux)
  6313  		ptr1 := v_0
  6314  		if v_1.Op != OpRISCV64MOVWstore || auxIntToInt32(v_1.AuxInt) != off || auxToSym(v_1.Aux) != sym {
  6315  			break
  6316  		}
  6317  		x := v_1.Args[1]
  6318  		ptr2 := v_1.Args[0]
  6319  		if !(isSamePtr(ptr1, ptr2)) {
  6320  			break
  6321  		}
  6322  		v.reset(OpRISCV64MOVWUreg)
  6323  		v.AddArg(x)
  6324  		return true
  6325  	}
  6326  	// match: (MOVWUload [off] {sym} ptr1 (FMOVWstore [off] {sym} ptr2 x _))
  6327  	// cond: isSamePtr(ptr1, ptr2)
  6328  	// result: (MOVWUreg (FMVXS x))
  6329  	for {
  6330  		off := auxIntToInt32(v.AuxInt)
  6331  		sym := auxToSym(v.Aux)
  6332  		ptr1 := v_0
  6333  		if v_1.Op != OpRISCV64FMOVWstore || auxIntToInt32(v_1.AuxInt) != off || auxToSym(v_1.Aux) != sym {
  6334  			break
  6335  		}
  6336  		x := v_1.Args[1]
  6337  		ptr2 := v_1.Args[0]
  6338  		if !(isSamePtr(ptr1, ptr2)) {
  6339  			break
  6340  		}
  6341  		v.reset(OpRISCV64MOVWUreg)
  6342  		v0 := b.NewValue0(v_1.Pos, OpRISCV64FMVXS, typ.Int32)
  6343  		v0.AddArg(x)
  6344  		v.AddArg(v0)
  6345  		return true
  6346  	}
  6347  	return false
  6348  }
  6349  func rewriteValueRISCV64_OpRISCV64MOVWUreg(v *Value) bool {
  6350  	v_0 := v.Args[0]
  6351  	b := v.Block
  6352  	typ := &b.Func.Config.Types
  6353  	// match: (MOVWUreg x:(ANDI [c] y))
  6354  	// cond: c >= 0 && int64(uint32(c)) == c
  6355  	// result: x
  6356  	for {
  6357  		x := v_0
  6358  		if x.Op != OpRISCV64ANDI {
  6359  			break
  6360  		}
  6361  		c := auxIntToInt64(x.AuxInt)
  6362  		if !(c >= 0 && int64(uint32(c)) == c) {
  6363  			break
  6364  		}
  6365  		v.copyOf(x)
  6366  		return true
  6367  	}
  6368  	// match: (MOVWUreg (ANDI [c] x))
  6369  	// cond: c < 0
  6370  	// result: (AND (MOVDconst [int64(uint32(c))]) x)
  6371  	for {
  6372  		if v_0.Op != OpRISCV64ANDI {
  6373  			break
  6374  		}
  6375  		c := auxIntToInt64(v_0.AuxInt)
  6376  		x := v_0.Args[0]
  6377  		if !(c < 0) {
  6378  			break
  6379  		}
  6380  		v.reset(OpRISCV64AND)
  6381  		v0 := b.NewValue0(v.Pos, OpRISCV64MOVDconst, typ.UInt64)
  6382  		v0.AuxInt = int64ToAuxInt(int64(uint32(c)))
  6383  		v.AddArg2(v0, x)
  6384  		return true
  6385  	}
  6386  	// match: (MOVWUreg (MOVDconst [c]))
  6387  	// result: (MOVDconst [int64(uint32(c))])
  6388  	for {
  6389  		if v_0.Op != OpRISCV64MOVDconst {
  6390  			break
  6391  		}
  6392  		c := auxIntToInt64(v_0.AuxInt)
  6393  		v.reset(OpRISCV64MOVDconst)
  6394  		v.AuxInt = int64ToAuxInt(int64(uint32(c)))
  6395  		return true
  6396  	}
  6397  	// match: (MOVWUreg x:(MOVBUload _ _))
  6398  	// result: (MOVDreg x)
  6399  	for {
  6400  		x := v_0
  6401  		if x.Op != OpRISCV64MOVBUload {
  6402  			break
  6403  		}
  6404  		v.reset(OpRISCV64MOVDreg)
  6405  		v.AddArg(x)
  6406  		return true
  6407  	}
  6408  	// match: (MOVWUreg x:(MOVHUload _ _))
  6409  	// result: (MOVDreg x)
  6410  	for {
  6411  		x := v_0
  6412  		if x.Op != OpRISCV64MOVHUload {
  6413  			break
  6414  		}
  6415  		v.reset(OpRISCV64MOVDreg)
  6416  		v.AddArg(x)
  6417  		return true
  6418  	}
  6419  	// match: (MOVWUreg x:(MOVWUload _ _))
  6420  	// result: (MOVDreg x)
  6421  	for {
  6422  		x := v_0
  6423  		if x.Op != OpRISCV64MOVWUload {
  6424  			break
  6425  		}
  6426  		v.reset(OpRISCV64MOVDreg)
  6427  		v.AddArg(x)
  6428  		return true
  6429  	}
  6430  	// match: (MOVWUreg x:(MOVBUreg _))
  6431  	// result: (MOVDreg x)
  6432  	for {
  6433  		x := v_0
  6434  		if x.Op != OpRISCV64MOVBUreg {
  6435  			break
  6436  		}
  6437  		v.reset(OpRISCV64MOVDreg)
  6438  		v.AddArg(x)
  6439  		return true
  6440  	}
  6441  	// match: (MOVWUreg x:(MOVHUreg _))
  6442  	// result: (MOVDreg x)
  6443  	for {
  6444  		x := v_0
  6445  		if x.Op != OpRISCV64MOVHUreg {
  6446  			break
  6447  		}
  6448  		v.reset(OpRISCV64MOVDreg)
  6449  		v.AddArg(x)
  6450  		return true
  6451  	}
  6452  	// match: (MOVWUreg x:(MOVWUreg _))
  6453  	// result: (MOVDreg x)
  6454  	for {
  6455  		x := v_0
  6456  		if x.Op != OpRISCV64MOVWUreg {
  6457  			break
  6458  		}
  6459  		v.reset(OpRISCV64MOVDreg)
  6460  		v.AddArg(x)
  6461  		return true
  6462  	}
  6463  	// match: (MOVWUreg <t> x:(MOVWload [off] {sym} ptr mem))
  6464  	// cond: x.Uses == 1 && clobber(x)
  6465  	// result: @x.Block (MOVWUload <t> [off] {sym} ptr mem)
  6466  	for {
  6467  		t := v.Type
  6468  		x := v_0
  6469  		if x.Op != OpRISCV64MOVWload {
  6470  			break
  6471  		}
  6472  		off := auxIntToInt32(x.AuxInt)
  6473  		sym := auxToSym(x.Aux)
  6474  		mem := x.Args[1]
  6475  		ptr := x.Args[0]
  6476  		if !(x.Uses == 1 && clobber(x)) {
  6477  			break
  6478  		}
  6479  		b = x.Block
  6480  		v0 := b.NewValue0(x.Pos, OpRISCV64MOVWUload, t)
  6481  		v.copyOf(v0)
  6482  		v0.AuxInt = int32ToAuxInt(off)
  6483  		v0.Aux = symToAux(sym)
  6484  		v0.AddArg2(ptr, mem)
  6485  		return true
  6486  	}
  6487  	return false
  6488  }
  6489  func rewriteValueRISCV64_OpRISCV64MOVWload(v *Value) bool {
  6490  	v_1 := v.Args[1]
  6491  	v_0 := v.Args[0]
  6492  	b := v.Block
  6493  	config := b.Func.Config
  6494  	// match: (MOVWload [off1] {sym1} (MOVaddr [off2] {sym2} base) mem)
  6495  	// cond: is32Bit(int64(off1)+int64(off2)) && canMergeSym(sym1, sym2) && (base.Op != OpSB || !config.ctxt.Flag_dynlink)
  6496  	// result: (MOVWload [off1+off2] {mergeSym(sym1,sym2)} base mem)
  6497  	for {
  6498  		off1 := auxIntToInt32(v.AuxInt)
  6499  		sym1 := auxToSym(v.Aux)
  6500  		if v_0.Op != OpRISCV64MOVaddr {
  6501  			break
  6502  		}
  6503  		off2 := auxIntToInt32(v_0.AuxInt)
  6504  		sym2 := auxToSym(v_0.Aux)
  6505  		base := v_0.Args[0]
  6506  		mem := v_1
  6507  		if !(is32Bit(int64(off1)+int64(off2)) && canMergeSym(sym1, sym2) && (base.Op != OpSB || !config.ctxt.Flag_dynlink)) {
  6508  			break
  6509  		}
  6510  		v.reset(OpRISCV64MOVWload)
  6511  		v.AuxInt = int32ToAuxInt(off1 + off2)
  6512  		v.Aux = symToAux(mergeSym(sym1, sym2))
  6513  		v.AddArg2(base, mem)
  6514  		return true
  6515  	}
  6516  	// match: (MOVWload [off1] {sym} (ADDI [off2] base) mem)
  6517  	// cond: is32Bit(int64(off1)+off2)
  6518  	// result: (MOVWload [off1+int32(off2)] {sym} base mem)
  6519  	for {
  6520  		off1 := auxIntToInt32(v.AuxInt)
  6521  		sym := auxToSym(v.Aux)
  6522  		if v_0.Op != OpRISCV64ADDI {
  6523  			break
  6524  		}
  6525  		off2 := auxIntToInt64(v_0.AuxInt)
  6526  		base := v_0.Args[0]
  6527  		mem := v_1
  6528  		if !(is32Bit(int64(off1) + off2)) {
  6529  			break
  6530  		}
  6531  		v.reset(OpRISCV64MOVWload)
  6532  		v.AuxInt = int32ToAuxInt(off1 + int32(off2))
  6533  		v.Aux = symToAux(sym)
  6534  		v.AddArg2(base, mem)
  6535  		return true
  6536  	}
  6537  	// match: (MOVWload [off] {sym} ptr1 (MOVWstore [off] {sym} ptr2 x _))
  6538  	// cond: isSamePtr(ptr1, ptr2)
  6539  	// result: (MOVWreg x)
  6540  	for {
  6541  		off := auxIntToInt32(v.AuxInt)
  6542  		sym := auxToSym(v.Aux)
  6543  		ptr1 := v_0
  6544  		if v_1.Op != OpRISCV64MOVWstore || auxIntToInt32(v_1.AuxInt) != off || auxToSym(v_1.Aux) != sym {
  6545  			break
  6546  		}
  6547  		x := v_1.Args[1]
  6548  		ptr2 := v_1.Args[0]
  6549  		if !(isSamePtr(ptr1, ptr2)) {
  6550  			break
  6551  		}
  6552  		v.reset(OpRISCV64MOVWreg)
  6553  		v.AddArg(x)
  6554  		return true
  6555  	}
  6556  	// match: (MOVWload [off] {sym} ptr1 (FMOVWstore [off] {sym} ptr2 x _))
  6557  	// cond: isSamePtr(ptr1, ptr2)
  6558  	// result: (FMVXS x)
  6559  	for {
  6560  		off := auxIntToInt32(v.AuxInt)
  6561  		sym := auxToSym(v.Aux)
  6562  		ptr1 := v_0
  6563  		if v_1.Op != OpRISCV64FMOVWstore || auxIntToInt32(v_1.AuxInt) != off || auxToSym(v_1.Aux) != sym {
  6564  			break
  6565  		}
  6566  		x := v_1.Args[1]
  6567  		ptr2 := v_1.Args[0]
  6568  		if !(isSamePtr(ptr1, ptr2)) {
  6569  			break
  6570  		}
  6571  		v.reset(OpRISCV64FMVXS)
  6572  		v.AddArg(x)
  6573  		return true
  6574  	}
  6575  	return false
  6576  }
  6577  func rewriteValueRISCV64_OpRISCV64MOVWreg(v *Value) bool {
  6578  	v_0 := v.Args[0]
  6579  	b := v.Block
  6580  	// match: (MOVWreg x:(ANDI [c] y))
  6581  	// cond: c >= 0 && int64(int32(c)) == c
  6582  	// result: x
  6583  	for {
  6584  		x := v_0
  6585  		if x.Op != OpRISCV64ANDI {
  6586  			break
  6587  		}
  6588  		c := auxIntToInt64(x.AuxInt)
  6589  		if !(c >= 0 && int64(int32(c)) == c) {
  6590  			break
  6591  		}
  6592  		v.copyOf(x)
  6593  		return true
  6594  	}
  6595  	// match: (MOVWreg (NEG x))
  6596  	// result: (NEGW x)
  6597  	for {
  6598  		if v_0.Op != OpRISCV64NEG {
  6599  			break
  6600  		}
  6601  		x := v_0.Args[0]
  6602  		v.reset(OpRISCV64NEGW)
  6603  		v.AddArg(x)
  6604  		return true
  6605  	}
  6606  	// match: (MOVWreg (MOVDconst [c]))
  6607  	// result: (MOVDconst [int64(int32(c))])
  6608  	for {
  6609  		if v_0.Op != OpRISCV64MOVDconst {
  6610  			break
  6611  		}
  6612  		c := auxIntToInt64(v_0.AuxInt)
  6613  		v.reset(OpRISCV64MOVDconst)
  6614  		v.AuxInt = int64ToAuxInt(int64(int32(c)))
  6615  		return true
  6616  	}
  6617  	// match: (MOVWreg x:(MOVBload _ _))
  6618  	// result: (MOVDreg x)
  6619  	for {
  6620  		x := v_0
  6621  		if x.Op != OpRISCV64MOVBload {
  6622  			break
  6623  		}
  6624  		v.reset(OpRISCV64MOVDreg)
  6625  		v.AddArg(x)
  6626  		return true
  6627  	}
  6628  	// match: (MOVWreg x:(MOVBUload _ _))
  6629  	// result: (MOVDreg x)
  6630  	for {
  6631  		x := v_0
  6632  		if x.Op != OpRISCV64MOVBUload {
  6633  			break
  6634  		}
  6635  		v.reset(OpRISCV64MOVDreg)
  6636  		v.AddArg(x)
  6637  		return true
  6638  	}
  6639  	// match: (MOVWreg x:(MOVHload _ _))
  6640  	// result: (MOVDreg x)
  6641  	for {
  6642  		x := v_0
  6643  		if x.Op != OpRISCV64MOVHload {
  6644  			break
  6645  		}
  6646  		v.reset(OpRISCV64MOVDreg)
  6647  		v.AddArg(x)
  6648  		return true
  6649  	}
  6650  	// match: (MOVWreg x:(MOVHUload _ _))
  6651  	// result: (MOVDreg x)
  6652  	for {
  6653  		x := v_0
  6654  		if x.Op != OpRISCV64MOVHUload {
  6655  			break
  6656  		}
  6657  		v.reset(OpRISCV64MOVDreg)
  6658  		v.AddArg(x)
  6659  		return true
  6660  	}
  6661  	// match: (MOVWreg x:(MOVWload _ _))
  6662  	// result: (MOVDreg x)
  6663  	for {
  6664  		x := v_0
  6665  		if x.Op != OpRISCV64MOVWload {
  6666  			break
  6667  		}
  6668  		v.reset(OpRISCV64MOVDreg)
  6669  		v.AddArg(x)
  6670  		return true
  6671  	}
  6672  	// match: (MOVWreg x:(ADDIW _))
  6673  	// result: (MOVDreg x)
  6674  	for {
  6675  		x := v_0
  6676  		if x.Op != OpRISCV64ADDIW {
  6677  			break
  6678  		}
  6679  		v.reset(OpRISCV64MOVDreg)
  6680  		v.AddArg(x)
  6681  		return true
  6682  	}
  6683  	// match: (MOVWreg x:(SUBW _ _))
  6684  	// result: (MOVDreg x)
  6685  	for {
  6686  		x := v_0
  6687  		if x.Op != OpRISCV64SUBW {
  6688  			break
  6689  		}
  6690  		v.reset(OpRISCV64MOVDreg)
  6691  		v.AddArg(x)
  6692  		return true
  6693  	}
  6694  	// match: (MOVWreg x:(NEGW _))
  6695  	// result: (MOVDreg x)
  6696  	for {
  6697  		x := v_0
  6698  		if x.Op != OpRISCV64NEGW {
  6699  			break
  6700  		}
  6701  		v.reset(OpRISCV64MOVDreg)
  6702  		v.AddArg(x)
  6703  		return true
  6704  	}
  6705  	// match: (MOVWreg x:(MULW _ _))
  6706  	// result: (MOVDreg x)
  6707  	for {
  6708  		x := v_0
  6709  		if x.Op != OpRISCV64MULW {
  6710  			break
  6711  		}
  6712  		v.reset(OpRISCV64MOVDreg)
  6713  		v.AddArg(x)
  6714  		return true
  6715  	}
  6716  	// match: (MOVWreg x:(DIVW _ _))
  6717  	// result: (MOVDreg x)
  6718  	for {
  6719  		x := v_0
  6720  		if x.Op != OpRISCV64DIVW {
  6721  			break
  6722  		}
  6723  		v.reset(OpRISCV64MOVDreg)
  6724  		v.AddArg(x)
  6725  		return true
  6726  	}
  6727  	// match: (MOVWreg x:(DIVUW _ _))
  6728  	// result: (MOVDreg x)
  6729  	for {
  6730  		x := v_0
  6731  		if x.Op != OpRISCV64DIVUW {
  6732  			break
  6733  		}
  6734  		v.reset(OpRISCV64MOVDreg)
  6735  		v.AddArg(x)
  6736  		return true
  6737  	}
  6738  	// match: (MOVWreg x:(REMW _ _))
  6739  	// result: (MOVDreg x)
  6740  	for {
  6741  		x := v_0
  6742  		if x.Op != OpRISCV64REMW {
  6743  			break
  6744  		}
  6745  		v.reset(OpRISCV64MOVDreg)
  6746  		v.AddArg(x)
  6747  		return true
  6748  	}
  6749  	// match: (MOVWreg x:(REMUW _ _))
  6750  	// result: (MOVDreg x)
  6751  	for {
  6752  		x := v_0
  6753  		if x.Op != OpRISCV64REMUW {
  6754  			break
  6755  		}
  6756  		v.reset(OpRISCV64MOVDreg)
  6757  		v.AddArg(x)
  6758  		return true
  6759  	}
  6760  	// match: (MOVWreg x:(ROLW _ _))
  6761  	// result: (MOVDreg x)
  6762  	for {
  6763  		x := v_0
  6764  		if x.Op != OpRISCV64ROLW {
  6765  			break
  6766  		}
  6767  		v.reset(OpRISCV64MOVDreg)
  6768  		v.AddArg(x)
  6769  		return true
  6770  	}
  6771  	// match: (MOVWreg x:(RORW _ _))
  6772  	// result: (MOVDreg x)
  6773  	for {
  6774  		x := v_0
  6775  		if x.Op != OpRISCV64RORW {
  6776  			break
  6777  		}
  6778  		v.reset(OpRISCV64MOVDreg)
  6779  		v.AddArg(x)
  6780  		return true
  6781  	}
  6782  	// match: (MOVWreg x:(RORIW _))
  6783  	// result: (MOVDreg x)
  6784  	for {
  6785  		x := v_0
  6786  		if x.Op != OpRISCV64RORIW {
  6787  			break
  6788  		}
  6789  		v.reset(OpRISCV64MOVDreg)
  6790  		v.AddArg(x)
  6791  		return true
  6792  	}
  6793  	// match: (MOVWreg x:(MOVBreg _))
  6794  	// result: (MOVDreg x)
  6795  	for {
  6796  		x := v_0
  6797  		if x.Op != OpRISCV64MOVBreg {
  6798  			break
  6799  		}
  6800  		v.reset(OpRISCV64MOVDreg)
  6801  		v.AddArg(x)
  6802  		return true
  6803  	}
  6804  	// match: (MOVWreg x:(MOVBUreg _))
  6805  	// result: (MOVDreg x)
  6806  	for {
  6807  		x := v_0
  6808  		if x.Op != OpRISCV64MOVBUreg {
  6809  			break
  6810  		}
  6811  		v.reset(OpRISCV64MOVDreg)
  6812  		v.AddArg(x)
  6813  		return true
  6814  	}
  6815  	// match: (MOVWreg x:(MOVHreg _))
  6816  	// result: (MOVDreg x)
  6817  	for {
  6818  		x := v_0
  6819  		if x.Op != OpRISCV64MOVHreg {
  6820  			break
  6821  		}
  6822  		v.reset(OpRISCV64MOVDreg)
  6823  		v.AddArg(x)
  6824  		return true
  6825  	}
  6826  	// match: (MOVWreg x:(MOVWreg _))
  6827  	// result: (MOVDreg x)
  6828  	for {
  6829  		x := v_0
  6830  		if x.Op != OpRISCV64MOVWreg {
  6831  			break
  6832  		}
  6833  		v.reset(OpRISCV64MOVDreg)
  6834  		v.AddArg(x)
  6835  		return true
  6836  	}
  6837  	// match: (MOVWreg <t> x:(MOVWUload [off] {sym} ptr mem))
  6838  	// cond: x.Uses == 1 && clobber(x)
  6839  	// result: @x.Block (MOVWload <t> [off] {sym} ptr mem)
  6840  	for {
  6841  		t := v.Type
  6842  		x := v_0
  6843  		if x.Op != OpRISCV64MOVWUload {
  6844  			break
  6845  		}
  6846  		off := auxIntToInt32(x.AuxInt)
  6847  		sym := auxToSym(x.Aux)
  6848  		mem := x.Args[1]
  6849  		ptr := x.Args[0]
  6850  		if !(x.Uses == 1 && clobber(x)) {
  6851  			break
  6852  		}
  6853  		b = x.Block
  6854  		v0 := b.NewValue0(x.Pos, OpRISCV64MOVWload, t)
  6855  		v.copyOf(v0)
  6856  		v0.AuxInt = int32ToAuxInt(off)
  6857  		v0.Aux = symToAux(sym)
  6858  		v0.AddArg2(ptr, mem)
  6859  		return true
  6860  	}
  6861  	return false
  6862  }
  6863  func rewriteValueRISCV64_OpRISCV64MOVWstore(v *Value) bool {
  6864  	v_2 := v.Args[2]
  6865  	v_1 := v.Args[1]
  6866  	v_0 := v.Args[0]
  6867  	b := v.Block
  6868  	config := b.Func.Config
  6869  	// match: (MOVWstore [off1] {sym1} (MOVaddr [off2] {sym2} base) val mem)
  6870  	// cond: is32Bit(int64(off1)+int64(off2)) && canMergeSym(sym1, sym2) && (base.Op != OpSB || !config.ctxt.Flag_dynlink)
  6871  	// result: (MOVWstore [off1+off2] {mergeSym(sym1,sym2)} base val mem)
  6872  	for {
  6873  		off1 := auxIntToInt32(v.AuxInt)
  6874  		sym1 := auxToSym(v.Aux)
  6875  		if v_0.Op != OpRISCV64MOVaddr {
  6876  			break
  6877  		}
  6878  		off2 := auxIntToInt32(v_0.AuxInt)
  6879  		sym2 := auxToSym(v_0.Aux)
  6880  		base := v_0.Args[0]
  6881  		val := v_1
  6882  		mem := v_2
  6883  		if !(is32Bit(int64(off1)+int64(off2)) && canMergeSym(sym1, sym2) && (base.Op != OpSB || !config.ctxt.Flag_dynlink)) {
  6884  			break
  6885  		}
  6886  		v.reset(OpRISCV64MOVWstore)
  6887  		v.AuxInt = int32ToAuxInt(off1 + off2)
  6888  		v.Aux = symToAux(mergeSym(sym1, sym2))
  6889  		v.AddArg3(base, val, mem)
  6890  		return true
  6891  	}
  6892  	// match: (MOVWstore [off1] {sym} (ADDI [off2] base) val mem)
  6893  	// cond: is32Bit(int64(off1)+off2)
  6894  	// result: (MOVWstore [off1+int32(off2)] {sym} base val mem)
  6895  	for {
  6896  		off1 := auxIntToInt32(v.AuxInt)
  6897  		sym := auxToSym(v.Aux)
  6898  		if v_0.Op != OpRISCV64ADDI {
  6899  			break
  6900  		}
  6901  		off2 := auxIntToInt64(v_0.AuxInt)
  6902  		base := v_0.Args[0]
  6903  		val := v_1
  6904  		mem := v_2
  6905  		if !(is32Bit(int64(off1) + off2)) {
  6906  			break
  6907  		}
  6908  		v.reset(OpRISCV64MOVWstore)
  6909  		v.AuxInt = int32ToAuxInt(off1 + int32(off2))
  6910  		v.Aux = symToAux(sym)
  6911  		v.AddArg3(base, val, mem)
  6912  		return true
  6913  	}
  6914  	// match: (MOVWstore [off] {sym} ptr (MOVDconst [0]) mem)
  6915  	// result: (MOVWstorezero [off] {sym} ptr mem)
  6916  	for {
  6917  		off := auxIntToInt32(v.AuxInt)
  6918  		sym := auxToSym(v.Aux)
  6919  		ptr := v_0
  6920  		if v_1.Op != OpRISCV64MOVDconst || auxIntToInt64(v_1.AuxInt) != 0 {
  6921  			break
  6922  		}
  6923  		mem := v_2
  6924  		v.reset(OpRISCV64MOVWstorezero)
  6925  		v.AuxInt = int32ToAuxInt(off)
  6926  		v.Aux = symToAux(sym)
  6927  		v.AddArg2(ptr, mem)
  6928  		return true
  6929  	}
  6930  	// match: (MOVWstore [off] {sym} ptr (MOVWreg x) mem)
  6931  	// result: (MOVWstore [off] {sym} ptr x mem)
  6932  	for {
  6933  		off := auxIntToInt32(v.AuxInt)
  6934  		sym := auxToSym(v.Aux)
  6935  		ptr := v_0
  6936  		if v_1.Op != OpRISCV64MOVWreg {
  6937  			break
  6938  		}
  6939  		x := v_1.Args[0]
  6940  		mem := v_2
  6941  		v.reset(OpRISCV64MOVWstore)
  6942  		v.AuxInt = int32ToAuxInt(off)
  6943  		v.Aux = symToAux(sym)
  6944  		v.AddArg3(ptr, x, mem)
  6945  		return true
  6946  	}
  6947  	// match: (MOVWstore [off] {sym} ptr (MOVWUreg x) mem)
  6948  	// result: (MOVWstore [off] {sym} ptr x mem)
  6949  	for {
  6950  		off := auxIntToInt32(v.AuxInt)
  6951  		sym := auxToSym(v.Aux)
  6952  		ptr := v_0
  6953  		if v_1.Op != OpRISCV64MOVWUreg {
  6954  			break
  6955  		}
  6956  		x := v_1.Args[0]
  6957  		mem := v_2
  6958  		v.reset(OpRISCV64MOVWstore)
  6959  		v.AuxInt = int32ToAuxInt(off)
  6960  		v.Aux = symToAux(sym)
  6961  		v.AddArg3(ptr, x, mem)
  6962  		return true
  6963  	}
  6964  	return false
  6965  }
  6966  func rewriteValueRISCV64_OpRISCV64MOVWstorezero(v *Value) bool {
  6967  	v_1 := v.Args[1]
  6968  	v_0 := v.Args[0]
  6969  	b := v.Block
  6970  	config := b.Func.Config
  6971  	// match: (MOVWstorezero [off1] {sym1} (MOVaddr [off2] {sym2} base) mem)
  6972  	// cond: canMergeSym(sym1,sym2) && is32Bit(int64(off1)+int64(off2)) && (base.Op != OpSB || !config.ctxt.Flag_dynlink)
  6973  	// result: (MOVWstorezero [off1+off2] {mergeSym(sym1,sym2)} base mem)
  6974  	for {
  6975  		off1 := auxIntToInt32(v.AuxInt)
  6976  		sym1 := auxToSym(v.Aux)
  6977  		if v_0.Op != OpRISCV64MOVaddr {
  6978  			break
  6979  		}
  6980  		off2 := auxIntToInt32(v_0.AuxInt)
  6981  		sym2 := auxToSym(v_0.Aux)
  6982  		base := v_0.Args[0]
  6983  		mem := v_1
  6984  		if !(canMergeSym(sym1, sym2) && is32Bit(int64(off1)+int64(off2)) && (base.Op != OpSB || !config.ctxt.Flag_dynlink)) {
  6985  			break
  6986  		}
  6987  		v.reset(OpRISCV64MOVWstorezero)
  6988  		v.AuxInt = int32ToAuxInt(off1 + off2)
  6989  		v.Aux = symToAux(mergeSym(sym1, sym2))
  6990  		v.AddArg2(base, mem)
  6991  		return true
  6992  	}
  6993  	// match: (MOVWstorezero [off1] {sym} (ADDI [off2] base) mem)
  6994  	// cond: is32Bit(int64(off1)+off2)
  6995  	// result: (MOVWstorezero [off1+int32(off2)] {sym} base mem)
  6996  	for {
  6997  		off1 := auxIntToInt32(v.AuxInt)
  6998  		sym := auxToSym(v.Aux)
  6999  		if v_0.Op != OpRISCV64ADDI {
  7000  			break
  7001  		}
  7002  		off2 := auxIntToInt64(v_0.AuxInt)
  7003  		base := v_0.Args[0]
  7004  		mem := v_1
  7005  		if !(is32Bit(int64(off1) + off2)) {
  7006  			break
  7007  		}
  7008  		v.reset(OpRISCV64MOVWstorezero)
  7009  		v.AuxInt = int32ToAuxInt(off1 + int32(off2))
  7010  		v.Aux = symToAux(sym)
  7011  		v.AddArg2(base, mem)
  7012  		return true
  7013  	}
  7014  	return false
  7015  }
  7016  func rewriteValueRISCV64_OpRISCV64NEG(v *Value) bool {
  7017  	v_0 := v.Args[0]
  7018  	b := v.Block
  7019  	// match: (NEG (SUB x y))
  7020  	// result: (SUB y x)
  7021  	for {
  7022  		if v_0.Op != OpRISCV64SUB {
  7023  			break
  7024  		}
  7025  		y := v_0.Args[1]
  7026  		x := v_0.Args[0]
  7027  		v.reset(OpRISCV64SUB)
  7028  		v.AddArg2(y, x)
  7029  		return true
  7030  	}
  7031  	// match: (NEG <t> s:(ADDI [val] (SUB x y)))
  7032  	// cond: s.Uses == 1 && is32Bit(-val)
  7033  	// result: (ADDI [-val] (SUB <t> y x))
  7034  	for {
  7035  		t := v.Type
  7036  		s := v_0
  7037  		if s.Op != OpRISCV64ADDI {
  7038  			break
  7039  		}
  7040  		val := auxIntToInt64(s.AuxInt)
  7041  		s_0 := s.Args[0]
  7042  		if s_0.Op != OpRISCV64SUB {
  7043  			break
  7044  		}
  7045  		y := s_0.Args[1]
  7046  		x := s_0.Args[0]
  7047  		if !(s.Uses == 1 && is32Bit(-val)) {
  7048  			break
  7049  		}
  7050  		v.reset(OpRISCV64ADDI)
  7051  		v.AuxInt = int64ToAuxInt(-val)
  7052  		v0 := b.NewValue0(v.Pos, OpRISCV64SUB, t)
  7053  		v0.AddArg2(y, x)
  7054  		v.AddArg(v0)
  7055  		return true
  7056  	}
  7057  	// match: (NEG (NEG x))
  7058  	// result: x
  7059  	for {
  7060  		if v_0.Op != OpRISCV64NEG {
  7061  			break
  7062  		}
  7063  		x := v_0.Args[0]
  7064  		v.copyOf(x)
  7065  		return true
  7066  	}
  7067  	// match: (NEG <t> s:(ADDI [val] (NEG x)))
  7068  	// cond: s.Uses == 1 && is32Bit(-val)
  7069  	// result: (ADDI [-val] x)
  7070  	for {
  7071  		s := v_0
  7072  		if s.Op != OpRISCV64ADDI {
  7073  			break
  7074  		}
  7075  		val := auxIntToInt64(s.AuxInt)
  7076  		s_0 := s.Args[0]
  7077  		if s_0.Op != OpRISCV64NEG {
  7078  			break
  7079  		}
  7080  		x := s_0.Args[0]
  7081  		if !(s.Uses == 1 && is32Bit(-val)) {
  7082  			break
  7083  		}
  7084  		v.reset(OpRISCV64ADDI)
  7085  		v.AuxInt = int64ToAuxInt(-val)
  7086  		v.AddArg(x)
  7087  		return true
  7088  	}
  7089  	// match: (NEG (MOVDconst [x]))
  7090  	// result: (MOVDconst [-x])
  7091  	for {
  7092  		if v_0.Op != OpRISCV64MOVDconst {
  7093  			break
  7094  		}
  7095  		x := auxIntToInt64(v_0.AuxInt)
  7096  		v.reset(OpRISCV64MOVDconst)
  7097  		v.AuxInt = int64ToAuxInt(-x)
  7098  		return true
  7099  	}
  7100  	return false
  7101  }
  7102  func rewriteValueRISCV64_OpRISCV64NEGW(v *Value) bool {
  7103  	v_0 := v.Args[0]
  7104  	// match: (NEGW (MOVDconst [x]))
  7105  	// result: (MOVDconst [int64(int32(-x))])
  7106  	for {
  7107  		if v_0.Op != OpRISCV64MOVDconst {
  7108  			break
  7109  		}
  7110  		x := auxIntToInt64(v_0.AuxInt)
  7111  		v.reset(OpRISCV64MOVDconst)
  7112  		v.AuxInt = int64ToAuxInt(int64(int32(-x)))
  7113  		return true
  7114  	}
  7115  	return false
  7116  }
  7117  func rewriteValueRISCV64_OpRISCV64OR(v *Value) bool {
  7118  	v_1 := v.Args[1]
  7119  	v_0 := v.Args[0]
  7120  	b := v.Block
  7121  	typ := &b.Func.Config.Types
  7122  	// match: (OR (MOVDconst [val]) x)
  7123  	// cond: is32Bit(val)
  7124  	// result: (ORI [val] x)
  7125  	for {
  7126  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
  7127  			if v_0.Op != OpRISCV64MOVDconst {
  7128  				continue
  7129  			}
  7130  			val := auxIntToInt64(v_0.AuxInt)
  7131  			x := v_1
  7132  			if !(is32Bit(val)) {
  7133  				continue
  7134  			}
  7135  			v.reset(OpRISCV64ORI)
  7136  			v.AuxInt = int64ToAuxInt(val)
  7137  			v.AddArg(x)
  7138  			return true
  7139  		}
  7140  		break
  7141  	}
  7142  	// match: (OR x x)
  7143  	// result: x
  7144  	for {
  7145  		x := v_0
  7146  		if x != v_1 {
  7147  			break
  7148  		}
  7149  		v.copyOf(x)
  7150  		return true
  7151  	}
  7152  	// match: (OR (CZEROEQZ <t> x cond) (CZERONEZ <t> (ADD x y) cond))
  7153  	// result: (ADD x (CZERONEZ <t> y cond))
  7154  	for {
  7155  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
  7156  			if v_0.Op != OpRISCV64CZEROEQZ {
  7157  				continue
  7158  			}
  7159  			t := v_0.Type
  7160  			cond := v_0.Args[1]
  7161  			x := v_0.Args[0]
  7162  			if v_1.Op != OpRISCV64CZERONEZ || v_1.Type != t {
  7163  				continue
  7164  			}
  7165  			_ = v_1.Args[1]
  7166  			v_1_0 := v_1.Args[0]
  7167  			if v_1_0.Op != OpRISCV64ADD {
  7168  				continue
  7169  			}
  7170  			_ = v_1_0.Args[1]
  7171  			v_1_0_0 := v_1_0.Args[0]
  7172  			v_1_0_1 := v_1_0.Args[1]
  7173  			for _i1 := 0; _i1 <= 1; _i1, v_1_0_0, v_1_0_1 = _i1+1, v_1_0_1, v_1_0_0 {
  7174  				if x != v_1_0_0 {
  7175  					continue
  7176  				}
  7177  				y := v_1_0_1
  7178  				if cond != v_1.Args[1] {
  7179  					continue
  7180  				}
  7181  				v.reset(OpRISCV64ADD)
  7182  				v0 := b.NewValue0(v.Pos, OpRISCV64CZERONEZ, t)
  7183  				v0.AddArg2(y, cond)
  7184  				v.AddArg2(x, v0)
  7185  				return true
  7186  			}
  7187  		}
  7188  		break
  7189  	}
  7190  	// match: (OR (CZEROEQZ <t> x cond) (CZERONEZ <t> (SUB x y) cond))
  7191  	// result: (SUB x (CZERONEZ <t> y cond))
  7192  	for {
  7193  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
  7194  			if v_0.Op != OpRISCV64CZEROEQZ {
  7195  				continue
  7196  			}
  7197  			t := v_0.Type
  7198  			cond := v_0.Args[1]
  7199  			x := v_0.Args[0]
  7200  			if v_1.Op != OpRISCV64CZERONEZ || v_1.Type != t {
  7201  				continue
  7202  			}
  7203  			_ = v_1.Args[1]
  7204  			v_1_0 := v_1.Args[0]
  7205  			if v_1_0.Op != OpRISCV64SUB {
  7206  				continue
  7207  			}
  7208  			y := v_1_0.Args[1]
  7209  			if x != v_1_0.Args[0] || cond != v_1.Args[1] {
  7210  				continue
  7211  			}
  7212  			v.reset(OpRISCV64SUB)
  7213  			v0 := b.NewValue0(v.Pos, OpRISCV64CZERONEZ, t)
  7214  			v0.AddArg2(y, cond)
  7215  			v.AddArg2(x, v0)
  7216  			return true
  7217  		}
  7218  		break
  7219  	}
  7220  	// match: (OR (CZEROEQZ <t> x cond) (CZERONEZ <t> (OR x y) cond))
  7221  	// result: (OR x (CZERONEZ <t> y cond))
  7222  	for {
  7223  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
  7224  			if v_0.Op != OpRISCV64CZEROEQZ {
  7225  				continue
  7226  			}
  7227  			t := v_0.Type
  7228  			cond := v_0.Args[1]
  7229  			x := v_0.Args[0]
  7230  			if v_1.Op != OpRISCV64CZERONEZ || v_1.Type != t {
  7231  				continue
  7232  			}
  7233  			_ = v_1.Args[1]
  7234  			v_1_0 := v_1.Args[0]
  7235  			if v_1_0.Op != OpRISCV64OR {
  7236  				continue
  7237  			}
  7238  			_ = v_1_0.Args[1]
  7239  			v_1_0_0 := v_1_0.Args[0]
  7240  			v_1_0_1 := v_1_0.Args[1]
  7241  			for _i1 := 0; _i1 <= 1; _i1, v_1_0_0, v_1_0_1 = _i1+1, v_1_0_1, v_1_0_0 {
  7242  				if x != v_1_0_0 {
  7243  					continue
  7244  				}
  7245  				y := v_1_0_1
  7246  				if cond != v_1.Args[1] {
  7247  					continue
  7248  				}
  7249  				v.reset(OpRISCV64OR)
  7250  				v0 := b.NewValue0(v.Pos, OpRISCV64CZERONEZ, t)
  7251  				v0.AddArg2(y, cond)
  7252  				v.AddArg2(x, v0)
  7253  				return true
  7254  			}
  7255  		}
  7256  		break
  7257  	}
  7258  	// match: (OR (CZEROEQZ <t> x cond) (CZERONEZ <t> (XOR x y) cond))
  7259  	// result: (XOR x (CZERONEZ <t> y cond))
  7260  	for {
  7261  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
  7262  			if v_0.Op != OpRISCV64CZEROEQZ {
  7263  				continue
  7264  			}
  7265  			t := v_0.Type
  7266  			cond := v_0.Args[1]
  7267  			x := v_0.Args[0]
  7268  			if v_1.Op != OpRISCV64CZERONEZ || v_1.Type != t {
  7269  				continue
  7270  			}
  7271  			_ = v_1.Args[1]
  7272  			v_1_0 := v_1.Args[0]
  7273  			if v_1_0.Op != OpRISCV64XOR {
  7274  				continue
  7275  			}
  7276  			_ = v_1_0.Args[1]
  7277  			v_1_0_0 := v_1_0.Args[0]
  7278  			v_1_0_1 := v_1_0.Args[1]
  7279  			for _i1 := 0; _i1 <= 1; _i1, v_1_0_0, v_1_0_1 = _i1+1, v_1_0_1, v_1_0_0 {
  7280  				if x != v_1_0_0 {
  7281  					continue
  7282  				}
  7283  				y := v_1_0_1
  7284  				if cond != v_1.Args[1] {
  7285  					continue
  7286  				}
  7287  				v.reset(OpRISCV64XOR)
  7288  				v0 := b.NewValue0(v.Pos, OpRISCV64CZERONEZ, t)
  7289  				v0.AddArg2(y, cond)
  7290  				v.AddArg2(x, v0)
  7291  				return true
  7292  			}
  7293  		}
  7294  		break
  7295  	}
  7296  	// match: (OR (CZEROEQZ <t> x cond) (CZERONEZ <t> (SUBW x y) cond))
  7297  	// result: (SUBW x (CZERONEZ <t> y cond))
  7298  	for {
  7299  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
  7300  			if v_0.Op != OpRISCV64CZEROEQZ {
  7301  				continue
  7302  			}
  7303  			t := v_0.Type
  7304  			cond := v_0.Args[1]
  7305  			x := v_0.Args[0]
  7306  			if v_1.Op != OpRISCV64CZERONEZ || v_1.Type != t {
  7307  				continue
  7308  			}
  7309  			_ = v_1.Args[1]
  7310  			v_1_0 := v_1.Args[0]
  7311  			if v_1_0.Op != OpRISCV64SUBW {
  7312  				continue
  7313  			}
  7314  			y := v_1_0.Args[1]
  7315  			if x != v_1_0.Args[0] || cond != v_1.Args[1] {
  7316  				continue
  7317  			}
  7318  			v.reset(OpRISCV64SUBW)
  7319  			v0 := b.NewValue0(v.Pos, OpRISCV64CZERONEZ, t)
  7320  			v0.AddArg2(y, cond)
  7321  			v.AddArg2(x, v0)
  7322  			return true
  7323  		}
  7324  		break
  7325  	}
  7326  	// match: (OR (CZEROEQZ <t> (ADD x y) cond) (CZERONEZ <t> x cond))
  7327  	// result: (ADD x (CZEROEQZ <t> y cond))
  7328  	for {
  7329  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
  7330  			if v_0.Op != OpRISCV64CZEROEQZ {
  7331  				continue
  7332  			}
  7333  			t := v_0.Type
  7334  			cond := v_0.Args[1]
  7335  			v_0_0 := v_0.Args[0]
  7336  			if v_0_0.Op != OpRISCV64ADD {
  7337  				continue
  7338  			}
  7339  			_ = v_0_0.Args[1]
  7340  			v_0_0_0 := v_0_0.Args[0]
  7341  			v_0_0_1 := v_0_0.Args[1]
  7342  			for _i1 := 0; _i1 <= 1; _i1, v_0_0_0, v_0_0_1 = _i1+1, v_0_0_1, v_0_0_0 {
  7343  				x := v_0_0_0
  7344  				y := v_0_0_1
  7345  				if v_1.Op != OpRISCV64CZERONEZ || v_1.Type != t {
  7346  					continue
  7347  				}
  7348  				_ = v_1.Args[1]
  7349  				if x != v_1.Args[0] || cond != v_1.Args[1] {
  7350  					continue
  7351  				}
  7352  				v.reset(OpRISCV64ADD)
  7353  				v0 := b.NewValue0(v.Pos, OpRISCV64CZEROEQZ, t)
  7354  				v0.AddArg2(y, cond)
  7355  				v.AddArg2(x, v0)
  7356  				return true
  7357  			}
  7358  		}
  7359  		break
  7360  	}
  7361  	// match: (OR (CZEROEQZ <t> (SUB x y) cond) (CZERONEZ <t> x cond))
  7362  	// result: (SUB x (CZEROEQZ <t> y cond))
  7363  	for {
  7364  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
  7365  			if v_0.Op != OpRISCV64CZEROEQZ {
  7366  				continue
  7367  			}
  7368  			t := v_0.Type
  7369  			cond := v_0.Args[1]
  7370  			v_0_0 := v_0.Args[0]
  7371  			if v_0_0.Op != OpRISCV64SUB {
  7372  				continue
  7373  			}
  7374  			y := v_0_0.Args[1]
  7375  			x := v_0_0.Args[0]
  7376  			if v_1.Op != OpRISCV64CZERONEZ || v_1.Type != t {
  7377  				continue
  7378  			}
  7379  			_ = v_1.Args[1]
  7380  			if x != v_1.Args[0] || cond != v_1.Args[1] {
  7381  				continue
  7382  			}
  7383  			v.reset(OpRISCV64SUB)
  7384  			v0 := b.NewValue0(v.Pos, OpRISCV64CZEROEQZ, t)
  7385  			v0.AddArg2(y, cond)
  7386  			v.AddArg2(x, v0)
  7387  			return true
  7388  		}
  7389  		break
  7390  	}
  7391  	// match: (OR (CZEROEQZ <t> (OR x y) cond) (CZERONEZ <t> x cond))
  7392  	// result: (OR x (CZEROEQZ <t> y cond))
  7393  	for {
  7394  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
  7395  			if v_0.Op != OpRISCV64CZEROEQZ {
  7396  				continue
  7397  			}
  7398  			t := v_0.Type
  7399  			cond := v_0.Args[1]
  7400  			v_0_0 := v_0.Args[0]
  7401  			if v_0_0.Op != OpRISCV64OR {
  7402  				continue
  7403  			}
  7404  			_ = v_0_0.Args[1]
  7405  			v_0_0_0 := v_0_0.Args[0]
  7406  			v_0_0_1 := v_0_0.Args[1]
  7407  			for _i1 := 0; _i1 <= 1; _i1, v_0_0_0, v_0_0_1 = _i1+1, v_0_0_1, v_0_0_0 {
  7408  				x := v_0_0_0
  7409  				y := v_0_0_1
  7410  				if v_1.Op != OpRISCV64CZERONEZ || v_1.Type != t {
  7411  					continue
  7412  				}
  7413  				_ = v_1.Args[1]
  7414  				if x != v_1.Args[0] || cond != v_1.Args[1] {
  7415  					continue
  7416  				}
  7417  				v.reset(OpRISCV64OR)
  7418  				v0 := b.NewValue0(v.Pos, OpRISCV64CZEROEQZ, t)
  7419  				v0.AddArg2(y, cond)
  7420  				v.AddArg2(x, v0)
  7421  				return true
  7422  			}
  7423  		}
  7424  		break
  7425  	}
  7426  	// match: (OR (CZEROEQZ <t> (XOR x y) cond) (CZERONEZ <t> x cond))
  7427  	// result: (XOR x (CZEROEQZ <t> y cond))
  7428  	for {
  7429  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
  7430  			if v_0.Op != OpRISCV64CZEROEQZ {
  7431  				continue
  7432  			}
  7433  			t := v_0.Type
  7434  			cond := v_0.Args[1]
  7435  			v_0_0 := v_0.Args[0]
  7436  			if v_0_0.Op != OpRISCV64XOR {
  7437  				continue
  7438  			}
  7439  			_ = v_0_0.Args[1]
  7440  			v_0_0_0 := v_0_0.Args[0]
  7441  			v_0_0_1 := v_0_0.Args[1]
  7442  			for _i1 := 0; _i1 <= 1; _i1, v_0_0_0, v_0_0_1 = _i1+1, v_0_0_1, v_0_0_0 {
  7443  				x := v_0_0_0
  7444  				y := v_0_0_1
  7445  				if v_1.Op != OpRISCV64CZERONEZ || v_1.Type != t {
  7446  					continue
  7447  				}
  7448  				_ = v_1.Args[1]
  7449  				if x != v_1.Args[0] || cond != v_1.Args[1] {
  7450  					continue
  7451  				}
  7452  				v.reset(OpRISCV64XOR)
  7453  				v0 := b.NewValue0(v.Pos, OpRISCV64CZEROEQZ, t)
  7454  				v0.AddArg2(y, cond)
  7455  				v.AddArg2(x, v0)
  7456  				return true
  7457  			}
  7458  		}
  7459  		break
  7460  	}
  7461  	// match: (OR (CZEROEQZ <t> (SUBW x y) cond) (CZERONEZ <t> x cond))
  7462  	// result: (SUBW x (CZEROEQZ <t> y cond))
  7463  	for {
  7464  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
  7465  			if v_0.Op != OpRISCV64CZEROEQZ {
  7466  				continue
  7467  			}
  7468  			t := v_0.Type
  7469  			cond := v_0.Args[1]
  7470  			v_0_0 := v_0.Args[0]
  7471  			if v_0_0.Op != OpRISCV64SUBW {
  7472  				continue
  7473  			}
  7474  			y := v_0_0.Args[1]
  7475  			x := v_0_0.Args[0]
  7476  			if v_1.Op != OpRISCV64CZERONEZ || v_1.Type != t {
  7477  				continue
  7478  			}
  7479  			_ = v_1.Args[1]
  7480  			if x != v_1.Args[0] || cond != v_1.Args[1] {
  7481  				continue
  7482  			}
  7483  			v.reset(OpRISCV64SUBW)
  7484  			v0 := b.NewValue0(v.Pos, OpRISCV64CZEROEQZ, t)
  7485  			v0.AddArg2(y, cond)
  7486  			v.AddArg2(x, v0)
  7487  			return true
  7488  		}
  7489  		break
  7490  	}
  7491  	// match: (OR x:(CZEROEQZ z cond) (CZERONEZ y:(AND z _) cond))
  7492  	// result: (OR y x)
  7493  	for {
  7494  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
  7495  			x := v_0
  7496  			if x.Op != OpRISCV64CZEROEQZ {
  7497  				continue
  7498  			}
  7499  			cond := x.Args[1]
  7500  			z := x.Args[0]
  7501  			if v_1.Op != OpRISCV64CZERONEZ {
  7502  				continue
  7503  			}
  7504  			_ = v_1.Args[1]
  7505  			y := v_1.Args[0]
  7506  			if y.Op != OpRISCV64AND {
  7507  				continue
  7508  			}
  7509  			y_0 := y.Args[0]
  7510  			y_1 := y.Args[1]
  7511  			for _i1 := 0; _i1 <= 1; _i1, y_0, y_1 = _i1+1, y_1, y_0 {
  7512  				if z != y_0 || cond != v_1.Args[1] {
  7513  					continue
  7514  				}
  7515  				v.reset(OpRISCV64OR)
  7516  				v.AddArg2(y, x)
  7517  				return true
  7518  			}
  7519  		}
  7520  		break
  7521  	}
  7522  	// match: (OR (CZEROEQZ x:(AND z _) cond) y:(CZERONEZ z cond))
  7523  	// result: (OR x y)
  7524  	for {
  7525  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
  7526  			if v_0.Op != OpRISCV64CZEROEQZ {
  7527  				continue
  7528  			}
  7529  			cond := v_0.Args[1]
  7530  			x := v_0.Args[0]
  7531  			if x.Op != OpRISCV64AND {
  7532  				continue
  7533  			}
  7534  			x_0 := x.Args[0]
  7535  			x_1 := x.Args[1]
  7536  			for _i1 := 0; _i1 <= 1; _i1, x_0, x_1 = _i1+1, x_1, x_0 {
  7537  				z := x_0
  7538  				y := v_1
  7539  				if y.Op != OpRISCV64CZERONEZ {
  7540  					continue
  7541  				}
  7542  				_ = y.Args[1]
  7543  				if z != y.Args[0] || cond != y.Args[1] {
  7544  					continue
  7545  				}
  7546  				v.reset(OpRISCV64OR)
  7547  				v.AddArg2(x, y)
  7548  				return true
  7549  			}
  7550  		}
  7551  		break
  7552  	}
  7553  	// match: (OR x:(CZEROEQZ z cond) (CZERONEZ y:(ANDI <t> [c] z) cond))
  7554  	// result: (OR y x)
  7555  	for {
  7556  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
  7557  			x := v_0
  7558  			if x.Op != OpRISCV64CZEROEQZ {
  7559  				continue
  7560  			}
  7561  			cond := x.Args[1]
  7562  			z := x.Args[0]
  7563  			if v_1.Op != OpRISCV64CZERONEZ {
  7564  				continue
  7565  			}
  7566  			_ = v_1.Args[1]
  7567  			y := v_1.Args[0]
  7568  			if y.Op != OpRISCV64ANDI {
  7569  				continue
  7570  			}
  7571  			if z != y.Args[0] || cond != v_1.Args[1] {
  7572  				continue
  7573  			}
  7574  			v.reset(OpRISCV64OR)
  7575  			v.AddArg2(y, x)
  7576  			return true
  7577  		}
  7578  		break
  7579  	}
  7580  	// match: (OR (CZEROEQZ x:(ANDI <t> [c] z) cond) y:(CZERONEZ z cond))
  7581  	// result: (OR x y)
  7582  	for {
  7583  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
  7584  			if v_0.Op != OpRISCV64CZEROEQZ {
  7585  				continue
  7586  			}
  7587  			cond := v_0.Args[1]
  7588  			x := v_0.Args[0]
  7589  			if x.Op != OpRISCV64ANDI {
  7590  				continue
  7591  			}
  7592  			z := x.Args[0]
  7593  			y := v_1
  7594  			if y.Op != OpRISCV64CZERONEZ {
  7595  				continue
  7596  			}
  7597  			_ = y.Args[1]
  7598  			if z != y.Args[0] || cond != y.Args[1] {
  7599  				continue
  7600  			}
  7601  			v.reset(OpRISCV64OR)
  7602  			v.AddArg2(x, y)
  7603  			return true
  7604  		}
  7605  		break
  7606  	}
  7607  	// match: (OR (CZEROEQZ <t> x cond) (CZERONEZ <t> (ADDI [c] x) cond))
  7608  	// result: (ADD x (CZERONEZ <t> (MOVDconst [c]) cond))
  7609  	for {
  7610  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
  7611  			if v_0.Op != OpRISCV64CZEROEQZ {
  7612  				continue
  7613  			}
  7614  			t := v_0.Type
  7615  			cond := v_0.Args[1]
  7616  			x := v_0.Args[0]
  7617  			if v_1.Op != OpRISCV64CZERONEZ || v_1.Type != t {
  7618  				continue
  7619  			}
  7620  			_ = v_1.Args[1]
  7621  			v_1_0 := v_1.Args[0]
  7622  			if v_1_0.Op != OpRISCV64ADDI {
  7623  				continue
  7624  			}
  7625  			c := auxIntToInt64(v_1_0.AuxInt)
  7626  			if x != v_1_0.Args[0] || cond != v_1.Args[1] {
  7627  				continue
  7628  			}
  7629  			v.reset(OpRISCV64ADD)
  7630  			v0 := b.NewValue0(v.Pos, OpRISCV64CZERONEZ, t)
  7631  			v1 := b.NewValue0(v.Pos, OpRISCV64MOVDconst, typ.UInt64)
  7632  			v1.AuxInt = int64ToAuxInt(c)
  7633  			v0.AddArg2(v1, cond)
  7634  			v.AddArg2(x, v0)
  7635  			return true
  7636  		}
  7637  		break
  7638  	}
  7639  	// match: (OR (CZEROEQZ <t> x cond) (CZERONEZ <t> (ORI [c] x) cond))
  7640  	// result: (OR x (CZERONEZ <t> (MOVDconst [c]) cond))
  7641  	for {
  7642  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
  7643  			if v_0.Op != OpRISCV64CZEROEQZ {
  7644  				continue
  7645  			}
  7646  			t := v_0.Type
  7647  			cond := v_0.Args[1]
  7648  			x := v_0.Args[0]
  7649  			if v_1.Op != OpRISCV64CZERONEZ || v_1.Type != t {
  7650  				continue
  7651  			}
  7652  			_ = v_1.Args[1]
  7653  			v_1_0 := v_1.Args[0]
  7654  			if v_1_0.Op != OpRISCV64ORI {
  7655  				continue
  7656  			}
  7657  			c := auxIntToInt64(v_1_0.AuxInt)
  7658  			if x != v_1_0.Args[0] || cond != v_1.Args[1] {
  7659  				continue
  7660  			}
  7661  			v.reset(OpRISCV64OR)
  7662  			v0 := b.NewValue0(v.Pos, OpRISCV64CZERONEZ, t)
  7663  			v1 := b.NewValue0(v.Pos, OpRISCV64MOVDconst, typ.UInt64)
  7664  			v1.AuxInt = int64ToAuxInt(c)
  7665  			v0.AddArg2(v1, cond)
  7666  			v.AddArg2(x, v0)
  7667  			return true
  7668  		}
  7669  		break
  7670  	}
  7671  	// match: (OR (CZEROEQZ <t> x cond) (CZERONEZ <t> (XORI [c] x) cond))
  7672  	// result: (XOR x (CZERONEZ <t> (MOVDconst [c]) cond))
  7673  	for {
  7674  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
  7675  			if v_0.Op != OpRISCV64CZEROEQZ {
  7676  				continue
  7677  			}
  7678  			t := v_0.Type
  7679  			cond := v_0.Args[1]
  7680  			x := v_0.Args[0]
  7681  			if v_1.Op != OpRISCV64CZERONEZ || v_1.Type != t {
  7682  				continue
  7683  			}
  7684  			_ = v_1.Args[1]
  7685  			v_1_0 := v_1.Args[0]
  7686  			if v_1_0.Op != OpRISCV64XORI {
  7687  				continue
  7688  			}
  7689  			c := auxIntToInt64(v_1_0.AuxInt)
  7690  			if x != v_1_0.Args[0] || cond != v_1.Args[1] {
  7691  				continue
  7692  			}
  7693  			v.reset(OpRISCV64XOR)
  7694  			v0 := b.NewValue0(v.Pos, OpRISCV64CZERONEZ, t)
  7695  			v1 := b.NewValue0(v.Pos, OpRISCV64MOVDconst, typ.UInt64)
  7696  			v1.AuxInt = int64ToAuxInt(c)
  7697  			v0.AddArg2(v1, cond)
  7698  			v.AddArg2(x, v0)
  7699  			return true
  7700  		}
  7701  		break
  7702  	}
  7703  	// match: (OR (CZEROEQZ <t> (ADDI [c] x) cond) (CZERONEZ <t> x cond))
  7704  	// result: (ADD x (CZEROEQZ <t> (MOVDconst [c]) cond))
  7705  	for {
  7706  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
  7707  			if v_0.Op != OpRISCV64CZEROEQZ {
  7708  				continue
  7709  			}
  7710  			t := v_0.Type
  7711  			cond := v_0.Args[1]
  7712  			v_0_0 := v_0.Args[0]
  7713  			if v_0_0.Op != OpRISCV64ADDI {
  7714  				continue
  7715  			}
  7716  			c := auxIntToInt64(v_0_0.AuxInt)
  7717  			x := v_0_0.Args[0]
  7718  			if v_1.Op != OpRISCV64CZERONEZ || v_1.Type != t {
  7719  				continue
  7720  			}
  7721  			_ = v_1.Args[1]
  7722  			if x != v_1.Args[0] || cond != v_1.Args[1] {
  7723  				continue
  7724  			}
  7725  			v.reset(OpRISCV64ADD)
  7726  			v0 := b.NewValue0(v.Pos, OpRISCV64CZEROEQZ, t)
  7727  			v1 := b.NewValue0(v.Pos, OpRISCV64MOVDconst, typ.UInt64)
  7728  			v1.AuxInt = int64ToAuxInt(c)
  7729  			v0.AddArg2(v1, cond)
  7730  			v.AddArg2(x, v0)
  7731  			return true
  7732  		}
  7733  		break
  7734  	}
  7735  	// match: (OR (CZEROEQZ <t> (ORI [c] x) cond) (CZERONEZ <t> x cond))
  7736  	// result: (OR x (CZEROEQZ <t> (MOVDconst [c]) cond))
  7737  	for {
  7738  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
  7739  			if v_0.Op != OpRISCV64CZEROEQZ {
  7740  				continue
  7741  			}
  7742  			t := v_0.Type
  7743  			cond := v_0.Args[1]
  7744  			v_0_0 := v_0.Args[0]
  7745  			if v_0_0.Op != OpRISCV64ORI {
  7746  				continue
  7747  			}
  7748  			c := auxIntToInt64(v_0_0.AuxInt)
  7749  			x := v_0_0.Args[0]
  7750  			if v_1.Op != OpRISCV64CZERONEZ || v_1.Type != t {
  7751  				continue
  7752  			}
  7753  			_ = v_1.Args[1]
  7754  			if x != v_1.Args[0] || cond != v_1.Args[1] {
  7755  				continue
  7756  			}
  7757  			v.reset(OpRISCV64OR)
  7758  			v0 := b.NewValue0(v.Pos, OpRISCV64CZEROEQZ, t)
  7759  			v1 := b.NewValue0(v.Pos, OpRISCV64MOVDconst, typ.UInt64)
  7760  			v1.AuxInt = int64ToAuxInt(c)
  7761  			v0.AddArg2(v1, cond)
  7762  			v.AddArg2(x, v0)
  7763  			return true
  7764  		}
  7765  		break
  7766  	}
  7767  	// match: (OR (CZEROEQZ <t> (XORI [c] x) cond) (CZERONEZ <t> x cond))
  7768  	// result: (XOR x (CZEROEQZ <t> (MOVDconst [c]) cond))
  7769  	for {
  7770  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
  7771  			if v_0.Op != OpRISCV64CZEROEQZ {
  7772  				continue
  7773  			}
  7774  			t := v_0.Type
  7775  			cond := v_0.Args[1]
  7776  			v_0_0 := v_0.Args[0]
  7777  			if v_0_0.Op != OpRISCV64XORI {
  7778  				continue
  7779  			}
  7780  			c := auxIntToInt64(v_0_0.AuxInt)
  7781  			x := v_0_0.Args[0]
  7782  			if v_1.Op != OpRISCV64CZERONEZ || v_1.Type != t {
  7783  				continue
  7784  			}
  7785  			_ = v_1.Args[1]
  7786  			if x != v_1.Args[0] || cond != v_1.Args[1] {
  7787  				continue
  7788  			}
  7789  			v.reset(OpRISCV64XOR)
  7790  			v0 := b.NewValue0(v.Pos, OpRISCV64CZEROEQZ, t)
  7791  			v1 := b.NewValue0(v.Pos, OpRISCV64MOVDconst, typ.UInt64)
  7792  			v1.AuxInt = int64ToAuxInt(c)
  7793  			v0.AddArg2(v1, cond)
  7794  			v.AddArg2(x, v0)
  7795  			return true
  7796  		}
  7797  		break
  7798  	}
  7799  	return false
  7800  }
  7801  func rewriteValueRISCV64_OpRISCV64ORI(v *Value) bool {
  7802  	v_0 := v.Args[0]
  7803  	// match: (ORI [0] x)
  7804  	// result: x
  7805  	for {
  7806  		if auxIntToInt64(v.AuxInt) != 0 {
  7807  			break
  7808  		}
  7809  		x := v_0
  7810  		v.copyOf(x)
  7811  		return true
  7812  	}
  7813  	// match: (ORI [-1] x)
  7814  	// result: (MOVDconst [-1])
  7815  	for {
  7816  		if auxIntToInt64(v.AuxInt) != -1 {
  7817  			break
  7818  		}
  7819  		v.reset(OpRISCV64MOVDconst)
  7820  		v.AuxInt = int64ToAuxInt(-1)
  7821  		return true
  7822  	}
  7823  	// match: (ORI [x] (MOVDconst [y]))
  7824  	// result: (MOVDconst [x | y])
  7825  	for {
  7826  		x := auxIntToInt64(v.AuxInt)
  7827  		if v_0.Op != OpRISCV64MOVDconst {
  7828  			break
  7829  		}
  7830  		y := auxIntToInt64(v_0.AuxInt)
  7831  		v.reset(OpRISCV64MOVDconst)
  7832  		v.AuxInt = int64ToAuxInt(x | y)
  7833  		return true
  7834  	}
  7835  	// match: (ORI [x] (ORI [y] z))
  7836  	// result: (ORI [x | y] z)
  7837  	for {
  7838  		x := auxIntToInt64(v.AuxInt)
  7839  		if v_0.Op != OpRISCV64ORI {
  7840  			break
  7841  		}
  7842  		y := auxIntToInt64(v_0.AuxInt)
  7843  		z := v_0.Args[0]
  7844  		v.reset(OpRISCV64ORI)
  7845  		v.AuxInt = int64ToAuxInt(x | y)
  7846  		v.AddArg(z)
  7847  		return true
  7848  	}
  7849  	return false
  7850  }
  7851  func rewriteValueRISCV64_OpRISCV64ORN(v *Value) bool {
  7852  	v_1 := v.Args[1]
  7853  	v_0 := v.Args[0]
  7854  	// match: (ORN x x)
  7855  	// result: (MOVDconst [-1])
  7856  	for {
  7857  		x := v_0
  7858  		if x != v_1 {
  7859  			break
  7860  		}
  7861  		v.reset(OpRISCV64MOVDconst)
  7862  		v.AuxInt = int64ToAuxInt(-1)
  7863  		return true
  7864  	}
  7865  	return false
  7866  }
  7867  func rewriteValueRISCV64_OpRISCV64ROL(v *Value) bool {
  7868  	v_1 := v.Args[1]
  7869  	v_0 := v.Args[0]
  7870  	// match: (ROL x (MOVDconst [val]))
  7871  	// result: (RORI [-val&63] x)
  7872  	for {
  7873  		x := v_0
  7874  		if v_1.Op != OpRISCV64MOVDconst {
  7875  			break
  7876  		}
  7877  		val := auxIntToInt64(v_1.AuxInt)
  7878  		v.reset(OpRISCV64RORI)
  7879  		v.AuxInt = int64ToAuxInt(-val & 63)
  7880  		v.AddArg(x)
  7881  		return true
  7882  	}
  7883  	// match: (ROL x (NEG y))
  7884  	// result: (ROR x y)
  7885  	for {
  7886  		x := v_0
  7887  		if v_1.Op != OpRISCV64NEG {
  7888  			break
  7889  		}
  7890  		y := v_1.Args[0]
  7891  		v.reset(OpRISCV64ROR)
  7892  		v.AddArg2(x, y)
  7893  		return true
  7894  	}
  7895  	return false
  7896  }
  7897  func rewriteValueRISCV64_OpRISCV64ROLW(v *Value) bool {
  7898  	v_1 := v.Args[1]
  7899  	v_0 := v.Args[0]
  7900  	// match: (ROLW x (MOVDconst [val]))
  7901  	// result: (RORIW [-val&31] x)
  7902  	for {
  7903  		x := v_0
  7904  		if v_1.Op != OpRISCV64MOVDconst {
  7905  			break
  7906  		}
  7907  		val := auxIntToInt64(v_1.AuxInt)
  7908  		v.reset(OpRISCV64RORIW)
  7909  		v.AuxInt = int64ToAuxInt(-val & 31)
  7910  		v.AddArg(x)
  7911  		return true
  7912  	}
  7913  	// match: (ROLW x (NEG y))
  7914  	// result: (RORW x y)
  7915  	for {
  7916  		x := v_0
  7917  		if v_1.Op != OpRISCV64NEG {
  7918  			break
  7919  		}
  7920  		y := v_1.Args[0]
  7921  		v.reset(OpRISCV64RORW)
  7922  		v.AddArg2(x, y)
  7923  		return true
  7924  	}
  7925  	return false
  7926  }
  7927  func rewriteValueRISCV64_OpRISCV64ROR(v *Value) bool {
  7928  	v_1 := v.Args[1]
  7929  	v_0 := v.Args[0]
  7930  	// match: (ROR x (MOVDconst [val]))
  7931  	// result: (RORI [val&63] x)
  7932  	for {
  7933  		x := v_0
  7934  		if v_1.Op != OpRISCV64MOVDconst {
  7935  			break
  7936  		}
  7937  		val := auxIntToInt64(v_1.AuxInt)
  7938  		v.reset(OpRISCV64RORI)
  7939  		v.AuxInt = int64ToAuxInt(val & 63)
  7940  		v.AddArg(x)
  7941  		return true
  7942  	}
  7943  	return false
  7944  }
  7945  func rewriteValueRISCV64_OpRISCV64RORW(v *Value) bool {
  7946  	v_1 := v.Args[1]
  7947  	v_0 := v.Args[0]
  7948  	// match: (RORW x (MOVDconst [val]))
  7949  	// result: (RORIW [val&31] x)
  7950  	for {
  7951  		x := v_0
  7952  		if v_1.Op != OpRISCV64MOVDconst {
  7953  			break
  7954  		}
  7955  		val := auxIntToInt64(v_1.AuxInt)
  7956  		v.reset(OpRISCV64RORIW)
  7957  		v.AuxInt = int64ToAuxInt(val & 31)
  7958  		v.AddArg(x)
  7959  		return true
  7960  	}
  7961  	return false
  7962  }
  7963  func rewriteValueRISCV64_OpRISCV64SEQZ(v *Value) bool {
  7964  	v_0 := v.Args[0]
  7965  	b := v.Block
  7966  	typ := &b.Func.Config.Types
  7967  	// match: (SEQZ (NEG x))
  7968  	// result: (SEQZ x)
  7969  	for {
  7970  		if v_0.Op != OpRISCV64NEG {
  7971  			break
  7972  		}
  7973  		x := v_0.Args[0]
  7974  		v.reset(OpRISCV64SEQZ)
  7975  		v.AddArg(x)
  7976  		return true
  7977  	}
  7978  	// match: (SEQZ (SEQZ x))
  7979  	// result: (SNEZ x)
  7980  	for {
  7981  		if v_0.Op != OpRISCV64SEQZ {
  7982  			break
  7983  		}
  7984  		x := v_0.Args[0]
  7985  		v.reset(OpRISCV64SNEZ)
  7986  		v.AddArg(x)
  7987  		return true
  7988  	}
  7989  	// match: (SEQZ (SNEZ x))
  7990  	// result: (SEQZ x)
  7991  	for {
  7992  		if v_0.Op != OpRISCV64SNEZ {
  7993  			break
  7994  		}
  7995  		x := v_0.Args[0]
  7996  		v.reset(OpRISCV64SEQZ)
  7997  		v.AddArg(x)
  7998  		return true
  7999  	}
  8000  	// match: (SEQZ (ANDI [c] (FCLASSD (FNEGD x))))
  8001  	// result: (SEQZ (ANDI <typ.Int64> [(c&0b11_0000_0000)|int64(bits.Reverse8(uint8(c))&0b1111_1111)] (FCLASSD x)))
  8002  	for {
  8003  		if v_0.Op != OpRISCV64ANDI {
  8004  			break
  8005  		}
  8006  		c := auxIntToInt64(v_0.AuxInt)
  8007  		v_0_0 := v_0.Args[0]
  8008  		if v_0_0.Op != OpRISCV64FCLASSD {
  8009  			break
  8010  		}
  8011  		v_0_0_0 := v_0_0.Args[0]
  8012  		if v_0_0_0.Op != OpRISCV64FNEGD {
  8013  			break
  8014  		}
  8015  		x := v_0_0_0.Args[0]
  8016  		v.reset(OpRISCV64SEQZ)
  8017  		v0 := b.NewValue0(v.Pos, OpRISCV64ANDI, typ.Int64)
  8018  		v0.AuxInt = int64ToAuxInt((c & 0b11_0000_0000) | int64(bits.Reverse8(uint8(c))&0b1111_1111))
  8019  		v1 := b.NewValue0(v.Pos, OpRISCV64FCLASSD, typ.Int64)
  8020  		v1.AddArg(x)
  8021  		v0.AddArg(v1)
  8022  		v.AddArg(v0)
  8023  		return true
  8024  	}
  8025  	// match: (SEQZ (ANDI [c] (FCLASSD (FABSD x))))
  8026  	// result: (SEQZ (ANDI <typ.Int64> [(c&0b11_1111_0000)|int64(bits.Reverse8(uint8(c))&0b0000_1111)] (FCLASSD x)))
  8027  	for {
  8028  		if v_0.Op != OpRISCV64ANDI {
  8029  			break
  8030  		}
  8031  		c := auxIntToInt64(v_0.AuxInt)
  8032  		v_0_0 := v_0.Args[0]
  8033  		if v_0_0.Op != OpRISCV64FCLASSD {
  8034  			break
  8035  		}
  8036  		v_0_0_0 := v_0_0.Args[0]
  8037  		if v_0_0_0.Op != OpRISCV64FABSD {
  8038  			break
  8039  		}
  8040  		x := v_0_0_0.Args[0]
  8041  		v.reset(OpRISCV64SEQZ)
  8042  		v0 := b.NewValue0(v.Pos, OpRISCV64ANDI, typ.Int64)
  8043  		v0.AuxInt = int64ToAuxInt((c & 0b11_1111_0000) | int64(bits.Reverse8(uint8(c))&0b0000_1111))
  8044  		v1 := b.NewValue0(v.Pos, OpRISCV64FCLASSD, typ.Int64)
  8045  		v1.AddArg(x)
  8046  		v0.AddArg(v1)
  8047  		v.AddArg(v0)
  8048  		return true
  8049  	}
  8050  	return false
  8051  }
  8052  func rewriteValueRISCV64_OpRISCV64SLL(v *Value) bool {
  8053  	v_1 := v.Args[1]
  8054  	v_0 := v.Args[0]
  8055  	// match: (SLL x (MOVDconst [val]))
  8056  	// result: (SLLI [val&63] x)
  8057  	for {
  8058  		x := v_0
  8059  		if v_1.Op != OpRISCV64MOVDconst {
  8060  			break
  8061  		}
  8062  		val := auxIntToInt64(v_1.AuxInt)
  8063  		v.reset(OpRISCV64SLLI)
  8064  		v.AuxInt = int64ToAuxInt(val & 63)
  8065  		v.AddArg(x)
  8066  		return true
  8067  	}
  8068  	return false
  8069  }
  8070  func rewriteValueRISCV64_OpRISCV64SLLI(v *Value) bool {
  8071  	v_0 := v.Args[0]
  8072  	// match: (SLLI [x] (MOVDconst [y]))
  8073  	// cond: is32Bit(y << uint32(x))
  8074  	// result: (MOVDconst [y << uint32(x)])
  8075  	for {
  8076  		x := auxIntToInt64(v.AuxInt)
  8077  		if v_0.Op != OpRISCV64MOVDconst {
  8078  			break
  8079  		}
  8080  		y := auxIntToInt64(v_0.AuxInt)
  8081  		if !(is32Bit(y << uint32(x))) {
  8082  			break
  8083  		}
  8084  		v.reset(OpRISCV64MOVDconst)
  8085  		v.AuxInt = int64ToAuxInt(y << uint32(x))
  8086  		return true
  8087  	}
  8088  	// match: (SLLI <t> [c] (ADD x x))
  8089  	// cond: c < t.Size() * 8 - 1
  8090  	// result: (SLLI [c+1] x)
  8091  	for {
  8092  		t := v.Type
  8093  		c := auxIntToInt64(v.AuxInt)
  8094  		if v_0.Op != OpRISCV64ADD {
  8095  			break
  8096  		}
  8097  		x := v_0.Args[1]
  8098  		if x != v_0.Args[0] || !(c < t.Size()*8-1) {
  8099  			break
  8100  		}
  8101  		v.reset(OpRISCV64SLLI)
  8102  		v.AuxInt = int64ToAuxInt(c + 1)
  8103  		v.AddArg(x)
  8104  		return true
  8105  	}
  8106  	// match: (SLLI <t> [c] (ADD x x))
  8107  	// cond: c >= t.Size() * 8 - 1
  8108  	// result: (MOVDconst [0])
  8109  	for {
  8110  		t := v.Type
  8111  		c := auxIntToInt64(v.AuxInt)
  8112  		if v_0.Op != OpRISCV64ADD {
  8113  			break
  8114  		}
  8115  		x := v_0.Args[1]
  8116  		if x != v_0.Args[0] || !(c >= t.Size()*8-1) {
  8117  			break
  8118  		}
  8119  		v.reset(OpRISCV64MOVDconst)
  8120  		v.AuxInt = int64ToAuxInt(0)
  8121  		return true
  8122  	}
  8123  	return false
  8124  }
  8125  func rewriteValueRISCV64_OpRISCV64SLLW(v *Value) bool {
  8126  	v_1 := v.Args[1]
  8127  	v_0 := v.Args[0]
  8128  	// match: (SLLW x (MOVDconst [val]))
  8129  	// result: (SLLIW [val&31] x)
  8130  	for {
  8131  		x := v_0
  8132  		if v_1.Op != OpRISCV64MOVDconst {
  8133  			break
  8134  		}
  8135  		val := auxIntToInt64(v_1.AuxInt)
  8136  		v.reset(OpRISCV64SLLIW)
  8137  		v.AuxInt = int64ToAuxInt(val & 31)
  8138  		v.AddArg(x)
  8139  		return true
  8140  	}
  8141  	return false
  8142  }
  8143  func rewriteValueRISCV64_OpRISCV64SLT(v *Value) bool {
  8144  	v_1 := v.Args[1]
  8145  	v_0 := v.Args[0]
  8146  	// match: (SLT x (MOVDconst [val]))
  8147  	// cond: is12Bit(val)
  8148  	// result: (SLTI [val] x)
  8149  	for {
  8150  		x := v_0
  8151  		if v_1.Op != OpRISCV64MOVDconst {
  8152  			break
  8153  		}
  8154  		val := auxIntToInt64(v_1.AuxInt)
  8155  		if !(is12Bit(val)) {
  8156  			break
  8157  		}
  8158  		v.reset(OpRISCV64SLTI)
  8159  		v.AuxInt = int64ToAuxInt(val)
  8160  		v.AddArg(x)
  8161  		return true
  8162  	}
  8163  	// match: (SLT x x)
  8164  	// result: (MOVDconst [0])
  8165  	for {
  8166  		x := v_0
  8167  		if x != v_1 {
  8168  			break
  8169  		}
  8170  		v.reset(OpRISCV64MOVDconst)
  8171  		v.AuxInt = int64ToAuxInt(0)
  8172  		return true
  8173  	}
  8174  	return false
  8175  }
  8176  func rewriteValueRISCV64_OpRISCV64SLTI(v *Value) bool {
  8177  	v_0 := v.Args[0]
  8178  	// match: (SLTI [x] (MOVDconst [y]))
  8179  	// result: (MOVDconst [b2i(int64(y) < int64(x))])
  8180  	for {
  8181  		x := auxIntToInt64(v.AuxInt)
  8182  		if v_0.Op != OpRISCV64MOVDconst {
  8183  			break
  8184  		}
  8185  		y := auxIntToInt64(v_0.AuxInt)
  8186  		v.reset(OpRISCV64MOVDconst)
  8187  		v.AuxInt = int64ToAuxInt(b2i(int64(y) < int64(x)))
  8188  		return true
  8189  	}
  8190  	// match: (SLTI [x] (ANDI [y] _))
  8191  	// cond: y >= 0 && int64(y) < int64(x)
  8192  	// result: (MOVDconst [1])
  8193  	for {
  8194  		x := auxIntToInt64(v.AuxInt)
  8195  		if v_0.Op != OpRISCV64ANDI {
  8196  			break
  8197  		}
  8198  		y := auxIntToInt64(v_0.AuxInt)
  8199  		if !(y >= 0 && int64(y) < int64(x)) {
  8200  			break
  8201  		}
  8202  		v.reset(OpRISCV64MOVDconst)
  8203  		v.AuxInt = int64ToAuxInt(1)
  8204  		return true
  8205  	}
  8206  	return false
  8207  }
  8208  func rewriteValueRISCV64_OpRISCV64SLTIU(v *Value) bool {
  8209  	v_0 := v.Args[0]
  8210  	// match: (SLTIU [x] (MOVDconst [y]))
  8211  	// result: (MOVDconst [b2i(uint64(y) < uint64(x))])
  8212  	for {
  8213  		x := auxIntToInt64(v.AuxInt)
  8214  		if v_0.Op != OpRISCV64MOVDconst {
  8215  			break
  8216  		}
  8217  		y := auxIntToInt64(v_0.AuxInt)
  8218  		v.reset(OpRISCV64MOVDconst)
  8219  		v.AuxInt = int64ToAuxInt(b2i(uint64(y) < uint64(x)))
  8220  		return true
  8221  	}
  8222  	// match: (SLTIU [x] (ANDI [y] _))
  8223  	// cond: y >= 0 && uint64(y) < uint64(x)
  8224  	// result: (MOVDconst [1])
  8225  	for {
  8226  		x := auxIntToInt64(v.AuxInt)
  8227  		if v_0.Op != OpRISCV64ANDI {
  8228  			break
  8229  		}
  8230  		y := auxIntToInt64(v_0.AuxInt)
  8231  		if !(y >= 0 && uint64(y) < uint64(x)) {
  8232  			break
  8233  		}
  8234  		v.reset(OpRISCV64MOVDconst)
  8235  		v.AuxInt = int64ToAuxInt(1)
  8236  		return true
  8237  	}
  8238  	// match: (SLTIU [x] (ORI [y] _))
  8239  	// cond: y >= 0 && uint64(y) >= uint64(x)
  8240  	// result: (MOVDconst [0])
  8241  	for {
  8242  		x := auxIntToInt64(v.AuxInt)
  8243  		if v_0.Op != OpRISCV64ORI {
  8244  			break
  8245  		}
  8246  		y := auxIntToInt64(v_0.AuxInt)
  8247  		if !(y >= 0 && uint64(y) >= uint64(x)) {
  8248  			break
  8249  		}
  8250  		v.reset(OpRISCV64MOVDconst)
  8251  		v.AuxInt = int64ToAuxInt(0)
  8252  		return true
  8253  	}
  8254  	return false
  8255  }
  8256  func rewriteValueRISCV64_OpRISCV64SLTU(v *Value) bool {
  8257  	v_1 := v.Args[1]
  8258  	v_0 := v.Args[0]
  8259  	// match: (SLTU x (MOVDconst [val]))
  8260  	// cond: is12Bit(val)
  8261  	// result: (SLTIU [val] x)
  8262  	for {
  8263  		x := v_0
  8264  		if v_1.Op != OpRISCV64MOVDconst {
  8265  			break
  8266  		}
  8267  		val := auxIntToInt64(v_1.AuxInt)
  8268  		if !(is12Bit(val)) {
  8269  			break
  8270  		}
  8271  		v.reset(OpRISCV64SLTIU)
  8272  		v.AuxInt = int64ToAuxInt(val)
  8273  		v.AddArg(x)
  8274  		return true
  8275  	}
  8276  	// match: (SLTU x x)
  8277  	// result: (MOVDconst [0])
  8278  	for {
  8279  		x := v_0
  8280  		if x != v_1 {
  8281  			break
  8282  		}
  8283  		v.reset(OpRISCV64MOVDconst)
  8284  		v.AuxInt = int64ToAuxInt(0)
  8285  		return true
  8286  	}
  8287  	return false
  8288  }
  8289  func rewriteValueRISCV64_OpRISCV64SNEZ(v *Value) bool {
  8290  	v_0 := v.Args[0]
  8291  	b := v.Block
  8292  	typ := &b.Func.Config.Types
  8293  	// match: (SNEZ (NEG x))
  8294  	// result: (SNEZ x)
  8295  	for {
  8296  		if v_0.Op != OpRISCV64NEG {
  8297  			break
  8298  		}
  8299  		x := v_0.Args[0]
  8300  		v.reset(OpRISCV64SNEZ)
  8301  		v.AddArg(x)
  8302  		return true
  8303  	}
  8304  	// match: (SNEZ (SEQZ x))
  8305  	// result: (SEQZ x)
  8306  	for {
  8307  		if v_0.Op != OpRISCV64SEQZ {
  8308  			break
  8309  		}
  8310  		x := v_0.Args[0]
  8311  		v.reset(OpRISCV64SEQZ)
  8312  		v.AddArg(x)
  8313  		return true
  8314  	}
  8315  	// match: (SNEZ (SNEZ x))
  8316  	// result: (SNEZ x)
  8317  	for {
  8318  		if v_0.Op != OpRISCV64SNEZ {
  8319  			break
  8320  		}
  8321  		x := v_0.Args[0]
  8322  		v.reset(OpRISCV64SNEZ)
  8323  		v.AddArg(x)
  8324  		return true
  8325  	}
  8326  	// match: (SNEZ (ANDI [c] (FCLASSD (FNEGD x))))
  8327  	// result: (SNEZ (ANDI <typ.Int64> [(c&0b11_0000_0000)|int64(bits.Reverse8(uint8(c))&0b1111_1111)] (FCLASSD x)))
  8328  	for {
  8329  		if v_0.Op != OpRISCV64ANDI {
  8330  			break
  8331  		}
  8332  		c := auxIntToInt64(v_0.AuxInt)
  8333  		v_0_0 := v_0.Args[0]
  8334  		if v_0_0.Op != OpRISCV64FCLASSD {
  8335  			break
  8336  		}
  8337  		v_0_0_0 := v_0_0.Args[0]
  8338  		if v_0_0_0.Op != OpRISCV64FNEGD {
  8339  			break
  8340  		}
  8341  		x := v_0_0_0.Args[0]
  8342  		v.reset(OpRISCV64SNEZ)
  8343  		v0 := b.NewValue0(v.Pos, OpRISCV64ANDI, typ.Int64)
  8344  		v0.AuxInt = int64ToAuxInt((c & 0b11_0000_0000) | int64(bits.Reverse8(uint8(c))&0b1111_1111))
  8345  		v1 := b.NewValue0(v.Pos, OpRISCV64FCLASSD, typ.Int64)
  8346  		v1.AddArg(x)
  8347  		v0.AddArg(v1)
  8348  		v.AddArg(v0)
  8349  		return true
  8350  	}
  8351  	// match: (SNEZ (ANDI [c] (FCLASSD (FABSD x))))
  8352  	// result: (SNEZ (ANDI <typ.Int64> [(c&0b11_1111_0000)|int64(bits.Reverse8(uint8(c))&0b0000_1111)] (FCLASSD x)))
  8353  	for {
  8354  		if v_0.Op != OpRISCV64ANDI {
  8355  			break
  8356  		}
  8357  		c := auxIntToInt64(v_0.AuxInt)
  8358  		v_0_0 := v_0.Args[0]
  8359  		if v_0_0.Op != OpRISCV64FCLASSD {
  8360  			break
  8361  		}
  8362  		v_0_0_0 := v_0_0.Args[0]
  8363  		if v_0_0_0.Op != OpRISCV64FABSD {
  8364  			break
  8365  		}
  8366  		x := v_0_0_0.Args[0]
  8367  		v.reset(OpRISCV64SNEZ)
  8368  		v0 := b.NewValue0(v.Pos, OpRISCV64ANDI, typ.Int64)
  8369  		v0.AuxInt = int64ToAuxInt((c & 0b11_1111_0000) | int64(bits.Reverse8(uint8(c))&0b0000_1111))
  8370  		v1 := b.NewValue0(v.Pos, OpRISCV64FCLASSD, typ.Int64)
  8371  		v1.AddArg(x)
  8372  		v0.AddArg(v1)
  8373  		v.AddArg(v0)
  8374  		return true
  8375  	}
  8376  	return false
  8377  }
  8378  func rewriteValueRISCV64_OpRISCV64SRA(v *Value) bool {
  8379  	v_1 := v.Args[1]
  8380  	v_0 := v.Args[0]
  8381  	// match: (SRA x (MOVDconst [val]))
  8382  	// result: (SRAI [val&63] x)
  8383  	for {
  8384  		x := v_0
  8385  		if v_1.Op != OpRISCV64MOVDconst {
  8386  			break
  8387  		}
  8388  		val := auxIntToInt64(v_1.AuxInt)
  8389  		v.reset(OpRISCV64SRAI)
  8390  		v.AuxInt = int64ToAuxInt(val & 63)
  8391  		v.AddArg(x)
  8392  		return true
  8393  	}
  8394  	return false
  8395  }
  8396  func rewriteValueRISCV64_OpRISCV64SRAI(v *Value) bool {
  8397  	v_0 := v.Args[0]
  8398  	b := v.Block
  8399  	// match: (SRAI [x] (MOVWreg y))
  8400  	// cond: x >= 0 && x <= 31
  8401  	// result: (SRAIW [x] y)
  8402  	for {
  8403  		x := auxIntToInt64(v.AuxInt)
  8404  		if v_0.Op != OpRISCV64MOVWreg {
  8405  			break
  8406  		}
  8407  		y := v_0.Args[0]
  8408  		if !(x >= 0 && x <= 31) {
  8409  			break
  8410  		}
  8411  		v.reset(OpRISCV64SRAIW)
  8412  		v.AuxInt = int64ToAuxInt(x)
  8413  		v.AddArg(y)
  8414  		return true
  8415  	}
  8416  	// match: (SRAI <t> [x] (MOVBreg y))
  8417  	// cond: x >= 8
  8418  	// result: (SRAI [63] (SLLI <t> [56] y))
  8419  	for {
  8420  		t := v.Type
  8421  		x := auxIntToInt64(v.AuxInt)
  8422  		if v_0.Op != OpRISCV64MOVBreg {
  8423  			break
  8424  		}
  8425  		y := v_0.Args[0]
  8426  		if !(x >= 8) {
  8427  			break
  8428  		}
  8429  		v.reset(OpRISCV64SRAI)
  8430  		v.AuxInt = int64ToAuxInt(63)
  8431  		v0 := b.NewValue0(v.Pos, OpRISCV64SLLI, t)
  8432  		v0.AuxInt = int64ToAuxInt(56)
  8433  		v0.AddArg(y)
  8434  		v.AddArg(v0)
  8435  		return true
  8436  	}
  8437  	// match: (SRAI <t> [x] (MOVHreg y))
  8438  	// cond: x >= 16
  8439  	// result: (SRAI [63] (SLLI <t> [48] y))
  8440  	for {
  8441  		t := v.Type
  8442  		x := auxIntToInt64(v.AuxInt)
  8443  		if v_0.Op != OpRISCV64MOVHreg {
  8444  			break
  8445  		}
  8446  		y := v_0.Args[0]
  8447  		if !(x >= 16) {
  8448  			break
  8449  		}
  8450  		v.reset(OpRISCV64SRAI)
  8451  		v.AuxInt = int64ToAuxInt(63)
  8452  		v0 := b.NewValue0(v.Pos, OpRISCV64SLLI, t)
  8453  		v0.AuxInt = int64ToAuxInt(48)
  8454  		v0.AddArg(y)
  8455  		v.AddArg(v0)
  8456  		return true
  8457  	}
  8458  	// match: (SRAI [x] (MOVWreg y))
  8459  	// cond: x >= 32
  8460  	// result: (SRAIW [31] y)
  8461  	for {
  8462  		x := auxIntToInt64(v.AuxInt)
  8463  		if v_0.Op != OpRISCV64MOVWreg {
  8464  			break
  8465  		}
  8466  		y := v_0.Args[0]
  8467  		if !(x >= 32) {
  8468  			break
  8469  		}
  8470  		v.reset(OpRISCV64SRAIW)
  8471  		v.AuxInt = int64ToAuxInt(31)
  8472  		v.AddArg(y)
  8473  		return true
  8474  	}
  8475  	// match: (SRAI [x] (MOVDconst [y]))
  8476  	// result: (MOVDconst [int64(y) >> uint32(x)])
  8477  	for {
  8478  		x := auxIntToInt64(v.AuxInt)
  8479  		if v_0.Op != OpRISCV64MOVDconst {
  8480  			break
  8481  		}
  8482  		y := auxIntToInt64(v_0.AuxInt)
  8483  		v.reset(OpRISCV64MOVDconst)
  8484  		v.AuxInt = int64ToAuxInt(int64(y) >> uint32(x))
  8485  		return true
  8486  	}
  8487  	return false
  8488  }
  8489  func rewriteValueRISCV64_OpRISCV64SRAW(v *Value) bool {
  8490  	v_1 := v.Args[1]
  8491  	v_0 := v.Args[0]
  8492  	// match: (SRAW x (MOVDconst [val]))
  8493  	// result: (SRAIW [val&31] x)
  8494  	for {
  8495  		x := v_0
  8496  		if v_1.Op != OpRISCV64MOVDconst {
  8497  			break
  8498  		}
  8499  		val := auxIntToInt64(v_1.AuxInt)
  8500  		v.reset(OpRISCV64SRAIW)
  8501  		v.AuxInt = int64ToAuxInt(val & 31)
  8502  		v.AddArg(x)
  8503  		return true
  8504  	}
  8505  	return false
  8506  }
  8507  func rewriteValueRISCV64_OpRISCV64SRL(v *Value) bool {
  8508  	v_1 := v.Args[1]
  8509  	v_0 := v.Args[0]
  8510  	// match: (SRL x (MOVDconst [val]))
  8511  	// result: (SRLI [val&63] x)
  8512  	for {
  8513  		x := v_0
  8514  		if v_1.Op != OpRISCV64MOVDconst {
  8515  			break
  8516  		}
  8517  		val := auxIntToInt64(v_1.AuxInt)
  8518  		v.reset(OpRISCV64SRLI)
  8519  		v.AuxInt = int64ToAuxInt(val & 63)
  8520  		v.AddArg(x)
  8521  		return true
  8522  	}
  8523  	return false
  8524  }
  8525  func rewriteValueRISCV64_OpRISCV64SRLI(v *Value) bool {
  8526  	v_0 := v.Args[0]
  8527  	// match: (SRLI [x] (MOVWUreg y))
  8528  	// cond: x >= 0 && x <= 31
  8529  	// result: (SRLIW [x] y)
  8530  	for {
  8531  		x := auxIntToInt64(v.AuxInt)
  8532  		if v_0.Op != OpRISCV64MOVWUreg {
  8533  			break
  8534  		}
  8535  		y := v_0.Args[0]
  8536  		if !(x >= 0 && x <= 31) {
  8537  			break
  8538  		}
  8539  		v.reset(OpRISCV64SRLIW)
  8540  		v.AuxInt = int64ToAuxInt(x)
  8541  		v.AddArg(y)
  8542  		return true
  8543  	}
  8544  	// match: (SRLI [x] (MOVBUreg y))
  8545  	// cond: x >= 8
  8546  	// result: (MOVDconst [0])
  8547  	for {
  8548  		x := auxIntToInt64(v.AuxInt)
  8549  		if v_0.Op != OpRISCV64MOVBUreg {
  8550  			break
  8551  		}
  8552  		if !(x >= 8) {
  8553  			break
  8554  		}
  8555  		v.reset(OpRISCV64MOVDconst)
  8556  		v.AuxInt = int64ToAuxInt(0)
  8557  		return true
  8558  	}
  8559  	// match: (SRLI [x] (MOVHUreg y))
  8560  	// cond: x >= 16
  8561  	// result: (MOVDconst [0])
  8562  	for {
  8563  		x := auxIntToInt64(v.AuxInt)
  8564  		if v_0.Op != OpRISCV64MOVHUreg {
  8565  			break
  8566  		}
  8567  		if !(x >= 16) {
  8568  			break
  8569  		}
  8570  		v.reset(OpRISCV64MOVDconst)
  8571  		v.AuxInt = int64ToAuxInt(0)
  8572  		return true
  8573  	}
  8574  	// match: (SRLI [x] (MOVWUreg y))
  8575  	// cond: x >= 32
  8576  	// result: (MOVDconst [0])
  8577  	for {
  8578  		x := auxIntToInt64(v.AuxInt)
  8579  		if v_0.Op != OpRISCV64MOVWUreg {
  8580  			break
  8581  		}
  8582  		if !(x >= 32) {
  8583  			break
  8584  		}
  8585  		v.reset(OpRISCV64MOVDconst)
  8586  		v.AuxInt = int64ToAuxInt(0)
  8587  		return true
  8588  	}
  8589  	// match: (SRLI [x] (MOVDconst [y]))
  8590  	// result: (MOVDconst [int64(uint64(y) >> uint32(x))])
  8591  	for {
  8592  		x := auxIntToInt64(v.AuxInt)
  8593  		if v_0.Op != OpRISCV64MOVDconst {
  8594  			break
  8595  		}
  8596  		y := auxIntToInt64(v_0.AuxInt)
  8597  		v.reset(OpRISCV64MOVDconst)
  8598  		v.AuxInt = int64ToAuxInt(int64(uint64(y) >> uint32(x)))
  8599  		return true
  8600  	}
  8601  	return false
  8602  }
  8603  func rewriteValueRISCV64_OpRISCV64SRLW(v *Value) bool {
  8604  	v_1 := v.Args[1]
  8605  	v_0 := v.Args[0]
  8606  	// match: (SRLW x (MOVDconst [val]))
  8607  	// result: (SRLIW [val&31] x)
  8608  	for {
  8609  		x := v_0
  8610  		if v_1.Op != OpRISCV64MOVDconst {
  8611  			break
  8612  		}
  8613  		val := auxIntToInt64(v_1.AuxInt)
  8614  		v.reset(OpRISCV64SRLIW)
  8615  		v.AuxInt = int64ToAuxInt(val & 31)
  8616  		v.AddArg(x)
  8617  		return true
  8618  	}
  8619  	return false
  8620  }
  8621  func rewriteValueRISCV64_OpRISCV64SUB(v *Value) bool {
  8622  	v_1 := v.Args[1]
  8623  	v_0 := v.Args[0]
  8624  	b := v.Block
  8625  	// match: (SUB x (NEG y))
  8626  	// result: (ADD x y)
  8627  	for {
  8628  		x := v_0
  8629  		if v_1.Op != OpRISCV64NEG {
  8630  			break
  8631  		}
  8632  		y := v_1.Args[0]
  8633  		v.reset(OpRISCV64ADD)
  8634  		v.AddArg2(x, y)
  8635  		return true
  8636  	}
  8637  	// match: (SUB x x)
  8638  	// result: (MOVDconst [0])
  8639  	for {
  8640  		x := v_0
  8641  		if x != v_1 {
  8642  			break
  8643  		}
  8644  		v.reset(OpRISCV64MOVDconst)
  8645  		v.AuxInt = int64ToAuxInt(0)
  8646  		return true
  8647  	}
  8648  	// match: (SUB x (MOVDconst [val]))
  8649  	// cond: is32Bit(-val)
  8650  	// result: (ADDI [-val] x)
  8651  	for {
  8652  		x := v_0
  8653  		if v_1.Op != OpRISCV64MOVDconst {
  8654  			break
  8655  		}
  8656  		val := auxIntToInt64(v_1.AuxInt)
  8657  		if !(is32Bit(-val)) {
  8658  			break
  8659  		}
  8660  		v.reset(OpRISCV64ADDI)
  8661  		v.AuxInt = int64ToAuxInt(-val)
  8662  		v.AddArg(x)
  8663  		return true
  8664  	}
  8665  	// match: (SUB <t> (MOVDconst [val]) y)
  8666  	// cond: is32Bit(-val)
  8667  	// result: (NEG (ADDI <t> [-val] y))
  8668  	for {
  8669  		t := v.Type
  8670  		if v_0.Op != OpRISCV64MOVDconst {
  8671  			break
  8672  		}
  8673  		val := auxIntToInt64(v_0.AuxInt)
  8674  		y := v_1
  8675  		if !(is32Bit(-val)) {
  8676  			break
  8677  		}
  8678  		v.reset(OpRISCV64NEG)
  8679  		v0 := b.NewValue0(v.Pos, OpRISCV64ADDI, t)
  8680  		v0.AuxInt = int64ToAuxInt(-val)
  8681  		v0.AddArg(y)
  8682  		v.AddArg(v0)
  8683  		return true
  8684  	}
  8685  	// match: (SUB x (MOVDconst [0]))
  8686  	// result: x
  8687  	for {
  8688  		x := v_0
  8689  		if v_1.Op != OpRISCV64MOVDconst || auxIntToInt64(v_1.AuxInt) != 0 {
  8690  			break
  8691  		}
  8692  		v.copyOf(x)
  8693  		return true
  8694  	}
  8695  	// match: (SUB (MOVDconst [0]) x)
  8696  	// result: (NEG x)
  8697  	for {
  8698  		if v_0.Op != OpRISCV64MOVDconst || auxIntToInt64(v_0.AuxInt) != 0 {
  8699  			break
  8700  		}
  8701  		x := v_1
  8702  		v.reset(OpRISCV64NEG)
  8703  		v.AddArg(x)
  8704  		return true
  8705  	}
  8706  	return false
  8707  }
  8708  func rewriteValueRISCV64_OpRISCV64SUBW(v *Value) bool {
  8709  	v_1 := v.Args[1]
  8710  	v_0 := v.Args[0]
  8711  	// match: (SUBW x (MOVDconst [0]))
  8712  	// result: (ADDIW [0] x)
  8713  	for {
  8714  		x := v_0
  8715  		if v_1.Op != OpRISCV64MOVDconst || auxIntToInt64(v_1.AuxInt) != 0 {
  8716  			break
  8717  		}
  8718  		v.reset(OpRISCV64ADDIW)
  8719  		v.AuxInt = int64ToAuxInt(0)
  8720  		v.AddArg(x)
  8721  		return true
  8722  	}
  8723  	// match: (SUBW (MOVDconst [0]) x)
  8724  	// result: (NEGW x)
  8725  	for {
  8726  		if v_0.Op != OpRISCV64MOVDconst || auxIntToInt64(v_0.AuxInt) != 0 {
  8727  			break
  8728  		}
  8729  		x := v_1
  8730  		v.reset(OpRISCV64NEGW)
  8731  		v.AddArg(x)
  8732  		return true
  8733  	}
  8734  	return false
  8735  }
  8736  func rewriteValueRISCV64_OpRISCV64XOR(v *Value) bool {
  8737  	v_1 := v.Args[1]
  8738  	v_0 := v.Args[0]
  8739  	// match: (XOR (MOVDconst [val]) x)
  8740  	// cond: is32Bit(val)
  8741  	// result: (XORI [val] x)
  8742  	for {
  8743  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
  8744  			if v_0.Op != OpRISCV64MOVDconst {
  8745  				continue
  8746  			}
  8747  			val := auxIntToInt64(v_0.AuxInt)
  8748  			x := v_1
  8749  			if !(is32Bit(val)) {
  8750  				continue
  8751  			}
  8752  			v.reset(OpRISCV64XORI)
  8753  			v.AuxInt = int64ToAuxInt(val)
  8754  			v.AddArg(x)
  8755  			return true
  8756  		}
  8757  		break
  8758  	}
  8759  	// match: (XOR x x)
  8760  	// result: (MOVDconst [0])
  8761  	for {
  8762  		x := v_0
  8763  		if x != v_1 {
  8764  			break
  8765  		}
  8766  		v.reset(OpRISCV64MOVDconst)
  8767  		v.AuxInt = int64ToAuxInt(0)
  8768  		return true
  8769  	}
  8770  	return false
  8771  }
  8772  func rewriteValueRISCV64_OpRotateLeft16(v *Value) bool {
  8773  	v_1 := v.Args[1]
  8774  	v_0 := v.Args[0]
  8775  	b := v.Block
  8776  	typ := &b.Func.Config.Types
  8777  	// match: (RotateLeft16 <t> x y)
  8778  	// result: (OR (SLL <t> x (ANDI [15] <y.Type> y)) (SRL <t> (ZeroExt16to64 x) (ANDI [15] <y.Type> (NEG <y.Type> y))))
  8779  	for {
  8780  		t := v.Type
  8781  		x := v_0
  8782  		y := v_1
  8783  		v.reset(OpRISCV64OR)
  8784  		v0 := b.NewValue0(v.Pos, OpRISCV64SLL, t)
  8785  		v1 := b.NewValue0(v.Pos, OpRISCV64ANDI, y.Type)
  8786  		v1.AuxInt = int64ToAuxInt(15)
  8787  		v1.AddArg(y)
  8788  		v0.AddArg2(x, v1)
  8789  		v2 := b.NewValue0(v.Pos, OpRISCV64SRL, t)
  8790  		v3 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64)
  8791  		v3.AddArg(x)
  8792  		v4 := b.NewValue0(v.Pos, OpRISCV64ANDI, y.Type)
  8793  		v4.AuxInt = int64ToAuxInt(15)
  8794  		v5 := b.NewValue0(v.Pos, OpRISCV64NEG, y.Type)
  8795  		v5.AddArg(y)
  8796  		v4.AddArg(v5)
  8797  		v2.AddArg2(v3, v4)
  8798  		v.AddArg2(v0, v2)
  8799  		return true
  8800  	}
  8801  }
  8802  func rewriteValueRISCV64_OpRotateLeft8(v *Value) bool {
  8803  	v_1 := v.Args[1]
  8804  	v_0 := v.Args[0]
  8805  	b := v.Block
  8806  	typ := &b.Func.Config.Types
  8807  	// match: (RotateLeft8 <t> x y)
  8808  	// result: (OR (SLL <t> x (ANDI [7] <y.Type> y)) (SRL <t> (ZeroExt8to64 x) (ANDI [7] <y.Type> (NEG <y.Type> y))))
  8809  	for {
  8810  		t := v.Type
  8811  		x := v_0
  8812  		y := v_1
  8813  		v.reset(OpRISCV64OR)
  8814  		v0 := b.NewValue0(v.Pos, OpRISCV64SLL, t)
  8815  		v1 := b.NewValue0(v.Pos, OpRISCV64ANDI, y.Type)
  8816  		v1.AuxInt = int64ToAuxInt(7)
  8817  		v1.AddArg(y)
  8818  		v0.AddArg2(x, v1)
  8819  		v2 := b.NewValue0(v.Pos, OpRISCV64SRL, t)
  8820  		v3 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64)
  8821  		v3.AddArg(x)
  8822  		v4 := b.NewValue0(v.Pos, OpRISCV64ANDI, y.Type)
  8823  		v4.AuxInt = int64ToAuxInt(7)
  8824  		v5 := b.NewValue0(v.Pos, OpRISCV64NEG, y.Type)
  8825  		v5.AddArg(y)
  8826  		v4.AddArg(v5)
  8827  		v2.AddArg2(v3, v4)
  8828  		v.AddArg2(v0, v2)
  8829  		return true
  8830  	}
  8831  }
  8832  func rewriteValueRISCV64_OpRsh16Ux16(v *Value) bool {
  8833  	v_1 := v.Args[1]
  8834  	v_0 := v.Args[0]
  8835  	b := v.Block
  8836  	typ := &b.Func.Config.Types
  8837  	// match: (Rsh16Ux16 <t> x y)
  8838  	// cond: !shiftIsBounded(v)
  8839  	// result: (AND (SRL <t> (ZeroExt16to64 x) y) (Neg16 <t> (SLTIU <t> [64] (ZeroExt16to64 y))))
  8840  	for {
  8841  		t := v.Type
  8842  		x := v_0
  8843  		y := v_1
  8844  		if !(!shiftIsBounded(v)) {
  8845  			break
  8846  		}
  8847  		v.reset(OpRISCV64AND)
  8848  		v0 := b.NewValue0(v.Pos, OpRISCV64SRL, t)
  8849  		v1 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64)
  8850  		v1.AddArg(x)
  8851  		v0.AddArg2(v1, y)
  8852  		v2 := b.NewValue0(v.Pos, OpNeg16, t)
  8853  		v3 := b.NewValue0(v.Pos, OpRISCV64SLTIU, t)
  8854  		v3.AuxInt = int64ToAuxInt(64)
  8855  		v4 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64)
  8856  		v4.AddArg(y)
  8857  		v3.AddArg(v4)
  8858  		v2.AddArg(v3)
  8859  		v.AddArg2(v0, v2)
  8860  		return true
  8861  	}
  8862  	// match: (Rsh16Ux16 x y)
  8863  	// cond: shiftIsBounded(v)
  8864  	// result: (SRL (ZeroExt16to64 x) y)
  8865  	for {
  8866  		x := v_0
  8867  		y := v_1
  8868  		if !(shiftIsBounded(v)) {
  8869  			break
  8870  		}
  8871  		v.reset(OpRISCV64SRL)
  8872  		v0 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64)
  8873  		v0.AddArg(x)
  8874  		v.AddArg2(v0, y)
  8875  		return true
  8876  	}
  8877  	return false
  8878  }
  8879  func rewriteValueRISCV64_OpRsh16Ux32(v *Value) bool {
  8880  	v_1 := v.Args[1]
  8881  	v_0 := v.Args[0]
  8882  	b := v.Block
  8883  	typ := &b.Func.Config.Types
  8884  	// match: (Rsh16Ux32 <t> x y)
  8885  	// cond: !shiftIsBounded(v)
  8886  	// result: (AND (SRL <t> (ZeroExt16to64 x) y) (Neg16 <t> (SLTIU <t> [64] (ZeroExt32to64 y))))
  8887  	for {
  8888  		t := v.Type
  8889  		x := v_0
  8890  		y := v_1
  8891  		if !(!shiftIsBounded(v)) {
  8892  			break
  8893  		}
  8894  		v.reset(OpRISCV64AND)
  8895  		v0 := b.NewValue0(v.Pos, OpRISCV64SRL, t)
  8896  		v1 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64)
  8897  		v1.AddArg(x)
  8898  		v0.AddArg2(v1, y)
  8899  		v2 := b.NewValue0(v.Pos, OpNeg16, t)
  8900  		v3 := b.NewValue0(v.Pos, OpRISCV64SLTIU, t)
  8901  		v3.AuxInt = int64ToAuxInt(64)
  8902  		v4 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64)
  8903  		v4.AddArg(y)
  8904  		v3.AddArg(v4)
  8905  		v2.AddArg(v3)
  8906  		v.AddArg2(v0, v2)
  8907  		return true
  8908  	}
  8909  	// match: (Rsh16Ux32 x y)
  8910  	// cond: shiftIsBounded(v)
  8911  	// result: (SRL (ZeroExt16to64 x) y)
  8912  	for {
  8913  		x := v_0
  8914  		y := v_1
  8915  		if !(shiftIsBounded(v)) {
  8916  			break
  8917  		}
  8918  		v.reset(OpRISCV64SRL)
  8919  		v0 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64)
  8920  		v0.AddArg(x)
  8921  		v.AddArg2(v0, y)
  8922  		return true
  8923  	}
  8924  	return false
  8925  }
  8926  func rewriteValueRISCV64_OpRsh16Ux64(v *Value) bool {
  8927  	v_1 := v.Args[1]
  8928  	v_0 := v.Args[0]
  8929  	b := v.Block
  8930  	typ := &b.Func.Config.Types
  8931  	// match: (Rsh16Ux64 <t> x y)
  8932  	// cond: !shiftIsBounded(v)
  8933  	// result: (AND (SRL <t> (ZeroExt16to64 x) y) (Neg16 <t> (SLTIU <t> [64] y)))
  8934  	for {
  8935  		t := v.Type
  8936  		x := v_0
  8937  		y := v_1
  8938  		if !(!shiftIsBounded(v)) {
  8939  			break
  8940  		}
  8941  		v.reset(OpRISCV64AND)
  8942  		v0 := b.NewValue0(v.Pos, OpRISCV64SRL, t)
  8943  		v1 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64)
  8944  		v1.AddArg(x)
  8945  		v0.AddArg2(v1, y)
  8946  		v2 := b.NewValue0(v.Pos, OpNeg16, t)
  8947  		v3 := b.NewValue0(v.Pos, OpRISCV64SLTIU, t)
  8948  		v3.AuxInt = int64ToAuxInt(64)
  8949  		v3.AddArg(y)
  8950  		v2.AddArg(v3)
  8951  		v.AddArg2(v0, v2)
  8952  		return true
  8953  	}
  8954  	// match: (Rsh16Ux64 x y)
  8955  	// cond: shiftIsBounded(v)
  8956  	// result: (SRL (ZeroExt16to64 x) y)
  8957  	for {
  8958  		x := v_0
  8959  		y := v_1
  8960  		if !(shiftIsBounded(v)) {
  8961  			break
  8962  		}
  8963  		v.reset(OpRISCV64SRL)
  8964  		v0 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64)
  8965  		v0.AddArg(x)
  8966  		v.AddArg2(v0, y)
  8967  		return true
  8968  	}
  8969  	return false
  8970  }
  8971  func rewriteValueRISCV64_OpRsh16Ux8(v *Value) bool {
  8972  	v_1 := v.Args[1]
  8973  	v_0 := v.Args[0]
  8974  	b := v.Block
  8975  	typ := &b.Func.Config.Types
  8976  	// match: (Rsh16Ux8 <t> x y)
  8977  	// cond: !shiftIsBounded(v)
  8978  	// result: (AND (SRL <t> (ZeroExt16to64 x) y) (Neg16 <t> (SLTIU <t> [64] (ZeroExt8to64 y))))
  8979  	for {
  8980  		t := v.Type
  8981  		x := v_0
  8982  		y := v_1
  8983  		if !(!shiftIsBounded(v)) {
  8984  			break
  8985  		}
  8986  		v.reset(OpRISCV64AND)
  8987  		v0 := b.NewValue0(v.Pos, OpRISCV64SRL, t)
  8988  		v1 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64)
  8989  		v1.AddArg(x)
  8990  		v0.AddArg2(v1, y)
  8991  		v2 := b.NewValue0(v.Pos, OpNeg16, t)
  8992  		v3 := b.NewValue0(v.Pos, OpRISCV64SLTIU, t)
  8993  		v3.AuxInt = int64ToAuxInt(64)
  8994  		v4 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64)
  8995  		v4.AddArg(y)
  8996  		v3.AddArg(v4)
  8997  		v2.AddArg(v3)
  8998  		v.AddArg2(v0, v2)
  8999  		return true
  9000  	}
  9001  	// match: (Rsh16Ux8 x y)
  9002  	// cond: shiftIsBounded(v)
  9003  	// result: (SRL (ZeroExt16to64 x) y)
  9004  	for {
  9005  		x := v_0
  9006  		y := v_1
  9007  		if !(shiftIsBounded(v)) {
  9008  			break
  9009  		}
  9010  		v.reset(OpRISCV64SRL)
  9011  		v0 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64)
  9012  		v0.AddArg(x)
  9013  		v.AddArg2(v0, y)
  9014  		return true
  9015  	}
  9016  	return false
  9017  }
  9018  func rewriteValueRISCV64_OpRsh16x16(v *Value) bool {
  9019  	v_1 := v.Args[1]
  9020  	v_0 := v.Args[0]
  9021  	b := v.Block
  9022  	typ := &b.Func.Config.Types
  9023  	// match: (Rsh16x16 <t> x y)
  9024  	// cond: !shiftIsBounded(v)
  9025  	// result: (SRA <t> (SignExt16to64 x) (OR <y.Type> y (ADDI <y.Type> [-1] (SLTIU <y.Type> [64] (ZeroExt16to64 y)))))
  9026  	for {
  9027  		t := v.Type
  9028  		x := v_0
  9029  		y := v_1
  9030  		if !(!shiftIsBounded(v)) {
  9031  			break
  9032  		}
  9033  		v.reset(OpRISCV64SRA)
  9034  		v.Type = t
  9035  		v0 := b.NewValue0(v.Pos, OpSignExt16to64, typ.Int64)
  9036  		v0.AddArg(x)
  9037  		v1 := b.NewValue0(v.Pos, OpRISCV64OR, y.Type)
  9038  		v2 := b.NewValue0(v.Pos, OpRISCV64ADDI, y.Type)
  9039  		v2.AuxInt = int64ToAuxInt(-1)
  9040  		v3 := b.NewValue0(v.Pos, OpRISCV64SLTIU, y.Type)
  9041  		v3.AuxInt = int64ToAuxInt(64)
  9042  		v4 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64)
  9043  		v4.AddArg(y)
  9044  		v3.AddArg(v4)
  9045  		v2.AddArg(v3)
  9046  		v1.AddArg2(y, v2)
  9047  		v.AddArg2(v0, v1)
  9048  		return true
  9049  	}
  9050  	// match: (Rsh16x16 x y)
  9051  	// cond: shiftIsBounded(v)
  9052  	// result: (SRA (SignExt16to64 x) y)
  9053  	for {
  9054  		x := v_0
  9055  		y := v_1
  9056  		if !(shiftIsBounded(v)) {
  9057  			break
  9058  		}
  9059  		v.reset(OpRISCV64SRA)
  9060  		v0 := b.NewValue0(v.Pos, OpSignExt16to64, typ.Int64)
  9061  		v0.AddArg(x)
  9062  		v.AddArg2(v0, y)
  9063  		return true
  9064  	}
  9065  	return false
  9066  }
  9067  func rewriteValueRISCV64_OpRsh16x32(v *Value) bool {
  9068  	v_1 := v.Args[1]
  9069  	v_0 := v.Args[0]
  9070  	b := v.Block
  9071  	typ := &b.Func.Config.Types
  9072  	// match: (Rsh16x32 <t> x y)
  9073  	// cond: !shiftIsBounded(v)
  9074  	// result: (SRA <t> (SignExt16to64 x) (OR <y.Type> y (ADDI <y.Type> [-1] (SLTIU <y.Type> [64] (ZeroExt32to64 y)))))
  9075  	for {
  9076  		t := v.Type
  9077  		x := v_0
  9078  		y := v_1
  9079  		if !(!shiftIsBounded(v)) {
  9080  			break
  9081  		}
  9082  		v.reset(OpRISCV64SRA)
  9083  		v.Type = t
  9084  		v0 := b.NewValue0(v.Pos, OpSignExt16to64, typ.Int64)
  9085  		v0.AddArg(x)
  9086  		v1 := b.NewValue0(v.Pos, OpRISCV64OR, y.Type)
  9087  		v2 := b.NewValue0(v.Pos, OpRISCV64ADDI, y.Type)
  9088  		v2.AuxInt = int64ToAuxInt(-1)
  9089  		v3 := b.NewValue0(v.Pos, OpRISCV64SLTIU, y.Type)
  9090  		v3.AuxInt = int64ToAuxInt(64)
  9091  		v4 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64)
  9092  		v4.AddArg(y)
  9093  		v3.AddArg(v4)
  9094  		v2.AddArg(v3)
  9095  		v1.AddArg2(y, v2)
  9096  		v.AddArg2(v0, v1)
  9097  		return true
  9098  	}
  9099  	// match: (Rsh16x32 x y)
  9100  	// cond: shiftIsBounded(v)
  9101  	// result: (SRA (SignExt16to64 x) y)
  9102  	for {
  9103  		x := v_0
  9104  		y := v_1
  9105  		if !(shiftIsBounded(v)) {
  9106  			break
  9107  		}
  9108  		v.reset(OpRISCV64SRA)
  9109  		v0 := b.NewValue0(v.Pos, OpSignExt16to64, typ.Int64)
  9110  		v0.AddArg(x)
  9111  		v.AddArg2(v0, y)
  9112  		return true
  9113  	}
  9114  	return false
  9115  }
  9116  func rewriteValueRISCV64_OpRsh16x64(v *Value) bool {
  9117  	v_1 := v.Args[1]
  9118  	v_0 := v.Args[0]
  9119  	b := v.Block
  9120  	typ := &b.Func.Config.Types
  9121  	// match: (Rsh16x64 <t> x y)
  9122  	// cond: !shiftIsBounded(v)
  9123  	// result: (SRA <t> (SignExt16to64 x) (OR <y.Type> y (ADDI <y.Type> [-1] (SLTIU <y.Type> [64] y))))
  9124  	for {
  9125  		t := v.Type
  9126  		x := v_0
  9127  		y := v_1
  9128  		if !(!shiftIsBounded(v)) {
  9129  			break
  9130  		}
  9131  		v.reset(OpRISCV64SRA)
  9132  		v.Type = t
  9133  		v0 := b.NewValue0(v.Pos, OpSignExt16to64, typ.Int64)
  9134  		v0.AddArg(x)
  9135  		v1 := b.NewValue0(v.Pos, OpRISCV64OR, y.Type)
  9136  		v2 := b.NewValue0(v.Pos, OpRISCV64ADDI, y.Type)
  9137  		v2.AuxInt = int64ToAuxInt(-1)
  9138  		v3 := b.NewValue0(v.Pos, OpRISCV64SLTIU, y.Type)
  9139  		v3.AuxInt = int64ToAuxInt(64)
  9140  		v3.AddArg(y)
  9141  		v2.AddArg(v3)
  9142  		v1.AddArg2(y, v2)
  9143  		v.AddArg2(v0, v1)
  9144  		return true
  9145  	}
  9146  	// match: (Rsh16x64 x y)
  9147  	// cond: shiftIsBounded(v)
  9148  	// result: (SRA (SignExt16to64 x) y)
  9149  	for {
  9150  		x := v_0
  9151  		y := v_1
  9152  		if !(shiftIsBounded(v)) {
  9153  			break
  9154  		}
  9155  		v.reset(OpRISCV64SRA)
  9156  		v0 := b.NewValue0(v.Pos, OpSignExt16to64, typ.Int64)
  9157  		v0.AddArg(x)
  9158  		v.AddArg2(v0, y)
  9159  		return true
  9160  	}
  9161  	return false
  9162  }
  9163  func rewriteValueRISCV64_OpRsh16x8(v *Value) bool {
  9164  	v_1 := v.Args[1]
  9165  	v_0 := v.Args[0]
  9166  	b := v.Block
  9167  	typ := &b.Func.Config.Types
  9168  	// match: (Rsh16x8 <t> x y)
  9169  	// cond: !shiftIsBounded(v)
  9170  	// result: (SRA <t> (SignExt16to64 x) (OR <y.Type> y (ADDI <y.Type> [-1] (SLTIU <y.Type> [64] (ZeroExt8to64 y)))))
  9171  	for {
  9172  		t := v.Type
  9173  		x := v_0
  9174  		y := v_1
  9175  		if !(!shiftIsBounded(v)) {
  9176  			break
  9177  		}
  9178  		v.reset(OpRISCV64SRA)
  9179  		v.Type = t
  9180  		v0 := b.NewValue0(v.Pos, OpSignExt16to64, typ.Int64)
  9181  		v0.AddArg(x)
  9182  		v1 := b.NewValue0(v.Pos, OpRISCV64OR, y.Type)
  9183  		v2 := b.NewValue0(v.Pos, OpRISCV64ADDI, y.Type)
  9184  		v2.AuxInt = int64ToAuxInt(-1)
  9185  		v3 := b.NewValue0(v.Pos, OpRISCV64SLTIU, y.Type)
  9186  		v3.AuxInt = int64ToAuxInt(64)
  9187  		v4 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64)
  9188  		v4.AddArg(y)
  9189  		v3.AddArg(v4)
  9190  		v2.AddArg(v3)
  9191  		v1.AddArg2(y, v2)
  9192  		v.AddArg2(v0, v1)
  9193  		return true
  9194  	}
  9195  	// match: (Rsh16x8 x y)
  9196  	// cond: shiftIsBounded(v)
  9197  	// result: (SRA (SignExt16to64 x) y)
  9198  	for {
  9199  		x := v_0
  9200  		y := v_1
  9201  		if !(shiftIsBounded(v)) {
  9202  			break
  9203  		}
  9204  		v.reset(OpRISCV64SRA)
  9205  		v0 := b.NewValue0(v.Pos, OpSignExt16to64, typ.Int64)
  9206  		v0.AddArg(x)
  9207  		v.AddArg2(v0, y)
  9208  		return true
  9209  	}
  9210  	return false
  9211  }
  9212  func rewriteValueRISCV64_OpRsh32Ux16(v *Value) bool {
  9213  	v_1 := v.Args[1]
  9214  	v_0 := v.Args[0]
  9215  	b := v.Block
  9216  	typ := &b.Func.Config.Types
  9217  	// match: (Rsh32Ux16 <t> x y)
  9218  	// cond: !shiftIsBounded(v)
  9219  	// result: (AND (SRLW <t> x y) (Neg32 <t> (SLTIU <t> [32] (ZeroExt16to64 y))))
  9220  	for {
  9221  		t := v.Type
  9222  		x := v_0
  9223  		y := v_1
  9224  		if !(!shiftIsBounded(v)) {
  9225  			break
  9226  		}
  9227  		v.reset(OpRISCV64AND)
  9228  		v0 := b.NewValue0(v.Pos, OpRISCV64SRLW, t)
  9229  		v0.AddArg2(x, y)
  9230  		v1 := b.NewValue0(v.Pos, OpNeg32, t)
  9231  		v2 := b.NewValue0(v.Pos, OpRISCV64SLTIU, t)
  9232  		v2.AuxInt = int64ToAuxInt(32)
  9233  		v3 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64)
  9234  		v3.AddArg(y)
  9235  		v2.AddArg(v3)
  9236  		v1.AddArg(v2)
  9237  		v.AddArg2(v0, v1)
  9238  		return true
  9239  	}
  9240  	// match: (Rsh32Ux16 x y)
  9241  	// cond: shiftIsBounded(v)
  9242  	// result: (SRLW x y)
  9243  	for {
  9244  		x := v_0
  9245  		y := v_1
  9246  		if !(shiftIsBounded(v)) {
  9247  			break
  9248  		}
  9249  		v.reset(OpRISCV64SRLW)
  9250  		v.AddArg2(x, y)
  9251  		return true
  9252  	}
  9253  	return false
  9254  }
  9255  func rewriteValueRISCV64_OpRsh32Ux32(v *Value) bool {
  9256  	v_1 := v.Args[1]
  9257  	v_0 := v.Args[0]
  9258  	b := v.Block
  9259  	typ := &b.Func.Config.Types
  9260  	// match: (Rsh32Ux32 <t> x y)
  9261  	// cond: !shiftIsBounded(v)
  9262  	// result: (AND (SRLW <t> x y) (Neg32 <t> (SLTIU <t> [32] (ZeroExt32to64 y))))
  9263  	for {
  9264  		t := v.Type
  9265  		x := v_0
  9266  		y := v_1
  9267  		if !(!shiftIsBounded(v)) {
  9268  			break
  9269  		}
  9270  		v.reset(OpRISCV64AND)
  9271  		v0 := b.NewValue0(v.Pos, OpRISCV64SRLW, t)
  9272  		v0.AddArg2(x, y)
  9273  		v1 := b.NewValue0(v.Pos, OpNeg32, t)
  9274  		v2 := b.NewValue0(v.Pos, OpRISCV64SLTIU, t)
  9275  		v2.AuxInt = int64ToAuxInt(32)
  9276  		v3 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64)
  9277  		v3.AddArg(y)
  9278  		v2.AddArg(v3)
  9279  		v1.AddArg(v2)
  9280  		v.AddArg2(v0, v1)
  9281  		return true
  9282  	}
  9283  	// match: (Rsh32Ux32 x y)
  9284  	// cond: shiftIsBounded(v)
  9285  	// result: (SRLW x y)
  9286  	for {
  9287  		x := v_0
  9288  		y := v_1
  9289  		if !(shiftIsBounded(v)) {
  9290  			break
  9291  		}
  9292  		v.reset(OpRISCV64SRLW)
  9293  		v.AddArg2(x, y)
  9294  		return true
  9295  	}
  9296  	return false
  9297  }
  9298  func rewriteValueRISCV64_OpRsh32Ux64(v *Value) bool {
  9299  	v_1 := v.Args[1]
  9300  	v_0 := v.Args[0]
  9301  	b := v.Block
  9302  	// match: (Rsh32Ux64 <t> x y)
  9303  	// cond: !shiftIsBounded(v)
  9304  	// result: (AND (SRLW <t> x y) (Neg32 <t> (SLTIU <t> [32] y)))
  9305  	for {
  9306  		t := v.Type
  9307  		x := v_0
  9308  		y := v_1
  9309  		if !(!shiftIsBounded(v)) {
  9310  			break
  9311  		}
  9312  		v.reset(OpRISCV64AND)
  9313  		v0 := b.NewValue0(v.Pos, OpRISCV64SRLW, t)
  9314  		v0.AddArg2(x, y)
  9315  		v1 := b.NewValue0(v.Pos, OpNeg32, t)
  9316  		v2 := b.NewValue0(v.Pos, OpRISCV64SLTIU, t)
  9317  		v2.AuxInt = int64ToAuxInt(32)
  9318  		v2.AddArg(y)
  9319  		v1.AddArg(v2)
  9320  		v.AddArg2(v0, v1)
  9321  		return true
  9322  	}
  9323  	// match: (Rsh32Ux64 x y)
  9324  	// cond: shiftIsBounded(v)
  9325  	// result: (SRLW x y)
  9326  	for {
  9327  		x := v_0
  9328  		y := v_1
  9329  		if !(shiftIsBounded(v)) {
  9330  			break
  9331  		}
  9332  		v.reset(OpRISCV64SRLW)
  9333  		v.AddArg2(x, y)
  9334  		return true
  9335  	}
  9336  	return false
  9337  }
  9338  func rewriteValueRISCV64_OpRsh32Ux8(v *Value) bool {
  9339  	v_1 := v.Args[1]
  9340  	v_0 := v.Args[0]
  9341  	b := v.Block
  9342  	typ := &b.Func.Config.Types
  9343  	// match: (Rsh32Ux8 <t> x y)
  9344  	// cond: !shiftIsBounded(v)
  9345  	// result: (AND (SRLW <t> x y) (Neg32 <t> (SLTIU <t> [32] (ZeroExt8to64 y))))
  9346  	for {
  9347  		t := v.Type
  9348  		x := v_0
  9349  		y := v_1
  9350  		if !(!shiftIsBounded(v)) {
  9351  			break
  9352  		}
  9353  		v.reset(OpRISCV64AND)
  9354  		v0 := b.NewValue0(v.Pos, OpRISCV64SRLW, t)
  9355  		v0.AddArg2(x, y)
  9356  		v1 := b.NewValue0(v.Pos, OpNeg32, t)
  9357  		v2 := b.NewValue0(v.Pos, OpRISCV64SLTIU, t)
  9358  		v2.AuxInt = int64ToAuxInt(32)
  9359  		v3 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64)
  9360  		v3.AddArg(y)
  9361  		v2.AddArg(v3)
  9362  		v1.AddArg(v2)
  9363  		v.AddArg2(v0, v1)
  9364  		return true
  9365  	}
  9366  	// match: (Rsh32Ux8 x y)
  9367  	// cond: shiftIsBounded(v)
  9368  	// result: (SRLW x y)
  9369  	for {
  9370  		x := v_0
  9371  		y := v_1
  9372  		if !(shiftIsBounded(v)) {
  9373  			break
  9374  		}
  9375  		v.reset(OpRISCV64SRLW)
  9376  		v.AddArg2(x, y)
  9377  		return true
  9378  	}
  9379  	return false
  9380  }
  9381  func rewriteValueRISCV64_OpRsh32x16(v *Value) bool {
  9382  	v_1 := v.Args[1]
  9383  	v_0 := v.Args[0]
  9384  	b := v.Block
  9385  	typ := &b.Func.Config.Types
  9386  	// match: (Rsh32x16 <t> x y)
  9387  	// cond: !shiftIsBounded(v)
  9388  	// result: (SRAW <t> x (OR <y.Type> y (ADDI <y.Type> [-1] (SLTIU <y.Type> [32] (ZeroExt16to64 y)))))
  9389  	for {
  9390  		t := v.Type
  9391  		x := v_0
  9392  		y := v_1
  9393  		if !(!shiftIsBounded(v)) {
  9394  			break
  9395  		}
  9396  		v.reset(OpRISCV64SRAW)
  9397  		v.Type = t
  9398  		v0 := b.NewValue0(v.Pos, OpRISCV64OR, y.Type)
  9399  		v1 := b.NewValue0(v.Pos, OpRISCV64ADDI, y.Type)
  9400  		v1.AuxInt = int64ToAuxInt(-1)
  9401  		v2 := b.NewValue0(v.Pos, OpRISCV64SLTIU, y.Type)
  9402  		v2.AuxInt = int64ToAuxInt(32)
  9403  		v3 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64)
  9404  		v3.AddArg(y)
  9405  		v2.AddArg(v3)
  9406  		v1.AddArg(v2)
  9407  		v0.AddArg2(y, v1)
  9408  		v.AddArg2(x, v0)
  9409  		return true
  9410  	}
  9411  	// match: (Rsh32x16 x y)
  9412  	// cond: shiftIsBounded(v)
  9413  	// result: (SRAW x y)
  9414  	for {
  9415  		x := v_0
  9416  		y := v_1
  9417  		if !(shiftIsBounded(v)) {
  9418  			break
  9419  		}
  9420  		v.reset(OpRISCV64SRAW)
  9421  		v.AddArg2(x, y)
  9422  		return true
  9423  	}
  9424  	return false
  9425  }
  9426  func rewriteValueRISCV64_OpRsh32x32(v *Value) bool {
  9427  	v_1 := v.Args[1]
  9428  	v_0 := v.Args[0]
  9429  	b := v.Block
  9430  	typ := &b.Func.Config.Types
  9431  	// match: (Rsh32x32 <t> x y)
  9432  	// cond: !shiftIsBounded(v)
  9433  	// result: (SRAW <t> x (OR <y.Type> y (ADDI <y.Type> [-1] (SLTIU <y.Type> [32] (ZeroExt32to64 y)))))
  9434  	for {
  9435  		t := v.Type
  9436  		x := v_0
  9437  		y := v_1
  9438  		if !(!shiftIsBounded(v)) {
  9439  			break
  9440  		}
  9441  		v.reset(OpRISCV64SRAW)
  9442  		v.Type = t
  9443  		v0 := b.NewValue0(v.Pos, OpRISCV64OR, y.Type)
  9444  		v1 := b.NewValue0(v.Pos, OpRISCV64ADDI, y.Type)
  9445  		v1.AuxInt = int64ToAuxInt(-1)
  9446  		v2 := b.NewValue0(v.Pos, OpRISCV64SLTIU, y.Type)
  9447  		v2.AuxInt = int64ToAuxInt(32)
  9448  		v3 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64)
  9449  		v3.AddArg(y)
  9450  		v2.AddArg(v3)
  9451  		v1.AddArg(v2)
  9452  		v0.AddArg2(y, v1)
  9453  		v.AddArg2(x, v0)
  9454  		return true
  9455  	}
  9456  	// match: (Rsh32x32 x y)
  9457  	// cond: shiftIsBounded(v)
  9458  	// result: (SRAW x y)
  9459  	for {
  9460  		x := v_0
  9461  		y := v_1
  9462  		if !(shiftIsBounded(v)) {
  9463  			break
  9464  		}
  9465  		v.reset(OpRISCV64SRAW)
  9466  		v.AddArg2(x, y)
  9467  		return true
  9468  	}
  9469  	return false
  9470  }
  9471  func rewriteValueRISCV64_OpRsh32x64(v *Value) bool {
  9472  	v_1 := v.Args[1]
  9473  	v_0 := v.Args[0]
  9474  	b := v.Block
  9475  	// match: (Rsh32x64 <t> x y)
  9476  	// cond: !shiftIsBounded(v)
  9477  	// result: (SRAW <t> x (OR <y.Type> y (ADDI <y.Type> [-1] (SLTIU <y.Type> [32] y))))
  9478  	for {
  9479  		t := v.Type
  9480  		x := v_0
  9481  		y := v_1
  9482  		if !(!shiftIsBounded(v)) {
  9483  			break
  9484  		}
  9485  		v.reset(OpRISCV64SRAW)
  9486  		v.Type = t
  9487  		v0 := b.NewValue0(v.Pos, OpRISCV64OR, y.Type)
  9488  		v1 := b.NewValue0(v.Pos, OpRISCV64ADDI, y.Type)
  9489  		v1.AuxInt = int64ToAuxInt(-1)
  9490  		v2 := b.NewValue0(v.Pos, OpRISCV64SLTIU, y.Type)
  9491  		v2.AuxInt = int64ToAuxInt(32)
  9492  		v2.AddArg(y)
  9493  		v1.AddArg(v2)
  9494  		v0.AddArg2(y, v1)
  9495  		v.AddArg2(x, v0)
  9496  		return true
  9497  	}
  9498  	// match: (Rsh32x64 x y)
  9499  	// cond: shiftIsBounded(v)
  9500  	// result: (SRAW x y)
  9501  	for {
  9502  		x := v_0
  9503  		y := v_1
  9504  		if !(shiftIsBounded(v)) {
  9505  			break
  9506  		}
  9507  		v.reset(OpRISCV64SRAW)
  9508  		v.AddArg2(x, y)
  9509  		return true
  9510  	}
  9511  	return false
  9512  }
  9513  func rewriteValueRISCV64_OpRsh32x8(v *Value) bool {
  9514  	v_1 := v.Args[1]
  9515  	v_0 := v.Args[0]
  9516  	b := v.Block
  9517  	typ := &b.Func.Config.Types
  9518  	// match: (Rsh32x8 <t> x y)
  9519  	// cond: !shiftIsBounded(v)
  9520  	// result: (SRAW <t> x (OR <y.Type> y (ADDI <y.Type> [-1] (SLTIU <y.Type> [32] (ZeroExt8to64 y)))))
  9521  	for {
  9522  		t := v.Type
  9523  		x := v_0
  9524  		y := v_1
  9525  		if !(!shiftIsBounded(v)) {
  9526  			break
  9527  		}
  9528  		v.reset(OpRISCV64SRAW)
  9529  		v.Type = t
  9530  		v0 := b.NewValue0(v.Pos, OpRISCV64OR, y.Type)
  9531  		v1 := b.NewValue0(v.Pos, OpRISCV64ADDI, y.Type)
  9532  		v1.AuxInt = int64ToAuxInt(-1)
  9533  		v2 := b.NewValue0(v.Pos, OpRISCV64SLTIU, y.Type)
  9534  		v2.AuxInt = int64ToAuxInt(32)
  9535  		v3 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64)
  9536  		v3.AddArg(y)
  9537  		v2.AddArg(v3)
  9538  		v1.AddArg(v2)
  9539  		v0.AddArg2(y, v1)
  9540  		v.AddArg2(x, v0)
  9541  		return true
  9542  	}
  9543  	// match: (Rsh32x8 x y)
  9544  	// cond: shiftIsBounded(v)
  9545  	// result: (SRAW x y)
  9546  	for {
  9547  		x := v_0
  9548  		y := v_1
  9549  		if !(shiftIsBounded(v)) {
  9550  			break
  9551  		}
  9552  		v.reset(OpRISCV64SRAW)
  9553  		v.AddArg2(x, y)
  9554  		return true
  9555  	}
  9556  	return false
  9557  }
  9558  func rewriteValueRISCV64_OpRsh64Ux16(v *Value) bool {
  9559  	v_1 := v.Args[1]
  9560  	v_0 := v.Args[0]
  9561  	b := v.Block
  9562  	typ := &b.Func.Config.Types
  9563  	// match: (Rsh64Ux16 <t> x y)
  9564  	// cond: !shiftIsBounded(v)
  9565  	// result: (AND (SRL <t> x y) (Neg64 <t> (SLTIU <t> [64] (ZeroExt16to64 y))))
  9566  	for {
  9567  		t := v.Type
  9568  		x := v_0
  9569  		y := v_1
  9570  		if !(!shiftIsBounded(v)) {
  9571  			break
  9572  		}
  9573  		v.reset(OpRISCV64AND)
  9574  		v0 := b.NewValue0(v.Pos, OpRISCV64SRL, t)
  9575  		v0.AddArg2(x, y)
  9576  		v1 := b.NewValue0(v.Pos, OpNeg64, t)
  9577  		v2 := b.NewValue0(v.Pos, OpRISCV64SLTIU, t)
  9578  		v2.AuxInt = int64ToAuxInt(64)
  9579  		v3 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64)
  9580  		v3.AddArg(y)
  9581  		v2.AddArg(v3)
  9582  		v1.AddArg(v2)
  9583  		v.AddArg2(v0, v1)
  9584  		return true
  9585  	}
  9586  	// match: (Rsh64Ux16 x y)
  9587  	// cond: shiftIsBounded(v)
  9588  	// result: (SRL x y)
  9589  	for {
  9590  		x := v_0
  9591  		y := v_1
  9592  		if !(shiftIsBounded(v)) {
  9593  			break
  9594  		}
  9595  		v.reset(OpRISCV64SRL)
  9596  		v.AddArg2(x, y)
  9597  		return true
  9598  	}
  9599  	return false
  9600  }
  9601  func rewriteValueRISCV64_OpRsh64Ux32(v *Value) bool {
  9602  	v_1 := v.Args[1]
  9603  	v_0 := v.Args[0]
  9604  	b := v.Block
  9605  	typ := &b.Func.Config.Types
  9606  	// match: (Rsh64Ux32 <t> x y)
  9607  	// cond: !shiftIsBounded(v)
  9608  	// result: (AND (SRL <t> x y) (Neg64 <t> (SLTIU <t> [64] (ZeroExt32to64 y))))
  9609  	for {
  9610  		t := v.Type
  9611  		x := v_0
  9612  		y := v_1
  9613  		if !(!shiftIsBounded(v)) {
  9614  			break
  9615  		}
  9616  		v.reset(OpRISCV64AND)
  9617  		v0 := b.NewValue0(v.Pos, OpRISCV64SRL, t)
  9618  		v0.AddArg2(x, y)
  9619  		v1 := b.NewValue0(v.Pos, OpNeg64, t)
  9620  		v2 := b.NewValue0(v.Pos, OpRISCV64SLTIU, t)
  9621  		v2.AuxInt = int64ToAuxInt(64)
  9622  		v3 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64)
  9623  		v3.AddArg(y)
  9624  		v2.AddArg(v3)
  9625  		v1.AddArg(v2)
  9626  		v.AddArg2(v0, v1)
  9627  		return true
  9628  	}
  9629  	// match: (Rsh64Ux32 x y)
  9630  	// cond: shiftIsBounded(v)
  9631  	// result: (SRL x y)
  9632  	for {
  9633  		x := v_0
  9634  		y := v_1
  9635  		if !(shiftIsBounded(v)) {
  9636  			break
  9637  		}
  9638  		v.reset(OpRISCV64SRL)
  9639  		v.AddArg2(x, y)
  9640  		return true
  9641  	}
  9642  	return false
  9643  }
  9644  func rewriteValueRISCV64_OpRsh64Ux64(v *Value) bool {
  9645  	v_1 := v.Args[1]
  9646  	v_0 := v.Args[0]
  9647  	b := v.Block
  9648  	// match: (Rsh64Ux64 <t> x y)
  9649  	// cond: !shiftIsBounded(v)
  9650  	// result: (AND (SRL <t> x y) (Neg64 <t> (SLTIU <t> [64] y)))
  9651  	for {
  9652  		t := v.Type
  9653  		x := v_0
  9654  		y := v_1
  9655  		if !(!shiftIsBounded(v)) {
  9656  			break
  9657  		}
  9658  		v.reset(OpRISCV64AND)
  9659  		v0 := b.NewValue0(v.Pos, OpRISCV64SRL, t)
  9660  		v0.AddArg2(x, y)
  9661  		v1 := b.NewValue0(v.Pos, OpNeg64, t)
  9662  		v2 := b.NewValue0(v.Pos, OpRISCV64SLTIU, t)
  9663  		v2.AuxInt = int64ToAuxInt(64)
  9664  		v2.AddArg(y)
  9665  		v1.AddArg(v2)
  9666  		v.AddArg2(v0, v1)
  9667  		return true
  9668  	}
  9669  	// match: (Rsh64Ux64 x y)
  9670  	// cond: shiftIsBounded(v)
  9671  	// result: (SRL x y)
  9672  	for {
  9673  		x := v_0
  9674  		y := v_1
  9675  		if !(shiftIsBounded(v)) {
  9676  			break
  9677  		}
  9678  		v.reset(OpRISCV64SRL)
  9679  		v.AddArg2(x, y)
  9680  		return true
  9681  	}
  9682  	return false
  9683  }
  9684  func rewriteValueRISCV64_OpRsh64Ux8(v *Value) bool {
  9685  	v_1 := v.Args[1]
  9686  	v_0 := v.Args[0]
  9687  	b := v.Block
  9688  	typ := &b.Func.Config.Types
  9689  	// match: (Rsh64Ux8 <t> x y)
  9690  	// cond: !shiftIsBounded(v)
  9691  	// result: (AND (SRL <t> x y) (Neg64 <t> (SLTIU <t> [64] (ZeroExt8to64 y))))
  9692  	for {
  9693  		t := v.Type
  9694  		x := v_0
  9695  		y := v_1
  9696  		if !(!shiftIsBounded(v)) {
  9697  			break
  9698  		}
  9699  		v.reset(OpRISCV64AND)
  9700  		v0 := b.NewValue0(v.Pos, OpRISCV64SRL, t)
  9701  		v0.AddArg2(x, y)
  9702  		v1 := b.NewValue0(v.Pos, OpNeg64, t)
  9703  		v2 := b.NewValue0(v.Pos, OpRISCV64SLTIU, t)
  9704  		v2.AuxInt = int64ToAuxInt(64)
  9705  		v3 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64)
  9706  		v3.AddArg(y)
  9707  		v2.AddArg(v3)
  9708  		v1.AddArg(v2)
  9709  		v.AddArg2(v0, v1)
  9710  		return true
  9711  	}
  9712  	// match: (Rsh64Ux8 x y)
  9713  	// cond: shiftIsBounded(v)
  9714  	// result: (SRL x y)
  9715  	for {
  9716  		x := v_0
  9717  		y := v_1
  9718  		if !(shiftIsBounded(v)) {
  9719  			break
  9720  		}
  9721  		v.reset(OpRISCV64SRL)
  9722  		v.AddArg2(x, y)
  9723  		return true
  9724  	}
  9725  	return false
  9726  }
  9727  func rewriteValueRISCV64_OpRsh64x16(v *Value) bool {
  9728  	v_1 := v.Args[1]
  9729  	v_0 := v.Args[0]
  9730  	b := v.Block
  9731  	typ := &b.Func.Config.Types
  9732  	// match: (Rsh64x16 <t> x y)
  9733  	// cond: !shiftIsBounded(v)
  9734  	// result: (SRA <t> x (OR <y.Type> y (ADDI <y.Type> [-1] (SLTIU <y.Type> [64] (ZeroExt16to64 y)))))
  9735  	for {
  9736  		t := v.Type
  9737  		x := v_0
  9738  		y := v_1
  9739  		if !(!shiftIsBounded(v)) {
  9740  			break
  9741  		}
  9742  		v.reset(OpRISCV64SRA)
  9743  		v.Type = t
  9744  		v0 := b.NewValue0(v.Pos, OpRISCV64OR, y.Type)
  9745  		v1 := b.NewValue0(v.Pos, OpRISCV64ADDI, y.Type)
  9746  		v1.AuxInt = int64ToAuxInt(-1)
  9747  		v2 := b.NewValue0(v.Pos, OpRISCV64SLTIU, y.Type)
  9748  		v2.AuxInt = int64ToAuxInt(64)
  9749  		v3 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64)
  9750  		v3.AddArg(y)
  9751  		v2.AddArg(v3)
  9752  		v1.AddArg(v2)
  9753  		v0.AddArg2(y, v1)
  9754  		v.AddArg2(x, v0)
  9755  		return true
  9756  	}
  9757  	// match: (Rsh64x16 x y)
  9758  	// cond: shiftIsBounded(v)
  9759  	// result: (SRA x y)
  9760  	for {
  9761  		x := v_0
  9762  		y := v_1
  9763  		if !(shiftIsBounded(v)) {
  9764  			break
  9765  		}
  9766  		v.reset(OpRISCV64SRA)
  9767  		v.AddArg2(x, y)
  9768  		return true
  9769  	}
  9770  	return false
  9771  }
  9772  func rewriteValueRISCV64_OpRsh64x32(v *Value) bool {
  9773  	v_1 := v.Args[1]
  9774  	v_0 := v.Args[0]
  9775  	b := v.Block
  9776  	typ := &b.Func.Config.Types
  9777  	// match: (Rsh64x32 <t> x y)
  9778  	// cond: !shiftIsBounded(v)
  9779  	// result: (SRA <t> x (OR <y.Type> y (ADDI <y.Type> [-1] (SLTIU <y.Type> [64] (ZeroExt32to64 y)))))
  9780  	for {
  9781  		t := v.Type
  9782  		x := v_0
  9783  		y := v_1
  9784  		if !(!shiftIsBounded(v)) {
  9785  			break
  9786  		}
  9787  		v.reset(OpRISCV64SRA)
  9788  		v.Type = t
  9789  		v0 := b.NewValue0(v.Pos, OpRISCV64OR, y.Type)
  9790  		v1 := b.NewValue0(v.Pos, OpRISCV64ADDI, y.Type)
  9791  		v1.AuxInt = int64ToAuxInt(-1)
  9792  		v2 := b.NewValue0(v.Pos, OpRISCV64SLTIU, y.Type)
  9793  		v2.AuxInt = int64ToAuxInt(64)
  9794  		v3 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64)
  9795  		v3.AddArg(y)
  9796  		v2.AddArg(v3)
  9797  		v1.AddArg(v2)
  9798  		v0.AddArg2(y, v1)
  9799  		v.AddArg2(x, v0)
  9800  		return true
  9801  	}
  9802  	// match: (Rsh64x32 x y)
  9803  	// cond: shiftIsBounded(v)
  9804  	// result: (SRA x y)
  9805  	for {
  9806  		x := v_0
  9807  		y := v_1
  9808  		if !(shiftIsBounded(v)) {
  9809  			break
  9810  		}
  9811  		v.reset(OpRISCV64SRA)
  9812  		v.AddArg2(x, y)
  9813  		return true
  9814  	}
  9815  	return false
  9816  }
  9817  func rewriteValueRISCV64_OpRsh64x64(v *Value) bool {
  9818  	v_1 := v.Args[1]
  9819  	v_0 := v.Args[0]
  9820  	b := v.Block
  9821  	// match: (Rsh64x64 <t> x y)
  9822  	// cond: !shiftIsBounded(v)
  9823  	// result: (SRA <t> x (OR <y.Type> y (ADDI <y.Type> [-1] (SLTIU <y.Type> [64] y))))
  9824  	for {
  9825  		t := v.Type
  9826  		x := v_0
  9827  		y := v_1
  9828  		if !(!shiftIsBounded(v)) {
  9829  			break
  9830  		}
  9831  		v.reset(OpRISCV64SRA)
  9832  		v.Type = t
  9833  		v0 := b.NewValue0(v.Pos, OpRISCV64OR, y.Type)
  9834  		v1 := b.NewValue0(v.Pos, OpRISCV64ADDI, y.Type)
  9835  		v1.AuxInt = int64ToAuxInt(-1)
  9836  		v2 := b.NewValue0(v.Pos, OpRISCV64SLTIU, y.Type)
  9837  		v2.AuxInt = int64ToAuxInt(64)
  9838  		v2.AddArg(y)
  9839  		v1.AddArg(v2)
  9840  		v0.AddArg2(y, v1)
  9841  		v.AddArg2(x, v0)
  9842  		return true
  9843  	}
  9844  	// match: (Rsh64x64 x y)
  9845  	// cond: shiftIsBounded(v)
  9846  	// result: (SRA x y)
  9847  	for {
  9848  		x := v_0
  9849  		y := v_1
  9850  		if !(shiftIsBounded(v)) {
  9851  			break
  9852  		}
  9853  		v.reset(OpRISCV64SRA)
  9854  		v.AddArg2(x, y)
  9855  		return true
  9856  	}
  9857  	return false
  9858  }
  9859  func rewriteValueRISCV64_OpRsh64x8(v *Value) bool {
  9860  	v_1 := v.Args[1]
  9861  	v_0 := v.Args[0]
  9862  	b := v.Block
  9863  	typ := &b.Func.Config.Types
  9864  	// match: (Rsh64x8 <t> x y)
  9865  	// cond: !shiftIsBounded(v)
  9866  	// result: (SRA <t> x (OR <y.Type> y (ADDI <y.Type> [-1] (SLTIU <y.Type> [64] (ZeroExt8to64 y)))))
  9867  	for {
  9868  		t := v.Type
  9869  		x := v_0
  9870  		y := v_1
  9871  		if !(!shiftIsBounded(v)) {
  9872  			break
  9873  		}
  9874  		v.reset(OpRISCV64SRA)
  9875  		v.Type = t
  9876  		v0 := b.NewValue0(v.Pos, OpRISCV64OR, y.Type)
  9877  		v1 := b.NewValue0(v.Pos, OpRISCV64ADDI, y.Type)
  9878  		v1.AuxInt = int64ToAuxInt(-1)
  9879  		v2 := b.NewValue0(v.Pos, OpRISCV64SLTIU, y.Type)
  9880  		v2.AuxInt = int64ToAuxInt(64)
  9881  		v3 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64)
  9882  		v3.AddArg(y)
  9883  		v2.AddArg(v3)
  9884  		v1.AddArg(v2)
  9885  		v0.AddArg2(y, v1)
  9886  		v.AddArg2(x, v0)
  9887  		return true
  9888  	}
  9889  	// match: (Rsh64x8 x y)
  9890  	// cond: shiftIsBounded(v)
  9891  	// result: (SRA x y)
  9892  	for {
  9893  		x := v_0
  9894  		y := v_1
  9895  		if !(shiftIsBounded(v)) {
  9896  			break
  9897  		}
  9898  		v.reset(OpRISCV64SRA)
  9899  		v.AddArg2(x, y)
  9900  		return true
  9901  	}
  9902  	return false
  9903  }
  9904  func rewriteValueRISCV64_OpRsh8Ux16(v *Value) bool {
  9905  	v_1 := v.Args[1]
  9906  	v_0 := v.Args[0]
  9907  	b := v.Block
  9908  	typ := &b.Func.Config.Types
  9909  	// match: (Rsh8Ux16 <t> x y)
  9910  	// cond: !shiftIsBounded(v)
  9911  	// result: (AND (SRL <t> (ZeroExt8to64 x) y) (Neg8 <t> (SLTIU <t> [64] (ZeroExt16to64 y))))
  9912  	for {
  9913  		t := v.Type
  9914  		x := v_0
  9915  		y := v_1
  9916  		if !(!shiftIsBounded(v)) {
  9917  			break
  9918  		}
  9919  		v.reset(OpRISCV64AND)
  9920  		v0 := b.NewValue0(v.Pos, OpRISCV64SRL, t)
  9921  		v1 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64)
  9922  		v1.AddArg(x)
  9923  		v0.AddArg2(v1, y)
  9924  		v2 := b.NewValue0(v.Pos, OpNeg8, t)
  9925  		v3 := b.NewValue0(v.Pos, OpRISCV64SLTIU, t)
  9926  		v3.AuxInt = int64ToAuxInt(64)
  9927  		v4 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64)
  9928  		v4.AddArg(y)
  9929  		v3.AddArg(v4)
  9930  		v2.AddArg(v3)
  9931  		v.AddArg2(v0, v2)
  9932  		return true
  9933  	}
  9934  	// match: (Rsh8Ux16 x y)
  9935  	// cond: shiftIsBounded(v)
  9936  	// result: (SRL (ZeroExt8to64 x) y)
  9937  	for {
  9938  		x := v_0
  9939  		y := v_1
  9940  		if !(shiftIsBounded(v)) {
  9941  			break
  9942  		}
  9943  		v.reset(OpRISCV64SRL)
  9944  		v0 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64)
  9945  		v0.AddArg(x)
  9946  		v.AddArg2(v0, y)
  9947  		return true
  9948  	}
  9949  	return false
  9950  }
  9951  func rewriteValueRISCV64_OpRsh8Ux32(v *Value) bool {
  9952  	v_1 := v.Args[1]
  9953  	v_0 := v.Args[0]
  9954  	b := v.Block
  9955  	typ := &b.Func.Config.Types
  9956  	// match: (Rsh8Ux32 <t> x y)
  9957  	// cond: !shiftIsBounded(v)
  9958  	// result: (AND (SRL <t> (ZeroExt8to64 x) y) (Neg8 <t> (SLTIU <t> [64] (ZeroExt32to64 y))))
  9959  	for {
  9960  		t := v.Type
  9961  		x := v_0
  9962  		y := v_1
  9963  		if !(!shiftIsBounded(v)) {
  9964  			break
  9965  		}
  9966  		v.reset(OpRISCV64AND)
  9967  		v0 := b.NewValue0(v.Pos, OpRISCV64SRL, t)
  9968  		v1 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64)
  9969  		v1.AddArg(x)
  9970  		v0.AddArg2(v1, y)
  9971  		v2 := b.NewValue0(v.Pos, OpNeg8, t)
  9972  		v3 := b.NewValue0(v.Pos, OpRISCV64SLTIU, t)
  9973  		v3.AuxInt = int64ToAuxInt(64)
  9974  		v4 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64)
  9975  		v4.AddArg(y)
  9976  		v3.AddArg(v4)
  9977  		v2.AddArg(v3)
  9978  		v.AddArg2(v0, v2)
  9979  		return true
  9980  	}
  9981  	// match: (Rsh8Ux32 x y)
  9982  	// cond: shiftIsBounded(v)
  9983  	// result: (SRL (ZeroExt8to64 x) y)
  9984  	for {
  9985  		x := v_0
  9986  		y := v_1
  9987  		if !(shiftIsBounded(v)) {
  9988  			break
  9989  		}
  9990  		v.reset(OpRISCV64SRL)
  9991  		v0 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64)
  9992  		v0.AddArg(x)
  9993  		v.AddArg2(v0, y)
  9994  		return true
  9995  	}
  9996  	return false
  9997  }
  9998  func rewriteValueRISCV64_OpRsh8Ux64(v *Value) bool {
  9999  	v_1 := v.Args[1]
 10000  	v_0 := v.Args[0]
 10001  	b := v.Block
 10002  	typ := &b.Func.Config.Types
 10003  	// match: (Rsh8Ux64 <t> x y)
 10004  	// cond: !shiftIsBounded(v)
 10005  	// result: (AND (SRL <t> (ZeroExt8to64 x) y) (Neg8 <t> (SLTIU <t> [64] y)))
 10006  	for {
 10007  		t := v.Type
 10008  		x := v_0
 10009  		y := v_1
 10010  		if !(!shiftIsBounded(v)) {
 10011  			break
 10012  		}
 10013  		v.reset(OpRISCV64AND)
 10014  		v0 := b.NewValue0(v.Pos, OpRISCV64SRL, t)
 10015  		v1 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64)
 10016  		v1.AddArg(x)
 10017  		v0.AddArg2(v1, y)
 10018  		v2 := b.NewValue0(v.Pos, OpNeg8, t)
 10019  		v3 := b.NewValue0(v.Pos, OpRISCV64SLTIU, t)
 10020  		v3.AuxInt = int64ToAuxInt(64)
 10021  		v3.AddArg(y)
 10022  		v2.AddArg(v3)
 10023  		v.AddArg2(v0, v2)
 10024  		return true
 10025  	}
 10026  	// match: (Rsh8Ux64 x y)
 10027  	// cond: shiftIsBounded(v)
 10028  	// result: (SRL (ZeroExt8to64 x) y)
 10029  	for {
 10030  		x := v_0
 10031  		y := v_1
 10032  		if !(shiftIsBounded(v)) {
 10033  			break
 10034  		}
 10035  		v.reset(OpRISCV64SRL)
 10036  		v0 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64)
 10037  		v0.AddArg(x)
 10038  		v.AddArg2(v0, y)
 10039  		return true
 10040  	}
 10041  	return false
 10042  }
 10043  func rewriteValueRISCV64_OpRsh8Ux8(v *Value) bool {
 10044  	v_1 := v.Args[1]
 10045  	v_0 := v.Args[0]
 10046  	b := v.Block
 10047  	typ := &b.Func.Config.Types
 10048  	// match: (Rsh8Ux8 <t> x y)
 10049  	// cond: !shiftIsBounded(v)
 10050  	// result: (AND (SRL <t> (ZeroExt8to64 x) y) (Neg8 <t> (SLTIU <t> [64] (ZeroExt8to64 y))))
 10051  	for {
 10052  		t := v.Type
 10053  		x := v_0
 10054  		y := v_1
 10055  		if !(!shiftIsBounded(v)) {
 10056  			break
 10057  		}
 10058  		v.reset(OpRISCV64AND)
 10059  		v0 := b.NewValue0(v.Pos, OpRISCV64SRL, t)
 10060  		v1 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64)
 10061  		v1.AddArg(x)
 10062  		v0.AddArg2(v1, y)
 10063  		v2 := b.NewValue0(v.Pos, OpNeg8, t)
 10064  		v3 := b.NewValue0(v.Pos, OpRISCV64SLTIU, t)
 10065  		v3.AuxInt = int64ToAuxInt(64)
 10066  		v4 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64)
 10067  		v4.AddArg(y)
 10068  		v3.AddArg(v4)
 10069  		v2.AddArg(v3)
 10070  		v.AddArg2(v0, v2)
 10071  		return true
 10072  	}
 10073  	// match: (Rsh8Ux8 x y)
 10074  	// cond: shiftIsBounded(v)
 10075  	// result: (SRL (ZeroExt8to64 x) y)
 10076  	for {
 10077  		x := v_0
 10078  		y := v_1
 10079  		if !(shiftIsBounded(v)) {
 10080  			break
 10081  		}
 10082  		v.reset(OpRISCV64SRL)
 10083  		v0 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64)
 10084  		v0.AddArg(x)
 10085  		v.AddArg2(v0, y)
 10086  		return true
 10087  	}
 10088  	return false
 10089  }
 10090  func rewriteValueRISCV64_OpRsh8x16(v *Value) bool {
 10091  	v_1 := v.Args[1]
 10092  	v_0 := v.Args[0]
 10093  	b := v.Block
 10094  	typ := &b.Func.Config.Types
 10095  	// match: (Rsh8x16 <t> x y)
 10096  	// cond: !shiftIsBounded(v)
 10097  	// result: (SRA <t> (SignExt8to64 x) (OR <y.Type> y (ADDI <y.Type> [-1] (SLTIU <y.Type> [64] (ZeroExt16to64 y)))))
 10098  	for {
 10099  		t := v.Type
 10100  		x := v_0
 10101  		y := v_1
 10102  		if !(!shiftIsBounded(v)) {
 10103  			break
 10104  		}
 10105  		v.reset(OpRISCV64SRA)
 10106  		v.Type = t
 10107  		v0 := b.NewValue0(v.Pos, OpSignExt8to64, typ.Int64)
 10108  		v0.AddArg(x)
 10109  		v1 := b.NewValue0(v.Pos, OpRISCV64OR, y.Type)
 10110  		v2 := b.NewValue0(v.Pos, OpRISCV64ADDI, y.Type)
 10111  		v2.AuxInt = int64ToAuxInt(-1)
 10112  		v3 := b.NewValue0(v.Pos, OpRISCV64SLTIU, y.Type)
 10113  		v3.AuxInt = int64ToAuxInt(64)
 10114  		v4 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64)
 10115  		v4.AddArg(y)
 10116  		v3.AddArg(v4)
 10117  		v2.AddArg(v3)
 10118  		v1.AddArg2(y, v2)
 10119  		v.AddArg2(v0, v1)
 10120  		return true
 10121  	}
 10122  	// match: (Rsh8x16 x y)
 10123  	// cond: shiftIsBounded(v)
 10124  	// result: (SRA (SignExt8to64 x) y)
 10125  	for {
 10126  		x := v_0
 10127  		y := v_1
 10128  		if !(shiftIsBounded(v)) {
 10129  			break
 10130  		}
 10131  		v.reset(OpRISCV64SRA)
 10132  		v0 := b.NewValue0(v.Pos, OpSignExt8to64, typ.Int64)
 10133  		v0.AddArg(x)
 10134  		v.AddArg2(v0, y)
 10135  		return true
 10136  	}
 10137  	return false
 10138  }
 10139  func rewriteValueRISCV64_OpRsh8x32(v *Value) bool {
 10140  	v_1 := v.Args[1]
 10141  	v_0 := v.Args[0]
 10142  	b := v.Block
 10143  	typ := &b.Func.Config.Types
 10144  	// match: (Rsh8x32 <t> x y)
 10145  	// cond: !shiftIsBounded(v)
 10146  	// result: (SRA <t> (SignExt8to64 x) (OR <y.Type> y (ADDI <y.Type> [-1] (SLTIU <y.Type> [64] (ZeroExt32to64 y)))))
 10147  	for {
 10148  		t := v.Type
 10149  		x := v_0
 10150  		y := v_1
 10151  		if !(!shiftIsBounded(v)) {
 10152  			break
 10153  		}
 10154  		v.reset(OpRISCV64SRA)
 10155  		v.Type = t
 10156  		v0 := b.NewValue0(v.Pos, OpSignExt8to64, typ.Int64)
 10157  		v0.AddArg(x)
 10158  		v1 := b.NewValue0(v.Pos, OpRISCV64OR, y.Type)
 10159  		v2 := b.NewValue0(v.Pos, OpRISCV64ADDI, y.Type)
 10160  		v2.AuxInt = int64ToAuxInt(-1)
 10161  		v3 := b.NewValue0(v.Pos, OpRISCV64SLTIU, y.Type)
 10162  		v3.AuxInt = int64ToAuxInt(64)
 10163  		v4 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64)
 10164  		v4.AddArg(y)
 10165  		v3.AddArg(v4)
 10166  		v2.AddArg(v3)
 10167  		v1.AddArg2(y, v2)
 10168  		v.AddArg2(v0, v1)
 10169  		return true
 10170  	}
 10171  	// match: (Rsh8x32 x y)
 10172  	// cond: shiftIsBounded(v)
 10173  	// result: (SRA (SignExt8to64 x) y)
 10174  	for {
 10175  		x := v_0
 10176  		y := v_1
 10177  		if !(shiftIsBounded(v)) {
 10178  			break
 10179  		}
 10180  		v.reset(OpRISCV64SRA)
 10181  		v0 := b.NewValue0(v.Pos, OpSignExt8to64, typ.Int64)
 10182  		v0.AddArg(x)
 10183  		v.AddArg2(v0, y)
 10184  		return true
 10185  	}
 10186  	return false
 10187  }
 10188  func rewriteValueRISCV64_OpRsh8x64(v *Value) bool {
 10189  	v_1 := v.Args[1]
 10190  	v_0 := v.Args[0]
 10191  	b := v.Block
 10192  	typ := &b.Func.Config.Types
 10193  	// match: (Rsh8x64 <t> x y)
 10194  	// cond: !shiftIsBounded(v)
 10195  	// result: (SRA <t> (SignExt8to64 x) (OR <y.Type> y (ADDI <y.Type> [-1] (SLTIU <y.Type> [64] y))))
 10196  	for {
 10197  		t := v.Type
 10198  		x := v_0
 10199  		y := v_1
 10200  		if !(!shiftIsBounded(v)) {
 10201  			break
 10202  		}
 10203  		v.reset(OpRISCV64SRA)
 10204  		v.Type = t
 10205  		v0 := b.NewValue0(v.Pos, OpSignExt8to64, typ.Int64)
 10206  		v0.AddArg(x)
 10207  		v1 := b.NewValue0(v.Pos, OpRISCV64OR, y.Type)
 10208  		v2 := b.NewValue0(v.Pos, OpRISCV64ADDI, y.Type)
 10209  		v2.AuxInt = int64ToAuxInt(-1)
 10210  		v3 := b.NewValue0(v.Pos, OpRISCV64SLTIU, y.Type)
 10211  		v3.AuxInt = int64ToAuxInt(64)
 10212  		v3.AddArg(y)
 10213  		v2.AddArg(v3)
 10214  		v1.AddArg2(y, v2)
 10215  		v.AddArg2(v0, v1)
 10216  		return true
 10217  	}
 10218  	// match: (Rsh8x64 x y)
 10219  	// cond: shiftIsBounded(v)
 10220  	// result: (SRA (SignExt8to64 x) y)
 10221  	for {
 10222  		x := v_0
 10223  		y := v_1
 10224  		if !(shiftIsBounded(v)) {
 10225  			break
 10226  		}
 10227  		v.reset(OpRISCV64SRA)
 10228  		v0 := b.NewValue0(v.Pos, OpSignExt8to64, typ.Int64)
 10229  		v0.AddArg(x)
 10230  		v.AddArg2(v0, y)
 10231  		return true
 10232  	}
 10233  	return false
 10234  }
 10235  func rewriteValueRISCV64_OpRsh8x8(v *Value) bool {
 10236  	v_1 := v.Args[1]
 10237  	v_0 := v.Args[0]
 10238  	b := v.Block
 10239  	typ := &b.Func.Config.Types
 10240  	// match: (Rsh8x8 <t> x y)
 10241  	// cond: !shiftIsBounded(v)
 10242  	// result: (SRA <t> (SignExt8to64 x) (OR <y.Type> y (ADDI <y.Type> [-1] (SLTIU <y.Type> [64] (ZeroExt8to64 y)))))
 10243  	for {
 10244  		t := v.Type
 10245  		x := v_0
 10246  		y := v_1
 10247  		if !(!shiftIsBounded(v)) {
 10248  			break
 10249  		}
 10250  		v.reset(OpRISCV64SRA)
 10251  		v.Type = t
 10252  		v0 := b.NewValue0(v.Pos, OpSignExt8to64, typ.Int64)
 10253  		v0.AddArg(x)
 10254  		v1 := b.NewValue0(v.Pos, OpRISCV64OR, y.Type)
 10255  		v2 := b.NewValue0(v.Pos, OpRISCV64ADDI, y.Type)
 10256  		v2.AuxInt = int64ToAuxInt(-1)
 10257  		v3 := b.NewValue0(v.Pos, OpRISCV64SLTIU, y.Type)
 10258  		v3.AuxInt = int64ToAuxInt(64)
 10259  		v4 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64)
 10260  		v4.AddArg(y)
 10261  		v3.AddArg(v4)
 10262  		v2.AddArg(v3)
 10263  		v1.AddArg2(y, v2)
 10264  		v.AddArg2(v0, v1)
 10265  		return true
 10266  	}
 10267  	// match: (Rsh8x8 x y)
 10268  	// cond: shiftIsBounded(v)
 10269  	// result: (SRA (SignExt8to64 x) y)
 10270  	for {
 10271  		x := v_0
 10272  		y := v_1
 10273  		if !(shiftIsBounded(v)) {
 10274  			break
 10275  		}
 10276  		v.reset(OpRISCV64SRA)
 10277  		v0 := b.NewValue0(v.Pos, OpSignExt8to64, typ.Int64)
 10278  		v0.AddArg(x)
 10279  		v.AddArg2(v0, y)
 10280  		return true
 10281  	}
 10282  	return false
 10283  }
 10284  func rewriteValueRISCV64_OpSelect0(v *Value) bool {
 10285  	v_0 := v.Args[0]
 10286  	b := v.Block
 10287  	typ := &b.Func.Config.Types
 10288  	// match: (Select0 (Add64carry x y c))
 10289  	// result: (ADD (ADD <typ.UInt64> x y) c)
 10290  	for {
 10291  		if v_0.Op != OpAdd64carry {
 10292  			break
 10293  		}
 10294  		c := v_0.Args[2]
 10295  		x := v_0.Args[0]
 10296  		y := v_0.Args[1]
 10297  		v.reset(OpRISCV64ADD)
 10298  		v0 := b.NewValue0(v.Pos, OpRISCV64ADD, typ.UInt64)
 10299  		v0.AddArg2(x, y)
 10300  		v.AddArg2(v0, c)
 10301  		return true
 10302  	}
 10303  	// match: (Select0 (Sub64borrow x y c))
 10304  	// result: (SUB (SUB <typ.UInt64> x y) c)
 10305  	for {
 10306  		if v_0.Op != OpSub64borrow {
 10307  			break
 10308  		}
 10309  		c := v_0.Args[2]
 10310  		x := v_0.Args[0]
 10311  		y := v_0.Args[1]
 10312  		v.reset(OpRISCV64SUB)
 10313  		v0 := b.NewValue0(v.Pos, OpRISCV64SUB, typ.UInt64)
 10314  		v0.AddArg2(x, y)
 10315  		v.AddArg2(v0, c)
 10316  		return true
 10317  	}
 10318  	// match: (Select0 m:(LoweredMuluhilo x y))
 10319  	// cond: m.Uses == 1
 10320  	// result: (MULHU x y)
 10321  	for {
 10322  		m := v_0
 10323  		if m.Op != OpRISCV64LoweredMuluhilo {
 10324  			break
 10325  		}
 10326  		y := m.Args[1]
 10327  		x := m.Args[0]
 10328  		if !(m.Uses == 1) {
 10329  			break
 10330  		}
 10331  		v.reset(OpRISCV64MULHU)
 10332  		v.AddArg2(x, y)
 10333  		return true
 10334  	}
 10335  	return false
 10336  }
 10337  func rewriteValueRISCV64_OpSelect1(v *Value) bool {
 10338  	v_0 := v.Args[0]
 10339  	b := v.Block
 10340  	typ := &b.Func.Config.Types
 10341  	// match: (Select1 (Add64carry x y c))
 10342  	// result: (OR (SLTU <typ.UInt64> s:(ADD <typ.UInt64> x y) x) (SLTU <typ.UInt64> (ADD <typ.UInt64> s c) s))
 10343  	for {
 10344  		if v_0.Op != OpAdd64carry {
 10345  			break
 10346  		}
 10347  		c := v_0.Args[2]
 10348  		x := v_0.Args[0]
 10349  		y := v_0.Args[1]
 10350  		v.reset(OpRISCV64OR)
 10351  		v0 := b.NewValue0(v.Pos, OpRISCV64SLTU, typ.UInt64)
 10352  		s := b.NewValue0(v.Pos, OpRISCV64ADD, typ.UInt64)
 10353  		s.AddArg2(x, y)
 10354  		v0.AddArg2(s, x)
 10355  		v2 := b.NewValue0(v.Pos, OpRISCV64SLTU, typ.UInt64)
 10356  		v3 := b.NewValue0(v.Pos, OpRISCV64ADD, typ.UInt64)
 10357  		v3.AddArg2(s, c)
 10358  		v2.AddArg2(v3, s)
 10359  		v.AddArg2(v0, v2)
 10360  		return true
 10361  	}
 10362  	// match: (Select1 (Sub64borrow x y c))
 10363  	// result: (OR (SLTU <typ.UInt64> x s:(SUB <typ.UInt64> x y)) (SLTU <typ.UInt64> s (SUB <typ.UInt64> s c)))
 10364  	for {
 10365  		if v_0.Op != OpSub64borrow {
 10366  			break
 10367  		}
 10368  		c := v_0.Args[2]
 10369  		x := v_0.Args[0]
 10370  		y := v_0.Args[1]
 10371  		v.reset(OpRISCV64OR)
 10372  		v0 := b.NewValue0(v.Pos, OpRISCV64SLTU, typ.UInt64)
 10373  		s := b.NewValue0(v.Pos, OpRISCV64SUB, typ.UInt64)
 10374  		s.AddArg2(x, y)
 10375  		v0.AddArg2(x, s)
 10376  		v2 := b.NewValue0(v.Pos, OpRISCV64SLTU, typ.UInt64)
 10377  		v3 := b.NewValue0(v.Pos, OpRISCV64SUB, typ.UInt64)
 10378  		v3.AddArg2(s, c)
 10379  		v2.AddArg2(s, v3)
 10380  		v.AddArg2(v0, v2)
 10381  		return true
 10382  	}
 10383  	// match: (Select1 m:(LoweredMuluhilo x y))
 10384  	// cond: m.Uses == 1
 10385  	// result: (MUL x y)
 10386  	for {
 10387  		m := v_0
 10388  		if m.Op != OpRISCV64LoweredMuluhilo {
 10389  			break
 10390  		}
 10391  		y := m.Args[1]
 10392  		x := m.Args[0]
 10393  		if !(m.Uses == 1) {
 10394  			break
 10395  		}
 10396  		v.reset(OpRISCV64MUL)
 10397  		v.AddArg2(x, y)
 10398  		return true
 10399  	}
 10400  	return false
 10401  }
 10402  func rewriteValueRISCV64_OpSlicemask(v *Value) bool {
 10403  	v_0 := v.Args[0]
 10404  	b := v.Block
 10405  	// match: (Slicemask <t> x)
 10406  	// result: (SRAI [63] (NEG <t> x))
 10407  	for {
 10408  		t := v.Type
 10409  		x := v_0
 10410  		v.reset(OpRISCV64SRAI)
 10411  		v.AuxInt = int64ToAuxInt(63)
 10412  		v0 := b.NewValue0(v.Pos, OpRISCV64NEG, t)
 10413  		v0.AddArg(x)
 10414  		v.AddArg(v0)
 10415  		return true
 10416  	}
 10417  }
 10418  func rewriteValueRISCV64_OpStore(v *Value) bool {
 10419  	v_2 := v.Args[2]
 10420  	v_1 := v.Args[1]
 10421  	v_0 := v.Args[0]
 10422  	// match: (Store {t} ptr val mem)
 10423  	// cond: t.Size() == 1
 10424  	// result: (MOVBstore ptr val mem)
 10425  	for {
 10426  		t := auxToType(v.Aux)
 10427  		ptr := v_0
 10428  		val := v_1
 10429  		mem := v_2
 10430  		if !(t.Size() == 1) {
 10431  			break
 10432  		}
 10433  		v.reset(OpRISCV64MOVBstore)
 10434  		v.AddArg3(ptr, val, mem)
 10435  		return true
 10436  	}
 10437  	// match: (Store {t} ptr val mem)
 10438  	// cond: t.Size() == 2
 10439  	// result: (MOVHstore ptr val mem)
 10440  	for {
 10441  		t := auxToType(v.Aux)
 10442  		ptr := v_0
 10443  		val := v_1
 10444  		mem := v_2
 10445  		if !(t.Size() == 2) {
 10446  			break
 10447  		}
 10448  		v.reset(OpRISCV64MOVHstore)
 10449  		v.AddArg3(ptr, val, mem)
 10450  		return true
 10451  	}
 10452  	// match: (Store {t} ptr val mem)
 10453  	// cond: t.Size() == 4 && !t.IsFloat()
 10454  	// result: (MOVWstore ptr val mem)
 10455  	for {
 10456  		t := auxToType(v.Aux)
 10457  		ptr := v_0
 10458  		val := v_1
 10459  		mem := v_2
 10460  		if !(t.Size() == 4 && !t.IsFloat()) {
 10461  			break
 10462  		}
 10463  		v.reset(OpRISCV64MOVWstore)
 10464  		v.AddArg3(ptr, val, mem)
 10465  		return true
 10466  	}
 10467  	// match: (Store {t} ptr val mem)
 10468  	// cond: t.Size() == 8 && !t.IsFloat()
 10469  	// result: (MOVDstore ptr val mem)
 10470  	for {
 10471  		t := auxToType(v.Aux)
 10472  		ptr := v_0
 10473  		val := v_1
 10474  		mem := v_2
 10475  		if !(t.Size() == 8 && !t.IsFloat()) {
 10476  			break
 10477  		}
 10478  		v.reset(OpRISCV64MOVDstore)
 10479  		v.AddArg3(ptr, val, mem)
 10480  		return true
 10481  	}
 10482  	// match: (Store {t} ptr val mem)
 10483  	// cond: t.Size() == 4 && t.IsFloat()
 10484  	// result: (FMOVWstore ptr val mem)
 10485  	for {
 10486  		t := auxToType(v.Aux)
 10487  		ptr := v_0
 10488  		val := v_1
 10489  		mem := v_2
 10490  		if !(t.Size() == 4 && t.IsFloat()) {
 10491  			break
 10492  		}
 10493  		v.reset(OpRISCV64FMOVWstore)
 10494  		v.AddArg3(ptr, val, mem)
 10495  		return true
 10496  	}
 10497  	// match: (Store {t} ptr val mem)
 10498  	// cond: t.Size() == 8 && t.IsFloat()
 10499  	// result: (FMOVDstore ptr val mem)
 10500  	for {
 10501  		t := auxToType(v.Aux)
 10502  		ptr := v_0
 10503  		val := v_1
 10504  		mem := v_2
 10505  		if !(t.Size() == 8 && t.IsFloat()) {
 10506  			break
 10507  		}
 10508  		v.reset(OpRISCV64FMOVDstore)
 10509  		v.AddArg3(ptr, val, mem)
 10510  		return true
 10511  	}
 10512  	return false
 10513  }
 10514  func rewriteValueRISCV64_OpZero(v *Value) bool {
 10515  	v_1 := v.Args[1]
 10516  	v_0 := v.Args[0]
 10517  	b := v.Block
 10518  	config := b.Func.Config
 10519  	typ := &b.Func.Config.Types
 10520  	// match: (Zero [0] _ mem)
 10521  	// result: mem
 10522  	for {
 10523  		if auxIntToInt64(v.AuxInt) != 0 {
 10524  			break
 10525  		}
 10526  		mem := v_1
 10527  		v.copyOf(mem)
 10528  		return true
 10529  	}
 10530  	// match: (Zero [1] ptr mem)
 10531  	// result: (MOVBstore ptr (MOVDconst [0]) mem)
 10532  	for {
 10533  		if auxIntToInt64(v.AuxInt) != 1 {
 10534  			break
 10535  		}
 10536  		ptr := v_0
 10537  		mem := v_1
 10538  		v.reset(OpRISCV64MOVBstore)
 10539  		v0 := b.NewValue0(v.Pos, OpRISCV64MOVDconst, typ.UInt64)
 10540  		v0.AuxInt = int64ToAuxInt(0)
 10541  		v.AddArg3(ptr, v0, mem)
 10542  		return true
 10543  	}
 10544  	// match: (Zero [2] {t} ptr mem)
 10545  	// cond: t.Alignment()%2 == 0
 10546  	// result: (MOVHstore ptr (MOVDconst [0]) mem)
 10547  	for {
 10548  		if auxIntToInt64(v.AuxInt) != 2 {
 10549  			break
 10550  		}
 10551  		t := auxToType(v.Aux)
 10552  		ptr := v_0
 10553  		mem := v_1
 10554  		if !(t.Alignment()%2 == 0) {
 10555  			break
 10556  		}
 10557  		v.reset(OpRISCV64MOVHstore)
 10558  		v0 := b.NewValue0(v.Pos, OpRISCV64MOVDconst, typ.UInt64)
 10559  		v0.AuxInt = int64ToAuxInt(0)
 10560  		v.AddArg3(ptr, v0, mem)
 10561  		return true
 10562  	}
 10563  	// match: (Zero [2] ptr mem)
 10564  	// result: (MOVBstore [1] ptr (MOVDconst [0]) (MOVBstore ptr (MOVDconst [0]) mem))
 10565  	for {
 10566  		if auxIntToInt64(v.AuxInt) != 2 {
 10567  			break
 10568  		}
 10569  		ptr := v_0
 10570  		mem := v_1
 10571  		v.reset(OpRISCV64MOVBstore)
 10572  		v.AuxInt = int32ToAuxInt(1)
 10573  		v0 := b.NewValue0(v.Pos, OpRISCV64MOVDconst, typ.UInt64)
 10574  		v0.AuxInt = int64ToAuxInt(0)
 10575  		v1 := b.NewValue0(v.Pos, OpRISCV64MOVBstore, types.TypeMem)
 10576  		v1.AddArg3(ptr, v0, mem)
 10577  		v.AddArg3(ptr, v0, v1)
 10578  		return true
 10579  	}
 10580  	// match: (Zero [4] {t} ptr mem)
 10581  	// cond: t.Alignment()%4 == 0
 10582  	// result: (MOVWstore ptr (MOVDconst [0]) mem)
 10583  	for {
 10584  		if auxIntToInt64(v.AuxInt) != 4 {
 10585  			break
 10586  		}
 10587  		t := auxToType(v.Aux)
 10588  		ptr := v_0
 10589  		mem := v_1
 10590  		if !(t.Alignment()%4 == 0) {
 10591  			break
 10592  		}
 10593  		v.reset(OpRISCV64MOVWstore)
 10594  		v0 := b.NewValue0(v.Pos, OpRISCV64MOVDconst, typ.UInt64)
 10595  		v0.AuxInt = int64ToAuxInt(0)
 10596  		v.AddArg3(ptr, v0, mem)
 10597  		return true
 10598  	}
 10599  	// match: (Zero [4] {t} ptr mem)
 10600  	// cond: t.Alignment()%2 == 0
 10601  	// result: (MOVHstore [2] ptr (MOVDconst [0]) (MOVHstore ptr (MOVDconst [0]) mem))
 10602  	for {
 10603  		if auxIntToInt64(v.AuxInt) != 4 {
 10604  			break
 10605  		}
 10606  		t := auxToType(v.Aux)
 10607  		ptr := v_0
 10608  		mem := v_1
 10609  		if !(t.Alignment()%2 == 0) {
 10610  			break
 10611  		}
 10612  		v.reset(OpRISCV64MOVHstore)
 10613  		v.AuxInt = int32ToAuxInt(2)
 10614  		v0 := b.NewValue0(v.Pos, OpRISCV64MOVDconst, typ.UInt64)
 10615  		v0.AuxInt = int64ToAuxInt(0)
 10616  		v1 := b.NewValue0(v.Pos, OpRISCV64MOVHstore, types.TypeMem)
 10617  		v1.AddArg3(ptr, v0, mem)
 10618  		v.AddArg3(ptr, v0, v1)
 10619  		return true
 10620  	}
 10621  	// match: (Zero [4] ptr mem)
 10622  	// result: (MOVBstore [3] ptr (MOVDconst [0]) (MOVBstore [2] ptr (MOVDconst [0]) (MOVBstore [1] ptr (MOVDconst [0]) (MOVBstore ptr (MOVDconst [0]) mem))))
 10623  	for {
 10624  		if auxIntToInt64(v.AuxInt) != 4 {
 10625  			break
 10626  		}
 10627  		ptr := v_0
 10628  		mem := v_1
 10629  		v.reset(OpRISCV64MOVBstore)
 10630  		v.AuxInt = int32ToAuxInt(3)
 10631  		v0 := b.NewValue0(v.Pos, OpRISCV64MOVDconst, typ.UInt64)
 10632  		v0.AuxInt = int64ToAuxInt(0)
 10633  		v1 := b.NewValue0(v.Pos, OpRISCV64MOVBstore, types.TypeMem)
 10634  		v1.AuxInt = int32ToAuxInt(2)
 10635  		v2 := b.NewValue0(v.Pos, OpRISCV64MOVBstore, types.TypeMem)
 10636  		v2.AuxInt = int32ToAuxInt(1)
 10637  		v3 := b.NewValue0(v.Pos, OpRISCV64MOVBstore, types.TypeMem)
 10638  		v3.AddArg3(ptr, v0, mem)
 10639  		v2.AddArg3(ptr, v0, v3)
 10640  		v1.AddArg3(ptr, v0, v2)
 10641  		v.AddArg3(ptr, v0, v1)
 10642  		return true
 10643  	}
 10644  	// match: (Zero [8] {t} ptr mem)
 10645  	// cond: t.Alignment()%8 == 0
 10646  	// result: (MOVDstore ptr (MOVDconst [0]) mem)
 10647  	for {
 10648  		if auxIntToInt64(v.AuxInt) != 8 {
 10649  			break
 10650  		}
 10651  		t := auxToType(v.Aux)
 10652  		ptr := v_0
 10653  		mem := v_1
 10654  		if !(t.Alignment()%8 == 0) {
 10655  			break
 10656  		}
 10657  		v.reset(OpRISCV64MOVDstore)
 10658  		v0 := b.NewValue0(v.Pos, OpRISCV64MOVDconst, typ.UInt64)
 10659  		v0.AuxInt = int64ToAuxInt(0)
 10660  		v.AddArg3(ptr, v0, mem)
 10661  		return true
 10662  	}
 10663  	// match: (Zero [8] {t} ptr mem)
 10664  	// cond: t.Alignment()%4 == 0
 10665  	// result: (MOVWstore [4] ptr (MOVDconst [0]) (MOVWstore ptr (MOVDconst [0]) mem))
 10666  	for {
 10667  		if auxIntToInt64(v.AuxInt) != 8 {
 10668  			break
 10669  		}
 10670  		t := auxToType(v.Aux)
 10671  		ptr := v_0
 10672  		mem := v_1
 10673  		if !(t.Alignment()%4 == 0) {
 10674  			break
 10675  		}
 10676  		v.reset(OpRISCV64MOVWstore)
 10677  		v.AuxInt = int32ToAuxInt(4)
 10678  		v0 := b.NewValue0(v.Pos, OpRISCV64MOVDconst, typ.UInt64)
 10679  		v0.AuxInt = int64ToAuxInt(0)
 10680  		v1 := b.NewValue0(v.Pos, OpRISCV64MOVWstore, types.TypeMem)
 10681  		v1.AddArg3(ptr, v0, mem)
 10682  		v.AddArg3(ptr, v0, v1)
 10683  		return true
 10684  	}
 10685  	// match: (Zero [8] {t} ptr mem)
 10686  	// cond: t.Alignment()%2 == 0
 10687  	// result: (MOVHstore [6] ptr (MOVDconst [0]) (MOVHstore [4] ptr (MOVDconst [0]) (MOVHstore [2] ptr (MOVDconst [0]) (MOVHstore ptr (MOVDconst [0]) mem))))
 10688  	for {
 10689  		if auxIntToInt64(v.AuxInt) != 8 {
 10690  			break
 10691  		}
 10692  		t := auxToType(v.Aux)
 10693  		ptr := v_0
 10694  		mem := v_1
 10695  		if !(t.Alignment()%2 == 0) {
 10696  			break
 10697  		}
 10698  		v.reset(OpRISCV64MOVHstore)
 10699  		v.AuxInt = int32ToAuxInt(6)
 10700  		v0 := b.NewValue0(v.Pos, OpRISCV64MOVDconst, typ.UInt64)
 10701  		v0.AuxInt = int64ToAuxInt(0)
 10702  		v1 := b.NewValue0(v.Pos, OpRISCV64MOVHstore, types.TypeMem)
 10703  		v1.AuxInt = int32ToAuxInt(4)
 10704  		v2 := b.NewValue0(v.Pos, OpRISCV64MOVHstore, types.TypeMem)
 10705  		v2.AuxInt = int32ToAuxInt(2)
 10706  		v3 := b.NewValue0(v.Pos, OpRISCV64MOVHstore, types.TypeMem)
 10707  		v3.AddArg3(ptr, v0, mem)
 10708  		v2.AddArg3(ptr, v0, v3)
 10709  		v1.AddArg3(ptr, v0, v2)
 10710  		v.AddArg3(ptr, v0, v1)
 10711  		return true
 10712  	}
 10713  	// match: (Zero [3] ptr mem)
 10714  	// result: (MOVBstore [2] ptr (MOVDconst [0]) (MOVBstore [1] ptr (MOVDconst [0]) (MOVBstore ptr (MOVDconst [0]) mem)))
 10715  	for {
 10716  		if auxIntToInt64(v.AuxInt) != 3 {
 10717  			break
 10718  		}
 10719  		ptr := v_0
 10720  		mem := v_1
 10721  		v.reset(OpRISCV64MOVBstore)
 10722  		v.AuxInt = int32ToAuxInt(2)
 10723  		v0 := b.NewValue0(v.Pos, OpRISCV64MOVDconst, typ.UInt64)
 10724  		v0.AuxInt = int64ToAuxInt(0)
 10725  		v1 := b.NewValue0(v.Pos, OpRISCV64MOVBstore, types.TypeMem)
 10726  		v1.AuxInt = int32ToAuxInt(1)
 10727  		v2 := b.NewValue0(v.Pos, OpRISCV64MOVBstore, types.TypeMem)
 10728  		v2.AddArg3(ptr, v0, mem)
 10729  		v1.AddArg3(ptr, v0, v2)
 10730  		v.AddArg3(ptr, v0, v1)
 10731  		return true
 10732  	}
 10733  	// match: (Zero [6] {t} ptr mem)
 10734  	// cond: t.Alignment()%2 == 0
 10735  	// result: (MOVHstore [4] ptr (MOVDconst [0]) (MOVHstore [2] ptr (MOVDconst [0]) (MOVHstore ptr (MOVDconst [0]) mem)))
 10736  	for {
 10737  		if auxIntToInt64(v.AuxInt) != 6 {
 10738  			break
 10739  		}
 10740  		t := auxToType(v.Aux)
 10741  		ptr := v_0
 10742  		mem := v_1
 10743  		if !(t.Alignment()%2 == 0) {
 10744  			break
 10745  		}
 10746  		v.reset(OpRISCV64MOVHstore)
 10747  		v.AuxInt = int32ToAuxInt(4)
 10748  		v0 := b.NewValue0(v.Pos, OpRISCV64MOVDconst, typ.UInt64)
 10749  		v0.AuxInt = int64ToAuxInt(0)
 10750  		v1 := b.NewValue0(v.Pos, OpRISCV64MOVHstore, types.TypeMem)
 10751  		v1.AuxInt = int32ToAuxInt(2)
 10752  		v2 := b.NewValue0(v.Pos, OpRISCV64MOVHstore, types.TypeMem)
 10753  		v2.AddArg3(ptr, v0, mem)
 10754  		v1.AddArg3(ptr, v0, v2)
 10755  		v.AddArg3(ptr, v0, v1)
 10756  		return true
 10757  	}
 10758  	// match: (Zero [s] {t} ptr mem)
 10759  	// cond: s <= 24*moveSize(t.Alignment(), config)
 10760  	// result: (LoweredZero [makeValAndOff(int32(s),int32(t.Alignment()))] ptr mem)
 10761  	for {
 10762  		s := auxIntToInt64(v.AuxInt)
 10763  		t := auxToType(v.Aux)
 10764  		ptr := v_0
 10765  		mem := v_1
 10766  		if !(s <= 24*moveSize(t.Alignment(), config)) {
 10767  			break
 10768  		}
 10769  		v.reset(OpRISCV64LoweredZero)
 10770  		v.AuxInt = valAndOffToAuxInt(makeValAndOff(int32(s), int32(t.Alignment())))
 10771  		v.AddArg2(ptr, mem)
 10772  		return true
 10773  	}
 10774  	// match: (Zero [s] {t} ptr mem)
 10775  	// cond: s > 24*moveSize(t.Alignment(), config)
 10776  	// result: (LoweredZeroLoop [makeValAndOff(int32(s),int32(t.Alignment()))] ptr mem)
 10777  	for {
 10778  		s := auxIntToInt64(v.AuxInt)
 10779  		t := auxToType(v.Aux)
 10780  		ptr := v_0
 10781  		mem := v_1
 10782  		if !(s > 24*moveSize(t.Alignment(), config)) {
 10783  			break
 10784  		}
 10785  		v.reset(OpRISCV64LoweredZeroLoop)
 10786  		v.AuxInt = valAndOffToAuxInt(makeValAndOff(int32(s), int32(t.Alignment())))
 10787  		v.AddArg2(ptr, mem)
 10788  		return true
 10789  	}
 10790  	return false
 10791  }
 10792  func rewriteBlockRISCV64(b *Block) bool {
 10793  	typ := &b.Func.Config.Types
 10794  	switch b.Kind {
 10795  	case BlockRISCV64BEQ:
 10796  		// match: (BEQ (MOVDconst [0]) cond yes no)
 10797  		// result: (BEQZ cond yes no)
 10798  		for b.Controls[0].Op == OpRISCV64MOVDconst {
 10799  			v_0 := b.Controls[0]
 10800  			if auxIntToInt64(v_0.AuxInt) != 0 {
 10801  				break
 10802  			}
 10803  			cond := b.Controls[1]
 10804  			b.resetWithControl(BlockRISCV64BEQZ, cond)
 10805  			return true
 10806  		}
 10807  		// match: (BEQ cond (MOVDconst [0]) yes no)
 10808  		// result: (BEQZ cond yes no)
 10809  		for b.Controls[1].Op == OpRISCV64MOVDconst {
 10810  			cond := b.Controls[0]
 10811  			v_1 := b.Controls[1]
 10812  			if auxIntToInt64(v_1.AuxInt) != 0 {
 10813  				break
 10814  			}
 10815  			b.resetWithControl(BlockRISCV64BEQZ, cond)
 10816  			return true
 10817  		}
 10818  	case BlockRISCV64BEQZ:
 10819  		// match: (BEQZ (SEQZ x) yes no)
 10820  		// result: (BNEZ x yes no)
 10821  		for b.Controls[0].Op == OpRISCV64SEQZ {
 10822  			v_0 := b.Controls[0]
 10823  			x := v_0.Args[0]
 10824  			b.resetWithControl(BlockRISCV64BNEZ, x)
 10825  			return true
 10826  		}
 10827  		// match: (BEQZ (SNEZ x) yes no)
 10828  		// result: (BEQZ x yes no)
 10829  		for b.Controls[0].Op == OpRISCV64SNEZ {
 10830  			v_0 := b.Controls[0]
 10831  			x := v_0.Args[0]
 10832  			b.resetWithControl(BlockRISCV64BEQZ, x)
 10833  			return true
 10834  		}
 10835  		// match: (BEQZ (NEG x) yes no)
 10836  		// result: (BEQZ x yes no)
 10837  		for b.Controls[0].Op == OpRISCV64NEG {
 10838  			v_0 := b.Controls[0]
 10839  			x := v_0.Args[0]
 10840  			b.resetWithControl(BlockRISCV64BEQZ, x)
 10841  			return true
 10842  		}
 10843  		// match: (BEQZ (FNES <t> x y) yes no)
 10844  		// result: (BNEZ (FEQS <t> x y) yes no)
 10845  		for b.Controls[0].Op == OpRISCV64FNES {
 10846  			v_0 := b.Controls[0]
 10847  			t := v_0.Type
 10848  			_ = v_0.Args[1]
 10849  			v_0_0 := v_0.Args[0]
 10850  			v_0_1 := v_0.Args[1]
 10851  			for _i0 := 0; _i0 <= 1; _i0, v_0_0, v_0_1 = _i0+1, v_0_1, v_0_0 {
 10852  				x := v_0_0
 10853  				y := v_0_1
 10854  				v0 := b.NewValue0(v_0.Pos, OpRISCV64FEQS, t)
 10855  				v0.AddArg2(x, y)
 10856  				b.resetWithControl(BlockRISCV64BNEZ, v0)
 10857  				return true
 10858  			}
 10859  		}
 10860  		// match: (BEQZ (FNED <t> x y) yes no)
 10861  		// result: (BNEZ (FEQD <t> x y) yes no)
 10862  		for b.Controls[0].Op == OpRISCV64FNED {
 10863  			v_0 := b.Controls[0]
 10864  			t := v_0.Type
 10865  			_ = v_0.Args[1]
 10866  			v_0_0 := v_0.Args[0]
 10867  			v_0_1 := v_0.Args[1]
 10868  			for _i0 := 0; _i0 <= 1; _i0, v_0_0, v_0_1 = _i0+1, v_0_1, v_0_0 {
 10869  				x := v_0_0
 10870  				y := v_0_1
 10871  				v0 := b.NewValue0(v_0.Pos, OpRISCV64FEQD, t)
 10872  				v0.AddArg2(x, y)
 10873  				b.resetWithControl(BlockRISCV64BNEZ, v0)
 10874  				return true
 10875  			}
 10876  		}
 10877  		// match: (BEQZ (SUB x y) yes no)
 10878  		// result: (BEQ x y yes no)
 10879  		for b.Controls[0].Op == OpRISCV64SUB {
 10880  			v_0 := b.Controls[0]
 10881  			y := v_0.Args[1]
 10882  			x := v_0.Args[0]
 10883  			b.resetWithControl2(BlockRISCV64BEQ, x, y)
 10884  			return true
 10885  		}
 10886  		// match: (BEQZ (SLT x y) yes no)
 10887  		// result: (BGE x y yes no)
 10888  		for b.Controls[0].Op == OpRISCV64SLT {
 10889  			v_0 := b.Controls[0]
 10890  			y := v_0.Args[1]
 10891  			x := v_0.Args[0]
 10892  			b.resetWithControl2(BlockRISCV64BGE, x, y)
 10893  			return true
 10894  		}
 10895  		// match: (BEQZ (SLTU x y) yes no)
 10896  		// result: (BGEU x y yes no)
 10897  		for b.Controls[0].Op == OpRISCV64SLTU {
 10898  			v_0 := b.Controls[0]
 10899  			y := v_0.Args[1]
 10900  			x := v_0.Args[0]
 10901  			b.resetWithControl2(BlockRISCV64BGEU, x, y)
 10902  			return true
 10903  		}
 10904  		// match: (BEQZ (SLTI [x] y) yes no)
 10905  		// result: (BGE y (MOVDconst [x]) yes no)
 10906  		for b.Controls[0].Op == OpRISCV64SLTI {
 10907  			v_0 := b.Controls[0]
 10908  			x := auxIntToInt64(v_0.AuxInt)
 10909  			y := v_0.Args[0]
 10910  			v0 := b.NewValue0(b.Pos, OpRISCV64MOVDconst, typ.UInt64)
 10911  			v0.AuxInt = int64ToAuxInt(x)
 10912  			b.resetWithControl2(BlockRISCV64BGE, y, v0)
 10913  			return true
 10914  		}
 10915  		// match: (BEQZ (SLTIU [x] y) yes no)
 10916  		// result: (BGEU y (MOVDconst [x]) yes no)
 10917  		for b.Controls[0].Op == OpRISCV64SLTIU {
 10918  			v_0 := b.Controls[0]
 10919  			x := auxIntToInt64(v_0.AuxInt)
 10920  			y := v_0.Args[0]
 10921  			v0 := b.NewValue0(b.Pos, OpRISCV64MOVDconst, typ.UInt64)
 10922  			v0.AuxInt = int64ToAuxInt(x)
 10923  			b.resetWithControl2(BlockRISCV64BGEU, y, v0)
 10924  			return true
 10925  		}
 10926  		// match: (BEQZ (ANDI [c] (FCLASSD (FNEGD x))) yes no)
 10927  		// result: (BEQZ (ANDI <typ.Int64> [(c&0b11_0000_0000)|int64(bits.Reverse8(uint8(c))&0b1111_1111)] (FCLASSD x)) yes no)
 10928  		for b.Controls[0].Op == OpRISCV64ANDI {
 10929  			v_0 := b.Controls[0]
 10930  			c := auxIntToInt64(v_0.AuxInt)
 10931  			v_0_0 := v_0.Args[0]
 10932  			if v_0_0.Op != OpRISCV64FCLASSD {
 10933  				break
 10934  			}
 10935  			v_0_0_0 := v_0_0.Args[0]
 10936  			if v_0_0_0.Op != OpRISCV64FNEGD {
 10937  				break
 10938  			}
 10939  			x := v_0_0_0.Args[0]
 10940  			v0 := b.NewValue0(v_0.Pos, OpRISCV64ANDI, typ.Int64)
 10941  			v0.AuxInt = int64ToAuxInt((c & 0b11_0000_0000) | int64(bits.Reverse8(uint8(c))&0b1111_1111))
 10942  			v1 := b.NewValue0(v_0.Pos, OpRISCV64FCLASSD, typ.Int64)
 10943  			v1.AddArg(x)
 10944  			v0.AddArg(v1)
 10945  			b.resetWithControl(BlockRISCV64BEQZ, v0)
 10946  			return true
 10947  		}
 10948  		// match: (BEQZ (ANDI [c] (FCLASSD (FABSD x))) yes no)
 10949  		// result: (BEQZ (ANDI <typ.Int64> [(c&0b11_1111_0000)|int64(bits.Reverse8(uint8(c))&0b0000_1111)] (FCLASSD x)) yes no)
 10950  		for b.Controls[0].Op == OpRISCV64ANDI {
 10951  			v_0 := b.Controls[0]
 10952  			c := auxIntToInt64(v_0.AuxInt)
 10953  			v_0_0 := v_0.Args[0]
 10954  			if v_0_0.Op != OpRISCV64FCLASSD {
 10955  				break
 10956  			}
 10957  			v_0_0_0 := v_0_0.Args[0]
 10958  			if v_0_0_0.Op != OpRISCV64FABSD {
 10959  				break
 10960  			}
 10961  			x := v_0_0_0.Args[0]
 10962  			v0 := b.NewValue0(v_0.Pos, OpRISCV64ANDI, typ.Int64)
 10963  			v0.AuxInt = int64ToAuxInt((c & 0b11_1111_0000) | int64(bits.Reverse8(uint8(c))&0b0000_1111))
 10964  			v1 := b.NewValue0(v_0.Pos, OpRISCV64FCLASSD, typ.Int64)
 10965  			v1.AddArg(x)
 10966  			v0.AddArg(v1)
 10967  			b.resetWithControl(BlockRISCV64BEQZ, v0)
 10968  			return true
 10969  		}
 10970  	case BlockRISCV64BGE:
 10971  		// match: (BGE (MOVDconst [0]) cond yes no)
 10972  		// result: (BLEZ cond yes no)
 10973  		for b.Controls[0].Op == OpRISCV64MOVDconst {
 10974  			v_0 := b.Controls[0]
 10975  			if auxIntToInt64(v_0.AuxInt) != 0 {
 10976  				break
 10977  			}
 10978  			cond := b.Controls[1]
 10979  			b.resetWithControl(BlockRISCV64BLEZ, cond)
 10980  			return true
 10981  		}
 10982  		// match: (BGE cond (MOVDconst [0]) yes no)
 10983  		// result: (BGEZ cond yes no)
 10984  		for b.Controls[1].Op == OpRISCV64MOVDconst {
 10985  			cond := b.Controls[0]
 10986  			v_1 := b.Controls[1]
 10987  			if auxIntToInt64(v_1.AuxInt) != 0 {
 10988  				break
 10989  			}
 10990  			b.resetWithControl(BlockRISCV64BGEZ, cond)
 10991  			return true
 10992  		}
 10993  	case BlockRISCV64BGEU:
 10994  		// match: (BGEU (MOVDconst [0]) cond yes no)
 10995  		// result: (BEQZ cond yes no)
 10996  		for b.Controls[0].Op == OpRISCV64MOVDconst {
 10997  			v_0 := b.Controls[0]
 10998  			if auxIntToInt64(v_0.AuxInt) != 0 {
 10999  				break
 11000  			}
 11001  			cond := b.Controls[1]
 11002  			b.resetWithControl(BlockRISCV64BEQZ, cond)
 11003  			return true
 11004  		}
 11005  	case BlockRISCV64BLT:
 11006  		// match: (BLT (MOVDconst [0]) cond yes no)
 11007  		// result: (BGTZ cond yes no)
 11008  		for b.Controls[0].Op == OpRISCV64MOVDconst {
 11009  			v_0 := b.Controls[0]
 11010  			if auxIntToInt64(v_0.AuxInt) != 0 {
 11011  				break
 11012  			}
 11013  			cond := b.Controls[1]
 11014  			b.resetWithControl(BlockRISCV64BGTZ, cond)
 11015  			return true
 11016  		}
 11017  		// match: (BLT cond (MOVDconst [0]) yes no)
 11018  		// result: (BLTZ cond yes no)
 11019  		for b.Controls[1].Op == OpRISCV64MOVDconst {
 11020  			cond := b.Controls[0]
 11021  			v_1 := b.Controls[1]
 11022  			if auxIntToInt64(v_1.AuxInt) != 0 {
 11023  				break
 11024  			}
 11025  			b.resetWithControl(BlockRISCV64BLTZ, cond)
 11026  			return true
 11027  		}
 11028  	case BlockRISCV64BLTU:
 11029  		// match: (BLTU (MOVDconst [0]) cond yes no)
 11030  		// result: (BNEZ cond yes no)
 11031  		for b.Controls[0].Op == OpRISCV64MOVDconst {
 11032  			v_0 := b.Controls[0]
 11033  			if auxIntToInt64(v_0.AuxInt) != 0 {
 11034  				break
 11035  			}
 11036  			cond := b.Controls[1]
 11037  			b.resetWithControl(BlockRISCV64BNEZ, cond)
 11038  			return true
 11039  		}
 11040  	case BlockRISCV64BNE:
 11041  		// match: (BNE (MOVDconst [0]) cond yes no)
 11042  		// result: (BNEZ cond yes no)
 11043  		for b.Controls[0].Op == OpRISCV64MOVDconst {
 11044  			v_0 := b.Controls[0]
 11045  			if auxIntToInt64(v_0.AuxInt) != 0 {
 11046  				break
 11047  			}
 11048  			cond := b.Controls[1]
 11049  			b.resetWithControl(BlockRISCV64BNEZ, cond)
 11050  			return true
 11051  		}
 11052  		// match: (BNE cond (MOVDconst [0]) yes no)
 11053  		// result: (BNEZ cond yes no)
 11054  		for b.Controls[1].Op == OpRISCV64MOVDconst {
 11055  			cond := b.Controls[0]
 11056  			v_1 := b.Controls[1]
 11057  			if auxIntToInt64(v_1.AuxInt) != 0 {
 11058  				break
 11059  			}
 11060  			b.resetWithControl(BlockRISCV64BNEZ, cond)
 11061  			return true
 11062  		}
 11063  	case BlockRISCV64BNEZ:
 11064  		// match: (BNEZ (SEQZ x) yes no)
 11065  		// result: (BEQZ x yes no)
 11066  		for b.Controls[0].Op == OpRISCV64SEQZ {
 11067  			v_0 := b.Controls[0]
 11068  			x := v_0.Args[0]
 11069  			b.resetWithControl(BlockRISCV64BEQZ, x)
 11070  			return true
 11071  		}
 11072  		// match: (BNEZ (SNEZ x) yes no)
 11073  		// result: (BNEZ x yes no)
 11074  		for b.Controls[0].Op == OpRISCV64SNEZ {
 11075  			v_0 := b.Controls[0]
 11076  			x := v_0.Args[0]
 11077  			b.resetWithControl(BlockRISCV64BNEZ, x)
 11078  			return true
 11079  		}
 11080  		// match: (BNEZ (NEG x) yes no)
 11081  		// result: (BNEZ x yes no)
 11082  		for b.Controls[0].Op == OpRISCV64NEG {
 11083  			v_0 := b.Controls[0]
 11084  			x := v_0.Args[0]
 11085  			b.resetWithControl(BlockRISCV64BNEZ, x)
 11086  			return true
 11087  		}
 11088  		// match: (BNEZ (FNES <t> x y) yes no)
 11089  		// result: (BEQZ (FEQS <t> x y) yes no)
 11090  		for b.Controls[0].Op == OpRISCV64FNES {
 11091  			v_0 := b.Controls[0]
 11092  			t := v_0.Type
 11093  			_ = v_0.Args[1]
 11094  			v_0_0 := v_0.Args[0]
 11095  			v_0_1 := v_0.Args[1]
 11096  			for _i0 := 0; _i0 <= 1; _i0, v_0_0, v_0_1 = _i0+1, v_0_1, v_0_0 {
 11097  				x := v_0_0
 11098  				y := v_0_1
 11099  				v0 := b.NewValue0(v_0.Pos, OpRISCV64FEQS, t)
 11100  				v0.AddArg2(x, y)
 11101  				b.resetWithControl(BlockRISCV64BEQZ, v0)
 11102  				return true
 11103  			}
 11104  		}
 11105  		// match: (BNEZ (FNED <t> x y) yes no)
 11106  		// result: (BEQZ (FEQD <t> x y) yes no)
 11107  		for b.Controls[0].Op == OpRISCV64FNED {
 11108  			v_0 := b.Controls[0]
 11109  			t := v_0.Type
 11110  			_ = v_0.Args[1]
 11111  			v_0_0 := v_0.Args[0]
 11112  			v_0_1 := v_0.Args[1]
 11113  			for _i0 := 0; _i0 <= 1; _i0, v_0_0, v_0_1 = _i0+1, v_0_1, v_0_0 {
 11114  				x := v_0_0
 11115  				y := v_0_1
 11116  				v0 := b.NewValue0(v_0.Pos, OpRISCV64FEQD, t)
 11117  				v0.AddArg2(x, y)
 11118  				b.resetWithControl(BlockRISCV64BEQZ, v0)
 11119  				return true
 11120  			}
 11121  		}
 11122  		// match: (BNEZ (SUB x y) yes no)
 11123  		// result: (BNE x y yes no)
 11124  		for b.Controls[0].Op == OpRISCV64SUB {
 11125  			v_0 := b.Controls[0]
 11126  			y := v_0.Args[1]
 11127  			x := v_0.Args[0]
 11128  			b.resetWithControl2(BlockRISCV64BNE, x, y)
 11129  			return true
 11130  		}
 11131  		// match: (BNEZ (SLT x y) yes no)
 11132  		// result: (BLT x y yes no)
 11133  		for b.Controls[0].Op == OpRISCV64SLT {
 11134  			v_0 := b.Controls[0]
 11135  			y := v_0.Args[1]
 11136  			x := v_0.Args[0]
 11137  			b.resetWithControl2(BlockRISCV64BLT, x, y)
 11138  			return true
 11139  		}
 11140  		// match: (BNEZ (SLTU x y) yes no)
 11141  		// result: (BLTU x y yes no)
 11142  		for b.Controls[0].Op == OpRISCV64SLTU {
 11143  			v_0 := b.Controls[0]
 11144  			y := v_0.Args[1]
 11145  			x := v_0.Args[0]
 11146  			b.resetWithControl2(BlockRISCV64BLTU, x, y)
 11147  			return true
 11148  		}
 11149  		// match: (BNEZ (SLTI [x] y) yes no)
 11150  		// result: (BLT y (MOVDconst [x]) yes no)
 11151  		for b.Controls[0].Op == OpRISCV64SLTI {
 11152  			v_0 := b.Controls[0]
 11153  			x := auxIntToInt64(v_0.AuxInt)
 11154  			y := v_0.Args[0]
 11155  			v0 := b.NewValue0(b.Pos, OpRISCV64MOVDconst, typ.UInt64)
 11156  			v0.AuxInt = int64ToAuxInt(x)
 11157  			b.resetWithControl2(BlockRISCV64BLT, y, v0)
 11158  			return true
 11159  		}
 11160  		// match: (BNEZ (SLTIU [x] y) yes no)
 11161  		// result: (BLTU y (MOVDconst [x]) yes no)
 11162  		for b.Controls[0].Op == OpRISCV64SLTIU {
 11163  			v_0 := b.Controls[0]
 11164  			x := auxIntToInt64(v_0.AuxInt)
 11165  			y := v_0.Args[0]
 11166  			v0 := b.NewValue0(b.Pos, OpRISCV64MOVDconst, typ.UInt64)
 11167  			v0.AuxInt = int64ToAuxInt(x)
 11168  			b.resetWithControl2(BlockRISCV64BLTU, y, v0)
 11169  			return true
 11170  		}
 11171  		// match: (BNEZ (ANDI [c] (FCLASSD (FNEGD x))) yes no)
 11172  		// result: (BNEZ (ANDI <typ.Int64> [(c&0b11_0000_0000)|int64(bits.Reverse8(uint8(c))&0b1111_1111)] (FCLASSD x)) yes no)
 11173  		for b.Controls[0].Op == OpRISCV64ANDI {
 11174  			v_0 := b.Controls[0]
 11175  			c := auxIntToInt64(v_0.AuxInt)
 11176  			v_0_0 := v_0.Args[0]
 11177  			if v_0_0.Op != OpRISCV64FCLASSD {
 11178  				break
 11179  			}
 11180  			v_0_0_0 := v_0_0.Args[0]
 11181  			if v_0_0_0.Op != OpRISCV64FNEGD {
 11182  				break
 11183  			}
 11184  			x := v_0_0_0.Args[0]
 11185  			v0 := b.NewValue0(v_0.Pos, OpRISCV64ANDI, typ.Int64)
 11186  			v0.AuxInt = int64ToAuxInt((c & 0b11_0000_0000) | int64(bits.Reverse8(uint8(c))&0b1111_1111))
 11187  			v1 := b.NewValue0(v_0.Pos, OpRISCV64FCLASSD, typ.Int64)
 11188  			v1.AddArg(x)
 11189  			v0.AddArg(v1)
 11190  			b.resetWithControl(BlockRISCV64BNEZ, v0)
 11191  			return true
 11192  		}
 11193  		// match: (BNEZ (ANDI [c] (FCLASSD (FABSD x))) yes no)
 11194  		// result: (BNEZ (ANDI <typ.Int64> [(c&0b11_1111_0000)|int64(bits.Reverse8(uint8(c))&0b0000_1111)] (FCLASSD x)) yes no)
 11195  		for b.Controls[0].Op == OpRISCV64ANDI {
 11196  			v_0 := b.Controls[0]
 11197  			c := auxIntToInt64(v_0.AuxInt)
 11198  			v_0_0 := v_0.Args[0]
 11199  			if v_0_0.Op != OpRISCV64FCLASSD {
 11200  				break
 11201  			}
 11202  			v_0_0_0 := v_0_0.Args[0]
 11203  			if v_0_0_0.Op != OpRISCV64FABSD {
 11204  				break
 11205  			}
 11206  			x := v_0_0_0.Args[0]
 11207  			v0 := b.NewValue0(v_0.Pos, OpRISCV64ANDI, typ.Int64)
 11208  			v0.AuxInt = int64ToAuxInt((c & 0b11_1111_0000) | int64(bits.Reverse8(uint8(c))&0b0000_1111))
 11209  			v1 := b.NewValue0(v_0.Pos, OpRISCV64FCLASSD, typ.Int64)
 11210  			v1.AddArg(x)
 11211  			v0.AddArg(v1)
 11212  			b.resetWithControl(BlockRISCV64BNEZ, v0)
 11213  			return true
 11214  		}
 11215  	case BlockIf:
 11216  		// match: (If cond yes no)
 11217  		// result: (BNEZ (MOVBUreg <typ.UInt64> cond) yes no)
 11218  		for {
 11219  			cond := b.Controls[0]
 11220  			v0 := b.NewValue0(cond.Pos, OpRISCV64MOVBUreg, typ.UInt64)
 11221  			v0.AddArg(cond)
 11222  			b.resetWithControl(BlockRISCV64BNEZ, v0)
 11223  			return true
 11224  		}
 11225  	}
 11226  	return false
 11227  }
 11228  

View as plain text