Home | History | Annotate | Download | only in ssa
      1 // Code generated from gen/MIPS.rules; DO NOT EDIT.
      2 // generated with: cd gen; go run *.go
      3 
      4 package ssa
      5 
      6 import "math"
      7 import "cmd/internal/obj"
      8 import "cmd/internal/objabi"
      9 import "cmd/compile/internal/types"
     10 
     11 var _ = math.MinInt8  // in case not otherwise used
     12 var _ = obj.ANOP      // in case not otherwise used
     13 var _ = objabi.GOROOT // in case not otherwise used
     14 var _ = types.TypeMem // in case not otherwise used
     15 
     16 func rewriteValueMIPS(v *Value) bool {
     17 	switch v.Op {
     18 	case OpAdd16:
     19 		return rewriteValueMIPS_OpAdd16_0(v)
     20 	case OpAdd32:
     21 		return rewriteValueMIPS_OpAdd32_0(v)
     22 	case OpAdd32F:
     23 		return rewriteValueMIPS_OpAdd32F_0(v)
     24 	case OpAdd32withcarry:
     25 		return rewriteValueMIPS_OpAdd32withcarry_0(v)
     26 	case OpAdd64F:
     27 		return rewriteValueMIPS_OpAdd64F_0(v)
     28 	case OpAdd8:
     29 		return rewriteValueMIPS_OpAdd8_0(v)
     30 	case OpAddPtr:
     31 		return rewriteValueMIPS_OpAddPtr_0(v)
     32 	case OpAddr:
     33 		return rewriteValueMIPS_OpAddr_0(v)
     34 	case OpAnd16:
     35 		return rewriteValueMIPS_OpAnd16_0(v)
     36 	case OpAnd32:
     37 		return rewriteValueMIPS_OpAnd32_0(v)
     38 	case OpAnd8:
     39 		return rewriteValueMIPS_OpAnd8_0(v)
     40 	case OpAndB:
     41 		return rewriteValueMIPS_OpAndB_0(v)
     42 	case OpAtomicAdd32:
     43 		return rewriteValueMIPS_OpAtomicAdd32_0(v)
     44 	case OpAtomicAnd8:
     45 		return rewriteValueMIPS_OpAtomicAnd8_0(v)
     46 	case OpAtomicCompareAndSwap32:
     47 		return rewriteValueMIPS_OpAtomicCompareAndSwap32_0(v)
     48 	case OpAtomicExchange32:
     49 		return rewriteValueMIPS_OpAtomicExchange32_0(v)
     50 	case OpAtomicLoad32:
     51 		return rewriteValueMIPS_OpAtomicLoad32_0(v)
     52 	case OpAtomicLoadPtr:
     53 		return rewriteValueMIPS_OpAtomicLoadPtr_0(v)
     54 	case OpAtomicOr8:
     55 		return rewriteValueMIPS_OpAtomicOr8_0(v)
     56 	case OpAtomicStore32:
     57 		return rewriteValueMIPS_OpAtomicStore32_0(v)
     58 	case OpAtomicStorePtrNoWB:
     59 		return rewriteValueMIPS_OpAtomicStorePtrNoWB_0(v)
     60 	case OpAvg32u:
     61 		return rewriteValueMIPS_OpAvg32u_0(v)
     62 	case OpBitLen32:
     63 		return rewriteValueMIPS_OpBitLen32_0(v)
     64 	case OpClosureCall:
     65 		return rewriteValueMIPS_OpClosureCall_0(v)
     66 	case OpCom16:
     67 		return rewriteValueMIPS_OpCom16_0(v)
     68 	case OpCom32:
     69 		return rewriteValueMIPS_OpCom32_0(v)
     70 	case OpCom8:
     71 		return rewriteValueMIPS_OpCom8_0(v)
     72 	case OpConst16:
     73 		return rewriteValueMIPS_OpConst16_0(v)
     74 	case OpConst32:
     75 		return rewriteValueMIPS_OpConst32_0(v)
     76 	case OpConst32F:
     77 		return rewriteValueMIPS_OpConst32F_0(v)
     78 	case OpConst64F:
     79 		return rewriteValueMIPS_OpConst64F_0(v)
     80 	case OpConst8:
     81 		return rewriteValueMIPS_OpConst8_0(v)
     82 	case OpConstBool:
     83 		return rewriteValueMIPS_OpConstBool_0(v)
     84 	case OpConstNil:
     85 		return rewriteValueMIPS_OpConstNil_0(v)
     86 	case OpConvert:
     87 		return rewriteValueMIPS_OpConvert_0(v)
     88 	case OpCtz32:
     89 		return rewriteValueMIPS_OpCtz32_0(v)
     90 	case OpCvt32Fto32:
     91 		return rewriteValueMIPS_OpCvt32Fto32_0(v)
     92 	case OpCvt32Fto64F:
     93 		return rewriteValueMIPS_OpCvt32Fto64F_0(v)
     94 	case OpCvt32to32F:
     95 		return rewriteValueMIPS_OpCvt32to32F_0(v)
     96 	case OpCvt32to64F:
     97 		return rewriteValueMIPS_OpCvt32to64F_0(v)
     98 	case OpCvt64Fto32:
     99 		return rewriteValueMIPS_OpCvt64Fto32_0(v)
    100 	case OpCvt64Fto32F:
    101 		return rewriteValueMIPS_OpCvt64Fto32F_0(v)
    102 	case OpDiv16:
    103 		return rewriteValueMIPS_OpDiv16_0(v)
    104 	case OpDiv16u:
    105 		return rewriteValueMIPS_OpDiv16u_0(v)
    106 	case OpDiv32:
    107 		return rewriteValueMIPS_OpDiv32_0(v)
    108 	case OpDiv32F:
    109 		return rewriteValueMIPS_OpDiv32F_0(v)
    110 	case OpDiv32u:
    111 		return rewriteValueMIPS_OpDiv32u_0(v)
    112 	case OpDiv64F:
    113 		return rewriteValueMIPS_OpDiv64F_0(v)
    114 	case OpDiv8:
    115 		return rewriteValueMIPS_OpDiv8_0(v)
    116 	case OpDiv8u:
    117 		return rewriteValueMIPS_OpDiv8u_0(v)
    118 	case OpEq16:
    119 		return rewriteValueMIPS_OpEq16_0(v)
    120 	case OpEq32:
    121 		return rewriteValueMIPS_OpEq32_0(v)
    122 	case OpEq32F:
    123 		return rewriteValueMIPS_OpEq32F_0(v)
    124 	case OpEq64F:
    125 		return rewriteValueMIPS_OpEq64F_0(v)
    126 	case OpEq8:
    127 		return rewriteValueMIPS_OpEq8_0(v)
    128 	case OpEqB:
    129 		return rewriteValueMIPS_OpEqB_0(v)
    130 	case OpEqPtr:
    131 		return rewriteValueMIPS_OpEqPtr_0(v)
    132 	case OpGeq16:
    133 		return rewriteValueMIPS_OpGeq16_0(v)
    134 	case OpGeq16U:
    135 		return rewriteValueMIPS_OpGeq16U_0(v)
    136 	case OpGeq32:
    137 		return rewriteValueMIPS_OpGeq32_0(v)
    138 	case OpGeq32F:
    139 		return rewriteValueMIPS_OpGeq32F_0(v)
    140 	case OpGeq32U:
    141 		return rewriteValueMIPS_OpGeq32U_0(v)
    142 	case OpGeq64F:
    143 		return rewriteValueMIPS_OpGeq64F_0(v)
    144 	case OpGeq8:
    145 		return rewriteValueMIPS_OpGeq8_0(v)
    146 	case OpGeq8U:
    147 		return rewriteValueMIPS_OpGeq8U_0(v)
    148 	case OpGetCallerSP:
    149 		return rewriteValueMIPS_OpGetCallerSP_0(v)
    150 	case OpGetClosurePtr:
    151 		return rewriteValueMIPS_OpGetClosurePtr_0(v)
    152 	case OpGreater16:
    153 		return rewriteValueMIPS_OpGreater16_0(v)
    154 	case OpGreater16U:
    155 		return rewriteValueMIPS_OpGreater16U_0(v)
    156 	case OpGreater32:
    157 		return rewriteValueMIPS_OpGreater32_0(v)
    158 	case OpGreater32F:
    159 		return rewriteValueMIPS_OpGreater32F_0(v)
    160 	case OpGreater32U:
    161 		return rewriteValueMIPS_OpGreater32U_0(v)
    162 	case OpGreater64F:
    163 		return rewriteValueMIPS_OpGreater64F_0(v)
    164 	case OpGreater8:
    165 		return rewriteValueMIPS_OpGreater8_0(v)
    166 	case OpGreater8U:
    167 		return rewriteValueMIPS_OpGreater8U_0(v)
    168 	case OpHmul32:
    169 		return rewriteValueMIPS_OpHmul32_0(v)
    170 	case OpHmul32u:
    171 		return rewriteValueMIPS_OpHmul32u_0(v)
    172 	case OpInterCall:
    173 		return rewriteValueMIPS_OpInterCall_0(v)
    174 	case OpIsInBounds:
    175 		return rewriteValueMIPS_OpIsInBounds_0(v)
    176 	case OpIsNonNil:
    177 		return rewriteValueMIPS_OpIsNonNil_0(v)
    178 	case OpIsSliceInBounds:
    179 		return rewriteValueMIPS_OpIsSliceInBounds_0(v)
    180 	case OpLeq16:
    181 		return rewriteValueMIPS_OpLeq16_0(v)
    182 	case OpLeq16U:
    183 		return rewriteValueMIPS_OpLeq16U_0(v)
    184 	case OpLeq32:
    185 		return rewriteValueMIPS_OpLeq32_0(v)
    186 	case OpLeq32F:
    187 		return rewriteValueMIPS_OpLeq32F_0(v)
    188 	case OpLeq32U:
    189 		return rewriteValueMIPS_OpLeq32U_0(v)
    190 	case OpLeq64F:
    191 		return rewriteValueMIPS_OpLeq64F_0(v)
    192 	case OpLeq8:
    193 		return rewriteValueMIPS_OpLeq8_0(v)
    194 	case OpLeq8U:
    195 		return rewriteValueMIPS_OpLeq8U_0(v)
    196 	case OpLess16:
    197 		return rewriteValueMIPS_OpLess16_0(v)
    198 	case OpLess16U:
    199 		return rewriteValueMIPS_OpLess16U_0(v)
    200 	case OpLess32:
    201 		return rewriteValueMIPS_OpLess32_0(v)
    202 	case OpLess32F:
    203 		return rewriteValueMIPS_OpLess32F_0(v)
    204 	case OpLess32U:
    205 		return rewriteValueMIPS_OpLess32U_0(v)
    206 	case OpLess64F:
    207 		return rewriteValueMIPS_OpLess64F_0(v)
    208 	case OpLess8:
    209 		return rewriteValueMIPS_OpLess8_0(v)
    210 	case OpLess8U:
    211 		return rewriteValueMIPS_OpLess8U_0(v)
    212 	case OpLoad:
    213 		return rewriteValueMIPS_OpLoad_0(v)
    214 	case OpLsh16x16:
    215 		return rewriteValueMIPS_OpLsh16x16_0(v)
    216 	case OpLsh16x32:
    217 		return rewriteValueMIPS_OpLsh16x32_0(v)
    218 	case OpLsh16x64:
    219 		return rewriteValueMIPS_OpLsh16x64_0(v)
    220 	case OpLsh16x8:
    221 		return rewriteValueMIPS_OpLsh16x8_0(v)
    222 	case OpLsh32x16:
    223 		return rewriteValueMIPS_OpLsh32x16_0(v)
    224 	case OpLsh32x32:
    225 		return rewriteValueMIPS_OpLsh32x32_0(v)
    226 	case OpLsh32x64:
    227 		return rewriteValueMIPS_OpLsh32x64_0(v)
    228 	case OpLsh32x8:
    229 		return rewriteValueMIPS_OpLsh32x8_0(v)
    230 	case OpLsh8x16:
    231 		return rewriteValueMIPS_OpLsh8x16_0(v)
    232 	case OpLsh8x32:
    233 		return rewriteValueMIPS_OpLsh8x32_0(v)
    234 	case OpLsh8x64:
    235 		return rewriteValueMIPS_OpLsh8x64_0(v)
    236 	case OpLsh8x8:
    237 		return rewriteValueMIPS_OpLsh8x8_0(v)
    238 	case OpMIPSADD:
    239 		return rewriteValueMIPS_OpMIPSADD_0(v)
    240 	case OpMIPSADDconst:
    241 		return rewriteValueMIPS_OpMIPSADDconst_0(v)
    242 	case OpMIPSAND:
    243 		return rewriteValueMIPS_OpMIPSAND_0(v)
    244 	case OpMIPSANDconst:
    245 		return rewriteValueMIPS_OpMIPSANDconst_0(v)
    246 	case OpMIPSCMOVZ:
    247 		return rewriteValueMIPS_OpMIPSCMOVZ_0(v)
    248 	case OpMIPSCMOVZzero:
    249 		return rewriteValueMIPS_OpMIPSCMOVZzero_0(v)
    250 	case OpMIPSLoweredAtomicAdd:
    251 		return rewriteValueMIPS_OpMIPSLoweredAtomicAdd_0(v)
    252 	case OpMIPSLoweredAtomicStore:
    253 		return rewriteValueMIPS_OpMIPSLoweredAtomicStore_0(v)
    254 	case OpMIPSMOVBUload:
    255 		return rewriteValueMIPS_OpMIPSMOVBUload_0(v)
    256 	case OpMIPSMOVBUreg:
    257 		return rewriteValueMIPS_OpMIPSMOVBUreg_0(v)
    258 	case OpMIPSMOVBload:
    259 		return rewriteValueMIPS_OpMIPSMOVBload_0(v)
    260 	case OpMIPSMOVBreg:
    261 		return rewriteValueMIPS_OpMIPSMOVBreg_0(v)
    262 	case OpMIPSMOVBstore:
    263 		return rewriteValueMIPS_OpMIPSMOVBstore_0(v)
    264 	case OpMIPSMOVBstorezero:
    265 		return rewriteValueMIPS_OpMIPSMOVBstorezero_0(v)
    266 	case OpMIPSMOVDload:
    267 		return rewriteValueMIPS_OpMIPSMOVDload_0(v)
    268 	case OpMIPSMOVDstore:
    269 		return rewriteValueMIPS_OpMIPSMOVDstore_0(v)
    270 	case OpMIPSMOVFload:
    271 		return rewriteValueMIPS_OpMIPSMOVFload_0(v)
    272 	case OpMIPSMOVFstore:
    273 		return rewriteValueMIPS_OpMIPSMOVFstore_0(v)
    274 	case OpMIPSMOVHUload:
    275 		return rewriteValueMIPS_OpMIPSMOVHUload_0(v)
    276 	case OpMIPSMOVHUreg:
    277 		return rewriteValueMIPS_OpMIPSMOVHUreg_0(v)
    278 	case OpMIPSMOVHload:
    279 		return rewriteValueMIPS_OpMIPSMOVHload_0(v)
    280 	case OpMIPSMOVHreg:
    281 		return rewriteValueMIPS_OpMIPSMOVHreg_0(v)
    282 	case OpMIPSMOVHstore:
    283 		return rewriteValueMIPS_OpMIPSMOVHstore_0(v)
    284 	case OpMIPSMOVHstorezero:
    285 		return rewriteValueMIPS_OpMIPSMOVHstorezero_0(v)
    286 	case OpMIPSMOVWload:
    287 		return rewriteValueMIPS_OpMIPSMOVWload_0(v)
    288 	case OpMIPSMOVWreg:
    289 		return rewriteValueMIPS_OpMIPSMOVWreg_0(v)
    290 	case OpMIPSMOVWstore:
    291 		return rewriteValueMIPS_OpMIPSMOVWstore_0(v)
    292 	case OpMIPSMOVWstorezero:
    293 		return rewriteValueMIPS_OpMIPSMOVWstorezero_0(v)
    294 	case OpMIPSMUL:
    295 		return rewriteValueMIPS_OpMIPSMUL_0(v)
    296 	case OpMIPSNEG:
    297 		return rewriteValueMIPS_OpMIPSNEG_0(v)
    298 	case OpMIPSNOR:
    299 		return rewriteValueMIPS_OpMIPSNOR_0(v)
    300 	case OpMIPSNORconst:
    301 		return rewriteValueMIPS_OpMIPSNORconst_0(v)
    302 	case OpMIPSOR:
    303 		return rewriteValueMIPS_OpMIPSOR_0(v)
    304 	case OpMIPSORconst:
    305 		return rewriteValueMIPS_OpMIPSORconst_0(v)
    306 	case OpMIPSSGT:
    307 		return rewriteValueMIPS_OpMIPSSGT_0(v)
    308 	case OpMIPSSGTU:
    309 		return rewriteValueMIPS_OpMIPSSGTU_0(v)
    310 	case OpMIPSSGTUconst:
    311 		return rewriteValueMIPS_OpMIPSSGTUconst_0(v)
    312 	case OpMIPSSGTUzero:
    313 		return rewriteValueMIPS_OpMIPSSGTUzero_0(v)
    314 	case OpMIPSSGTconst:
    315 		return rewriteValueMIPS_OpMIPSSGTconst_0(v) || rewriteValueMIPS_OpMIPSSGTconst_10(v)
    316 	case OpMIPSSGTzero:
    317 		return rewriteValueMIPS_OpMIPSSGTzero_0(v)
    318 	case OpMIPSSLL:
    319 		return rewriteValueMIPS_OpMIPSSLL_0(v)
    320 	case OpMIPSSLLconst:
    321 		return rewriteValueMIPS_OpMIPSSLLconst_0(v)
    322 	case OpMIPSSRA:
    323 		return rewriteValueMIPS_OpMIPSSRA_0(v)
    324 	case OpMIPSSRAconst:
    325 		return rewriteValueMIPS_OpMIPSSRAconst_0(v)
    326 	case OpMIPSSRL:
    327 		return rewriteValueMIPS_OpMIPSSRL_0(v)
    328 	case OpMIPSSRLconst:
    329 		return rewriteValueMIPS_OpMIPSSRLconst_0(v)
    330 	case OpMIPSSUB:
    331 		return rewriteValueMIPS_OpMIPSSUB_0(v)
    332 	case OpMIPSSUBconst:
    333 		return rewriteValueMIPS_OpMIPSSUBconst_0(v)
    334 	case OpMIPSXOR:
    335 		return rewriteValueMIPS_OpMIPSXOR_0(v)
    336 	case OpMIPSXORconst:
    337 		return rewriteValueMIPS_OpMIPSXORconst_0(v)
    338 	case OpMod16:
    339 		return rewriteValueMIPS_OpMod16_0(v)
    340 	case OpMod16u:
    341 		return rewriteValueMIPS_OpMod16u_0(v)
    342 	case OpMod32:
    343 		return rewriteValueMIPS_OpMod32_0(v)
    344 	case OpMod32u:
    345 		return rewriteValueMIPS_OpMod32u_0(v)
    346 	case OpMod8:
    347 		return rewriteValueMIPS_OpMod8_0(v)
    348 	case OpMod8u:
    349 		return rewriteValueMIPS_OpMod8u_0(v)
    350 	case OpMove:
    351 		return rewriteValueMIPS_OpMove_0(v) || rewriteValueMIPS_OpMove_10(v)
    352 	case OpMul16:
    353 		return rewriteValueMIPS_OpMul16_0(v)
    354 	case OpMul32:
    355 		return rewriteValueMIPS_OpMul32_0(v)
    356 	case OpMul32F:
    357 		return rewriteValueMIPS_OpMul32F_0(v)
    358 	case OpMul32uhilo:
    359 		return rewriteValueMIPS_OpMul32uhilo_0(v)
    360 	case OpMul64F:
    361 		return rewriteValueMIPS_OpMul64F_0(v)
    362 	case OpMul8:
    363 		return rewriteValueMIPS_OpMul8_0(v)
    364 	case OpNeg16:
    365 		return rewriteValueMIPS_OpNeg16_0(v)
    366 	case OpNeg32:
    367 		return rewriteValueMIPS_OpNeg32_0(v)
    368 	case OpNeg32F:
    369 		return rewriteValueMIPS_OpNeg32F_0(v)
    370 	case OpNeg64F:
    371 		return rewriteValueMIPS_OpNeg64F_0(v)
    372 	case OpNeg8:
    373 		return rewriteValueMIPS_OpNeg8_0(v)
    374 	case OpNeq16:
    375 		return rewriteValueMIPS_OpNeq16_0(v)
    376 	case OpNeq32:
    377 		return rewriteValueMIPS_OpNeq32_0(v)
    378 	case OpNeq32F:
    379 		return rewriteValueMIPS_OpNeq32F_0(v)
    380 	case OpNeq64F:
    381 		return rewriteValueMIPS_OpNeq64F_0(v)
    382 	case OpNeq8:
    383 		return rewriteValueMIPS_OpNeq8_0(v)
    384 	case OpNeqB:
    385 		return rewriteValueMIPS_OpNeqB_0(v)
    386 	case OpNeqPtr:
    387 		return rewriteValueMIPS_OpNeqPtr_0(v)
    388 	case OpNilCheck:
    389 		return rewriteValueMIPS_OpNilCheck_0(v)
    390 	case OpNot:
    391 		return rewriteValueMIPS_OpNot_0(v)
    392 	case OpOffPtr:
    393 		return rewriteValueMIPS_OpOffPtr_0(v)
    394 	case OpOr16:
    395 		return rewriteValueMIPS_OpOr16_0(v)
    396 	case OpOr32:
    397 		return rewriteValueMIPS_OpOr32_0(v)
    398 	case OpOr8:
    399 		return rewriteValueMIPS_OpOr8_0(v)
    400 	case OpOrB:
    401 		return rewriteValueMIPS_OpOrB_0(v)
    402 	case OpRound32F:
    403 		return rewriteValueMIPS_OpRound32F_0(v)
    404 	case OpRound64F:
    405 		return rewriteValueMIPS_OpRound64F_0(v)
    406 	case OpRsh16Ux16:
    407 		return rewriteValueMIPS_OpRsh16Ux16_0(v)
    408 	case OpRsh16Ux32:
    409 		return rewriteValueMIPS_OpRsh16Ux32_0(v)
    410 	case OpRsh16Ux64:
    411 		return rewriteValueMIPS_OpRsh16Ux64_0(v)
    412 	case OpRsh16Ux8:
    413 		return rewriteValueMIPS_OpRsh16Ux8_0(v)
    414 	case OpRsh16x16:
    415 		return rewriteValueMIPS_OpRsh16x16_0(v)
    416 	case OpRsh16x32:
    417 		return rewriteValueMIPS_OpRsh16x32_0(v)
    418 	case OpRsh16x64:
    419 		return rewriteValueMIPS_OpRsh16x64_0(v)
    420 	case OpRsh16x8:
    421 		return rewriteValueMIPS_OpRsh16x8_0(v)
    422 	case OpRsh32Ux16:
    423 		return rewriteValueMIPS_OpRsh32Ux16_0(v)
    424 	case OpRsh32Ux32:
    425 		return rewriteValueMIPS_OpRsh32Ux32_0(v)
    426 	case OpRsh32Ux64:
    427 		return rewriteValueMIPS_OpRsh32Ux64_0(v)
    428 	case OpRsh32Ux8:
    429 		return rewriteValueMIPS_OpRsh32Ux8_0(v)
    430 	case OpRsh32x16:
    431 		return rewriteValueMIPS_OpRsh32x16_0(v)
    432 	case OpRsh32x32:
    433 		return rewriteValueMIPS_OpRsh32x32_0(v)
    434 	case OpRsh32x64:
    435 		return rewriteValueMIPS_OpRsh32x64_0(v)
    436 	case OpRsh32x8:
    437 		return rewriteValueMIPS_OpRsh32x8_0(v)
    438 	case OpRsh8Ux16:
    439 		return rewriteValueMIPS_OpRsh8Ux16_0(v)
    440 	case OpRsh8Ux32:
    441 		return rewriteValueMIPS_OpRsh8Ux32_0(v)
    442 	case OpRsh8Ux64:
    443 		return rewriteValueMIPS_OpRsh8Ux64_0(v)
    444 	case OpRsh8Ux8:
    445 		return rewriteValueMIPS_OpRsh8Ux8_0(v)
    446 	case OpRsh8x16:
    447 		return rewriteValueMIPS_OpRsh8x16_0(v)
    448 	case OpRsh8x32:
    449 		return rewriteValueMIPS_OpRsh8x32_0(v)
    450 	case OpRsh8x64:
    451 		return rewriteValueMIPS_OpRsh8x64_0(v)
    452 	case OpRsh8x8:
    453 		return rewriteValueMIPS_OpRsh8x8_0(v)
    454 	case OpSelect0:
    455 		return rewriteValueMIPS_OpSelect0_0(v) || rewriteValueMIPS_OpSelect0_10(v)
    456 	case OpSelect1:
    457 		return rewriteValueMIPS_OpSelect1_0(v) || rewriteValueMIPS_OpSelect1_10(v)
    458 	case OpSignExt16to32:
    459 		return rewriteValueMIPS_OpSignExt16to32_0(v)
    460 	case OpSignExt8to16:
    461 		return rewriteValueMIPS_OpSignExt8to16_0(v)
    462 	case OpSignExt8to32:
    463 		return rewriteValueMIPS_OpSignExt8to32_0(v)
    464 	case OpSignmask:
    465 		return rewriteValueMIPS_OpSignmask_0(v)
    466 	case OpSlicemask:
    467 		return rewriteValueMIPS_OpSlicemask_0(v)
    468 	case OpSqrt:
    469 		return rewriteValueMIPS_OpSqrt_0(v)
    470 	case OpStaticCall:
    471 		return rewriteValueMIPS_OpStaticCall_0(v)
    472 	case OpStore:
    473 		return rewriteValueMIPS_OpStore_0(v)
    474 	case OpSub16:
    475 		return rewriteValueMIPS_OpSub16_0(v)
    476 	case OpSub32:
    477 		return rewriteValueMIPS_OpSub32_0(v)
    478 	case OpSub32F:
    479 		return rewriteValueMIPS_OpSub32F_0(v)
    480 	case OpSub32withcarry:
    481 		return rewriteValueMIPS_OpSub32withcarry_0(v)
    482 	case OpSub64F:
    483 		return rewriteValueMIPS_OpSub64F_0(v)
    484 	case OpSub8:
    485 		return rewriteValueMIPS_OpSub8_0(v)
    486 	case OpSubPtr:
    487 		return rewriteValueMIPS_OpSubPtr_0(v)
    488 	case OpTrunc16to8:
    489 		return rewriteValueMIPS_OpTrunc16to8_0(v)
    490 	case OpTrunc32to16:
    491 		return rewriteValueMIPS_OpTrunc32to16_0(v)
    492 	case OpTrunc32to8:
    493 		return rewriteValueMIPS_OpTrunc32to8_0(v)
    494 	case OpXor16:
    495 		return rewriteValueMIPS_OpXor16_0(v)
    496 	case OpXor32:
    497 		return rewriteValueMIPS_OpXor32_0(v)
    498 	case OpXor8:
    499 		return rewriteValueMIPS_OpXor8_0(v)
    500 	case OpZero:
    501 		return rewriteValueMIPS_OpZero_0(v) || rewriteValueMIPS_OpZero_10(v)
    502 	case OpZeroExt16to32:
    503 		return rewriteValueMIPS_OpZeroExt16to32_0(v)
    504 	case OpZeroExt8to16:
    505 		return rewriteValueMIPS_OpZeroExt8to16_0(v)
    506 	case OpZeroExt8to32:
    507 		return rewriteValueMIPS_OpZeroExt8to32_0(v)
    508 	case OpZeromask:
    509 		return rewriteValueMIPS_OpZeromask_0(v)
    510 	}
    511 	return false
    512 }
    513 func rewriteValueMIPS_OpAdd16_0(v *Value) bool {
    514 	// match: (Add16 x y)
    515 	// cond:
    516 	// result: (ADD x y)
    517 	for {
    518 		_ = v.Args[1]
    519 		x := v.Args[0]
    520 		y := v.Args[1]
    521 		v.reset(OpMIPSADD)
    522 		v.AddArg(x)
    523 		v.AddArg(y)
    524 		return true
    525 	}
    526 }
    527 func rewriteValueMIPS_OpAdd32_0(v *Value) bool {
    528 	// match: (Add32 x y)
    529 	// cond:
    530 	// result: (ADD x y)
    531 	for {
    532 		_ = v.Args[1]
    533 		x := v.Args[0]
    534 		y := v.Args[1]
    535 		v.reset(OpMIPSADD)
    536 		v.AddArg(x)
    537 		v.AddArg(y)
    538 		return true
    539 	}
    540 }
    541 func rewriteValueMIPS_OpAdd32F_0(v *Value) bool {
    542 	// match: (Add32F x y)
    543 	// cond:
    544 	// result: (ADDF x y)
    545 	for {
    546 		_ = v.Args[1]
    547 		x := v.Args[0]
    548 		y := v.Args[1]
    549 		v.reset(OpMIPSADDF)
    550 		v.AddArg(x)
    551 		v.AddArg(y)
    552 		return true
    553 	}
    554 }
    555 func rewriteValueMIPS_OpAdd32withcarry_0(v *Value) bool {
    556 	b := v.Block
    557 	_ = b
    558 	// match: (Add32withcarry <t> x y c)
    559 	// cond:
    560 	// result: (ADD c (ADD <t> x y))
    561 	for {
    562 		t := v.Type
    563 		_ = v.Args[2]
    564 		x := v.Args[0]
    565 		y := v.Args[1]
    566 		c := v.Args[2]
    567 		v.reset(OpMIPSADD)
    568 		v.AddArg(c)
    569 		v0 := b.NewValue0(v.Pos, OpMIPSADD, t)
    570 		v0.AddArg(x)
    571 		v0.AddArg(y)
    572 		v.AddArg(v0)
    573 		return true
    574 	}
    575 }
    576 func rewriteValueMIPS_OpAdd64F_0(v *Value) bool {
    577 	// match: (Add64F x y)
    578 	// cond:
    579 	// result: (ADDD x y)
    580 	for {
    581 		_ = v.Args[1]
    582 		x := v.Args[0]
    583 		y := v.Args[1]
    584 		v.reset(OpMIPSADDD)
    585 		v.AddArg(x)
    586 		v.AddArg(y)
    587 		return true
    588 	}
    589 }
    590 func rewriteValueMIPS_OpAdd8_0(v *Value) bool {
    591 	// match: (Add8 x y)
    592 	// cond:
    593 	// result: (ADD x y)
    594 	for {
    595 		_ = v.Args[1]
    596 		x := v.Args[0]
    597 		y := v.Args[1]
    598 		v.reset(OpMIPSADD)
    599 		v.AddArg(x)
    600 		v.AddArg(y)
    601 		return true
    602 	}
    603 }
    604 func rewriteValueMIPS_OpAddPtr_0(v *Value) bool {
    605 	// match: (AddPtr x y)
    606 	// cond:
    607 	// result: (ADD x y)
    608 	for {
    609 		_ = v.Args[1]
    610 		x := v.Args[0]
    611 		y := v.Args[1]
    612 		v.reset(OpMIPSADD)
    613 		v.AddArg(x)
    614 		v.AddArg(y)
    615 		return true
    616 	}
    617 }
    618 func rewriteValueMIPS_OpAddr_0(v *Value) bool {
    619 	// match: (Addr {sym} base)
    620 	// cond:
    621 	// result: (MOVWaddr {sym} base)
    622 	for {
    623 		sym := v.Aux
    624 		base := v.Args[0]
    625 		v.reset(OpMIPSMOVWaddr)
    626 		v.Aux = sym
    627 		v.AddArg(base)
    628 		return true
    629 	}
    630 }
    631 func rewriteValueMIPS_OpAnd16_0(v *Value) bool {
    632 	// match: (And16 x y)
    633 	// cond:
    634 	// result: (AND x y)
    635 	for {
    636 		_ = v.Args[1]
    637 		x := v.Args[0]
    638 		y := v.Args[1]
    639 		v.reset(OpMIPSAND)
    640 		v.AddArg(x)
    641 		v.AddArg(y)
    642 		return true
    643 	}
    644 }
    645 func rewriteValueMIPS_OpAnd32_0(v *Value) bool {
    646 	// match: (And32 x y)
    647 	// cond:
    648 	// result: (AND x y)
    649 	for {
    650 		_ = v.Args[1]
    651 		x := v.Args[0]
    652 		y := v.Args[1]
    653 		v.reset(OpMIPSAND)
    654 		v.AddArg(x)
    655 		v.AddArg(y)
    656 		return true
    657 	}
    658 }
    659 func rewriteValueMIPS_OpAnd8_0(v *Value) bool {
    660 	// match: (And8 x y)
    661 	// cond:
    662 	// result: (AND x y)
    663 	for {
    664 		_ = v.Args[1]
    665 		x := v.Args[0]
    666 		y := v.Args[1]
    667 		v.reset(OpMIPSAND)
    668 		v.AddArg(x)
    669 		v.AddArg(y)
    670 		return true
    671 	}
    672 }
    673 func rewriteValueMIPS_OpAndB_0(v *Value) bool {
    674 	// match: (AndB x y)
    675 	// cond:
    676 	// result: (AND x y)
    677 	for {
    678 		_ = v.Args[1]
    679 		x := v.Args[0]
    680 		y := v.Args[1]
    681 		v.reset(OpMIPSAND)
    682 		v.AddArg(x)
    683 		v.AddArg(y)
    684 		return true
    685 	}
    686 }
    687 func rewriteValueMIPS_OpAtomicAdd32_0(v *Value) bool {
    688 	// match: (AtomicAdd32 ptr val mem)
    689 	// cond:
    690 	// result: (LoweredAtomicAdd ptr val mem)
    691 	for {
    692 		_ = v.Args[2]
    693 		ptr := v.Args[0]
    694 		val := v.Args[1]
    695 		mem := v.Args[2]
    696 		v.reset(OpMIPSLoweredAtomicAdd)
    697 		v.AddArg(ptr)
    698 		v.AddArg(val)
    699 		v.AddArg(mem)
    700 		return true
    701 	}
    702 }
    703 func rewriteValueMIPS_OpAtomicAnd8_0(v *Value) bool {
    704 	b := v.Block
    705 	_ = b
    706 	config := b.Func.Config
    707 	_ = config
    708 	typ := &b.Func.Config.Types
    709 	_ = typ
    710 	// match: (AtomicAnd8 ptr val mem)
    711 	// cond: !config.BigEndian
    712 	// result: (LoweredAtomicAnd (AND <typ.UInt32Ptr> (MOVWconst [^3]) ptr) 		(OR <typ.UInt32> (SLL <typ.UInt32> (ZeroExt8to32 val) 			(SLLconst <typ.UInt32> [3] 				(ANDconst  <typ.UInt32> [3] ptr))) 		(NORconst [0] <typ.UInt32> (SLL <typ.UInt32> 			(MOVWconst [0xff]) (SLLconst <typ.UInt32> [3] 				(ANDconst <typ.UInt32> [3] ptr))))) mem)
    713 	for {
    714 		_ = v.Args[2]
    715 		ptr := v.Args[0]
    716 		val := v.Args[1]
    717 		mem := v.Args[2]
    718 		if !(!config.BigEndian) {
    719 			break
    720 		}
    721 		v.reset(OpMIPSLoweredAtomicAnd)
    722 		v0 := b.NewValue0(v.Pos, OpMIPSAND, typ.UInt32Ptr)
    723 		v1 := b.NewValue0(v.Pos, OpMIPSMOVWconst, typ.UInt32)
    724 		v1.AuxInt = ^3
    725 		v0.AddArg(v1)
    726 		v0.AddArg(ptr)
    727 		v.AddArg(v0)
    728 		v2 := b.NewValue0(v.Pos, OpMIPSOR, typ.UInt32)
    729 		v3 := b.NewValue0(v.Pos, OpMIPSSLL, typ.UInt32)
    730 		v4 := b.NewValue0(v.Pos, OpZeroExt8to32, typ.UInt32)
    731 		v4.AddArg(val)
    732 		v3.AddArg(v4)
    733 		v5 := b.NewValue0(v.Pos, OpMIPSSLLconst, typ.UInt32)
    734 		v5.AuxInt = 3
    735 		v6 := b.NewValue0(v.Pos, OpMIPSANDconst, typ.UInt32)
    736 		v6.AuxInt = 3
    737 		v6.AddArg(ptr)
    738 		v5.AddArg(v6)
    739 		v3.AddArg(v5)
    740 		v2.AddArg(v3)
    741 		v7 := b.NewValue0(v.Pos, OpMIPSNORconst, typ.UInt32)
    742 		v7.AuxInt = 0
    743 		v8 := b.NewValue0(v.Pos, OpMIPSSLL, typ.UInt32)
    744 		v9 := b.NewValue0(v.Pos, OpMIPSMOVWconst, typ.UInt32)
    745 		v9.AuxInt = 0xff
    746 		v8.AddArg(v9)
    747 		v10 := b.NewValue0(v.Pos, OpMIPSSLLconst, typ.UInt32)
    748 		v10.AuxInt = 3
    749 		v11 := b.NewValue0(v.Pos, OpMIPSANDconst, typ.UInt32)
    750 		v11.AuxInt = 3
    751 		v11.AddArg(ptr)
    752 		v10.AddArg(v11)
    753 		v8.AddArg(v10)
    754 		v7.AddArg(v8)
    755 		v2.AddArg(v7)
    756 		v.AddArg(v2)
    757 		v.AddArg(mem)
    758 		return true
    759 	}
    760 	// match: (AtomicAnd8 ptr val mem)
    761 	// cond: config.BigEndian
    762 	// result: (LoweredAtomicAnd (AND <typ.UInt32Ptr> (MOVWconst [^3]) ptr) 		(OR <typ.UInt32> (SLL <typ.UInt32> (ZeroExt8to32 val) 			(SLLconst <typ.UInt32> [3] 				(ANDconst  <typ.UInt32> [3] 					(XORconst <typ.UInt32> [3] ptr)))) 		(NORconst [0] <typ.UInt32> (SLL <typ.UInt32> 			(MOVWconst [0xff]) (SLLconst <typ.UInt32> [3] 				(ANDconst <typ.UInt32> [3] 					(XORconst <typ.UInt32> [3] ptr)))))) mem)
    763 	for {
    764 		_ = v.Args[2]
    765 		ptr := v.Args[0]
    766 		val := v.Args[1]
    767 		mem := v.Args[2]
    768 		if !(config.BigEndian) {
    769 			break
    770 		}
    771 		v.reset(OpMIPSLoweredAtomicAnd)
    772 		v0 := b.NewValue0(v.Pos, OpMIPSAND, typ.UInt32Ptr)
    773 		v1 := b.NewValue0(v.Pos, OpMIPSMOVWconst, typ.UInt32)
    774 		v1.AuxInt = ^3
    775 		v0.AddArg(v1)
    776 		v0.AddArg(ptr)
    777 		v.AddArg(v0)
    778 		v2 := b.NewValue0(v.Pos, OpMIPSOR, typ.UInt32)
    779 		v3 := b.NewValue0(v.Pos, OpMIPSSLL, typ.UInt32)
    780 		v4 := b.NewValue0(v.Pos, OpZeroExt8to32, typ.UInt32)
    781 		v4.AddArg(val)
    782 		v3.AddArg(v4)
    783 		v5 := b.NewValue0(v.Pos, OpMIPSSLLconst, typ.UInt32)
    784 		v5.AuxInt = 3
    785 		v6 := b.NewValue0(v.Pos, OpMIPSANDconst, typ.UInt32)
    786 		v6.AuxInt = 3
    787 		v7 := b.NewValue0(v.Pos, OpMIPSXORconst, typ.UInt32)
    788 		v7.AuxInt = 3
    789 		v7.AddArg(ptr)
    790 		v6.AddArg(v7)
    791 		v5.AddArg(v6)
    792 		v3.AddArg(v5)
    793 		v2.AddArg(v3)
    794 		v8 := b.NewValue0(v.Pos, OpMIPSNORconst, typ.UInt32)
    795 		v8.AuxInt = 0
    796 		v9 := b.NewValue0(v.Pos, OpMIPSSLL, typ.UInt32)
    797 		v10 := b.NewValue0(v.Pos, OpMIPSMOVWconst, typ.UInt32)
    798 		v10.AuxInt = 0xff
    799 		v9.AddArg(v10)
    800 		v11 := b.NewValue0(v.Pos, OpMIPSSLLconst, typ.UInt32)
    801 		v11.AuxInt = 3
    802 		v12 := b.NewValue0(v.Pos, OpMIPSANDconst, typ.UInt32)
    803 		v12.AuxInt = 3
    804 		v13 := b.NewValue0(v.Pos, OpMIPSXORconst, typ.UInt32)
    805 		v13.AuxInt = 3
    806 		v13.AddArg(ptr)
    807 		v12.AddArg(v13)
    808 		v11.AddArg(v12)
    809 		v9.AddArg(v11)
    810 		v8.AddArg(v9)
    811 		v2.AddArg(v8)
    812 		v.AddArg(v2)
    813 		v.AddArg(mem)
    814 		return true
    815 	}
    816 	return false
    817 }
    818 func rewriteValueMIPS_OpAtomicCompareAndSwap32_0(v *Value) bool {
    819 	// match: (AtomicCompareAndSwap32 ptr old new_ mem)
    820 	// cond:
    821 	// result: (LoweredAtomicCas ptr old new_ mem)
    822 	for {
    823 		_ = v.Args[3]
    824 		ptr := v.Args[0]
    825 		old := v.Args[1]
    826 		new_ := v.Args[2]
    827 		mem := v.Args[3]
    828 		v.reset(OpMIPSLoweredAtomicCas)
    829 		v.AddArg(ptr)
    830 		v.AddArg(old)
    831 		v.AddArg(new_)
    832 		v.AddArg(mem)
    833 		return true
    834 	}
    835 }
    836 func rewriteValueMIPS_OpAtomicExchange32_0(v *Value) bool {
    837 	// match: (AtomicExchange32 ptr val mem)
    838 	// cond:
    839 	// result: (LoweredAtomicExchange ptr val mem)
    840 	for {
    841 		_ = v.Args[2]
    842 		ptr := v.Args[0]
    843 		val := v.Args[1]
    844 		mem := v.Args[2]
    845 		v.reset(OpMIPSLoweredAtomicExchange)
    846 		v.AddArg(ptr)
    847 		v.AddArg(val)
    848 		v.AddArg(mem)
    849 		return true
    850 	}
    851 }
    852 func rewriteValueMIPS_OpAtomicLoad32_0(v *Value) bool {
    853 	// match: (AtomicLoad32 ptr mem)
    854 	// cond:
    855 	// result: (LoweredAtomicLoad ptr mem)
    856 	for {
    857 		_ = v.Args[1]
    858 		ptr := v.Args[0]
    859 		mem := v.Args[1]
    860 		v.reset(OpMIPSLoweredAtomicLoad)
    861 		v.AddArg(ptr)
    862 		v.AddArg(mem)
    863 		return true
    864 	}
    865 }
    866 func rewriteValueMIPS_OpAtomicLoadPtr_0(v *Value) bool {
    867 	// match: (AtomicLoadPtr ptr mem)
    868 	// cond:
    869 	// result: (LoweredAtomicLoad  ptr mem)
    870 	for {
    871 		_ = v.Args[1]
    872 		ptr := v.Args[0]
    873 		mem := v.Args[1]
    874 		v.reset(OpMIPSLoweredAtomicLoad)
    875 		v.AddArg(ptr)
    876 		v.AddArg(mem)
    877 		return true
    878 	}
    879 }
    880 func rewriteValueMIPS_OpAtomicOr8_0(v *Value) bool {
    881 	b := v.Block
    882 	_ = b
    883 	config := b.Func.Config
    884 	_ = config
    885 	typ := &b.Func.Config.Types
    886 	_ = typ
    887 	// match: (AtomicOr8 ptr val mem)
    888 	// cond: !config.BigEndian
    889 	// result: (LoweredAtomicOr (AND <typ.UInt32Ptr> (MOVWconst [^3]) ptr) 		(SLL <typ.UInt32> (ZeroExt8to32 val) 			(SLLconst <typ.UInt32> [3] 				(ANDconst <typ.UInt32> [3] ptr))) mem)
    890 	for {
    891 		_ = v.Args[2]
    892 		ptr := v.Args[0]
    893 		val := v.Args[1]
    894 		mem := v.Args[2]
    895 		if !(!config.BigEndian) {
    896 			break
    897 		}
    898 		v.reset(OpMIPSLoweredAtomicOr)
    899 		v0 := b.NewValue0(v.Pos, OpMIPSAND, typ.UInt32Ptr)
    900 		v1 := b.NewValue0(v.Pos, OpMIPSMOVWconst, typ.UInt32)
    901 		v1.AuxInt = ^3
    902 		v0.AddArg(v1)
    903 		v0.AddArg(ptr)
    904 		v.AddArg(v0)
    905 		v2 := b.NewValue0(v.Pos, OpMIPSSLL, typ.UInt32)
    906 		v3 := b.NewValue0(v.Pos, OpZeroExt8to32, typ.UInt32)
    907 		v3.AddArg(val)
    908 		v2.AddArg(v3)
    909 		v4 := b.NewValue0(v.Pos, OpMIPSSLLconst, typ.UInt32)
    910 		v4.AuxInt = 3
    911 		v5 := b.NewValue0(v.Pos, OpMIPSANDconst, typ.UInt32)
    912 		v5.AuxInt = 3
    913 		v5.AddArg(ptr)
    914 		v4.AddArg(v5)
    915 		v2.AddArg(v4)
    916 		v.AddArg(v2)
    917 		v.AddArg(mem)
    918 		return true
    919 	}
    920 	// match: (AtomicOr8 ptr val mem)
    921 	// cond: config.BigEndian
    922 	// result: (LoweredAtomicOr (AND <typ.UInt32Ptr> (MOVWconst [^3]) ptr) 		(SLL <typ.UInt32> (ZeroExt8to32 val) 			(SLLconst <typ.UInt32> [3] 				(ANDconst <typ.UInt32> [3] 					(XORconst <typ.UInt32> [3] ptr)))) mem)
    923 	for {
    924 		_ = v.Args[2]
    925 		ptr := v.Args[0]
    926 		val := v.Args[1]
    927 		mem := v.Args[2]
    928 		if !(config.BigEndian) {
    929 			break
    930 		}
    931 		v.reset(OpMIPSLoweredAtomicOr)
    932 		v0 := b.NewValue0(v.Pos, OpMIPSAND, typ.UInt32Ptr)
    933 		v1 := b.NewValue0(v.Pos, OpMIPSMOVWconst, typ.UInt32)
    934 		v1.AuxInt = ^3
    935 		v0.AddArg(v1)
    936 		v0.AddArg(ptr)
    937 		v.AddArg(v0)
    938 		v2 := b.NewValue0(v.Pos, OpMIPSSLL, typ.UInt32)
    939 		v3 := b.NewValue0(v.Pos, OpZeroExt8to32, typ.UInt32)
    940 		v3.AddArg(val)
    941 		v2.AddArg(v3)
    942 		v4 := b.NewValue0(v.Pos, OpMIPSSLLconst, typ.UInt32)
    943 		v4.AuxInt = 3
    944 		v5 := b.NewValue0(v.Pos, OpMIPSANDconst, typ.UInt32)
    945 		v5.AuxInt = 3
    946 		v6 := b.NewValue0(v.Pos, OpMIPSXORconst, typ.UInt32)
    947 		v6.AuxInt = 3
    948 		v6.AddArg(ptr)
    949 		v5.AddArg(v6)
    950 		v4.AddArg(v5)
    951 		v2.AddArg(v4)
    952 		v.AddArg(v2)
    953 		v.AddArg(mem)
    954 		return true
    955 	}
    956 	return false
    957 }
    958 func rewriteValueMIPS_OpAtomicStore32_0(v *Value) bool {
    959 	// match: (AtomicStore32 ptr val mem)
    960 	// cond:
    961 	// result: (LoweredAtomicStore ptr val mem)
    962 	for {
    963 		_ = v.Args[2]
    964 		ptr := v.Args[0]
    965 		val := v.Args[1]
    966 		mem := v.Args[2]
    967 		v.reset(OpMIPSLoweredAtomicStore)
    968 		v.AddArg(ptr)
    969 		v.AddArg(val)
    970 		v.AddArg(mem)
    971 		return true
    972 	}
    973 }
    974 func rewriteValueMIPS_OpAtomicStorePtrNoWB_0(v *Value) bool {
    975 	// match: (AtomicStorePtrNoWB ptr val mem)
    976 	// cond:
    977 	// result: (LoweredAtomicStore  ptr val mem)
    978 	for {
    979 		_ = v.Args[2]
    980 		ptr := v.Args[0]
    981 		val := v.Args[1]
    982 		mem := v.Args[2]
    983 		v.reset(OpMIPSLoweredAtomicStore)
    984 		v.AddArg(ptr)
    985 		v.AddArg(val)
    986 		v.AddArg(mem)
    987 		return true
    988 	}
    989 }
    990 func rewriteValueMIPS_OpAvg32u_0(v *Value) bool {
    991 	b := v.Block
    992 	_ = b
    993 	// match: (Avg32u <t> x y)
    994 	// cond:
    995 	// result: (ADD (SRLconst <t> (SUB <t> x y) [1]) y)
    996 	for {
    997 		t := v.Type
    998 		_ = v.Args[1]
    999 		x := v.Args[0]
   1000 		y := v.Args[1]
   1001 		v.reset(OpMIPSADD)
   1002 		v0 := b.NewValue0(v.Pos, OpMIPSSRLconst, t)
   1003 		v0.AuxInt = 1
   1004 		v1 := b.NewValue0(v.Pos, OpMIPSSUB, t)
   1005 		v1.AddArg(x)
   1006 		v1.AddArg(y)
   1007 		v0.AddArg(v1)
   1008 		v.AddArg(v0)
   1009 		v.AddArg(y)
   1010 		return true
   1011 	}
   1012 }
   1013 func rewriteValueMIPS_OpBitLen32_0(v *Value) bool {
   1014 	b := v.Block
   1015 	_ = b
   1016 	typ := &b.Func.Config.Types
   1017 	_ = typ
   1018 	// match: (BitLen32 <t> x)
   1019 	// cond:
   1020 	// result: (SUB (MOVWconst [32]) (CLZ <t> x))
   1021 	for {
   1022 		t := v.Type
   1023 		x := v.Args[0]
   1024 		v.reset(OpMIPSSUB)
   1025 		v0 := b.NewValue0(v.Pos, OpMIPSMOVWconst, typ.UInt32)
   1026 		v0.AuxInt = 32
   1027 		v.AddArg(v0)
   1028 		v1 := b.NewValue0(v.Pos, OpMIPSCLZ, t)
   1029 		v1.AddArg(x)
   1030 		v.AddArg(v1)
   1031 		return true
   1032 	}
   1033 }
   1034 func rewriteValueMIPS_OpClosureCall_0(v *Value) bool {
   1035 	// match: (ClosureCall [argwid] entry closure mem)
   1036 	// cond:
   1037 	// result: (CALLclosure [argwid] entry closure mem)
   1038 	for {
   1039 		argwid := v.AuxInt
   1040 		_ = v.Args[2]
   1041 		entry := v.Args[0]
   1042 		closure := v.Args[1]
   1043 		mem := v.Args[2]
   1044 		v.reset(OpMIPSCALLclosure)
   1045 		v.AuxInt = argwid
   1046 		v.AddArg(entry)
   1047 		v.AddArg(closure)
   1048 		v.AddArg(mem)
   1049 		return true
   1050 	}
   1051 }
   1052 func rewriteValueMIPS_OpCom16_0(v *Value) bool {
   1053 	// match: (Com16 x)
   1054 	// cond:
   1055 	// result: (NORconst [0] x)
   1056 	for {
   1057 		x := v.Args[0]
   1058 		v.reset(OpMIPSNORconst)
   1059 		v.AuxInt = 0
   1060 		v.AddArg(x)
   1061 		return true
   1062 	}
   1063 }
   1064 func rewriteValueMIPS_OpCom32_0(v *Value) bool {
   1065 	// match: (Com32 x)
   1066 	// cond:
   1067 	// result: (NORconst [0] x)
   1068 	for {
   1069 		x := v.Args[0]
   1070 		v.reset(OpMIPSNORconst)
   1071 		v.AuxInt = 0
   1072 		v.AddArg(x)
   1073 		return true
   1074 	}
   1075 }
   1076 func rewriteValueMIPS_OpCom8_0(v *Value) bool {
   1077 	// match: (Com8 x)
   1078 	// cond:
   1079 	// result: (NORconst [0] x)
   1080 	for {
   1081 		x := v.Args[0]
   1082 		v.reset(OpMIPSNORconst)
   1083 		v.AuxInt = 0
   1084 		v.AddArg(x)
   1085 		return true
   1086 	}
   1087 }
   1088 func rewriteValueMIPS_OpConst16_0(v *Value) bool {
   1089 	// match: (Const16 [val])
   1090 	// cond:
   1091 	// result: (MOVWconst [val])
   1092 	for {
   1093 		val := v.AuxInt
   1094 		v.reset(OpMIPSMOVWconst)
   1095 		v.AuxInt = val
   1096 		return true
   1097 	}
   1098 }
   1099 func rewriteValueMIPS_OpConst32_0(v *Value) bool {
   1100 	// match: (Const32 [val])
   1101 	// cond:
   1102 	// result: (MOVWconst [val])
   1103 	for {
   1104 		val := v.AuxInt
   1105 		v.reset(OpMIPSMOVWconst)
   1106 		v.AuxInt = val
   1107 		return true
   1108 	}
   1109 }
   1110 func rewriteValueMIPS_OpConst32F_0(v *Value) bool {
   1111 	// match: (Const32F [val])
   1112 	// cond:
   1113 	// result: (MOVFconst [val])
   1114 	for {
   1115 		val := v.AuxInt
   1116 		v.reset(OpMIPSMOVFconst)
   1117 		v.AuxInt = val
   1118 		return true
   1119 	}
   1120 }
   1121 func rewriteValueMIPS_OpConst64F_0(v *Value) bool {
   1122 	// match: (Const64F [val])
   1123 	// cond:
   1124 	// result: (MOVDconst [val])
   1125 	for {
   1126 		val := v.AuxInt
   1127 		v.reset(OpMIPSMOVDconst)
   1128 		v.AuxInt = val
   1129 		return true
   1130 	}
   1131 }
   1132 func rewriteValueMIPS_OpConst8_0(v *Value) bool {
   1133 	// match: (Const8 [val])
   1134 	// cond:
   1135 	// result: (MOVWconst [val])
   1136 	for {
   1137 		val := v.AuxInt
   1138 		v.reset(OpMIPSMOVWconst)
   1139 		v.AuxInt = val
   1140 		return true
   1141 	}
   1142 }
   1143 func rewriteValueMIPS_OpConstBool_0(v *Value) bool {
   1144 	// match: (ConstBool [b])
   1145 	// cond:
   1146 	// result: (MOVWconst [b])
   1147 	for {
   1148 		b := v.AuxInt
   1149 		v.reset(OpMIPSMOVWconst)
   1150 		v.AuxInt = b
   1151 		return true
   1152 	}
   1153 }
   1154 func rewriteValueMIPS_OpConstNil_0(v *Value) bool {
   1155 	// match: (ConstNil)
   1156 	// cond:
   1157 	// result: (MOVWconst [0])
   1158 	for {
   1159 		v.reset(OpMIPSMOVWconst)
   1160 		v.AuxInt = 0
   1161 		return true
   1162 	}
   1163 }
   1164 func rewriteValueMIPS_OpConvert_0(v *Value) bool {
   1165 	// match: (Convert x mem)
   1166 	// cond:
   1167 	// result: (MOVWconvert x mem)
   1168 	for {
   1169 		_ = v.Args[1]
   1170 		x := v.Args[0]
   1171 		mem := v.Args[1]
   1172 		v.reset(OpMIPSMOVWconvert)
   1173 		v.AddArg(x)
   1174 		v.AddArg(mem)
   1175 		return true
   1176 	}
   1177 }
   1178 func rewriteValueMIPS_OpCtz32_0(v *Value) bool {
   1179 	b := v.Block
   1180 	_ = b
   1181 	typ := &b.Func.Config.Types
   1182 	_ = typ
   1183 	// match: (Ctz32 <t> x)
   1184 	// cond:
   1185 	// result: (SUB (MOVWconst [32]) (CLZ <t> (SUBconst <t> [1] (AND <t> x (NEG <t> x)))))
   1186 	for {
   1187 		t := v.Type
   1188 		x := v.Args[0]
   1189 		v.reset(OpMIPSSUB)
   1190 		v0 := b.NewValue0(v.Pos, OpMIPSMOVWconst, typ.UInt32)
   1191 		v0.AuxInt = 32
   1192 		v.AddArg(v0)
   1193 		v1 := b.NewValue0(v.Pos, OpMIPSCLZ, t)
   1194 		v2 := b.NewValue0(v.Pos, OpMIPSSUBconst, t)
   1195 		v2.AuxInt = 1
   1196 		v3 := b.NewValue0(v.Pos, OpMIPSAND, t)
   1197 		v3.AddArg(x)
   1198 		v4 := b.NewValue0(v.Pos, OpMIPSNEG, t)
   1199 		v4.AddArg(x)
   1200 		v3.AddArg(v4)
   1201 		v2.AddArg(v3)
   1202 		v1.AddArg(v2)
   1203 		v.AddArg(v1)
   1204 		return true
   1205 	}
   1206 }
   1207 func rewriteValueMIPS_OpCvt32Fto32_0(v *Value) bool {
   1208 	// match: (Cvt32Fto32 x)
   1209 	// cond:
   1210 	// result: (TRUNCFW x)
   1211 	for {
   1212 		x := v.Args[0]
   1213 		v.reset(OpMIPSTRUNCFW)
   1214 		v.AddArg(x)
   1215 		return true
   1216 	}
   1217 }
   1218 func rewriteValueMIPS_OpCvt32Fto64F_0(v *Value) bool {
   1219 	// match: (Cvt32Fto64F x)
   1220 	// cond:
   1221 	// result: (MOVFD x)
   1222 	for {
   1223 		x := v.Args[0]
   1224 		v.reset(OpMIPSMOVFD)
   1225 		v.AddArg(x)
   1226 		return true
   1227 	}
   1228 }
   1229 func rewriteValueMIPS_OpCvt32to32F_0(v *Value) bool {
   1230 	// match: (Cvt32to32F x)
   1231 	// cond:
   1232 	// result: (MOVWF x)
   1233 	for {
   1234 		x := v.Args[0]
   1235 		v.reset(OpMIPSMOVWF)
   1236 		v.AddArg(x)
   1237 		return true
   1238 	}
   1239 }
   1240 func rewriteValueMIPS_OpCvt32to64F_0(v *Value) bool {
   1241 	// match: (Cvt32to64F x)
   1242 	// cond:
   1243 	// result: (MOVWD x)
   1244 	for {
   1245 		x := v.Args[0]
   1246 		v.reset(OpMIPSMOVWD)
   1247 		v.AddArg(x)
   1248 		return true
   1249 	}
   1250 }
   1251 func rewriteValueMIPS_OpCvt64Fto32_0(v *Value) bool {
   1252 	// match: (Cvt64Fto32 x)
   1253 	// cond:
   1254 	// result: (TRUNCDW x)
   1255 	for {
   1256 		x := v.Args[0]
   1257 		v.reset(OpMIPSTRUNCDW)
   1258 		v.AddArg(x)
   1259 		return true
   1260 	}
   1261 }
   1262 func rewriteValueMIPS_OpCvt64Fto32F_0(v *Value) bool {
   1263 	// match: (Cvt64Fto32F x)
   1264 	// cond:
   1265 	// result: (MOVDF x)
   1266 	for {
   1267 		x := v.Args[0]
   1268 		v.reset(OpMIPSMOVDF)
   1269 		v.AddArg(x)
   1270 		return true
   1271 	}
   1272 }
   1273 func rewriteValueMIPS_OpDiv16_0(v *Value) bool {
   1274 	b := v.Block
   1275 	_ = b
   1276 	typ := &b.Func.Config.Types
   1277 	_ = typ
   1278 	// match: (Div16 x y)
   1279 	// cond:
   1280 	// result: (Select1 (DIV (SignExt16to32 x) (SignExt16to32 y)))
   1281 	for {
   1282 		_ = v.Args[1]
   1283 		x := v.Args[0]
   1284 		y := v.Args[1]
   1285 		v.reset(OpSelect1)
   1286 		v0 := b.NewValue0(v.Pos, OpMIPSDIV, types.NewTuple(typ.Int32, typ.Int32))
   1287 		v1 := b.NewValue0(v.Pos, OpSignExt16to32, typ.Int32)
   1288 		v1.AddArg(x)
   1289 		v0.AddArg(v1)
   1290 		v2 := b.NewValue0(v.Pos, OpSignExt16to32, typ.Int32)
   1291 		v2.AddArg(y)
   1292 		v0.AddArg(v2)
   1293 		v.AddArg(v0)
   1294 		return true
   1295 	}
   1296 }
   1297 func rewriteValueMIPS_OpDiv16u_0(v *Value) bool {
   1298 	b := v.Block
   1299 	_ = b
   1300 	typ := &b.Func.Config.Types
   1301 	_ = typ
   1302 	// match: (Div16u x y)
   1303 	// cond:
   1304 	// result: (Select1 (DIVU (ZeroExt16to32 x) (ZeroExt16to32 y)))
   1305 	for {
   1306 		_ = v.Args[1]
   1307 		x := v.Args[0]
   1308 		y := v.Args[1]
   1309 		v.reset(OpSelect1)
   1310 		v0 := b.NewValue0(v.Pos, OpMIPSDIVU, types.NewTuple(typ.UInt32, typ.UInt32))
   1311 		v1 := b.NewValue0(v.Pos, OpZeroExt16to32, typ.UInt32)
   1312 		v1.AddArg(x)
   1313 		v0.AddArg(v1)
   1314 		v2 := b.NewValue0(v.Pos, OpZeroExt16to32, typ.UInt32)
   1315 		v2.AddArg(y)
   1316 		v0.AddArg(v2)
   1317 		v.AddArg(v0)
   1318 		return true
   1319 	}
   1320 }
   1321 func rewriteValueMIPS_OpDiv32_0(v *Value) bool {
   1322 	b := v.Block
   1323 	_ = b
   1324 	typ := &b.Func.Config.Types
   1325 	_ = typ
   1326 	// match: (Div32 x y)
   1327 	// cond:
   1328 	// result: (Select1 (DIV x y))
   1329 	for {
   1330 		_ = v.Args[1]
   1331 		x := v.Args[0]
   1332 		y := v.Args[1]
   1333 		v.reset(OpSelect1)
   1334 		v0 := b.NewValue0(v.Pos, OpMIPSDIV, types.NewTuple(typ.Int32, typ.Int32))
   1335 		v0.AddArg(x)
   1336 		v0.AddArg(y)
   1337 		v.AddArg(v0)
   1338 		return true
   1339 	}
   1340 }
   1341 func rewriteValueMIPS_OpDiv32F_0(v *Value) bool {
   1342 	// match: (Div32F x y)
   1343 	// cond:
   1344 	// result: (DIVF x y)
   1345 	for {
   1346 		_ = v.Args[1]
   1347 		x := v.Args[0]
   1348 		y := v.Args[1]
   1349 		v.reset(OpMIPSDIVF)
   1350 		v.AddArg(x)
   1351 		v.AddArg(y)
   1352 		return true
   1353 	}
   1354 }
   1355 func rewriteValueMIPS_OpDiv32u_0(v *Value) bool {
   1356 	b := v.Block
   1357 	_ = b
   1358 	typ := &b.Func.Config.Types
   1359 	_ = typ
   1360 	// match: (Div32u x y)
   1361 	// cond:
   1362 	// result: (Select1 (DIVU x y))
   1363 	for {
   1364 		_ = v.Args[1]
   1365 		x := v.Args[0]
   1366 		y := v.Args[1]
   1367 		v.reset(OpSelect1)
   1368 		v0 := b.NewValue0(v.Pos, OpMIPSDIVU, types.NewTuple(typ.UInt32, typ.UInt32))
   1369 		v0.AddArg(x)
   1370 		v0.AddArg(y)
   1371 		v.AddArg(v0)
   1372 		return true
   1373 	}
   1374 }
   1375 func rewriteValueMIPS_OpDiv64F_0(v *Value) bool {
   1376 	// match: (Div64F x y)
   1377 	// cond:
   1378 	// result: (DIVD x y)
   1379 	for {
   1380 		_ = v.Args[1]
   1381 		x := v.Args[0]
   1382 		y := v.Args[1]
   1383 		v.reset(OpMIPSDIVD)
   1384 		v.AddArg(x)
   1385 		v.AddArg(y)
   1386 		return true
   1387 	}
   1388 }
   1389 func rewriteValueMIPS_OpDiv8_0(v *Value) bool {
   1390 	b := v.Block
   1391 	_ = b
   1392 	typ := &b.Func.Config.Types
   1393 	_ = typ
   1394 	// match: (Div8 x y)
   1395 	// cond:
   1396 	// result: (Select1 (DIV (SignExt8to32 x) (SignExt8to32 y)))
   1397 	for {
   1398 		_ = v.Args[1]
   1399 		x := v.Args[0]
   1400 		y := v.Args[1]
   1401 		v.reset(OpSelect1)
   1402 		v0 := b.NewValue0(v.Pos, OpMIPSDIV, types.NewTuple(typ.Int32, typ.Int32))
   1403 		v1 := b.NewValue0(v.Pos, OpSignExt8to32, typ.Int32)
   1404 		v1.AddArg(x)
   1405 		v0.AddArg(v1)
   1406 		v2 := b.NewValue0(v.Pos, OpSignExt8to32, typ.Int32)
   1407 		v2.AddArg(y)
   1408 		v0.AddArg(v2)
   1409 		v.AddArg(v0)
   1410 		return true
   1411 	}
   1412 }
   1413 func rewriteValueMIPS_OpDiv8u_0(v *Value) bool {
   1414 	b := v.Block
   1415 	_ = b
   1416 	typ := &b.Func.Config.Types
   1417 	_ = typ
   1418 	// match: (Div8u x y)
   1419 	// cond:
   1420 	// result: (Select1 (DIVU (ZeroExt8to32 x) (ZeroExt8to32 y)))
   1421 	for {
   1422 		_ = v.Args[1]
   1423 		x := v.Args[0]
   1424 		y := v.Args[1]
   1425 		v.reset(OpSelect1)
   1426 		v0 := b.NewValue0(v.Pos, OpMIPSDIVU, types.NewTuple(typ.UInt32, typ.UInt32))
   1427 		v1 := b.NewValue0(v.Pos, OpZeroExt8to32, typ.UInt32)
   1428 		v1.AddArg(x)
   1429 		v0.AddArg(v1)
   1430 		v2 := b.NewValue0(v.Pos, OpZeroExt8to32, typ.UInt32)
   1431 		v2.AddArg(y)
   1432 		v0.AddArg(v2)
   1433 		v.AddArg(v0)
   1434 		return true
   1435 	}
   1436 }
   1437 func rewriteValueMIPS_OpEq16_0(v *Value) bool {
   1438 	b := v.Block
   1439 	_ = b
   1440 	typ := &b.Func.Config.Types
   1441 	_ = typ
   1442 	// match: (Eq16 x y)
   1443 	// cond:
   1444 	// result: (SGTUconst [1] (XOR (ZeroExt16to32 x) (ZeroExt16to32 y)))
   1445 	for {
   1446 		_ = v.Args[1]
   1447 		x := v.Args[0]
   1448 		y := v.Args[1]
   1449 		v.reset(OpMIPSSGTUconst)
   1450 		v.AuxInt = 1
   1451 		v0 := b.NewValue0(v.Pos, OpMIPSXOR, typ.UInt32)
   1452 		v1 := b.NewValue0(v.Pos, OpZeroExt16to32, typ.UInt32)
   1453 		v1.AddArg(x)
   1454 		v0.AddArg(v1)
   1455 		v2 := b.NewValue0(v.Pos, OpZeroExt16to32, typ.UInt32)
   1456 		v2.AddArg(y)
   1457 		v0.AddArg(v2)
   1458 		v.AddArg(v0)
   1459 		return true
   1460 	}
   1461 }
   1462 func rewriteValueMIPS_OpEq32_0(v *Value) bool {
   1463 	b := v.Block
   1464 	_ = b
   1465 	typ := &b.Func.Config.Types
   1466 	_ = typ
   1467 	// match: (Eq32 x y)
   1468 	// cond:
   1469 	// result: (SGTUconst [1] (XOR x y))
   1470 	for {
   1471 		_ = v.Args[1]
   1472 		x := v.Args[0]
   1473 		y := v.Args[1]
   1474 		v.reset(OpMIPSSGTUconst)
   1475 		v.AuxInt = 1
   1476 		v0 := b.NewValue0(v.Pos, OpMIPSXOR, typ.UInt32)
   1477 		v0.AddArg(x)
   1478 		v0.AddArg(y)
   1479 		v.AddArg(v0)
   1480 		return true
   1481 	}
   1482 }
   1483 func rewriteValueMIPS_OpEq32F_0(v *Value) bool {
   1484 	b := v.Block
   1485 	_ = b
   1486 	// match: (Eq32F x y)
   1487 	// cond:
   1488 	// result: (FPFlagTrue (CMPEQF x y))
   1489 	for {
   1490 		_ = v.Args[1]
   1491 		x := v.Args[0]
   1492 		y := v.Args[1]
   1493 		v.reset(OpMIPSFPFlagTrue)
   1494 		v0 := b.NewValue0(v.Pos, OpMIPSCMPEQF, types.TypeFlags)
   1495 		v0.AddArg(x)
   1496 		v0.AddArg(y)
   1497 		v.AddArg(v0)
   1498 		return true
   1499 	}
   1500 }
   1501 func rewriteValueMIPS_OpEq64F_0(v *Value) bool {
   1502 	b := v.Block
   1503 	_ = b
   1504 	// match: (Eq64F x y)
   1505 	// cond:
   1506 	// result: (FPFlagTrue (CMPEQD x y))
   1507 	for {
   1508 		_ = v.Args[1]
   1509 		x := v.Args[0]
   1510 		y := v.Args[1]
   1511 		v.reset(OpMIPSFPFlagTrue)
   1512 		v0 := b.NewValue0(v.Pos, OpMIPSCMPEQD, types.TypeFlags)
   1513 		v0.AddArg(x)
   1514 		v0.AddArg(y)
   1515 		v.AddArg(v0)
   1516 		return true
   1517 	}
   1518 }
   1519 func rewriteValueMIPS_OpEq8_0(v *Value) bool {
   1520 	b := v.Block
   1521 	_ = b
   1522 	typ := &b.Func.Config.Types
   1523 	_ = typ
   1524 	// match: (Eq8 x y)
   1525 	// cond:
   1526 	// result: (SGTUconst [1] (XOR (ZeroExt8to32 x) (ZeroExt8to32 y)))
   1527 	for {
   1528 		_ = v.Args[1]
   1529 		x := v.Args[0]
   1530 		y := v.Args[1]
   1531 		v.reset(OpMIPSSGTUconst)
   1532 		v.AuxInt = 1
   1533 		v0 := b.NewValue0(v.Pos, OpMIPSXOR, typ.UInt32)
   1534 		v1 := b.NewValue0(v.Pos, OpZeroExt8to32, typ.UInt32)
   1535 		v1.AddArg(x)
   1536 		v0.AddArg(v1)
   1537 		v2 := b.NewValue0(v.Pos, OpZeroExt8to32, typ.UInt32)
   1538 		v2.AddArg(y)
   1539 		v0.AddArg(v2)
   1540 		v.AddArg(v0)
   1541 		return true
   1542 	}
   1543 }
   1544 func rewriteValueMIPS_OpEqB_0(v *Value) bool {
   1545 	b := v.Block
   1546 	_ = b
   1547 	typ := &b.Func.Config.Types
   1548 	_ = typ
   1549 	// match: (EqB x y)
   1550 	// cond:
   1551 	// result: (XORconst [1] (XOR <typ.Bool> x y))
   1552 	for {
   1553 		_ = v.Args[1]
   1554 		x := v.Args[0]
   1555 		y := v.Args[1]
   1556 		v.reset(OpMIPSXORconst)
   1557 		v.AuxInt = 1
   1558 		v0 := b.NewValue0(v.Pos, OpMIPSXOR, typ.Bool)
   1559 		v0.AddArg(x)
   1560 		v0.AddArg(y)
   1561 		v.AddArg(v0)
   1562 		return true
   1563 	}
   1564 }
   1565 func rewriteValueMIPS_OpEqPtr_0(v *Value) bool {
   1566 	b := v.Block
   1567 	_ = b
   1568 	typ := &b.Func.Config.Types
   1569 	_ = typ
   1570 	// match: (EqPtr x y)
   1571 	// cond:
   1572 	// result: (SGTUconst [1] (XOR x y))
   1573 	for {
   1574 		_ = v.Args[1]
   1575 		x := v.Args[0]
   1576 		y := v.Args[1]
   1577 		v.reset(OpMIPSSGTUconst)
   1578 		v.AuxInt = 1
   1579 		v0 := b.NewValue0(v.Pos, OpMIPSXOR, typ.UInt32)
   1580 		v0.AddArg(x)
   1581 		v0.AddArg(y)
   1582 		v.AddArg(v0)
   1583 		return true
   1584 	}
   1585 }
   1586 func rewriteValueMIPS_OpGeq16_0(v *Value) bool {
   1587 	b := v.Block
   1588 	_ = b
   1589 	typ := &b.Func.Config.Types
   1590 	_ = typ
   1591 	// match: (Geq16 x y)
   1592 	// cond:
   1593 	// result: (XORconst [1] (SGT (SignExt16to32 y) (SignExt16to32 x)))
   1594 	for {
   1595 		_ = v.Args[1]
   1596 		x := v.Args[0]
   1597 		y := v.Args[1]
   1598 		v.reset(OpMIPSXORconst)
   1599 		v.AuxInt = 1
   1600 		v0 := b.NewValue0(v.Pos, OpMIPSSGT, typ.Bool)
   1601 		v1 := b.NewValue0(v.Pos, OpSignExt16to32, typ.Int32)
   1602 		v1.AddArg(y)
   1603 		v0.AddArg(v1)
   1604 		v2 := b.NewValue0(v.Pos, OpSignExt16to32, typ.Int32)
   1605 		v2.AddArg(x)
   1606 		v0.AddArg(v2)
   1607 		v.AddArg(v0)
   1608 		return true
   1609 	}
   1610 }
   1611 func rewriteValueMIPS_OpGeq16U_0(v *Value) bool {
   1612 	b := v.Block
   1613 	_ = b
   1614 	typ := &b.Func.Config.Types
   1615 	_ = typ
   1616 	// match: (Geq16U x y)
   1617 	// cond:
   1618 	// result: (XORconst [1] (SGTU (ZeroExt16to32 y) (ZeroExt16to32 x)))
   1619 	for {
   1620 		_ = v.Args[1]
   1621 		x := v.Args[0]
   1622 		y := v.Args[1]
   1623 		v.reset(OpMIPSXORconst)
   1624 		v.AuxInt = 1
   1625 		v0 := b.NewValue0(v.Pos, OpMIPSSGTU, typ.Bool)
   1626 		v1 := b.NewValue0(v.Pos, OpZeroExt16to32, typ.UInt32)
   1627 		v1.AddArg(y)
   1628 		v0.AddArg(v1)
   1629 		v2 := b.NewValue0(v.Pos, OpZeroExt16to32, typ.UInt32)
   1630 		v2.AddArg(x)
   1631 		v0.AddArg(v2)
   1632 		v.AddArg(v0)
   1633 		return true
   1634 	}
   1635 }
   1636 func rewriteValueMIPS_OpGeq32_0(v *Value) bool {
   1637 	b := v.Block
   1638 	_ = b
   1639 	typ := &b.Func.Config.Types
   1640 	_ = typ
   1641 	// match: (Geq32 x y)
   1642 	// cond:
   1643 	// result: (XORconst [1] (SGT y x))
   1644 	for {
   1645 		_ = v.Args[1]
   1646 		x := v.Args[0]
   1647 		y := v.Args[1]
   1648 		v.reset(OpMIPSXORconst)
   1649 		v.AuxInt = 1
   1650 		v0 := b.NewValue0(v.Pos, OpMIPSSGT, typ.Bool)
   1651 		v0.AddArg(y)
   1652 		v0.AddArg(x)
   1653 		v.AddArg(v0)
   1654 		return true
   1655 	}
   1656 }
   1657 func rewriteValueMIPS_OpGeq32F_0(v *Value) bool {
   1658 	b := v.Block
   1659 	_ = b
   1660 	// match: (Geq32F x y)
   1661 	// cond:
   1662 	// result: (FPFlagTrue (CMPGEF x y))
   1663 	for {
   1664 		_ = v.Args[1]
   1665 		x := v.Args[0]
   1666 		y := v.Args[1]
   1667 		v.reset(OpMIPSFPFlagTrue)
   1668 		v0 := b.NewValue0(v.Pos, OpMIPSCMPGEF, types.TypeFlags)
   1669 		v0.AddArg(x)
   1670 		v0.AddArg(y)
   1671 		v.AddArg(v0)
   1672 		return true
   1673 	}
   1674 }
   1675 func rewriteValueMIPS_OpGeq32U_0(v *Value) bool {
   1676 	b := v.Block
   1677 	_ = b
   1678 	typ := &b.Func.Config.Types
   1679 	_ = typ
   1680 	// match: (Geq32U x y)
   1681 	// cond:
   1682 	// result: (XORconst [1] (SGTU y x))
   1683 	for {
   1684 		_ = v.Args[1]
   1685 		x := v.Args[0]
   1686 		y := v.Args[1]
   1687 		v.reset(OpMIPSXORconst)
   1688 		v.AuxInt = 1
   1689 		v0 := b.NewValue0(v.Pos, OpMIPSSGTU, typ.Bool)
   1690 		v0.AddArg(y)
   1691 		v0.AddArg(x)
   1692 		v.AddArg(v0)
   1693 		return true
   1694 	}
   1695 }
   1696 func rewriteValueMIPS_OpGeq64F_0(v *Value) bool {
   1697 	b := v.Block
   1698 	_ = b
   1699 	// match: (Geq64F x y)
   1700 	// cond:
   1701 	// result: (FPFlagTrue (CMPGED x y))
   1702 	for {
   1703 		_ = v.Args[1]
   1704 		x := v.Args[0]
   1705 		y := v.Args[1]
   1706 		v.reset(OpMIPSFPFlagTrue)
   1707 		v0 := b.NewValue0(v.Pos, OpMIPSCMPGED, types.TypeFlags)
   1708 		v0.AddArg(x)
   1709 		v0.AddArg(y)
   1710 		v.AddArg(v0)
   1711 		return true
   1712 	}
   1713 }
   1714 func rewriteValueMIPS_OpGeq8_0(v *Value) bool {
   1715 	b := v.Block
   1716 	_ = b
   1717 	typ := &b.Func.Config.Types
   1718 	_ = typ
   1719 	// match: (Geq8 x y)
   1720 	// cond:
   1721 	// result: (XORconst [1] (SGT (SignExt8to32 y) (SignExt8to32 x)))
   1722 	for {
   1723 		_ = v.Args[1]
   1724 		x := v.Args[0]
   1725 		y := v.Args[1]
   1726 		v.reset(OpMIPSXORconst)
   1727 		v.AuxInt = 1
   1728 		v0 := b.NewValue0(v.Pos, OpMIPSSGT, typ.Bool)
   1729 		v1 := b.NewValue0(v.Pos, OpSignExt8to32, typ.Int32)
   1730 		v1.AddArg(y)
   1731 		v0.AddArg(v1)
   1732 		v2 := b.NewValue0(v.Pos, OpSignExt8to32, typ.Int32)
   1733 		v2.AddArg(x)
   1734 		v0.AddArg(v2)
   1735 		v.AddArg(v0)
   1736 		return true
   1737 	}
   1738 }
   1739 func rewriteValueMIPS_OpGeq8U_0(v *Value) bool {
   1740 	b := v.Block
   1741 	_ = b
   1742 	typ := &b.Func.Config.Types
   1743 	_ = typ
   1744 	// match: (Geq8U x y)
   1745 	// cond:
   1746 	// result: (XORconst [1] (SGTU (ZeroExt8to32 y) (ZeroExt8to32 x)))
   1747 	for {
   1748 		_ = v.Args[1]
   1749 		x := v.Args[0]
   1750 		y := v.Args[1]
   1751 		v.reset(OpMIPSXORconst)
   1752 		v.AuxInt = 1
   1753 		v0 := b.NewValue0(v.Pos, OpMIPSSGTU, typ.Bool)
   1754 		v1 := b.NewValue0(v.Pos, OpZeroExt8to32, typ.UInt32)
   1755 		v1.AddArg(y)
   1756 		v0.AddArg(v1)
   1757 		v2 := b.NewValue0(v.Pos, OpZeroExt8to32, typ.UInt32)
   1758 		v2.AddArg(x)
   1759 		v0.AddArg(v2)
   1760 		v.AddArg(v0)
   1761 		return true
   1762 	}
   1763 }
   1764 func rewriteValueMIPS_OpGetCallerSP_0(v *Value) bool {
   1765 	// match: (GetCallerSP)
   1766 	// cond:
   1767 	// result: (LoweredGetCallerSP)
   1768 	for {
   1769 		v.reset(OpMIPSLoweredGetCallerSP)
   1770 		return true
   1771 	}
   1772 }
   1773 func rewriteValueMIPS_OpGetClosurePtr_0(v *Value) bool {
   1774 	// match: (GetClosurePtr)
   1775 	// cond:
   1776 	// result: (LoweredGetClosurePtr)
   1777 	for {
   1778 		v.reset(OpMIPSLoweredGetClosurePtr)
   1779 		return true
   1780 	}
   1781 }
   1782 func rewriteValueMIPS_OpGreater16_0(v *Value) bool {
   1783 	b := v.Block
   1784 	_ = b
   1785 	typ := &b.Func.Config.Types
   1786 	_ = typ
   1787 	// match: (Greater16 x y)
   1788 	// cond:
   1789 	// result: (SGT (SignExt16to32 x) (SignExt16to32 y))
   1790 	for {
   1791 		_ = v.Args[1]
   1792 		x := v.Args[0]
   1793 		y := v.Args[1]
   1794 		v.reset(OpMIPSSGT)
   1795 		v0 := b.NewValue0(v.Pos, OpSignExt16to32, typ.Int32)
   1796 		v0.AddArg(x)
   1797 		v.AddArg(v0)
   1798 		v1 := b.NewValue0(v.Pos, OpSignExt16to32, typ.Int32)
   1799 		v1.AddArg(y)
   1800 		v.AddArg(v1)
   1801 		return true
   1802 	}
   1803 }
   1804 func rewriteValueMIPS_OpGreater16U_0(v *Value) bool {
   1805 	b := v.Block
   1806 	_ = b
   1807 	typ := &b.Func.Config.Types
   1808 	_ = typ
   1809 	// match: (Greater16U x y)
   1810 	// cond:
   1811 	// result: (SGTU (ZeroExt16to32 x) (ZeroExt16to32 y))
   1812 	for {
   1813 		_ = v.Args[1]
   1814 		x := v.Args[0]
   1815 		y := v.Args[1]
   1816 		v.reset(OpMIPSSGTU)
   1817 		v0 := b.NewValue0(v.Pos, OpZeroExt16to32, typ.UInt32)
   1818 		v0.AddArg(x)
   1819 		v.AddArg(v0)
   1820 		v1 := b.NewValue0(v.Pos, OpZeroExt16to32, typ.UInt32)
   1821 		v1.AddArg(y)
   1822 		v.AddArg(v1)
   1823 		return true
   1824 	}
   1825 }
   1826 func rewriteValueMIPS_OpGreater32_0(v *Value) bool {
   1827 	// match: (Greater32 x y)
   1828 	// cond:
   1829 	// result: (SGT x y)
   1830 	for {
   1831 		_ = v.Args[1]
   1832 		x := v.Args[0]
   1833 		y := v.Args[1]
   1834 		v.reset(OpMIPSSGT)
   1835 		v.AddArg(x)
   1836 		v.AddArg(y)
   1837 		return true
   1838 	}
   1839 }
   1840 func rewriteValueMIPS_OpGreater32F_0(v *Value) bool {
   1841 	b := v.Block
   1842 	_ = b
   1843 	// match: (Greater32F x y)
   1844 	// cond:
   1845 	// result: (FPFlagTrue (CMPGTF x y))
   1846 	for {
   1847 		_ = v.Args[1]
   1848 		x := v.Args[0]
   1849 		y := v.Args[1]
   1850 		v.reset(OpMIPSFPFlagTrue)
   1851 		v0 := b.NewValue0(v.Pos, OpMIPSCMPGTF, types.TypeFlags)
   1852 		v0.AddArg(x)
   1853 		v0.AddArg(y)
   1854 		v.AddArg(v0)
   1855 		return true
   1856 	}
   1857 }
   1858 func rewriteValueMIPS_OpGreater32U_0(v *Value) bool {
   1859 	// match: (Greater32U x y)
   1860 	// cond:
   1861 	// result: (SGTU x y)
   1862 	for {
   1863 		_ = v.Args[1]
   1864 		x := v.Args[0]
   1865 		y := v.Args[1]
   1866 		v.reset(OpMIPSSGTU)
   1867 		v.AddArg(x)
   1868 		v.AddArg(y)
   1869 		return true
   1870 	}
   1871 }
   1872 func rewriteValueMIPS_OpGreater64F_0(v *Value) bool {
   1873 	b := v.Block
   1874 	_ = b
   1875 	// match: (Greater64F x y)
   1876 	// cond:
   1877 	// result: (FPFlagTrue (CMPGTD x y))
   1878 	for {
   1879 		_ = v.Args[1]
   1880 		x := v.Args[0]
   1881 		y := v.Args[1]
   1882 		v.reset(OpMIPSFPFlagTrue)
   1883 		v0 := b.NewValue0(v.Pos, OpMIPSCMPGTD, types.TypeFlags)
   1884 		v0.AddArg(x)
   1885 		v0.AddArg(y)
   1886 		v.AddArg(v0)
   1887 		return true
   1888 	}
   1889 }
   1890 func rewriteValueMIPS_OpGreater8_0(v *Value) bool {
   1891 	b := v.Block
   1892 	_ = b
   1893 	typ := &b.Func.Config.Types
   1894 	_ = typ
   1895 	// match: (Greater8 x y)
   1896 	// cond:
   1897 	// result: (SGT (SignExt8to32 x) (SignExt8to32 y))
   1898 	for {
   1899 		_ = v.Args[1]
   1900 		x := v.Args[0]
   1901 		y := v.Args[1]
   1902 		v.reset(OpMIPSSGT)
   1903 		v0 := b.NewValue0(v.Pos, OpSignExt8to32, typ.Int32)
   1904 		v0.AddArg(x)
   1905 		v.AddArg(v0)
   1906 		v1 := b.NewValue0(v.Pos, OpSignExt8to32, typ.Int32)
   1907 		v1.AddArg(y)
   1908 		v.AddArg(v1)
   1909 		return true
   1910 	}
   1911 }
   1912 func rewriteValueMIPS_OpGreater8U_0(v *Value) bool {
   1913 	b := v.Block
   1914 	_ = b
   1915 	typ := &b.Func.Config.Types
   1916 	_ = typ
   1917 	// match: (Greater8U x y)
   1918 	// cond:
   1919 	// result: (SGTU (ZeroExt8to32 x) (ZeroExt8to32 y))
   1920 	for {
   1921 		_ = v.Args[1]
   1922 		x := v.Args[0]
   1923 		y := v.Args[1]
   1924 		v.reset(OpMIPSSGTU)
   1925 		v0 := b.NewValue0(v.Pos, OpZeroExt8to32, typ.UInt32)
   1926 		v0.AddArg(x)
   1927 		v.AddArg(v0)
   1928 		v1 := b.NewValue0(v.Pos, OpZeroExt8to32, typ.UInt32)
   1929 		v1.AddArg(y)
   1930 		v.AddArg(v1)
   1931 		return true
   1932 	}
   1933 }
   1934 func rewriteValueMIPS_OpHmul32_0(v *Value) bool {
   1935 	b := v.Block
   1936 	_ = b
   1937 	typ := &b.Func.Config.Types
   1938 	_ = typ
   1939 	// match: (Hmul32 x y)
   1940 	// cond:
   1941 	// result: (Select0 (MULT x y))
   1942 	for {
   1943 		_ = v.Args[1]
   1944 		x := v.Args[0]
   1945 		y := v.Args[1]
   1946 		v.reset(OpSelect0)
   1947 		v0 := b.NewValue0(v.Pos, OpMIPSMULT, types.NewTuple(typ.Int32, typ.Int32))
   1948 		v0.AddArg(x)
   1949 		v0.AddArg(y)
   1950 		v.AddArg(v0)
   1951 		return true
   1952 	}
   1953 }
   1954 func rewriteValueMIPS_OpHmul32u_0(v *Value) bool {
   1955 	b := v.Block
   1956 	_ = b
   1957 	typ := &b.Func.Config.Types
   1958 	_ = typ
   1959 	// match: (Hmul32u x y)
   1960 	// cond:
   1961 	// result: (Select0 (MULTU x y))
   1962 	for {
   1963 		_ = v.Args[1]
   1964 		x := v.Args[0]
   1965 		y := v.Args[1]
   1966 		v.reset(OpSelect0)
   1967 		v0 := b.NewValue0(v.Pos, OpMIPSMULTU, types.NewTuple(typ.UInt32, typ.UInt32))
   1968 		v0.AddArg(x)
   1969 		v0.AddArg(y)
   1970 		v.AddArg(v0)
   1971 		return true
   1972 	}
   1973 }
   1974 func rewriteValueMIPS_OpInterCall_0(v *Value) bool {
   1975 	// match: (InterCall [argwid] entry mem)
   1976 	// cond:
   1977 	// result: (CALLinter [argwid] entry mem)
   1978 	for {
   1979 		argwid := v.AuxInt
   1980 		_ = v.Args[1]
   1981 		entry := v.Args[0]
   1982 		mem := v.Args[1]
   1983 		v.reset(OpMIPSCALLinter)
   1984 		v.AuxInt = argwid
   1985 		v.AddArg(entry)
   1986 		v.AddArg(mem)
   1987 		return true
   1988 	}
   1989 }
   1990 func rewriteValueMIPS_OpIsInBounds_0(v *Value) bool {
   1991 	// match: (IsInBounds idx len)
   1992 	// cond:
   1993 	// result: (SGTU len idx)
   1994 	for {
   1995 		_ = v.Args[1]
   1996 		idx := v.Args[0]
   1997 		len := v.Args[1]
   1998 		v.reset(OpMIPSSGTU)
   1999 		v.AddArg(len)
   2000 		v.AddArg(idx)
   2001 		return true
   2002 	}
   2003 }
   2004 func rewriteValueMIPS_OpIsNonNil_0(v *Value) bool {
   2005 	b := v.Block
   2006 	_ = b
   2007 	typ := &b.Func.Config.Types
   2008 	_ = typ
   2009 	// match: (IsNonNil ptr)
   2010 	// cond:
   2011 	// result: (SGTU ptr (MOVWconst [0]))
   2012 	for {
   2013 		ptr := v.Args[0]
   2014 		v.reset(OpMIPSSGTU)
   2015 		v.AddArg(ptr)
   2016 		v0 := b.NewValue0(v.Pos, OpMIPSMOVWconst, typ.UInt32)
   2017 		v0.AuxInt = 0
   2018 		v.AddArg(v0)
   2019 		return true
   2020 	}
   2021 }
   2022 func rewriteValueMIPS_OpIsSliceInBounds_0(v *Value) bool {
   2023 	b := v.Block
   2024 	_ = b
   2025 	typ := &b.Func.Config.Types
   2026 	_ = typ
   2027 	// match: (IsSliceInBounds idx len)
   2028 	// cond:
   2029 	// result: (XORconst [1] (SGTU idx len))
   2030 	for {
   2031 		_ = v.Args[1]
   2032 		idx := v.Args[0]
   2033 		len := v.Args[1]
   2034 		v.reset(OpMIPSXORconst)
   2035 		v.AuxInt = 1
   2036 		v0 := b.NewValue0(v.Pos, OpMIPSSGTU, typ.Bool)
   2037 		v0.AddArg(idx)
   2038 		v0.AddArg(len)
   2039 		v.AddArg(v0)
   2040 		return true
   2041 	}
   2042 }
   2043 func rewriteValueMIPS_OpLeq16_0(v *Value) bool {
   2044 	b := v.Block
   2045 	_ = b
   2046 	typ := &b.Func.Config.Types
   2047 	_ = typ
   2048 	// match: (Leq16 x y)
   2049 	// cond:
   2050 	// result: (XORconst [1] (SGT (SignExt16to32 x) (SignExt16to32 y)))
   2051 	for {
   2052 		_ = v.Args[1]
   2053 		x := v.Args[0]
   2054 		y := v.Args[1]
   2055 		v.reset(OpMIPSXORconst)
   2056 		v.AuxInt = 1
   2057 		v0 := b.NewValue0(v.Pos, OpMIPSSGT, typ.Bool)
   2058 		v1 := b.NewValue0(v.Pos, OpSignExt16to32, typ.Int32)
   2059 		v1.AddArg(x)
   2060 		v0.AddArg(v1)
   2061 		v2 := b.NewValue0(v.Pos, OpSignExt16to32, typ.Int32)
   2062 		v2.AddArg(y)
   2063 		v0.AddArg(v2)
   2064 		v.AddArg(v0)
   2065 		return true
   2066 	}
   2067 }
   2068 func rewriteValueMIPS_OpLeq16U_0(v *Value) bool {
   2069 	b := v.Block
   2070 	_ = b
   2071 	typ := &b.Func.Config.Types
   2072 	_ = typ
   2073 	// match: (Leq16U x y)
   2074 	// cond:
   2075 	// result: (XORconst [1] (SGTU (ZeroExt16to32 x) (ZeroExt16to32 y)))
   2076 	for {
   2077 		_ = v.Args[1]
   2078 		x := v.Args[0]
   2079 		y := v.Args[1]
   2080 		v.reset(OpMIPSXORconst)
   2081 		v.AuxInt = 1
   2082 		v0 := b.NewValue0(v.Pos, OpMIPSSGTU, typ.Bool)
   2083 		v1 := b.NewValue0(v.Pos, OpZeroExt16to32, typ.UInt32)
   2084 		v1.AddArg(x)
   2085 		v0.AddArg(v1)
   2086 		v2 := b.NewValue0(v.Pos, OpZeroExt16to32, typ.UInt32)
   2087 		v2.AddArg(y)
   2088 		v0.AddArg(v2)
   2089 		v.AddArg(v0)
   2090 		return true
   2091 	}
   2092 }
   2093 func rewriteValueMIPS_OpLeq32_0(v *Value) bool {
   2094 	b := v.Block
   2095 	_ = b
   2096 	typ := &b.Func.Config.Types
   2097 	_ = typ
   2098 	// match: (Leq32 x y)
   2099 	// cond:
   2100 	// result: (XORconst [1] (SGT x y))
   2101 	for {
   2102 		_ = v.Args[1]
   2103 		x := v.Args[0]
   2104 		y := v.Args[1]
   2105 		v.reset(OpMIPSXORconst)
   2106 		v.AuxInt = 1
   2107 		v0 := b.NewValue0(v.Pos, OpMIPSSGT, typ.Bool)
   2108 		v0.AddArg(x)
   2109 		v0.AddArg(y)
   2110 		v.AddArg(v0)
   2111 		return true
   2112 	}
   2113 }
   2114 func rewriteValueMIPS_OpLeq32F_0(v *Value) bool {
   2115 	b := v.Block
   2116 	_ = b
   2117 	// match: (Leq32F x y)
   2118 	// cond:
   2119 	// result: (FPFlagTrue (CMPGEF y x))
   2120 	for {
   2121 		_ = v.Args[1]
   2122 		x := v.Args[0]
   2123 		y := v.Args[1]
   2124 		v.reset(OpMIPSFPFlagTrue)
   2125 		v0 := b.NewValue0(v.Pos, OpMIPSCMPGEF, types.TypeFlags)
   2126 		v0.AddArg(y)
   2127 		v0.AddArg(x)
   2128 		v.AddArg(v0)
   2129 		return true
   2130 	}
   2131 }
   2132 func rewriteValueMIPS_OpLeq32U_0(v *Value) bool {
   2133 	b := v.Block
   2134 	_ = b
   2135 	typ := &b.Func.Config.Types
   2136 	_ = typ
   2137 	// match: (Leq32U x y)
   2138 	// cond:
   2139 	// result: (XORconst [1] (SGTU x y))
   2140 	for {
   2141 		_ = v.Args[1]
   2142 		x := v.Args[0]
   2143 		y := v.Args[1]
   2144 		v.reset(OpMIPSXORconst)
   2145 		v.AuxInt = 1
   2146 		v0 := b.NewValue0(v.Pos, OpMIPSSGTU, typ.Bool)
   2147 		v0.AddArg(x)
   2148 		v0.AddArg(y)
   2149 		v.AddArg(v0)
   2150 		return true
   2151 	}
   2152 }
   2153 func rewriteValueMIPS_OpLeq64F_0(v *Value) bool {
   2154 	b := v.Block
   2155 	_ = b
   2156 	// match: (Leq64F x y)
   2157 	// cond:
   2158 	// result: (FPFlagTrue (CMPGED y x))
   2159 	for {
   2160 		_ = v.Args[1]
   2161 		x := v.Args[0]
   2162 		y := v.Args[1]
   2163 		v.reset(OpMIPSFPFlagTrue)
   2164 		v0 := b.NewValue0(v.Pos, OpMIPSCMPGED, types.TypeFlags)
   2165 		v0.AddArg(y)
   2166 		v0.AddArg(x)
   2167 		v.AddArg(v0)
   2168 		return true
   2169 	}
   2170 }
   2171 func rewriteValueMIPS_OpLeq8_0(v *Value) bool {
   2172 	b := v.Block
   2173 	_ = b
   2174 	typ := &b.Func.Config.Types
   2175 	_ = typ
   2176 	// match: (Leq8 x y)
   2177 	// cond:
   2178 	// result: (XORconst [1] (SGT (SignExt8to32 x) (SignExt8to32 y)))
   2179 	for {
   2180 		_ = v.Args[1]
   2181 		x := v.Args[0]
   2182 		y := v.Args[1]
   2183 		v.reset(OpMIPSXORconst)
   2184 		v.AuxInt = 1
   2185 		v0 := b.NewValue0(v.Pos, OpMIPSSGT, typ.Bool)
   2186 		v1 := b.NewValue0(v.Pos, OpSignExt8to32, typ.Int32)
   2187 		v1.AddArg(x)
   2188 		v0.AddArg(v1)
   2189 		v2 := b.NewValue0(v.Pos, OpSignExt8to32, typ.Int32)
   2190 		v2.AddArg(y)
   2191 		v0.AddArg(v2)
   2192 		v.AddArg(v0)
   2193 		return true
   2194 	}
   2195 }
   2196 func rewriteValueMIPS_OpLeq8U_0(v *Value) bool {
   2197 	b := v.Block
   2198 	_ = b
   2199 	typ := &b.Func.Config.Types
   2200 	_ = typ
   2201 	// match: (Leq8U x y)
   2202 	// cond:
   2203 	// result: (XORconst [1] (SGTU (ZeroExt8to32 x) (ZeroExt8to32 y)))
   2204 	for {
   2205 		_ = v.Args[1]
   2206 		x := v.Args[0]
   2207 		y := v.Args[1]
   2208 		v.reset(OpMIPSXORconst)
   2209 		v.AuxInt = 1
   2210 		v0 := b.NewValue0(v.Pos, OpMIPSSGTU, typ.Bool)
   2211 		v1 := b.NewValue0(v.Pos, OpZeroExt8to32, typ.UInt32)
   2212 		v1.AddArg(x)
   2213 		v0.AddArg(v1)
   2214 		v2 := b.NewValue0(v.Pos, OpZeroExt8to32, typ.UInt32)
   2215 		v2.AddArg(y)
   2216 		v0.AddArg(v2)
   2217 		v.AddArg(v0)
   2218 		return true
   2219 	}
   2220 }
   2221 func rewriteValueMIPS_OpLess16_0(v *Value) bool {
   2222 	b := v.Block
   2223 	_ = b
   2224 	typ := &b.Func.Config.Types
   2225 	_ = typ
   2226 	// match: (Less16 x y)
   2227 	// cond:
   2228 	// result: (SGT (SignExt16to32 y) (SignExt16to32 x))
   2229 	for {
   2230 		_ = v.Args[1]
   2231 		x := v.Args[0]
   2232 		y := v.Args[1]
   2233 		v.reset(OpMIPSSGT)
   2234 		v0 := b.NewValue0(v.Pos, OpSignExt16to32, typ.Int32)
   2235 		v0.AddArg(y)
   2236 		v.AddArg(v0)
   2237 		v1 := b.NewValue0(v.Pos, OpSignExt16to32, typ.Int32)
   2238 		v1.AddArg(x)
   2239 		v.AddArg(v1)
   2240 		return true
   2241 	}
   2242 }
   2243 func rewriteValueMIPS_OpLess16U_0(v *Value) bool {
   2244 	b := v.Block
   2245 	_ = b
   2246 	typ := &b.Func.Config.Types
   2247 	_ = typ
   2248 	// match: (Less16U x y)
   2249 	// cond:
   2250 	// result: (SGTU (ZeroExt16to32 y) (ZeroExt16to32 x))
   2251 	for {
   2252 		_ = v.Args[1]
   2253 		x := v.Args[0]
   2254 		y := v.Args[1]
   2255 		v.reset(OpMIPSSGTU)
   2256 		v0 := b.NewValue0(v.Pos, OpZeroExt16to32, typ.UInt32)
   2257 		v0.AddArg(y)
   2258 		v.AddArg(v0)
   2259 		v1 := b.NewValue0(v.Pos, OpZeroExt16to32, typ.UInt32)
   2260 		v1.AddArg(x)
   2261 		v.AddArg(v1)
   2262 		return true
   2263 	}
   2264 }
   2265 func rewriteValueMIPS_OpLess32_0(v *Value) bool {
   2266 	// match: (Less32 x y)
   2267 	// cond:
   2268 	// result: (SGT y x)
   2269 	for {
   2270 		_ = v.Args[1]
   2271 		x := v.Args[0]
   2272 		y := v.Args[1]
   2273 		v.reset(OpMIPSSGT)
   2274 		v.AddArg(y)
   2275 		v.AddArg(x)
   2276 		return true
   2277 	}
   2278 }
   2279 func rewriteValueMIPS_OpLess32F_0(v *Value) bool {
   2280 	b := v.Block
   2281 	_ = b
   2282 	// match: (Less32F x y)
   2283 	// cond:
   2284 	// result: (FPFlagTrue (CMPGTF y x))
   2285 	for {
   2286 		_ = v.Args[1]
   2287 		x := v.Args[0]
   2288 		y := v.Args[1]
   2289 		v.reset(OpMIPSFPFlagTrue)
   2290 		v0 := b.NewValue0(v.Pos, OpMIPSCMPGTF, types.TypeFlags)
   2291 		v0.AddArg(y)
   2292 		v0.AddArg(x)
   2293 		v.AddArg(v0)
   2294 		return true
   2295 	}
   2296 }
   2297 func rewriteValueMIPS_OpLess32U_0(v *Value) bool {
   2298 	// match: (Less32U x y)
   2299 	// cond:
   2300 	// result: (SGTU y x)
   2301 	for {
   2302 		_ = v.Args[1]
   2303 		x := v.Args[0]
   2304 		y := v.Args[1]
   2305 		v.reset(OpMIPSSGTU)
   2306 		v.AddArg(y)
   2307 		v.AddArg(x)
   2308 		return true
   2309 	}
   2310 }
   2311 func rewriteValueMIPS_OpLess64F_0(v *Value) bool {
   2312 	b := v.Block
   2313 	_ = b
   2314 	// match: (Less64F x y)
   2315 	// cond:
   2316 	// result: (FPFlagTrue (CMPGTD y x))
   2317 	for {
   2318 		_ = v.Args[1]
   2319 		x := v.Args[0]
   2320 		y := v.Args[1]
   2321 		v.reset(OpMIPSFPFlagTrue)
   2322 		v0 := b.NewValue0(v.Pos, OpMIPSCMPGTD, types.TypeFlags)
   2323 		v0.AddArg(y)
   2324 		v0.AddArg(x)
   2325 		v.AddArg(v0)
   2326 		return true
   2327 	}
   2328 }
   2329 func rewriteValueMIPS_OpLess8_0(v *Value) bool {
   2330 	b := v.Block
   2331 	_ = b
   2332 	typ := &b.Func.Config.Types
   2333 	_ = typ
   2334 	// match: (Less8 x y)
   2335 	// cond:
   2336 	// result: (SGT (SignExt8to32 y) (SignExt8to32 x))
   2337 	for {
   2338 		_ = v.Args[1]
   2339 		x := v.Args[0]
   2340 		y := v.Args[1]
   2341 		v.reset(OpMIPSSGT)
   2342 		v0 := b.NewValue0(v.Pos, OpSignExt8to32, typ.Int32)
   2343 		v0.AddArg(y)
   2344 		v.AddArg(v0)
   2345 		v1 := b.NewValue0(v.Pos, OpSignExt8to32, typ.Int32)
   2346 		v1.AddArg(x)
   2347 		v.AddArg(v1)
   2348 		return true
   2349 	}
   2350 }
   2351 func rewriteValueMIPS_OpLess8U_0(v *Value) bool {
   2352 	b := v.Block
   2353 	_ = b
   2354 	typ := &b.Func.Config.Types
   2355 	_ = typ
   2356 	// match: (Less8U x y)
   2357 	// cond:
   2358 	// result: (SGTU (ZeroExt8to32 y) (ZeroExt8to32 x))
   2359 	for {
   2360 		_ = v.Args[1]
   2361 		x := v.Args[0]
   2362 		y := v.Args[1]
   2363 		v.reset(OpMIPSSGTU)
   2364 		v0 := b.NewValue0(v.Pos, OpZeroExt8to32, typ.UInt32)
   2365 		v0.AddArg(y)
   2366 		v.AddArg(v0)
   2367 		v1 := b.NewValue0(v.Pos, OpZeroExt8to32, typ.UInt32)
   2368 		v1.AddArg(x)
   2369 		v.AddArg(v1)
   2370 		return true
   2371 	}
   2372 }
   2373 func rewriteValueMIPS_OpLoad_0(v *Value) bool {
   2374 	// match: (Load <t> ptr mem)
   2375 	// cond: t.IsBoolean()
   2376 	// result: (MOVBUload ptr mem)
   2377 	for {
   2378 		t := v.Type
   2379 		_ = v.Args[1]
   2380 		ptr := v.Args[0]
   2381 		mem := v.Args[1]
   2382 		if !(t.IsBoolean()) {
   2383 			break
   2384 		}
   2385 		v.reset(OpMIPSMOVBUload)
   2386 		v.AddArg(ptr)
   2387 		v.AddArg(mem)
   2388 		return true
   2389 	}
   2390 	// match: (Load <t> ptr mem)
   2391 	// cond: (is8BitInt(t) && isSigned(t))
   2392 	// result: (MOVBload ptr mem)
   2393 	for {
   2394 		t := v.Type
   2395 		_ = v.Args[1]
   2396 		ptr := v.Args[0]
   2397 		mem := v.Args[1]
   2398 		if !(is8BitInt(t) && isSigned(t)) {
   2399 			break
   2400 		}
   2401 		v.reset(OpMIPSMOVBload)
   2402 		v.AddArg(ptr)
   2403 		v.AddArg(mem)
   2404 		return true
   2405 	}
   2406 	// match: (Load <t> ptr mem)
   2407 	// cond: (is8BitInt(t) && !isSigned(t))
   2408 	// result: (MOVBUload ptr mem)
   2409 	for {
   2410 		t := v.Type
   2411 		_ = v.Args[1]
   2412 		ptr := v.Args[0]
   2413 		mem := v.Args[1]
   2414 		if !(is8BitInt(t) && !isSigned(t)) {
   2415 			break
   2416 		}
   2417 		v.reset(OpMIPSMOVBUload)
   2418 		v.AddArg(ptr)
   2419 		v.AddArg(mem)
   2420 		return true
   2421 	}
   2422 	// match: (Load <t> ptr mem)
   2423 	// cond: (is16BitInt(t) && isSigned(t))
   2424 	// result: (MOVHload ptr mem)
   2425 	for {
   2426 		t := v.Type
   2427 		_ = v.Args[1]
   2428 		ptr := v.Args[0]
   2429 		mem := v.Args[1]
   2430 		if !(is16BitInt(t) && isSigned(t)) {
   2431 			break
   2432 		}
   2433 		v.reset(OpMIPSMOVHload)
   2434 		v.AddArg(ptr)
   2435 		v.AddArg(mem)
   2436 		return true
   2437 	}
   2438 	// match: (Load <t> ptr mem)
   2439 	// cond: (is16BitInt(t) && !isSigned(t))
   2440 	// result: (MOVHUload ptr mem)
   2441 	for {
   2442 		t := v.Type
   2443 		_ = v.Args[1]
   2444 		ptr := v.Args[0]
   2445 		mem := v.Args[1]
   2446 		if !(is16BitInt(t) && !isSigned(t)) {
   2447 			break
   2448 		}
   2449 		v.reset(OpMIPSMOVHUload)
   2450 		v.AddArg(ptr)
   2451 		v.AddArg(mem)
   2452 		return true
   2453 	}
   2454 	// match: (Load <t> ptr mem)
   2455 	// cond: (is32BitInt(t) || isPtr(t))
   2456 	// result: (MOVWload ptr mem)
   2457 	for {
   2458 		t := v.Type
   2459 		_ = v.Args[1]
   2460 		ptr := v.Args[0]
   2461 		mem := v.Args[1]
   2462 		if !(is32BitInt(t) || isPtr(t)) {
   2463 			break
   2464 		}
   2465 		v.reset(OpMIPSMOVWload)
   2466 		v.AddArg(ptr)
   2467 		v.AddArg(mem)
   2468 		return true
   2469 	}
   2470 	// match: (Load <t> ptr mem)
   2471 	// cond: is32BitFloat(t)
   2472 	// result: (MOVFload ptr mem)
   2473 	for {
   2474 		t := v.Type
   2475 		_ = v.Args[1]
   2476 		ptr := v.Args[0]
   2477 		mem := v.Args[1]
   2478 		if !(is32BitFloat(t)) {
   2479 			break
   2480 		}
   2481 		v.reset(OpMIPSMOVFload)
   2482 		v.AddArg(ptr)
   2483 		v.AddArg(mem)
   2484 		return true
   2485 	}
   2486 	// match: (Load <t> ptr mem)
   2487 	// cond: is64BitFloat(t)
   2488 	// result: (MOVDload ptr mem)
   2489 	for {
   2490 		t := v.Type
   2491 		_ = v.Args[1]
   2492 		ptr := v.Args[0]
   2493 		mem := v.Args[1]
   2494 		if !(is64BitFloat(t)) {
   2495 			break
   2496 		}
   2497 		v.reset(OpMIPSMOVDload)
   2498 		v.AddArg(ptr)
   2499 		v.AddArg(mem)
   2500 		return true
   2501 	}
   2502 	return false
   2503 }
   2504 func rewriteValueMIPS_OpLsh16x16_0(v *Value) bool {
   2505 	b := v.Block
   2506 	_ = b
   2507 	typ := &b.Func.Config.Types
   2508 	_ = typ
   2509 	// match: (Lsh16x16 <t> x y)
   2510 	// cond:
   2511 	// result: (CMOVZ (SLL <t> x (ZeroExt16to32 y) ) (MOVWconst [0]) (SGTUconst [32] (ZeroExt16to32 y)))
   2512 	for {
   2513 		t := v.Type
   2514 		_ = v.Args[1]
   2515 		x := v.Args[0]
   2516 		y := v.Args[1]
   2517 		v.reset(OpMIPSCMOVZ)
   2518 		v0 := b.NewValue0(v.Pos, OpMIPSSLL, t)
   2519 		v0.AddArg(x)
   2520 		v1 := b.NewValue0(v.Pos, OpZeroExt16to32, typ.UInt32)
   2521 		v1.AddArg(y)
   2522 		v0.AddArg(v1)
   2523 		v.AddArg(v0)
   2524 		v2 := b.NewValue0(v.Pos, OpMIPSMOVWconst, typ.UInt32)
   2525 		v2.AuxInt = 0
   2526 		v.AddArg(v2)
   2527 		v3 := b.NewValue0(v.Pos, OpMIPSSGTUconst, typ.Bool)
   2528 		v3.AuxInt = 32
   2529 		v4 := b.NewValue0(v.Pos, OpZeroExt16to32, typ.UInt32)
   2530 		v4.AddArg(y)
   2531 		v3.AddArg(v4)
   2532 		v.AddArg(v3)
   2533 		return true
   2534 	}
   2535 }
   2536 func rewriteValueMIPS_OpLsh16x32_0(v *Value) bool {
   2537 	b := v.Block
   2538 	_ = b
   2539 	typ := &b.Func.Config.Types
   2540 	_ = typ
   2541 	// match: (Lsh16x32 <t> x y)
   2542 	// cond:
   2543 	// result: (CMOVZ (SLL <t> x y) (MOVWconst [0]) (SGTUconst [32] y))
   2544 	for {
   2545 		t := v.Type
   2546 		_ = v.Args[1]
   2547 		x := v.Args[0]
   2548 		y := v.Args[1]
   2549 		v.reset(OpMIPSCMOVZ)
   2550 		v0 := b.NewValue0(v.Pos, OpMIPSSLL, t)
   2551 		v0.AddArg(x)
   2552 		v0.AddArg(y)
   2553 		v.AddArg(v0)
   2554 		v1 := b.NewValue0(v.Pos, OpMIPSMOVWconst, typ.UInt32)
   2555 		v1.AuxInt = 0
   2556 		v.AddArg(v1)
   2557 		v2 := b.NewValue0(v.Pos, OpMIPSSGTUconst, typ.Bool)
   2558 		v2.AuxInt = 32
   2559 		v2.AddArg(y)
   2560 		v.AddArg(v2)
   2561 		return true
   2562 	}
   2563 }
   2564 func rewriteValueMIPS_OpLsh16x64_0(v *Value) bool {
   2565 	// match: (Lsh16x64 x (Const64 [c]))
   2566 	// cond: uint32(c) < 16
   2567 	// result: (SLLconst x [c])
   2568 	for {
   2569 		_ = v.Args[1]
   2570 		x := v.Args[0]
   2571 		v_1 := v.Args[1]
   2572 		if v_1.Op != OpConst64 {
   2573 			break
   2574 		}
   2575 		c := v_1.AuxInt
   2576 		if !(uint32(c) < 16) {
   2577 			break
   2578 		}
   2579 		v.reset(OpMIPSSLLconst)
   2580 		v.AuxInt = c
   2581 		v.AddArg(x)
   2582 		return true
   2583 	}
   2584 	// match: (Lsh16x64 _ (Const64 [c]))
   2585 	// cond: uint32(c) >= 16
   2586 	// result: (MOVWconst [0])
   2587 	for {
   2588 		_ = v.Args[1]
   2589 		v_1 := v.Args[1]
   2590 		if v_1.Op != OpConst64 {
   2591 			break
   2592 		}
   2593 		c := v_1.AuxInt
   2594 		if !(uint32(c) >= 16) {
   2595 			break
   2596 		}
   2597 		v.reset(OpMIPSMOVWconst)
   2598 		v.AuxInt = 0
   2599 		return true
   2600 	}
   2601 	return false
   2602 }
   2603 func rewriteValueMIPS_OpLsh16x8_0(v *Value) bool {
   2604 	b := v.Block
   2605 	_ = b
   2606 	typ := &b.Func.Config.Types
   2607 	_ = typ
   2608 	// match: (Lsh16x8 <t> x y)
   2609 	// cond:
   2610 	// result: (CMOVZ (SLL <t> x (ZeroExt8to32 y) ) (MOVWconst [0]) (SGTUconst [32] (ZeroExt8to32 y)))
   2611 	for {
   2612 		t := v.Type
   2613 		_ = v.Args[1]
   2614 		x := v.Args[0]
   2615 		y := v.Args[1]
   2616 		v.reset(OpMIPSCMOVZ)
   2617 		v0 := b.NewValue0(v.Pos, OpMIPSSLL, t)
   2618 		v0.AddArg(x)
   2619 		v1 := b.NewValue0(v.Pos, OpZeroExt8to32, typ.UInt32)
   2620 		v1.AddArg(y)
   2621 		v0.AddArg(v1)
   2622 		v.AddArg(v0)
   2623 		v2 := b.NewValue0(v.Pos, OpMIPSMOVWconst, typ.UInt32)
   2624 		v2.AuxInt = 0
   2625 		v.AddArg(v2)
   2626 		v3 := b.NewValue0(v.Pos, OpMIPSSGTUconst, typ.Bool)
   2627 		v3.AuxInt = 32
   2628 		v4 := b.NewValue0(v.Pos, OpZeroExt8to32, typ.UInt32)
   2629 		v4.AddArg(y)
   2630 		v3.AddArg(v4)
   2631 		v.AddArg(v3)
   2632 		return true
   2633 	}
   2634 }
   2635 func rewriteValueMIPS_OpLsh32x16_0(v *Value) bool {
   2636 	b := v.Block
   2637 	_ = b
   2638 	typ := &b.Func.Config.Types
   2639 	_ = typ
   2640 	// match: (Lsh32x16 <t> x y)
   2641 	// cond:
   2642 	// result: (CMOVZ (SLL <t> x (ZeroExt16to32 y) ) (MOVWconst [0]) (SGTUconst [32] (ZeroExt16to32 y)))
   2643 	for {
   2644 		t := v.Type
   2645 		_ = v.Args[1]
   2646 		x := v.Args[0]
   2647 		y := v.Args[1]
   2648 		v.reset(OpMIPSCMOVZ)
   2649 		v0 := b.NewValue0(v.Pos, OpMIPSSLL, t)
   2650 		v0.AddArg(x)
   2651 		v1 := b.NewValue0(v.Pos, OpZeroExt16to32, typ.UInt32)
   2652 		v1.AddArg(y)
   2653 		v0.AddArg(v1)
   2654 		v.AddArg(v0)
   2655 		v2 := b.NewValue0(v.Pos, OpMIPSMOVWconst, typ.UInt32)
   2656 		v2.AuxInt = 0
   2657 		v.AddArg(v2)
   2658 		v3 := b.NewValue0(v.Pos, OpMIPSSGTUconst, typ.Bool)
   2659 		v3.AuxInt = 32
   2660 		v4 := b.NewValue0(v.Pos, OpZeroExt16to32, typ.UInt32)
   2661 		v4.AddArg(y)
   2662 		v3.AddArg(v4)
   2663 		v.AddArg(v3)
   2664 		return true
   2665 	}
   2666 }
   2667 func rewriteValueMIPS_OpLsh32x32_0(v *Value) bool {
   2668 	b := v.Block
   2669 	_ = b
   2670 	typ := &b.Func.Config.Types
   2671 	_ = typ
   2672 	// match: (Lsh32x32 <t> x y)
   2673 	// cond:
   2674 	// result: (CMOVZ (SLL <t> x y) (MOVWconst [0]) (SGTUconst [32] y))
   2675 	for {
   2676 		t := v.Type
   2677 		_ = v.Args[1]
   2678 		x := v.Args[0]
   2679 		y := v.Args[1]
   2680 		v.reset(OpMIPSCMOVZ)
   2681 		v0 := b.NewValue0(v.Pos, OpMIPSSLL, t)
   2682 		v0.AddArg(x)
   2683 		v0.AddArg(y)
   2684 		v.AddArg(v0)
   2685 		v1 := b.NewValue0(v.Pos, OpMIPSMOVWconst, typ.UInt32)
   2686 		v1.AuxInt = 0
   2687 		v.AddArg(v1)
   2688 		v2 := b.NewValue0(v.Pos, OpMIPSSGTUconst, typ.Bool)
   2689 		v2.AuxInt = 32
   2690 		v2.AddArg(y)
   2691 		v.AddArg(v2)
   2692 		return true
   2693 	}
   2694 }
   2695 func rewriteValueMIPS_OpLsh32x64_0(v *Value) bool {
   2696 	// match: (Lsh32x64 x (Const64 [c]))
   2697 	// cond: uint32(c) < 32
   2698 	// result: (SLLconst x [c])
   2699 	for {
   2700 		_ = v.Args[1]
   2701 		x := v.Args[0]
   2702 		v_1 := v.Args[1]
   2703 		if v_1.Op != OpConst64 {
   2704 			break
   2705 		}
   2706 		c := v_1.AuxInt
   2707 		if !(uint32(c) < 32) {
   2708 			break
   2709 		}
   2710 		v.reset(OpMIPSSLLconst)
   2711 		v.AuxInt = c
   2712 		v.AddArg(x)
   2713 		return true
   2714 	}
   2715 	// match: (Lsh32x64 _ (Const64 [c]))
   2716 	// cond: uint32(c) >= 32
   2717 	// result: (MOVWconst [0])
   2718 	for {
   2719 		_ = v.Args[1]
   2720 		v_1 := v.Args[1]
   2721 		if v_1.Op != OpConst64 {
   2722 			break
   2723 		}
   2724 		c := v_1.AuxInt
   2725 		if !(uint32(c) >= 32) {
   2726 			break
   2727 		}
   2728 		v.reset(OpMIPSMOVWconst)
   2729 		v.AuxInt = 0
   2730 		return true
   2731 	}
   2732 	return false
   2733 }
   2734 func rewriteValueMIPS_OpLsh32x8_0(v *Value) bool {
   2735 	b := v.Block
   2736 	_ = b
   2737 	typ := &b.Func.Config.Types
   2738 	_ = typ
   2739 	// match: (Lsh32x8 <t> x y)
   2740 	// cond:
   2741 	// result: (CMOVZ (SLL <t> x (ZeroExt8to32 y) ) (MOVWconst [0]) (SGTUconst [32] (ZeroExt8to32 y)))
   2742 	for {
   2743 		t := v.Type
   2744 		_ = v.Args[1]
   2745 		x := v.Args[0]
   2746 		y := v.Args[1]
   2747 		v.reset(OpMIPSCMOVZ)
   2748 		v0 := b.NewValue0(v.Pos, OpMIPSSLL, t)
   2749 		v0.AddArg(x)
   2750 		v1 := b.NewValue0(v.Pos, OpZeroExt8to32, typ.UInt32)
   2751 		v1.AddArg(y)
   2752 		v0.AddArg(v1)
   2753 		v.AddArg(v0)
   2754 		v2 := b.NewValue0(v.Pos, OpMIPSMOVWconst, typ.UInt32)
   2755 		v2.AuxInt = 0
   2756 		v.AddArg(v2)
   2757 		v3 := b.NewValue0(v.Pos, OpMIPSSGTUconst, typ.Bool)
   2758 		v3.AuxInt = 32
   2759 		v4 := b.NewValue0(v.Pos, OpZeroExt8to32, typ.UInt32)
   2760 		v4.AddArg(y)
   2761 		v3.AddArg(v4)
   2762 		v.AddArg(v3)
   2763 		return true
   2764 	}
   2765 }
   2766 func rewriteValueMIPS_OpLsh8x16_0(v *Value) bool {
   2767 	b := v.Block
   2768 	_ = b
   2769 	typ := &b.Func.Config.Types
   2770 	_ = typ
   2771 	// match: (Lsh8x16 <t> x y)
   2772 	// cond:
   2773 	// result: (CMOVZ (SLL <t> x (ZeroExt16to32 y) ) (MOVWconst [0]) (SGTUconst [32] (ZeroExt16to32 y)))
   2774 	for {
   2775 		t := v.Type
   2776 		_ = v.Args[1]
   2777 		x := v.Args[0]
   2778 		y := v.Args[1]
   2779 		v.reset(OpMIPSCMOVZ)
   2780 		v0 := b.NewValue0(v.Pos, OpMIPSSLL, t)
   2781 		v0.AddArg(x)
   2782 		v1 := b.NewValue0(v.Pos, OpZeroExt16to32, typ.UInt32)
   2783 		v1.AddArg(y)
   2784 		v0.AddArg(v1)
   2785 		v.AddArg(v0)
   2786 		v2 := b.NewValue0(v.Pos, OpMIPSMOVWconst, typ.UInt32)
   2787 		v2.AuxInt = 0
   2788 		v.AddArg(v2)
   2789 		v3 := b.NewValue0(v.Pos, OpMIPSSGTUconst, typ.Bool)
   2790 		v3.AuxInt = 32
   2791 		v4 := b.NewValue0(v.Pos, OpZeroExt16to32, typ.UInt32)
   2792 		v4.AddArg(y)
   2793 		v3.AddArg(v4)
   2794 		v.AddArg(v3)
   2795 		return true
   2796 	}
   2797 }
   2798 func rewriteValueMIPS_OpLsh8x32_0(v *Value) bool {
   2799 	b := v.Block
   2800 	_ = b
   2801 	typ := &b.Func.Config.Types
   2802 	_ = typ
   2803 	// match: (Lsh8x32 <t> x y)
   2804 	// cond:
   2805 	// result: (CMOVZ (SLL <t> x y) (MOVWconst [0]) (SGTUconst [32] y))
   2806 	for {
   2807 		t := v.Type
   2808 		_ = v.Args[1]
   2809 		x := v.Args[0]
   2810 		y := v.Args[1]
   2811 		v.reset(OpMIPSCMOVZ)
   2812 		v0 := b.NewValue0(v.Pos, OpMIPSSLL, t)
   2813 		v0.AddArg(x)
   2814 		v0.AddArg(y)
   2815 		v.AddArg(v0)
   2816 		v1 := b.NewValue0(v.Pos, OpMIPSMOVWconst, typ.UInt32)
   2817 		v1.AuxInt = 0
   2818 		v.AddArg(v1)
   2819 		v2 := b.NewValue0(v.Pos, OpMIPSSGTUconst, typ.Bool)
   2820 		v2.AuxInt = 32
   2821 		v2.AddArg(y)
   2822 		v.AddArg(v2)
   2823 		return true
   2824 	}
   2825 }
   2826 func rewriteValueMIPS_OpLsh8x64_0(v *Value) bool {
   2827 	// match: (Lsh8x64 x (Const64 [c]))
   2828 	// cond: uint32(c) < 8
   2829 	// result: (SLLconst x [c])
   2830 	for {
   2831 		_ = v.Args[1]
   2832 		x := v.Args[0]
   2833 		v_1 := v.Args[1]
   2834 		if v_1.Op != OpConst64 {
   2835 			break
   2836 		}
   2837 		c := v_1.AuxInt
   2838 		if !(uint32(c) < 8) {
   2839 			break
   2840 		}
   2841 		v.reset(OpMIPSSLLconst)
   2842 		v.AuxInt = c
   2843 		v.AddArg(x)
   2844 		return true
   2845 	}
   2846 	// match: (Lsh8x64 _ (Const64 [c]))
   2847 	// cond: uint32(c) >= 8
   2848 	// result: (MOVWconst [0])
   2849 	for {
   2850 		_ = v.Args[1]
   2851 		v_1 := v.Args[1]
   2852 		if v_1.Op != OpConst64 {
   2853 			break
   2854 		}
   2855 		c := v_1.AuxInt
   2856 		if !(uint32(c) >= 8) {
   2857 			break
   2858 		}
   2859 		v.reset(OpMIPSMOVWconst)
   2860 		v.AuxInt = 0
   2861 		return true
   2862 	}
   2863 	return false
   2864 }
   2865 func rewriteValueMIPS_OpLsh8x8_0(v *Value) bool {
   2866 	b := v.Block
   2867 	_ = b
   2868 	typ := &b.Func.Config.Types
   2869 	_ = typ
   2870 	// match: (Lsh8x8 <t> x y)
   2871 	// cond:
   2872 	// result: (CMOVZ (SLL <t> x (ZeroExt8to32 y) ) (MOVWconst [0]) (SGTUconst [32] (ZeroExt8to32 y)))
   2873 	for {
   2874 		t := v.Type
   2875 		_ = v.Args[1]
   2876 		x := v.Args[0]
   2877 		y := v.Args[1]
   2878 		v.reset(OpMIPSCMOVZ)
   2879 		v0 := b.NewValue0(v.Pos, OpMIPSSLL, t)
   2880 		v0.AddArg(x)
   2881 		v1 := b.NewValue0(v.Pos, OpZeroExt8to32, typ.UInt32)
   2882 		v1.AddArg(y)
   2883 		v0.AddArg(v1)
   2884 		v.AddArg(v0)
   2885 		v2 := b.NewValue0(v.Pos, OpMIPSMOVWconst, typ.UInt32)
   2886 		v2.AuxInt = 0
   2887 		v.AddArg(v2)
   2888 		v3 := b.NewValue0(v.Pos, OpMIPSSGTUconst, typ.Bool)
   2889 		v3.AuxInt = 32
   2890 		v4 := b.NewValue0(v.Pos, OpZeroExt8to32, typ.UInt32)
   2891 		v4.AddArg(y)
   2892 		v3.AddArg(v4)
   2893 		v.AddArg(v3)
   2894 		return true
   2895 	}
   2896 }
   2897 func rewriteValueMIPS_OpMIPSADD_0(v *Value) bool {
   2898 	// match: (ADD x (MOVWconst [c]))
   2899 	// cond:
   2900 	// result: (ADDconst [c] x)
   2901 	for {
   2902 		_ = v.Args[1]
   2903 		x := v.Args[0]
   2904 		v_1 := v.Args[1]
   2905 		if v_1.Op != OpMIPSMOVWconst {
   2906 			break
   2907 		}
   2908 		c := v_1.AuxInt
   2909 		v.reset(OpMIPSADDconst)
   2910 		v.AuxInt = c
   2911 		v.AddArg(x)
   2912 		return true
   2913 	}
   2914 	// match: (ADD (MOVWconst [c]) x)
   2915 	// cond:
   2916 	// result: (ADDconst [c] x)
   2917 	for {
   2918 		_ = v.Args[1]
   2919 		v_0 := v.Args[0]
   2920 		if v_0.Op != OpMIPSMOVWconst {
   2921 			break
   2922 		}
   2923 		c := v_0.AuxInt
   2924 		x := v.Args[1]
   2925 		v.reset(OpMIPSADDconst)
   2926 		v.AuxInt = c
   2927 		v.AddArg(x)
   2928 		return true
   2929 	}
   2930 	// match: (ADD x (NEG y))
   2931 	// cond:
   2932 	// result: (SUB x y)
   2933 	for {
   2934 		_ = v.Args[1]
   2935 		x := v.Args[0]
   2936 		v_1 := v.Args[1]
   2937 		if v_1.Op != OpMIPSNEG {
   2938 			break
   2939 		}
   2940 		y := v_1.Args[0]
   2941 		v.reset(OpMIPSSUB)
   2942 		v.AddArg(x)
   2943 		v.AddArg(y)
   2944 		return true
   2945 	}
   2946 	// match: (ADD (NEG y) x)
   2947 	// cond:
   2948 	// result: (SUB x y)
   2949 	for {
   2950 		_ = v.Args[1]
   2951 		v_0 := v.Args[0]
   2952 		if v_0.Op != OpMIPSNEG {
   2953 			break
   2954 		}
   2955 		y := v_0.Args[0]
   2956 		x := v.Args[1]
   2957 		v.reset(OpMIPSSUB)
   2958 		v.AddArg(x)
   2959 		v.AddArg(y)
   2960 		return true
   2961 	}
   2962 	return false
   2963 }
   2964 func rewriteValueMIPS_OpMIPSADDconst_0(v *Value) bool {
   2965 	// match: (ADDconst [off1] (MOVWaddr [off2] {sym} ptr))
   2966 	// cond:
   2967 	// result: (MOVWaddr [off1+off2] {sym} ptr)
   2968 	for {
   2969 		off1 := v.AuxInt
   2970 		v_0 := v.Args[0]
   2971 		if v_0.Op != OpMIPSMOVWaddr {
   2972 			break
   2973 		}
   2974 		off2 := v_0.AuxInt
   2975 		sym := v_0.Aux
   2976 		ptr := v_0.Args[0]
   2977 		v.reset(OpMIPSMOVWaddr)
   2978 		v.AuxInt = off1 + off2
   2979 		v.Aux = sym
   2980 		v.AddArg(ptr)
   2981 		return true
   2982 	}
   2983 	// match: (ADDconst [0] x)
   2984 	// cond:
   2985 	// result: x
   2986 	for {
   2987 		if v.AuxInt != 0 {
   2988 			break
   2989 		}
   2990 		x := v.Args[0]
   2991 		v.reset(OpCopy)
   2992 		v.Type = x.Type
   2993 		v.AddArg(x)
   2994 		return true
   2995 	}
   2996 	// match: (ADDconst [c] (MOVWconst [d]))
   2997 	// cond:
   2998 	// result: (MOVWconst [int64(int32(c+d))])
   2999 	for {
   3000 		c := v.AuxInt
   3001 		v_0 := v.Args[0]
   3002 		if v_0.Op != OpMIPSMOVWconst {
   3003 			break
   3004 		}
   3005 		d := v_0.AuxInt
   3006 		v.reset(OpMIPSMOVWconst)
   3007 		v.AuxInt = int64(int32(c + d))
   3008 		return true
   3009 	}
   3010 	// match: (ADDconst [c] (ADDconst [d] x))
   3011 	// cond:
   3012 	// result: (ADDconst [int64(int32(c+d))] x)
   3013 	for {
   3014 		c := v.AuxInt
   3015 		v_0 := v.Args[0]
   3016 		if v_0.Op != OpMIPSADDconst {
   3017 			break
   3018 		}
   3019 		d := v_0.AuxInt
   3020 		x := v_0.Args[0]
   3021 		v.reset(OpMIPSADDconst)
   3022 		v.AuxInt = int64(int32(c + d))
   3023 		v.AddArg(x)
   3024 		return true
   3025 	}
   3026 	// match: (ADDconst [c] (SUBconst [d] x))
   3027 	// cond:
   3028 	// result: (ADDconst [int64(int32(c-d))] x)
   3029 	for {
   3030 		c := v.AuxInt
   3031 		v_0 := v.Args[0]
   3032 		if v_0.Op != OpMIPSSUBconst {
   3033 			break
   3034 		}
   3035 		d := v_0.AuxInt
   3036 		x := v_0.Args[0]
   3037 		v.reset(OpMIPSADDconst)
   3038 		v.AuxInt = int64(int32(c - d))
   3039 		v.AddArg(x)
   3040 		return true
   3041 	}
   3042 	return false
   3043 }
   3044 func rewriteValueMIPS_OpMIPSAND_0(v *Value) bool {
   3045 	b := v.Block
   3046 	_ = b
   3047 	// match: (AND x (MOVWconst [c]))
   3048 	// cond:
   3049 	// result: (ANDconst [c] x)
   3050 	for {
   3051 		_ = v.Args[1]
   3052 		x := v.Args[0]
   3053 		v_1 := v.Args[1]
   3054 		if v_1.Op != OpMIPSMOVWconst {
   3055 			break
   3056 		}
   3057 		c := v_1.AuxInt
   3058 		v.reset(OpMIPSANDconst)
   3059 		v.AuxInt = c
   3060 		v.AddArg(x)
   3061 		return true
   3062 	}
   3063 	// match: (AND (MOVWconst [c]) x)
   3064 	// cond:
   3065 	// result: (ANDconst [c] x)
   3066 	for {
   3067 		_ = v.Args[1]
   3068 		v_0 := v.Args[0]
   3069 		if v_0.Op != OpMIPSMOVWconst {
   3070 			break
   3071 		}
   3072 		c := v_0.AuxInt
   3073 		x := v.Args[1]
   3074 		v.reset(OpMIPSANDconst)
   3075 		v.AuxInt = c
   3076 		v.AddArg(x)
   3077 		return true
   3078 	}
   3079 	// match: (AND x x)
   3080 	// cond:
   3081 	// result: x
   3082 	for {
   3083 		_ = v.Args[1]
   3084 		x := v.Args[0]
   3085 		if x != v.Args[1] {
   3086 			break
   3087 		}
   3088 		v.reset(OpCopy)
   3089 		v.Type = x.Type
   3090 		v.AddArg(x)
   3091 		return true
   3092 	}
   3093 	// match: (AND (SGTUconst [1] x) (SGTUconst [1] y))
   3094 	// cond:
   3095 	// result: (SGTUconst [1] (OR <x.Type> x y))
   3096 	for {
   3097 		_ = v.Args[1]
   3098 		v_0 := v.Args[0]
   3099 		if v_0.Op != OpMIPSSGTUconst {
   3100 			break
   3101 		}
   3102 		if v_0.AuxInt != 1 {
   3103 			break
   3104 		}
   3105 		x := v_0.Args[0]
   3106 		v_1 := v.Args[1]
   3107 		if v_1.Op != OpMIPSSGTUconst {
   3108 			break
   3109 		}
   3110 		if v_1.AuxInt != 1 {
   3111 			break
   3112 		}
   3113 		y := v_1.Args[0]
   3114 		v.reset(OpMIPSSGTUconst)
   3115 		v.AuxInt = 1
   3116 		v0 := b.NewValue0(v.Pos, OpMIPSOR, x.Type)
   3117 		v0.AddArg(x)
   3118 		v0.AddArg(y)
   3119 		v.AddArg(v0)
   3120 		return true
   3121 	}
   3122 	// match: (AND (SGTUconst [1] y) (SGTUconst [1] x))
   3123 	// cond:
   3124 	// result: (SGTUconst [1] (OR <x.Type> x y))
   3125 	for {
   3126 		_ = v.Args[1]
   3127 		v_0 := v.Args[0]
   3128 		if v_0.Op != OpMIPSSGTUconst {
   3129 			break
   3130 		}
   3131 		if v_0.AuxInt != 1 {
   3132 			break
   3133 		}
   3134 		y := v_0.Args[0]
   3135 		v_1 := v.Args[1]
   3136 		if v_1.Op != OpMIPSSGTUconst {
   3137 			break
   3138 		}
   3139 		if v_1.AuxInt != 1 {
   3140 			break
   3141 		}
   3142 		x := v_1.Args[0]
   3143 		v.reset(OpMIPSSGTUconst)
   3144 		v.AuxInt = 1
   3145 		v0 := b.NewValue0(v.Pos, OpMIPSOR, x.Type)
   3146 		v0.AddArg(x)
   3147 		v0.AddArg(y)
   3148 		v.AddArg(v0)
   3149 		return true
   3150 	}
   3151 	return false
   3152 }
   3153 func rewriteValueMIPS_OpMIPSANDconst_0(v *Value) bool {
   3154 	// match: (ANDconst [0] _)
   3155 	// cond:
   3156 	// result: (MOVWconst [0])
   3157 	for {
   3158 		if v.AuxInt != 0 {
   3159 			break
   3160 		}
   3161 		v.reset(OpMIPSMOVWconst)
   3162 		v.AuxInt = 0
   3163 		return true
   3164 	}
   3165 	// match: (ANDconst [-1] x)
   3166 	// cond:
   3167 	// result: x
   3168 	for {
   3169 		if v.AuxInt != -1 {
   3170 			break
   3171 		}
   3172 		x := v.Args[0]
   3173 		v.reset(OpCopy)
   3174 		v.Type = x.Type
   3175 		v.AddArg(x)
   3176 		return true
   3177 	}
   3178 	// match: (ANDconst [c] (MOVWconst [d]))
   3179 	// cond:
   3180 	// result: (MOVWconst [c&d])
   3181 	for {
   3182 		c := v.AuxInt
   3183 		v_0 := v.Args[0]
   3184 		if v_0.Op != OpMIPSMOVWconst {
   3185 			break
   3186 		}
   3187 		d := v_0.AuxInt
   3188 		v.reset(OpMIPSMOVWconst)
   3189 		v.AuxInt = c & d
   3190 		return true
   3191 	}
   3192 	// match: (ANDconst [c] (ANDconst [d] x))
   3193 	// cond:
   3194 	// result: (ANDconst [c&d] x)
   3195 	for {
   3196 		c := v.AuxInt
   3197 		v_0 := v.Args[0]
   3198 		if v_0.Op != OpMIPSANDconst {
   3199 			break
   3200 		}
   3201 		d := v_0.AuxInt
   3202 		x := v_0.Args[0]
   3203 		v.reset(OpMIPSANDconst)
   3204 		v.AuxInt = c & d
   3205 		v.AddArg(x)
   3206 		return true
   3207 	}
   3208 	return false
   3209 }
   3210 func rewriteValueMIPS_OpMIPSCMOVZ_0(v *Value) bool {
   3211 	b := v.Block
   3212 	_ = b
   3213 	// match: (CMOVZ _ b (MOVWconst [0]))
   3214 	// cond:
   3215 	// result: b
   3216 	for {
   3217 		_ = v.Args[2]
   3218 		b := v.Args[1]
   3219 		v_2 := v.Args[2]
   3220 		if v_2.Op != OpMIPSMOVWconst {
   3221 			break
   3222 		}
   3223 		if v_2.AuxInt != 0 {
   3224 			break
   3225 		}
   3226 		v.reset(OpCopy)
   3227 		v.Type = b.Type
   3228 		v.AddArg(b)
   3229 		return true
   3230 	}
   3231 	// match: (CMOVZ a _ (MOVWconst [c]))
   3232 	// cond: c!=0
   3233 	// result: a
   3234 	for {
   3235 		_ = v.Args[2]
   3236 		a := v.Args[0]
   3237 		v_2 := v.Args[2]
   3238 		if v_2.Op != OpMIPSMOVWconst {
   3239 			break
   3240 		}
   3241 		c := v_2.AuxInt
   3242 		if !(c != 0) {
   3243 			break
   3244 		}
   3245 		v.reset(OpCopy)
   3246 		v.Type = a.Type
   3247 		v.AddArg(a)
   3248 		return true
   3249 	}
   3250 	// match: (CMOVZ a (MOVWconst [0]) c)
   3251 	// cond:
   3252 	// result: (CMOVZzero a c)
   3253 	for {
   3254 		_ = v.Args[2]
   3255 		a := v.Args[0]
   3256 		v_1 := v.Args[1]
   3257 		if v_1.Op != OpMIPSMOVWconst {
   3258 			break
   3259 		}
   3260 		if v_1.AuxInt != 0 {
   3261 			break
   3262 		}
   3263 		c := v.Args[2]
   3264 		v.reset(OpMIPSCMOVZzero)
   3265 		v.AddArg(a)
   3266 		v.AddArg(c)
   3267 		return true
   3268 	}
   3269 	return false
   3270 }
   3271 func rewriteValueMIPS_OpMIPSCMOVZzero_0(v *Value) bool {
   3272 	// match: (CMOVZzero _ (MOVWconst [0]))
   3273 	// cond:
   3274 	// result: (MOVWconst [0])
   3275 	for {
   3276 		_ = v.Args[1]
   3277 		v_1 := v.Args[1]
   3278 		if v_1.Op != OpMIPSMOVWconst {
   3279 			break
   3280 		}
   3281 		if v_1.AuxInt != 0 {
   3282 			break
   3283 		}
   3284 		v.reset(OpMIPSMOVWconst)
   3285 		v.AuxInt = 0
   3286 		return true
   3287 	}
   3288 	// match: (CMOVZzero a (MOVWconst [c]))
   3289 	// cond: c!=0
   3290 	// result: a
   3291 	for {
   3292 		_ = v.Args[1]
   3293 		a := v.Args[0]
   3294 		v_1 := v.Args[1]
   3295 		if v_1.Op != OpMIPSMOVWconst {
   3296 			break
   3297 		}
   3298 		c := v_1.AuxInt
   3299 		if !(c != 0) {
   3300 			break
   3301 		}
   3302 		v.reset(OpCopy)
   3303 		v.Type = a.Type
   3304 		v.AddArg(a)
   3305 		return true
   3306 	}
   3307 	return false
   3308 }
   3309 func rewriteValueMIPS_OpMIPSLoweredAtomicAdd_0(v *Value) bool {
   3310 	// match: (LoweredAtomicAdd ptr (MOVWconst [c]) mem)
   3311 	// cond: is16Bit(c)
   3312 	// result: (LoweredAtomicAddconst [c] ptr mem)
   3313 	for {
   3314 		_ = v.Args[2]
   3315 		ptr := v.Args[0]
   3316 		v_1 := v.Args[1]
   3317 		if v_1.Op != OpMIPSMOVWconst {
   3318 			break
   3319 		}
   3320 		c := v_1.AuxInt
   3321 		mem := v.Args[2]
   3322 		if !(is16Bit(c)) {
   3323 			break
   3324 		}
   3325 		v.reset(OpMIPSLoweredAtomicAddconst)
   3326 		v.AuxInt = c
   3327 		v.AddArg(ptr)
   3328 		v.AddArg(mem)
   3329 		return true
   3330 	}
   3331 	return false
   3332 }
   3333 func rewriteValueMIPS_OpMIPSLoweredAtomicStore_0(v *Value) bool {
   3334 	// match: (LoweredAtomicStore ptr (MOVWconst [0]) mem)
   3335 	// cond:
   3336 	// result: (LoweredAtomicStorezero ptr mem)
   3337 	for {
   3338 		_ = v.Args[2]
   3339 		ptr := v.Args[0]
   3340 		v_1 := v.Args[1]
   3341 		if v_1.Op != OpMIPSMOVWconst {
   3342 			break
   3343 		}
   3344 		if v_1.AuxInt != 0 {
   3345 			break
   3346 		}
   3347 		mem := v.Args[2]
   3348 		v.reset(OpMIPSLoweredAtomicStorezero)
   3349 		v.AddArg(ptr)
   3350 		v.AddArg(mem)
   3351 		return true
   3352 	}
   3353 	return false
   3354 }
   3355 func rewriteValueMIPS_OpMIPSMOVBUload_0(v *Value) bool {
   3356 	// match: (MOVBUload [off1] {sym} x:(ADDconst [off2] ptr) mem)
   3357 	// cond: (is16Bit(off1+off2) || x.Uses == 1)
   3358 	// result: (MOVBUload [off1+off2] {sym} ptr mem)
   3359 	for {
   3360 		off1 := v.AuxInt
   3361 		sym := v.Aux
   3362 		_ = v.Args[1]
   3363 		x := v.Args[0]
   3364 		if x.Op != OpMIPSADDconst {
   3365 			break
   3366 		}
   3367 		off2 := x.AuxInt
   3368 		ptr := x.Args[0]
   3369 		mem := v.Args[1]
   3370 		if !(is16Bit(off1+off2) || x.Uses == 1) {
   3371 			break
   3372 		}
   3373 		v.reset(OpMIPSMOVBUload)
   3374 		v.AuxInt = off1 + off2
   3375 		v.Aux = sym
   3376 		v.AddArg(ptr)
   3377 		v.AddArg(mem)
   3378 		return true
   3379 	}
   3380 	// match: (MOVBUload [off1] {sym1} (MOVWaddr [off2] {sym2} ptr) mem)
   3381 	// cond: canMergeSym(sym1,sym2)
   3382 	// result: (MOVBUload [off1+off2] {mergeSym(sym1,sym2)} ptr mem)
   3383 	for {
   3384 		off1 := v.AuxInt
   3385 		sym1 := v.Aux
   3386 		_ = v.Args[1]
   3387 		v_0 := v.Args[0]
   3388 		if v_0.Op != OpMIPSMOVWaddr {
   3389 			break
   3390 		}
   3391 		off2 := v_0.AuxInt
   3392 		sym2 := v_0.Aux
   3393 		ptr := v_0.Args[0]
   3394 		mem := v.Args[1]
   3395 		if !(canMergeSym(sym1, sym2)) {
   3396 			break
   3397 		}
   3398 		v.reset(OpMIPSMOVBUload)
   3399 		v.AuxInt = off1 + off2
   3400 		v.Aux = mergeSym(sym1, sym2)
   3401 		v.AddArg(ptr)
   3402 		v.AddArg(mem)
   3403 		return true
   3404 	}
   3405 	// match: (MOVBUload [off] {sym} ptr (MOVBstore [off2] {sym2} ptr2 x _))
   3406 	// cond: sym == sym2 && off == off2 && isSamePtr(ptr, ptr2)
   3407 	// result: (MOVBUreg x)
   3408 	for {
   3409 		off := v.AuxInt
   3410 		sym := v.Aux
   3411 		_ = v.Args[1]
   3412 		ptr := v.Args[0]
   3413 		v_1 := v.Args[1]
   3414 		if v_1.Op != OpMIPSMOVBstore {
   3415 			break
   3416 		}
   3417 		off2 := v_1.AuxInt
   3418 		sym2 := v_1.Aux
   3419 		_ = v_1.Args[2]
   3420 		ptr2 := v_1.Args[0]
   3421 		x := v_1.Args[1]
   3422 		if !(sym == sym2 && off == off2 && isSamePtr(ptr, ptr2)) {
   3423 			break
   3424 		}
   3425 		v.reset(OpMIPSMOVBUreg)
   3426 		v.AddArg(x)
   3427 		return true
   3428 	}
   3429 	return false
   3430 }
   3431 func rewriteValueMIPS_OpMIPSMOVBUreg_0(v *Value) bool {
   3432 	b := v.Block
   3433 	_ = b
   3434 	// match: (MOVBUreg x:(MOVBUload _ _))
   3435 	// cond:
   3436 	// result: (MOVWreg x)
   3437 	for {
   3438 		x := v.Args[0]
   3439 		if x.Op != OpMIPSMOVBUload {
   3440 			break
   3441 		}
   3442 		_ = x.Args[1]
   3443 		v.reset(OpMIPSMOVWreg)
   3444 		v.AddArg(x)
   3445 		return true
   3446 	}
   3447 	// match: (MOVBUreg x:(MOVBUreg _))
   3448 	// cond:
   3449 	// result: (MOVWreg x)
   3450 	for {
   3451 		x := v.Args[0]
   3452 		if x.Op != OpMIPSMOVBUreg {
   3453 			break
   3454 		}
   3455 		v.reset(OpMIPSMOVWreg)
   3456 		v.AddArg(x)
   3457 		return true
   3458 	}
   3459 	// match: (MOVBUreg <t> x:(MOVBload [off] {sym} ptr mem))
   3460 	// cond: x.Uses == 1 && clobber(x)
   3461 	// result: @x.Block (MOVBUload <t> [off] {sym} ptr mem)
   3462 	for {
   3463 		t := v.Type
   3464 		x := v.Args[0]
   3465 		if x.Op != OpMIPSMOVBload {
   3466 			break
   3467 		}
   3468 		off := x.AuxInt
   3469 		sym := x.Aux
   3470 		_ = x.Args[1]
   3471 		ptr := x.Args[0]
   3472 		mem := x.Args[1]
   3473 		if !(x.Uses == 1 && clobber(x)) {
   3474 			break
   3475 		}
   3476 		b = x.Block
   3477 		v0 := b.NewValue0(v.Pos, OpMIPSMOVBUload, t)
   3478 		v.reset(OpCopy)
   3479 		v.AddArg(v0)
   3480 		v0.AuxInt = off
   3481 		v0.Aux = sym
   3482 		v0.AddArg(ptr)
   3483 		v0.AddArg(mem)
   3484 		return true
   3485 	}
   3486 	// match: (MOVBUreg (ANDconst [c] x))
   3487 	// cond:
   3488 	// result: (ANDconst [c&0xff] x)
   3489 	for {
   3490 		v_0 := v.Args[0]
   3491 		if v_0.Op != OpMIPSANDconst {
   3492 			break
   3493 		}
   3494 		c := v_0.AuxInt
   3495 		x := v_0.Args[0]
   3496 		v.reset(OpMIPSANDconst)
   3497 		v.AuxInt = c & 0xff
   3498 		v.AddArg(x)
   3499 		return true
   3500 	}
   3501 	// match: (MOVBUreg (MOVWconst [c]))
   3502 	// cond:
   3503 	// result: (MOVWconst [int64(uint8(c))])
   3504 	for {
   3505 		v_0 := v.Args[0]
   3506 		if v_0.Op != OpMIPSMOVWconst {
   3507 			break
   3508 		}
   3509 		c := v_0.AuxInt
   3510 		v.reset(OpMIPSMOVWconst)
   3511 		v.AuxInt = int64(uint8(c))
   3512 		return true
   3513 	}
   3514 	return false
   3515 }
   3516 func rewriteValueMIPS_OpMIPSMOVBload_0(v *Value) bool {
   3517 	// match: (MOVBload [off1] {sym} x:(ADDconst [off2] ptr) mem)
   3518 	// cond: (is16Bit(off1+off2) || x.Uses == 1)
   3519 	// result: (MOVBload  [off1+off2] {sym} ptr mem)
   3520 	for {
   3521 		off1 := v.AuxInt
   3522 		sym := v.Aux
   3523 		_ = v.Args[1]
   3524 		x := v.Args[0]
   3525 		if x.Op != OpMIPSADDconst {
   3526 			break
   3527 		}
   3528 		off2 := x.AuxInt
   3529 		ptr := x.Args[0]
   3530 		mem := v.Args[1]
   3531 		if !(is16Bit(off1+off2) || x.Uses == 1) {
   3532 			break
   3533 		}
   3534 		v.reset(OpMIPSMOVBload)
   3535 		v.AuxInt = off1 + off2
   3536 		v.Aux = sym
   3537 		v.AddArg(ptr)
   3538 		v.AddArg(mem)
   3539 		return true
   3540 	}
   3541 	// match: (MOVBload [off1] {sym1} (MOVWaddr [off2] {sym2} ptr) mem)
   3542 	// cond: canMergeSym(sym1,sym2)
   3543 	// result: (MOVBload [off1+off2] {mergeSym(sym1,sym2)} ptr mem)
   3544 	for {
   3545 		off1 := v.AuxInt
   3546 		sym1 := v.Aux
   3547 		_ = v.Args[1]
   3548 		v_0 := v.Args[0]
   3549 		if v_0.Op != OpMIPSMOVWaddr {
   3550 			break
   3551 		}
   3552 		off2 := v_0.AuxInt
   3553 		sym2 := v_0.Aux
   3554 		ptr := v_0.Args[0]
   3555 		mem := v.Args[1]
   3556 		if !(canMergeSym(sym1, sym2)) {
   3557 			break
   3558 		}
   3559 		v.reset(OpMIPSMOVBload)
   3560 		v.AuxInt = off1 + off2
   3561 		v.Aux = mergeSym(sym1, sym2)
   3562 		v.AddArg(ptr)
   3563 		v.AddArg(mem)
   3564 		return true
   3565 	}
   3566 	// match: (MOVBload [off] {sym} ptr (MOVBstore [off2] {sym2} ptr2 x _))
   3567 	// cond: sym == sym2 && off == off2 && isSamePtr(ptr, ptr2)
   3568 	// result: (MOVBreg x)
   3569 	for {
   3570 		off := v.AuxInt
   3571 		sym := v.Aux
   3572 		_ = v.Args[1]
   3573 		ptr := v.Args[0]
   3574 		v_1 := v.Args[1]
   3575 		if v_1.Op != OpMIPSMOVBstore {
   3576 			break
   3577 		}
   3578 		off2 := v_1.AuxInt
   3579 		sym2 := v_1.Aux
   3580 		_ = v_1.Args[2]
   3581 		ptr2 := v_1.Args[0]
   3582 		x := v_1.Args[1]
   3583 		if !(sym == sym2 && off == off2 && isSamePtr(ptr, ptr2)) {
   3584 			break
   3585 		}
   3586 		v.reset(OpMIPSMOVBreg)
   3587 		v.AddArg(x)
   3588 		return true
   3589 	}
   3590 	return false
   3591 }
   3592 func rewriteValueMIPS_OpMIPSMOVBreg_0(v *Value) bool {
   3593 	b := v.Block
   3594 	_ = b
   3595 	// match: (MOVBreg x:(MOVBload _ _))
   3596 	// cond:
   3597 	// result: (MOVWreg x)
   3598 	for {
   3599 		x := v.Args[0]
   3600 		if x.Op != OpMIPSMOVBload {
   3601 			break
   3602 		}
   3603 		_ = x.Args[1]
   3604 		v.reset(OpMIPSMOVWreg)
   3605 		v.AddArg(x)
   3606 		return true
   3607 	}
   3608 	// match: (MOVBreg x:(MOVBreg _))
   3609 	// cond:
   3610 	// result: (MOVWreg x)
   3611 	for {
   3612 		x := v.Args[0]
   3613 		if x.Op != OpMIPSMOVBreg {
   3614 			break
   3615 		}
   3616 		v.reset(OpMIPSMOVWreg)
   3617 		v.AddArg(x)
   3618 		return true
   3619 	}
   3620 	// match: (MOVBreg <t> x:(MOVBUload [off] {sym} ptr mem))
   3621 	// cond: x.Uses == 1 && clobber(x)
   3622 	// result: @x.Block (MOVBload <t> [off] {sym} ptr mem)
   3623 	for {
   3624 		t := v.Type
   3625 		x := v.Args[0]
   3626 		if x.Op != OpMIPSMOVBUload {
   3627 			break
   3628 		}
   3629 		off := x.AuxInt
   3630 		sym := x.Aux
   3631 		_ = x.Args[1]
   3632 		ptr := x.Args[0]
   3633 		mem := x.Args[1]
   3634 		if !(x.Uses == 1 && clobber(x)) {
   3635 			break
   3636 		}
   3637 		b = x.Block
   3638 		v0 := b.NewValue0(v.Pos, OpMIPSMOVBload, t)
   3639 		v.reset(OpCopy)
   3640 		v.AddArg(v0)
   3641 		v0.AuxInt = off
   3642 		v0.Aux = sym
   3643 		v0.AddArg(ptr)
   3644 		v0.AddArg(mem)
   3645 		return true
   3646 	}
   3647 	// match: (MOVBreg (ANDconst [c] x))
   3648 	// cond: c & 0x80 == 0
   3649 	// result: (ANDconst [c&0x7f] x)
   3650 	for {
   3651 		v_0 := v.Args[0]
   3652 		if v_0.Op != OpMIPSANDconst {
   3653 			break
   3654 		}
   3655 		c := v_0.AuxInt
   3656 		x := v_0.Args[0]
   3657 		if !(c&0x80 == 0) {
   3658 			break
   3659 		}
   3660 		v.reset(OpMIPSANDconst)
   3661 		v.AuxInt = c & 0x7f
   3662 		v.AddArg(x)
   3663 		return true
   3664 	}
   3665 	// match: (MOVBreg (MOVWconst [c]))
   3666 	// cond:
   3667 	// result: (MOVWconst [int64(int8(c))])
   3668 	for {
   3669 		v_0 := v.Args[0]
   3670 		if v_0.Op != OpMIPSMOVWconst {
   3671 			break
   3672 		}
   3673 		c := v_0.AuxInt
   3674 		v.reset(OpMIPSMOVWconst)
   3675 		v.AuxInt = int64(int8(c))
   3676 		return true
   3677 	}
   3678 	return false
   3679 }
   3680 func rewriteValueMIPS_OpMIPSMOVBstore_0(v *Value) bool {
   3681 	// match: (MOVBstore [off1] {sym} x:(ADDconst [off2] ptr) val mem)
   3682 	// cond: (is16Bit(off1+off2) || x.Uses == 1)
   3683 	// result: (MOVBstore [off1+off2] {sym} ptr val mem)
   3684 	for {
   3685 		off1 := v.AuxInt
   3686 		sym := v.Aux
   3687 		_ = v.Args[2]
   3688 		x := v.Args[0]
   3689 		if x.Op != OpMIPSADDconst {
   3690 			break
   3691 		}
   3692 		off2 := x.AuxInt
   3693 		ptr := x.Args[0]
   3694 		val := v.Args[1]
   3695 		mem := v.Args[2]
   3696 		if !(is16Bit(off1+off2) || x.Uses == 1) {
   3697 			break
   3698 		}
   3699 		v.reset(OpMIPSMOVBstore)
   3700 		v.AuxInt = off1 + off2
   3701 		v.Aux = sym
   3702 		v.AddArg(ptr)
   3703 		v.AddArg(val)
   3704 		v.AddArg(mem)
   3705 		return true
   3706 	}
   3707 	// match: (MOVBstore [off1] {sym1} (MOVWaddr [off2] {sym2} ptr) val mem)
   3708 	// cond: canMergeSym(sym1,sym2)
   3709 	// result: (MOVBstore [off1+off2] {mergeSym(sym1,sym2)} ptr val mem)
   3710 	for {
   3711 		off1 := v.AuxInt
   3712 		sym1 := v.Aux
   3713 		_ = v.Args[2]
   3714 		v_0 := v.Args[0]
   3715 		if v_0.Op != OpMIPSMOVWaddr {
   3716 			break
   3717 		}
   3718 		off2 := v_0.AuxInt
   3719 		sym2 := v_0.Aux
   3720 		ptr := v_0.Args[0]
   3721 		val := v.Args[1]
   3722 		mem := v.Args[2]
   3723 		if !(canMergeSym(sym1, sym2)) {
   3724 			break
   3725 		}
   3726 		v.reset(OpMIPSMOVBstore)
   3727 		v.AuxInt = off1 + off2
   3728 		v.Aux = mergeSym(sym1, sym2)
   3729 		v.AddArg(ptr)
   3730 		v.AddArg(val)
   3731 		v.AddArg(mem)
   3732 		return true
   3733 	}
   3734 	// match: (MOVBstore [off] {sym} ptr (MOVWconst [0]) mem)
   3735 	// cond:
   3736 	// result: (MOVBstorezero [off] {sym} ptr mem)
   3737 	for {
   3738 		off := v.AuxInt
   3739 		sym := v.Aux
   3740 		_ = v.Args[2]
   3741 		ptr := v.Args[0]
   3742 		v_1 := v.Args[1]
   3743 		if v_1.Op != OpMIPSMOVWconst {
   3744 			break
   3745 		}
   3746 		if v_1.AuxInt != 0 {
   3747 			break
   3748 		}
   3749 		mem := v.Args[2]
   3750 		v.reset(OpMIPSMOVBstorezero)
   3751 		v.AuxInt = off
   3752 		v.Aux = sym
   3753 		v.AddArg(ptr)
   3754 		v.AddArg(mem)
   3755 		return true
   3756 	}
   3757 	// match: (MOVBstore [off] {sym} ptr (MOVBreg x) mem)
   3758 	// cond:
   3759 	// result: (MOVBstore [off] {sym} ptr x mem)
   3760 	for {
   3761 		off := v.AuxInt
   3762 		sym := v.Aux
   3763 		_ = v.Args[2]
   3764 		ptr := v.Args[0]
   3765 		v_1 := v.Args[1]
   3766 		if v_1.Op != OpMIPSMOVBreg {
   3767 			break
   3768 		}
   3769 		x := v_1.Args[0]
   3770 		mem := v.Args[2]
   3771 		v.reset(OpMIPSMOVBstore)
   3772 		v.AuxInt = off
   3773 		v.Aux = sym
   3774 		v.AddArg(ptr)
   3775 		v.AddArg(x)
   3776 		v.AddArg(mem)
   3777 		return true
   3778 	}
   3779 	// match: (MOVBstore [off] {sym} ptr (MOVBUreg x) mem)
   3780 	// cond:
   3781 	// result: (MOVBstore [off] {sym} ptr x mem)
   3782 	for {
   3783 		off := v.AuxInt
   3784 		sym := v.Aux
   3785 		_ = v.Args[2]
   3786 		ptr := v.Args[0]
   3787 		v_1 := v.Args[1]
   3788 		if v_1.Op != OpMIPSMOVBUreg {
   3789 			break
   3790 		}
   3791 		x := v_1.Args[0]
   3792 		mem := v.Args[2]
   3793 		v.reset(OpMIPSMOVBstore)
   3794 		v.AuxInt = off
   3795 		v.Aux = sym
   3796 		v.AddArg(ptr)
   3797 		v.AddArg(x)
   3798 		v.AddArg(mem)
   3799 		return true
   3800 	}
   3801 	// match: (MOVBstore [off] {sym} ptr (MOVHreg x) mem)
   3802 	// cond:
   3803 	// result: (MOVBstore [off] {sym} ptr x mem)
   3804 	for {
   3805 		off := v.AuxInt
   3806 		sym := v.Aux
   3807 		_ = v.Args[2]
   3808 		ptr := v.Args[0]
   3809 		v_1 := v.Args[1]
   3810 		if v_1.Op != OpMIPSMOVHreg {
   3811 			break
   3812 		}
   3813 		x := v_1.Args[0]
   3814 		mem := v.Args[2]
   3815 		v.reset(OpMIPSMOVBstore)
   3816 		v.AuxInt = off
   3817 		v.Aux = sym
   3818 		v.AddArg(ptr)
   3819 		v.AddArg(x)
   3820 		v.AddArg(mem)
   3821 		return true
   3822 	}
   3823 	// match: (MOVBstore [off] {sym} ptr (MOVHUreg x) mem)
   3824 	// cond:
   3825 	// result: (MOVBstore [off] {sym} ptr x mem)
   3826 	for {
   3827 		off := v.AuxInt
   3828 		sym := v.Aux
   3829 		_ = v.Args[2]
   3830 		ptr := v.Args[0]
   3831 		v_1 := v.Args[1]
   3832 		if v_1.Op != OpMIPSMOVHUreg {
   3833 			break
   3834 		}
   3835 		x := v_1.Args[0]
   3836 		mem := v.Args[2]
   3837 		v.reset(OpMIPSMOVBstore)
   3838 		v.AuxInt = off
   3839 		v.Aux = sym
   3840 		v.AddArg(ptr)
   3841 		v.AddArg(x)
   3842 		v.AddArg(mem)
   3843 		return true
   3844 	}
   3845 	// match: (MOVBstore [off] {sym} ptr (MOVWreg x) mem)
   3846 	// cond:
   3847 	// result: (MOVBstore [off] {sym} ptr x mem)
   3848 	for {
   3849 		off := v.AuxInt
   3850 		sym := v.Aux
   3851 		_ = v.Args[2]
   3852 		ptr := v.Args[0]
   3853 		v_1 := v.Args[1]
   3854 		if v_1.Op != OpMIPSMOVWreg {
   3855 			break
   3856 		}
   3857 		x := v_1.Args[0]
   3858 		mem := v.Args[2]
   3859 		v.reset(OpMIPSMOVBstore)
   3860 		v.AuxInt = off
   3861 		v.Aux = sym
   3862 		v.AddArg(ptr)
   3863 		v.AddArg(x)
   3864 		v.AddArg(mem)
   3865 		return true
   3866 	}
   3867 	return false
   3868 }
   3869 func rewriteValueMIPS_OpMIPSMOVBstorezero_0(v *Value) bool {
   3870 	// match: (MOVBstorezero [off1] {sym} x:(ADDconst [off2] ptr) mem)
   3871 	// cond: (is16Bit(off1+off2) || x.Uses == 1)
   3872 	// result: (MOVBstorezero [off1+off2] {sym} ptr mem)
   3873 	for {
   3874 		off1 := v.AuxInt
   3875 		sym := v.Aux
   3876 		_ = v.Args[1]
   3877 		x := v.Args[0]
   3878 		if x.Op != OpMIPSADDconst {
   3879 			break
   3880 		}
   3881 		off2 := x.AuxInt
   3882 		ptr := x.Args[0]
   3883 		mem := v.Args[1]
   3884 		if !(is16Bit(off1+off2) || x.Uses == 1) {
   3885 			break
   3886 		}
   3887 		v.reset(OpMIPSMOVBstorezero)
   3888 		v.AuxInt = off1 + off2
   3889 		v.Aux = sym
   3890 		v.AddArg(ptr)
   3891 		v.AddArg(mem)
   3892 		return true
   3893 	}
   3894 	// match: (MOVBstorezero [off1] {sym1} (MOVWaddr [off2] {sym2} ptr) mem)
   3895 	// cond: canMergeSym(sym1,sym2)
   3896 	// result: (MOVBstorezero [off1+off2] {mergeSym(sym1,sym2)} ptr mem)
   3897 	for {
   3898 		off1 := v.AuxInt
   3899 		sym1 := v.Aux
   3900 		_ = v.Args[1]
   3901 		v_0 := v.Args[0]
   3902 		if v_0.Op != OpMIPSMOVWaddr {
   3903 			break
   3904 		}
   3905 		off2 := v_0.AuxInt
   3906 		sym2 := v_0.Aux
   3907 		ptr := v_0.Args[0]
   3908 		mem := v.Args[1]
   3909 		if !(canMergeSym(sym1, sym2)) {
   3910 			break
   3911 		}
   3912 		v.reset(OpMIPSMOVBstorezero)
   3913 		v.AuxInt = off1 + off2
   3914 		v.Aux = mergeSym(sym1, sym2)
   3915 		v.AddArg(ptr)
   3916 		v.AddArg(mem)
   3917 		return true
   3918 	}
   3919 	return false
   3920 }
   3921 func rewriteValueMIPS_OpMIPSMOVDload_0(v *Value) bool {
   3922 	// match: (MOVDload [off1] {sym} x:(ADDconst [off2] ptr) mem)
   3923 	// cond: (is16Bit(off1+off2) || x.Uses == 1)
   3924 	// result: (MOVDload  [off1+off2] {sym} ptr mem)
   3925 	for {
   3926 		off1 := v.AuxInt
   3927 		sym := v.Aux
   3928 		_ = v.Args[1]
   3929 		x := v.Args[0]
   3930 		if x.Op != OpMIPSADDconst {
   3931 			break
   3932 		}
   3933 		off2 := x.AuxInt
   3934 		ptr := x.Args[0]
   3935 		mem := v.Args[1]
   3936 		if !(is16Bit(off1+off2) || x.Uses == 1) {
   3937 			break
   3938 		}
   3939 		v.reset(OpMIPSMOVDload)
   3940 		v.AuxInt = off1 + off2
   3941 		v.Aux = sym
   3942 		v.AddArg(ptr)
   3943 		v.AddArg(mem)
   3944 		return true
   3945 	}
   3946 	// match: (MOVDload [off1] {sym1} (MOVWaddr [off2] {sym2} ptr) mem)
   3947 	// cond: canMergeSym(sym1,sym2)
   3948 	// result: (MOVDload [off1+off2] {mergeSym(sym1,sym2)} ptr mem)
   3949 	for {
   3950 		off1 := v.AuxInt
   3951 		sym1 := v.Aux
   3952 		_ = v.Args[1]
   3953 		v_0 := v.Args[0]
   3954 		if v_0.Op != OpMIPSMOVWaddr {
   3955 			break
   3956 		}
   3957 		off2 := v_0.AuxInt
   3958 		sym2 := v_0.Aux
   3959 		ptr := v_0.Args[0]
   3960 		mem := v.Args[1]
   3961 		if !(canMergeSym(sym1, sym2)) {
   3962 			break
   3963 		}
   3964 		v.reset(OpMIPSMOVDload)
   3965 		v.AuxInt = off1 + off2
   3966 		v.Aux = mergeSym(sym1, sym2)
   3967 		v.AddArg(ptr)
   3968 		v.AddArg(mem)
   3969 		return true
   3970 	}
   3971 	// match: (MOVDload [off] {sym} ptr (MOVDstore [off2] {sym2} ptr2 x _))
   3972 	// cond: sym == sym2 && off == off2 && isSamePtr(ptr, ptr2)
   3973 	// result: x
   3974 	for {
   3975 		off := v.AuxInt
   3976 		sym := v.Aux
   3977 		_ = v.Args[1]
   3978 		ptr := v.Args[0]
   3979 		v_1 := v.Args[1]
   3980 		if v_1.Op != OpMIPSMOVDstore {
   3981 			break
   3982 		}
   3983 		off2 := v_1.AuxInt
   3984 		sym2 := v_1.Aux
   3985 		_ = v_1.Args[2]
   3986 		ptr2 := v_1.Args[0]
   3987 		x := v_1.Args[1]
   3988 		if !(sym == sym2 && off == off2 && isSamePtr(ptr, ptr2)) {
   3989 			break
   3990 		}
   3991 		v.reset(OpCopy)
   3992 		v.Type = x.Type
   3993 		v.AddArg(x)
   3994 		return true
   3995 	}
   3996 	return false
   3997 }
   3998 func rewriteValueMIPS_OpMIPSMOVDstore_0(v *Value) bool {
   3999 	// match: (MOVDstore [off1] {sym} x:(ADDconst [off2] ptr) val mem)
   4000 	// cond: (is16Bit(off1+off2) || x.Uses == 1)
   4001 	// result: (MOVDstore [off1+off2] {sym} ptr val mem)
   4002 	for {
   4003 		off1 := v.AuxInt
   4004 		sym := v.Aux
   4005 		_ = v.Args[2]
   4006 		x := v.Args[0]
   4007 		if x.Op != OpMIPSADDconst {
   4008 			break
   4009 		}
   4010 		off2 := x.AuxInt
   4011 		ptr := x.Args[0]
   4012 		val := v.Args[1]
   4013 		mem := v.Args[2]
   4014 		if !(is16Bit(off1+off2) || x.Uses == 1) {
   4015 			break
   4016 		}
   4017 		v.reset(OpMIPSMOVDstore)
   4018 		v.AuxInt = off1 + off2
   4019 		v.Aux = sym
   4020 		v.AddArg(ptr)
   4021 		v.AddArg(val)
   4022 		v.AddArg(mem)
   4023 		return true
   4024 	}
   4025 	// match: (MOVDstore [off1] {sym1} (MOVWaddr [off2] {sym2} ptr) val mem)
   4026 	// cond: canMergeSym(sym1,sym2)
   4027 	// result: (MOVDstore [off1+off2] {mergeSym(sym1,sym2)} ptr val mem)
   4028 	for {
   4029 		off1 := v.AuxInt
   4030 		sym1 := v.Aux
   4031 		_ = v.Args[2]
   4032 		v_0 := v.Args[0]
   4033 		if v_0.Op != OpMIPSMOVWaddr {
   4034 			break
   4035 		}
   4036 		off2 := v_0.AuxInt
   4037 		sym2 := v_0.Aux
   4038 		ptr := v_0.Args[0]
   4039 		val := v.Args[1]
   4040 		mem := v.Args[2]
   4041 		if !(canMergeSym(sym1, sym2)) {
   4042 			break
   4043 		}
   4044 		v.reset(OpMIPSMOVDstore)
   4045 		v.AuxInt = off1 + off2
   4046 		v.Aux = mergeSym(sym1, sym2)
   4047 		v.AddArg(ptr)
   4048 		v.AddArg(val)
   4049 		v.AddArg(mem)
   4050 		return true
   4051 	}
   4052 	return false
   4053 }
   4054 func rewriteValueMIPS_OpMIPSMOVFload_0(v *Value) bool {
   4055 	// match: (MOVFload [off1] {sym} x:(ADDconst [off2] ptr) mem)
   4056 	// cond: (is16Bit(off1+off2) || x.Uses == 1)
   4057 	// result: (MOVFload  [off1+off2] {sym} ptr mem)
   4058 	for {
   4059 		off1 := v.AuxInt
   4060 		sym := v.Aux
   4061 		_ = v.Args[1]
   4062 		x := v.Args[0]
   4063 		if x.Op != OpMIPSADDconst {
   4064 			break
   4065 		}
   4066 		off2 := x.AuxInt
   4067 		ptr := x.Args[0]
   4068 		mem := v.Args[1]
   4069 		if !(is16Bit(off1+off2) || x.Uses == 1) {
   4070 			break
   4071 		}
   4072 		v.reset(OpMIPSMOVFload)
   4073 		v.AuxInt = off1 + off2
   4074 		v.Aux = sym
   4075 		v.AddArg(ptr)
   4076 		v.AddArg(mem)
   4077 		return true
   4078 	}
   4079 	// match: (MOVFload [off1] {sym1} (MOVWaddr [off2] {sym2} ptr) mem)
   4080 	// cond: canMergeSym(sym1,sym2)
   4081 	// result: (MOVFload [off1+off2] {mergeSym(sym1,sym2)} ptr mem)
   4082 	for {
   4083 		off1 := v.AuxInt
   4084 		sym1 := v.Aux
   4085 		_ = v.Args[1]
   4086 		v_0 := v.Args[0]
   4087 		if v_0.Op != OpMIPSMOVWaddr {
   4088 			break
   4089 		}
   4090 		off2 := v_0.AuxInt
   4091 		sym2 := v_0.Aux
   4092 		ptr := v_0.Args[0]
   4093 		mem := v.Args[1]
   4094 		if !(canMergeSym(sym1, sym2)) {
   4095 			break
   4096 		}
   4097 		v.reset(OpMIPSMOVFload)
   4098 		v.AuxInt = off1 + off2
   4099 		v.Aux = mergeSym(sym1, sym2)
   4100 		v.AddArg(ptr)
   4101 		v.AddArg(mem)
   4102 		return true
   4103 	}
   4104 	// match: (MOVFload [off] {sym} ptr (MOVFstore [off2] {sym2} ptr2 x _))
   4105 	// cond: sym == sym2 && off == off2 && isSamePtr(ptr, ptr2)
   4106 	// result: x
   4107 	for {
   4108 		off := v.AuxInt
   4109 		sym := v.Aux
   4110 		_ = v.Args[1]
   4111 		ptr := v.Args[0]
   4112 		v_1 := v.Args[1]
   4113 		if v_1.Op != OpMIPSMOVFstore {
   4114 			break
   4115 		}
   4116 		off2 := v_1.AuxInt
   4117 		sym2 := v_1.Aux
   4118 		_ = v_1.Args[2]
   4119 		ptr2 := v_1.Args[0]
   4120 		x := v_1.Args[1]
   4121 		if !(sym == sym2 && off == off2 && isSamePtr(ptr, ptr2)) {
   4122 			break
   4123 		}
   4124 		v.reset(OpCopy)
   4125 		v.Type = x.Type
   4126 		v.AddArg(x)
   4127 		return true
   4128 	}
   4129 	return false
   4130 }
   4131 func rewriteValueMIPS_OpMIPSMOVFstore_0(v *Value) bool {
   4132 	// match: (MOVFstore [off1] {sym} x:(ADDconst [off2] ptr) val mem)
   4133 	// cond: (is16Bit(off1+off2) || x.Uses == 1)
   4134 	// result: (MOVFstore [off1+off2] {sym} ptr val mem)
   4135 	for {
   4136 		off1 := v.AuxInt
   4137 		sym := v.Aux
   4138 		_ = v.Args[2]
   4139 		x := v.Args[0]
   4140 		if x.Op != OpMIPSADDconst {
   4141 			break
   4142 		}
   4143 		off2 := x.AuxInt
   4144 		ptr := x.Args[0]
   4145 		val := v.Args[1]
   4146 		mem := v.Args[2]
   4147 		if !(is16Bit(off1+off2) || x.Uses == 1) {
   4148 			break
   4149 		}
   4150 		v.reset(OpMIPSMOVFstore)
   4151 		v.AuxInt = off1 + off2
   4152 		v.Aux = sym
   4153 		v.AddArg(ptr)
   4154 		v.AddArg(val)
   4155 		v.AddArg(mem)
   4156 		return true
   4157 	}
   4158 	// match: (MOVFstore [off1] {sym1} (MOVWaddr [off2] {sym2} ptr) val mem)
   4159 	// cond: canMergeSym(sym1,sym2)
   4160 	// result: (MOVFstore [off1+off2] {mergeSym(sym1,sym2)} ptr val mem)
   4161 	for {
   4162 		off1 := v.AuxInt
   4163 		sym1 := v.Aux
   4164 		_ = v.Args[2]
   4165 		v_0 := v.Args[0]
   4166 		if v_0.Op != OpMIPSMOVWaddr {
   4167 			break
   4168 		}
   4169 		off2 := v_0.AuxInt
   4170 		sym2 := v_0.Aux
   4171 		ptr := v_0.Args[0]
   4172 		val := v.Args[1]
   4173 		mem := v.Args[2]
   4174 		if !(canMergeSym(sym1, sym2)) {
   4175 			break
   4176 		}
   4177 		v.reset(OpMIPSMOVFstore)
   4178 		v.AuxInt = off1 + off2
   4179 		v.Aux = mergeSym(sym1, sym2)
   4180 		v.AddArg(ptr)
   4181 		v.AddArg(val)
   4182 		v.AddArg(mem)
   4183 		return true
   4184 	}
   4185 	return false
   4186 }
   4187 func rewriteValueMIPS_OpMIPSMOVHUload_0(v *Value) bool {
   4188 	// match: (MOVHUload [off1] {sym} x:(ADDconst [off2] ptr) mem)
   4189 	// cond: (is16Bit(off1+off2) || x.Uses == 1)
   4190 	// result: (MOVHUload [off1+off2] {sym} ptr mem)
   4191 	for {
   4192 		off1 := v.AuxInt
   4193 		sym := v.Aux
   4194 		_ = v.Args[1]
   4195 		x := v.Args[0]
   4196 		if x.Op != OpMIPSADDconst {
   4197 			break
   4198 		}
   4199 		off2 := x.AuxInt
   4200 		ptr := x.Args[0]
   4201 		mem := v.Args[1]
   4202 		if !(is16Bit(off1+off2) || x.Uses == 1) {
   4203 			break
   4204 		}
   4205 		v.reset(OpMIPSMOVHUload)
   4206 		v.AuxInt = off1 + off2
   4207 		v.Aux = sym
   4208 		v.AddArg(ptr)
   4209 		v.AddArg(mem)
   4210 		return true
   4211 	}
   4212 	// match: (MOVHUload [off1] {sym1} (MOVWaddr [off2] {sym2} ptr) mem)
   4213 	// cond: canMergeSym(sym1,sym2)
   4214 	// result: (MOVHUload [off1+off2] {mergeSym(sym1,sym2)} ptr mem)
   4215 	for {
   4216 		off1 := v.AuxInt
   4217 		sym1 := v.Aux
   4218 		_ = v.Args[1]
   4219 		v_0 := v.Args[0]
   4220 		if v_0.Op != OpMIPSMOVWaddr {
   4221 			break
   4222 		}
   4223 		off2 := v_0.AuxInt
   4224 		sym2 := v_0.Aux
   4225 		ptr := v_0.Args[0]
   4226 		mem := v.Args[1]
   4227 		if !(canMergeSym(sym1, sym2)) {
   4228 			break
   4229 		}
   4230 		v.reset(OpMIPSMOVHUload)
   4231 		v.AuxInt = off1 + off2
   4232 		v.Aux = mergeSym(sym1, sym2)
   4233 		v.AddArg(ptr)
   4234 		v.AddArg(mem)
   4235 		return true
   4236 	}
   4237 	// match: (MOVHUload [off] {sym} ptr (MOVHstore [off2] {sym2} ptr2 x _))
   4238 	// cond: sym == sym2 && off == off2 && isSamePtr(ptr, ptr2)
   4239 	// result: (MOVHUreg x)
   4240 	for {
   4241 		off := v.AuxInt
   4242 		sym := v.Aux
   4243 		_ = v.Args[1]
   4244 		ptr := v.Args[0]
   4245 		v_1 := v.Args[1]
   4246 		if v_1.Op != OpMIPSMOVHstore {
   4247 			break
   4248 		}
   4249 		off2 := v_1.AuxInt
   4250 		sym2 := v_1.Aux
   4251 		_ = v_1.Args[2]
   4252 		ptr2 := v_1.Args[0]
   4253 		x := v_1.Args[1]
   4254 		if !(sym == sym2 && off == off2 && isSamePtr(ptr, ptr2)) {
   4255 			break
   4256 		}
   4257 		v.reset(OpMIPSMOVHUreg)
   4258 		v.AddArg(x)
   4259 		return true
   4260 	}
   4261 	return false
   4262 }
   4263 func rewriteValueMIPS_OpMIPSMOVHUreg_0(v *Value) bool {
   4264 	b := v.Block
   4265 	_ = b
   4266 	// match: (MOVHUreg x:(MOVBUload _ _))
   4267 	// cond:
   4268 	// result: (MOVWreg x)
   4269 	for {
   4270 		x := v.Args[0]
   4271 		if x.Op != OpMIPSMOVBUload {
   4272 			break
   4273 		}
   4274 		_ = x.Args[1]
   4275 		v.reset(OpMIPSMOVWreg)
   4276 		v.AddArg(x)
   4277 		return true
   4278 	}
   4279 	// match: (MOVHUreg x:(MOVHUload _ _))
   4280 	// cond:
   4281 	// result: (MOVWreg x)
   4282 	for {
   4283 		x := v.Args[0]
   4284 		if x.Op != OpMIPSMOVHUload {
   4285 			break
   4286 		}
   4287 		_ = x.Args[1]
   4288 		v.reset(OpMIPSMOVWreg)
   4289 		v.AddArg(x)
   4290 		return true
   4291 	}
   4292 	// match: (MOVHUreg x:(MOVBUreg _))
   4293 	// cond:
   4294 	// result: (MOVWreg x)
   4295 	for {
   4296 		x := v.Args[0]
   4297 		if x.Op != OpMIPSMOVBUreg {
   4298 			break
   4299 		}
   4300 		v.reset(OpMIPSMOVWreg)
   4301 		v.AddArg(x)
   4302 		return true
   4303 	}
   4304 	// match: (MOVHUreg x:(MOVHUreg _))
   4305 	// cond:
   4306 	// result: (MOVWreg x)
   4307 	for {
   4308 		x := v.Args[0]
   4309 		if x.Op != OpMIPSMOVHUreg {
   4310 			break
   4311 		}
   4312 		v.reset(OpMIPSMOVWreg)
   4313 		v.AddArg(x)
   4314 		return true
   4315 	}
   4316 	// match: (MOVHUreg <t> x:(MOVHload [off] {sym} ptr mem))
   4317 	// cond: x.Uses == 1 && clobber(x)
   4318 	// result: @x.Block (MOVHUload <t> [off] {sym} ptr mem)
   4319 	for {
   4320 		t := v.Type
   4321 		x := v.Args[0]
   4322 		if x.Op != OpMIPSMOVHload {
   4323 			break
   4324 		}
   4325 		off := x.AuxInt
   4326 		sym := x.Aux
   4327 		_ = x.Args[1]
   4328 		ptr := x.Args[0]
   4329 		mem := x.Args[1]
   4330 		if !(x.Uses == 1 && clobber(x)) {
   4331 			break
   4332 		}
   4333 		b = x.Block
   4334 		v0 := b.NewValue0(v.Pos, OpMIPSMOVHUload, t)
   4335 		v.reset(OpCopy)
   4336 		v.AddArg(v0)
   4337 		v0.AuxInt = off
   4338 		v0.Aux = sym
   4339 		v0.AddArg(ptr)
   4340 		v0.AddArg(mem)
   4341 		return true
   4342 	}
   4343 	// match: (MOVHUreg (ANDconst [c] x))
   4344 	// cond:
   4345 	// result: (ANDconst [c&0xffff] x)
   4346 	for {
   4347 		v_0 := v.Args[0]
   4348 		if v_0.Op != OpMIPSANDconst {
   4349 			break
   4350 		}
   4351 		c := v_0.AuxInt
   4352 		x := v_0.Args[0]
   4353 		v.reset(OpMIPSANDconst)
   4354 		v.AuxInt = c & 0xffff
   4355 		v.AddArg(x)
   4356 		return true
   4357 	}
   4358 	// match: (MOVHUreg (MOVWconst [c]))
   4359 	// cond:
   4360 	// result: (MOVWconst [int64(uint16(c))])
   4361 	for {
   4362 		v_0 := v.Args[0]
   4363 		if v_0.Op != OpMIPSMOVWconst {
   4364 			break
   4365 		}
   4366 		c := v_0.AuxInt
   4367 		v.reset(OpMIPSMOVWconst)
   4368 		v.AuxInt = int64(uint16(c))
   4369 		return true
   4370 	}
   4371 	return false
   4372 }
   4373 func rewriteValueMIPS_OpMIPSMOVHload_0(v *Value) bool {
   4374 	// match: (MOVHload [off1] {sym} x:(ADDconst [off2] ptr) mem)
   4375 	// cond: (is16Bit(off1+off2) || x.Uses == 1)
   4376 	// result: (MOVHload  [off1+off2] {sym} ptr mem)
   4377 	for {
   4378 		off1 := v.AuxInt
   4379 		sym := v.Aux
   4380 		_ = v.Args[1]
   4381 		x := v.Args[0]
   4382 		if x.Op != OpMIPSADDconst {
   4383 			break
   4384 		}
   4385 		off2 := x.AuxInt
   4386 		ptr := x.Args[0]
   4387 		mem := v.Args[1]
   4388 		if !(is16Bit(off1+off2) || x.Uses == 1) {
   4389 			break
   4390 		}
   4391 		v.reset(OpMIPSMOVHload)
   4392 		v.AuxInt = off1 + off2
   4393 		v.Aux = sym
   4394 		v.AddArg(ptr)
   4395 		v.AddArg(mem)
   4396 		return true
   4397 	}
   4398 	// match: (MOVHload [off1] {sym1} (MOVWaddr [off2] {sym2} ptr) mem)
   4399 	// cond: canMergeSym(sym1,sym2)
   4400 	// result: (MOVHload [off1+off2] {mergeSym(sym1,sym2)} ptr mem)
   4401 	for {
   4402 		off1 := v.AuxInt
   4403 		sym1 := v.Aux
   4404 		_ = v.Args[1]
   4405 		v_0 := v.Args[0]
   4406 		if v_0.Op != OpMIPSMOVWaddr {
   4407 			break
   4408 		}
   4409 		off2 := v_0.AuxInt
   4410 		sym2 := v_0.Aux
   4411 		ptr := v_0.Args[0]
   4412 		mem := v.Args[1]
   4413 		if !(canMergeSym(sym1, sym2)) {
   4414 			break
   4415 		}
   4416 		v.reset(OpMIPSMOVHload)
   4417 		v.AuxInt = off1 + off2
   4418 		v.Aux = mergeSym(sym1, sym2)
   4419 		v.AddArg(ptr)
   4420 		v.AddArg(mem)
   4421 		return true
   4422 	}
   4423 	// match: (MOVHload [off] {sym} ptr (MOVHstore [off2] {sym2} ptr2 x _))
   4424 	// cond: sym == sym2 && off == off2 && isSamePtr(ptr, ptr2)
   4425 	// result: (MOVHreg x)
   4426 	for {
   4427 		off := v.AuxInt
   4428 		sym := v.Aux
   4429 		_ = v.Args[1]
   4430 		ptr := v.Args[0]
   4431 		v_1 := v.Args[1]
   4432 		if v_1.Op != OpMIPSMOVHstore {
   4433 			break
   4434 		}
   4435 		off2 := v_1.AuxInt
   4436 		sym2 := v_1.Aux
   4437 		_ = v_1.Args[2]
   4438 		ptr2 := v_1.Args[0]
   4439 		x := v_1.Args[1]
   4440 		if !(sym == sym2 && off == off2 && isSamePtr(ptr, ptr2)) {
   4441 			break
   4442 		}
   4443 		v.reset(OpMIPSMOVHreg)
   4444 		v.AddArg(x)
   4445 		return true
   4446 	}
   4447 	return false
   4448 }
   4449 func rewriteValueMIPS_OpMIPSMOVHreg_0(v *Value) bool {
   4450 	b := v.Block
   4451 	_ = b
   4452 	// match: (MOVHreg x:(MOVBload _ _))
   4453 	// cond:
   4454 	// result: (MOVWreg x)
   4455 	for {
   4456 		x := v.Args[0]
   4457 		if x.Op != OpMIPSMOVBload {
   4458 			break
   4459 		}
   4460 		_ = x.Args[1]
   4461 		v.reset(OpMIPSMOVWreg)
   4462 		v.AddArg(x)
   4463 		return true
   4464 	}
   4465 	// match: (MOVHreg x:(MOVBUload _ _))
   4466 	// cond:
   4467 	// result: (MOVWreg x)
   4468 	for {
   4469 		x := v.Args[0]
   4470 		if x.Op != OpMIPSMOVBUload {
   4471 			break
   4472 		}
   4473 		_ = x.Args[1]
   4474 		v.reset(OpMIPSMOVWreg)
   4475 		v.AddArg(x)
   4476 		return true
   4477 	}
   4478 	// match: (MOVHreg x:(MOVHload _ _))
   4479 	// cond:
   4480 	// result: (MOVWreg x)
   4481 	for {
   4482 		x := v.Args[0]
   4483 		if x.Op != OpMIPSMOVHload {
   4484 			break
   4485 		}
   4486 		_ = x.Args[1]
   4487 		v.reset(OpMIPSMOVWreg)
   4488 		v.AddArg(x)
   4489 		return true
   4490 	}
   4491 	// match: (MOVHreg x:(MOVBreg _))
   4492 	// cond:
   4493 	// result: (MOVWreg x)
   4494 	for {
   4495 		x := v.Args[0]
   4496 		if x.Op != OpMIPSMOVBreg {
   4497 			break
   4498 		}
   4499 		v.reset(OpMIPSMOVWreg)
   4500 		v.AddArg(x)
   4501 		return true
   4502 	}
   4503 	// match: (MOVHreg x:(MOVBUreg _))
   4504 	// cond:
   4505 	// result: (MOVWreg x)
   4506 	for {
   4507 		x := v.Args[0]
   4508 		if x.Op != OpMIPSMOVBUreg {
   4509 			break
   4510 		}
   4511 		v.reset(OpMIPSMOVWreg)
   4512 		v.AddArg(x)
   4513 		return true
   4514 	}
   4515 	// match: (MOVHreg x:(MOVHreg _))
   4516 	// cond:
   4517 	// result: (MOVWreg x)
   4518 	for {
   4519 		x := v.Args[0]
   4520 		if x.Op != OpMIPSMOVHreg {
   4521 			break
   4522 		}
   4523 		v.reset(OpMIPSMOVWreg)
   4524 		v.AddArg(x)
   4525 		return true
   4526 	}
   4527 	// match: (MOVHreg <t> x:(MOVHUload [off] {sym} ptr mem))
   4528 	// cond: x.Uses == 1 && clobber(x)
   4529 	// result: @x.Block (MOVHload <t> [off] {sym} ptr mem)
   4530 	for {
   4531 		t := v.Type
   4532 		x := v.Args[0]
   4533 		if x.Op != OpMIPSMOVHUload {
   4534 			break
   4535 		}
   4536 		off := x.AuxInt
   4537 		sym := x.Aux
   4538 		_ = x.Args[1]
   4539 		ptr := x.Args[0]
   4540 		mem := x.Args[1]
   4541 		if !(x.Uses == 1 && clobber(x)) {
   4542 			break
   4543 		}
   4544 		b = x.Block
   4545 		v0 := b.NewValue0(v.Pos, OpMIPSMOVHload, t)
   4546 		v.reset(OpCopy)
   4547 		v.AddArg(v0)
   4548 		v0.AuxInt = off
   4549 		v0.Aux = sym
   4550 		v0.AddArg(ptr)
   4551 		v0.AddArg(mem)
   4552 		return true
   4553 	}
   4554 	// match: (MOVHreg (ANDconst [c] x))
   4555 	// cond: c & 0x8000 == 0
   4556 	// result: (ANDconst [c&0x7fff] x)
   4557 	for {
   4558 		v_0 := v.Args[0]
   4559 		if v_0.Op != OpMIPSANDconst {
   4560 			break
   4561 		}
   4562 		c := v_0.AuxInt
   4563 		x := v_0.Args[0]
   4564 		if !(c&0x8000 == 0) {
   4565 			break
   4566 		}
   4567 		v.reset(OpMIPSANDconst)
   4568 		v.AuxInt = c & 0x7fff
   4569 		v.AddArg(x)
   4570 		return true
   4571 	}
   4572 	// match: (MOVHreg (MOVWconst [c]))
   4573 	// cond:
   4574 	// result: (MOVWconst [int64(int16(c))])
   4575 	for {
   4576 		v_0 := v.Args[0]
   4577 		if v_0.Op != OpMIPSMOVWconst {
   4578 			break
   4579 		}
   4580 		c := v_0.AuxInt
   4581 		v.reset(OpMIPSMOVWconst)
   4582 		v.AuxInt = int64(int16(c))
   4583 		return true
   4584 	}
   4585 	return false
   4586 }
   4587 func rewriteValueMIPS_OpMIPSMOVHstore_0(v *Value) bool {
   4588 	// match: (MOVHstore [off1] {sym} x:(ADDconst [off2] ptr) val mem)
   4589 	// cond: (is16Bit(off1+off2) || x.Uses == 1)
   4590 	// result: (MOVHstore [off1+off2] {sym} ptr val mem)
   4591 	for {
   4592 		off1 := v.AuxInt
   4593 		sym := v.Aux
   4594 		_ = v.Args[2]
   4595 		x := v.Args[0]
   4596 		if x.Op != OpMIPSADDconst {
   4597 			break
   4598 		}
   4599 		off2 := x.AuxInt
   4600 		ptr := x.Args[0]
   4601 		val := v.Args[1]
   4602 		mem := v.Args[2]
   4603 		if !(is16Bit(off1+off2) || x.Uses == 1) {
   4604 			break
   4605 		}
   4606 		v.reset(OpMIPSMOVHstore)
   4607 		v.AuxInt = off1 + off2
   4608 		v.Aux = sym
   4609 		v.AddArg(ptr)
   4610 		v.AddArg(val)
   4611 		v.AddArg(mem)
   4612 		return true
   4613 	}
   4614 	// match: (MOVHstore [off1] {sym1} (MOVWaddr [off2] {sym2} ptr) val mem)
   4615 	// cond: canMergeSym(sym1,sym2)
   4616 	// result: (MOVHstore [off1+off2] {mergeSym(sym1,sym2)} ptr val mem)
   4617 	for {
   4618 		off1 := v.AuxInt
   4619 		sym1 := v.Aux
   4620 		_ = v.Args[2]
   4621 		v_0 := v.Args[0]
   4622 		if v_0.Op != OpMIPSMOVWaddr {
   4623 			break
   4624 		}
   4625 		off2 := v_0.AuxInt
   4626 		sym2 := v_0.Aux
   4627 		ptr := v_0.Args[0]
   4628 		val := v.Args[1]
   4629 		mem := v.Args[2]
   4630 		if !(canMergeSym(sym1, sym2)) {
   4631 			break
   4632 		}
   4633 		v.reset(OpMIPSMOVHstore)
   4634 		v.AuxInt = off1 + off2
   4635 		v.Aux = mergeSym(sym1, sym2)
   4636 		v.AddArg(ptr)
   4637 		v.AddArg(val)
   4638 		v.AddArg(mem)
   4639 		return true
   4640 	}
   4641 	// match: (MOVHstore [off] {sym} ptr (MOVWconst [0]) mem)
   4642 	// cond:
   4643 	// result: (MOVHstorezero [off] {sym} ptr mem)
   4644 	for {
   4645 		off := v.AuxInt
   4646 		sym := v.Aux
   4647 		_ = v.Args[2]
   4648 		ptr := v.Args[0]
   4649 		v_1 := v.Args[1]
   4650 		if v_1.Op != OpMIPSMOVWconst {
   4651 			break
   4652 		}
   4653 		if v_1.AuxInt != 0 {
   4654 			break
   4655 		}
   4656 		mem := v.Args[2]
   4657 		v.reset(OpMIPSMOVHstorezero)
   4658 		v.AuxInt = off
   4659 		v.Aux = sym
   4660 		v.AddArg(ptr)
   4661 		v.AddArg(mem)
   4662 		return true
   4663 	}
   4664 	// match: (MOVHstore [off] {sym} ptr (MOVHreg x) mem)
   4665 	// cond:
   4666 	// result: (MOVHstore [off] {sym} ptr x mem)
   4667 	for {
   4668 		off := v.AuxInt
   4669 		sym := v.Aux
   4670 		_ = v.Args[2]
   4671 		ptr := v.Args[0]
   4672 		v_1 := v.Args[1]
   4673 		if v_1.Op != OpMIPSMOVHreg {
   4674 			break
   4675 		}
   4676 		x := v_1.Args[0]
   4677 		mem := v.Args[2]
   4678 		v.reset(OpMIPSMOVHstore)
   4679 		v.AuxInt = off
   4680 		v.Aux = sym
   4681 		v.AddArg(ptr)
   4682 		v.AddArg(x)
   4683 		v.AddArg(mem)
   4684 		return true
   4685 	}
   4686 	// match: (MOVHstore [off] {sym} ptr (MOVHUreg x) mem)
   4687 	// cond:
   4688 	// result: (MOVHstore [off] {sym} ptr x mem)
   4689 	for {
   4690 		off := v.AuxInt
   4691 		sym := v.Aux
   4692 		_ = v.Args[2]
   4693 		ptr := v.Args[0]
   4694 		v_1 := v.Args[1]
   4695 		if v_1.Op != OpMIPSMOVHUreg {
   4696 			break
   4697 		}
   4698 		x := v_1.Args[0]
   4699 		mem := v.Args[2]
   4700 		v.reset(OpMIPSMOVHstore)
   4701 		v.AuxInt = off
   4702 		v.Aux = sym
   4703 		v.AddArg(ptr)
   4704 		v.AddArg(x)
   4705 		v.AddArg(mem)
   4706 		return true
   4707 	}
   4708 	// match: (MOVHstore [off] {sym} ptr (MOVWreg x) mem)
   4709 	// cond:
   4710 	// result: (MOVHstore [off] {sym} ptr x mem)
   4711 	for {
   4712 		off := v.AuxInt
   4713 		sym := v.Aux
   4714 		_ = v.Args[2]
   4715 		ptr := v.Args[0]
   4716 		v_1 := v.Args[1]
   4717 		if v_1.Op != OpMIPSMOVWreg {
   4718 			break
   4719 		}
   4720 		x := v_1.Args[0]
   4721 		mem := v.Args[2]
   4722 		v.reset(OpMIPSMOVHstore)
   4723 		v.AuxInt = off
   4724 		v.Aux = sym
   4725 		v.AddArg(ptr)
   4726 		v.AddArg(x)
   4727 		v.AddArg(mem)
   4728 		return true
   4729 	}
   4730 	return false
   4731 }
   4732 func rewriteValueMIPS_OpMIPSMOVHstorezero_0(v *Value) bool {
   4733 	// match: (MOVHstorezero [off1] {sym} x:(ADDconst [off2] ptr) mem)
   4734 	// cond: (is16Bit(off1+off2) || x.Uses == 1)
   4735 	// result: (MOVHstorezero [off1+off2] {sym} ptr mem)
   4736 	for {
   4737 		off1 := v.AuxInt
   4738 		sym := v.Aux
   4739 		_ = v.Args[1]
   4740 		x := v.Args[0]
   4741 		if x.Op != OpMIPSADDconst {
   4742 			break
   4743 		}
   4744 		off2 := x.AuxInt
   4745 		ptr := x.Args[0]
   4746 		mem := v.Args[1]
   4747 		if !(is16Bit(off1+off2) || x.Uses == 1) {
   4748 			break
   4749 		}
   4750 		v.reset(OpMIPSMOVHstorezero)
   4751 		v.AuxInt = off1 + off2
   4752 		v.Aux = sym
   4753 		v.AddArg(ptr)
   4754 		v.AddArg(mem)
   4755 		return true
   4756 	}
   4757 	// match: (MOVHstorezero [off1] {sym1} (MOVWaddr [off2] {sym2} ptr) mem)
   4758 	// cond: canMergeSym(sym1,sym2)
   4759 	// result: (MOVHstorezero [off1+off2] {mergeSym(sym1,sym2)} ptr mem)
   4760 	for {
   4761 		off1 := v.AuxInt
   4762 		sym1 := v.Aux
   4763 		_ = v.Args[1]
   4764 		v_0 := v.Args[0]
   4765 		if v_0.Op != OpMIPSMOVWaddr {
   4766 			break
   4767 		}
   4768 		off2 := v_0.AuxInt
   4769 		sym2 := v_0.Aux
   4770 		ptr := v_0.Args[0]
   4771 		mem := v.Args[1]
   4772 		if !(canMergeSym(sym1, sym2)) {
   4773 			break
   4774 		}
   4775 		v.reset(OpMIPSMOVHstorezero)
   4776 		v.AuxInt = off1 + off2
   4777 		v.Aux = mergeSym(sym1, sym2)
   4778 		v.AddArg(ptr)
   4779 		v.AddArg(mem)
   4780 		return true
   4781 	}
   4782 	return false
   4783 }
   4784 func rewriteValueMIPS_OpMIPSMOVWload_0(v *Value) bool {
   4785 	// match: (MOVWload [off1] {sym} x:(ADDconst [off2] ptr) mem)
   4786 	// cond: (is16Bit(off1+off2) || x.Uses == 1)
   4787 	// result: (MOVWload  [off1+off2] {sym} ptr mem)
   4788 	for {
   4789 		off1 := v.AuxInt
   4790 		sym := v.Aux
   4791 		_ = v.Args[1]
   4792 		x := v.Args[0]
   4793 		if x.Op != OpMIPSADDconst {
   4794 			break
   4795 		}
   4796 		off2 := x.AuxInt
   4797 		ptr := x.Args[0]
   4798 		mem := v.Args[1]
   4799 		if !(is16Bit(off1+off2) || x.Uses == 1) {
   4800 			break
   4801 		}
   4802 		v.reset(OpMIPSMOVWload)
   4803 		v.AuxInt = off1 + off2
   4804 		v.Aux = sym
   4805 		v.AddArg(ptr)
   4806 		v.AddArg(mem)
   4807 		return true
   4808 	}
   4809 	// match: (MOVWload [off1] {sym1} (MOVWaddr [off2] {sym2} ptr) mem)
   4810 	// cond: canMergeSym(sym1,sym2)
   4811 	// result: (MOVWload [off1+off2] {mergeSym(sym1,sym2)} ptr mem)
   4812 	for {
   4813 		off1 := v.AuxInt
   4814 		sym1 := v.Aux
   4815 		_ = v.Args[1]
   4816 		v_0 := v.Args[0]
   4817 		if v_0.Op != OpMIPSMOVWaddr {
   4818 			break
   4819 		}
   4820 		off2 := v_0.AuxInt
   4821 		sym2 := v_0.Aux
   4822 		ptr := v_0.Args[0]
   4823 		mem := v.Args[1]
   4824 		if !(canMergeSym(sym1, sym2)) {
   4825 			break
   4826 		}
   4827 		v.reset(OpMIPSMOVWload)
   4828 		v.AuxInt = off1 + off2
   4829 		v.Aux = mergeSym(sym1, sym2)
   4830 		v.AddArg(ptr)
   4831 		v.AddArg(mem)
   4832 		return true
   4833 	}
   4834 	// match: (MOVWload [off] {sym} ptr (MOVWstore [off2] {sym2} ptr2 x _))
   4835 	// cond: sym == sym2 && off == off2 && isSamePtr(ptr, ptr2)
   4836 	// result: x
   4837 	for {
   4838 		off := v.AuxInt
   4839 		sym := v.Aux
   4840 		_ = v.Args[1]
   4841 		ptr := v.Args[0]
   4842 		v_1 := v.Args[1]
   4843 		if v_1.Op != OpMIPSMOVWstore {
   4844 			break
   4845 		}
   4846 		off2 := v_1.AuxInt
   4847 		sym2 := v_1.Aux
   4848 		_ = v_1.Args[2]
   4849 		ptr2 := v_1.Args[0]
   4850 		x := v_1.Args[1]
   4851 		if !(sym == sym2 && off == off2 && isSamePtr(ptr, ptr2)) {
   4852 			break
   4853 		}
   4854 		v.reset(OpCopy)
   4855 		v.Type = x.Type
   4856 		v.AddArg(x)
   4857 		return true
   4858 	}
   4859 	return false
   4860 }
   4861 func rewriteValueMIPS_OpMIPSMOVWreg_0(v *Value) bool {
   4862 	// match: (MOVWreg x)
   4863 	// cond: x.Uses == 1
   4864 	// result: (MOVWnop x)
   4865 	for {
   4866 		x := v.Args[0]
   4867 		if !(x.Uses == 1) {
   4868 			break
   4869 		}
   4870 		v.reset(OpMIPSMOVWnop)
   4871 		v.AddArg(x)
   4872 		return true
   4873 	}
   4874 	// match: (MOVWreg (MOVWconst [c]))
   4875 	// cond:
   4876 	// result: (MOVWconst [c])
   4877 	for {
   4878 		v_0 := v.Args[0]
   4879 		if v_0.Op != OpMIPSMOVWconst {
   4880 			break
   4881 		}
   4882 		c := v_0.AuxInt
   4883 		v.reset(OpMIPSMOVWconst)
   4884 		v.AuxInt = c
   4885 		return true
   4886 	}
   4887 	return false
   4888 }
   4889 func rewriteValueMIPS_OpMIPSMOVWstore_0(v *Value) bool {
   4890 	// match: (MOVWstore [off1] {sym} x:(ADDconst [off2] ptr) val mem)
   4891 	// cond: (is16Bit(off1+off2) || x.Uses == 1)
   4892 	// result: (MOVWstore [off1+off2] {sym} ptr val mem)
   4893 	for {
   4894 		off1 := v.AuxInt
   4895 		sym := v.Aux
   4896 		_ = v.Args[2]
   4897 		x := v.Args[0]
   4898 		if x.Op != OpMIPSADDconst {
   4899 			break
   4900 		}
   4901 		off2 := x.AuxInt
   4902 		ptr := x.Args[0]
   4903 		val := v.Args[1]
   4904 		mem := v.Args[2]
   4905 		if !(is16Bit(off1+off2) || x.Uses == 1) {
   4906 			break
   4907 		}
   4908 		v.reset(OpMIPSMOVWstore)
   4909 		v.AuxInt = off1 + off2
   4910 		v.Aux = sym
   4911 		v.AddArg(ptr)
   4912 		v.AddArg(val)
   4913 		v.AddArg(mem)
   4914 		return true
   4915 	}
   4916 	// match: (MOVWstore [off1] {sym1} (MOVWaddr [off2] {sym2} ptr) val mem)
   4917 	// cond: canMergeSym(sym1,sym2)
   4918 	// result: (MOVWstore [off1+off2] {mergeSym(sym1,sym2)} ptr val mem)
   4919 	for {
   4920 		off1 := v.AuxInt
   4921 		sym1 := v.Aux
   4922 		_ = v.Args[2]
   4923 		v_0 := v.Args[0]
   4924 		if v_0.Op != OpMIPSMOVWaddr {
   4925 			break
   4926 		}
   4927 		off2 := v_0.AuxInt
   4928 		sym2 := v_0.Aux
   4929 		ptr := v_0.Args[0]
   4930 		val := v.Args[1]
   4931 		mem := v.Args[2]
   4932 		if !(canMergeSym(sym1, sym2)) {
   4933 			break
   4934 		}
   4935 		v.reset(OpMIPSMOVWstore)
   4936 		v.AuxInt = off1 + off2
   4937 		v.Aux = mergeSym(sym1, sym2)
   4938 		v.AddArg(ptr)
   4939 		v.AddArg(val)
   4940 		v.AddArg(mem)
   4941 		return true
   4942 	}
   4943 	// match: (MOVWstore [off] {sym} ptr (MOVWconst [0]) mem)
   4944 	// cond:
   4945 	// result: (MOVWstorezero [off] {sym} ptr mem)
   4946 	for {
   4947 		off := v.AuxInt
   4948 		sym := v.Aux
   4949 		_ = v.Args[2]
   4950 		ptr := v.Args[0]
   4951 		v_1 := v.Args[1]
   4952 		if v_1.Op != OpMIPSMOVWconst {
   4953 			break
   4954 		}
   4955 		if v_1.AuxInt != 0 {
   4956 			break
   4957 		}
   4958 		mem := v.Args[2]
   4959 		v.reset(OpMIPSMOVWstorezero)
   4960 		v.AuxInt = off
   4961 		v.Aux = sym
   4962 		v.AddArg(ptr)
   4963 		v.AddArg(mem)
   4964 		return true
   4965 	}
   4966 	// match: (MOVWstore [off] {sym} ptr (MOVWreg x) mem)
   4967 	// cond:
   4968 	// result: (MOVWstore [off] {sym} ptr x mem)
   4969 	for {
   4970 		off := v.AuxInt
   4971 		sym := v.Aux
   4972 		_ = v.Args[2]
   4973 		ptr := v.Args[0]
   4974 		v_1 := v.Args[1]
   4975 		if v_1.Op != OpMIPSMOVWreg {
   4976 			break
   4977 		}
   4978 		x := v_1.Args[0]
   4979 		mem := v.Args[2]
   4980 		v.reset(OpMIPSMOVWstore)
   4981 		v.AuxInt = off
   4982 		v.Aux = sym
   4983 		v.AddArg(ptr)
   4984 		v.AddArg(x)
   4985 		v.AddArg(mem)
   4986 		return true
   4987 	}
   4988 	return false
   4989 }
   4990 func rewriteValueMIPS_OpMIPSMOVWstorezero_0(v *Value) bool {
   4991 	// match: (MOVWstorezero [off1] {sym} x:(ADDconst [off2] ptr) mem)
   4992 	// cond: (is16Bit(off1+off2) || x.Uses == 1)
   4993 	// result: (MOVWstorezero [off1+off2] {sym} ptr mem)
   4994 	for {
   4995 		off1 := v.AuxInt
   4996 		sym := v.Aux
   4997 		_ = v.Args[1]
   4998 		x := v.Args[0]
   4999 		if x.Op != OpMIPSADDconst {
   5000 			break
   5001 		}
   5002 		off2 := x.AuxInt
   5003 		ptr := x.Args[0]
   5004 		mem := v.Args[1]
   5005 		if !(is16Bit(off1+off2) || x.Uses == 1) {
   5006 			break
   5007 		}
   5008 		v.reset(OpMIPSMOVWstorezero)
   5009 		v.AuxInt = off1 + off2
   5010 		v.Aux = sym
   5011 		v.AddArg(ptr)
   5012 		v.AddArg(mem)
   5013 		return true
   5014 	}
   5015 	// match: (MOVWstorezero [off1] {sym1} (MOVWaddr [off2] {sym2} ptr) mem)
   5016 	// cond: canMergeSym(sym1,sym2)
   5017 	// result: (MOVWstorezero [off1+off2] {mergeSym(sym1,sym2)} ptr mem)
   5018 	for {
   5019 		off1 := v.AuxInt
   5020 		sym1 := v.Aux
   5021 		_ = v.Args[1]
   5022 		v_0 := v.Args[0]
   5023 		if v_0.Op != OpMIPSMOVWaddr {
   5024 			break
   5025 		}
   5026 		off2 := v_0.AuxInt
   5027 		sym2 := v_0.Aux
   5028 		ptr := v_0.Args[0]
   5029 		mem := v.Args[1]
   5030 		if !(canMergeSym(sym1, sym2)) {
   5031 			break
   5032 		}
   5033 		v.reset(OpMIPSMOVWstorezero)
   5034 		v.AuxInt = off1 + off2
   5035 		v.Aux = mergeSym(sym1, sym2)
   5036 		v.AddArg(ptr)
   5037 		v.AddArg(mem)
   5038 		return true
   5039 	}
   5040 	return false
   5041 }
   5042 func rewriteValueMIPS_OpMIPSMUL_0(v *Value) bool {
   5043 	// match: (MUL (MOVWconst [0]) _)
   5044 	// cond:
   5045 	// result: (MOVWconst [0])
   5046 	for {
   5047 		_ = v.Args[1]
   5048 		v_0 := v.Args[0]
   5049 		if v_0.Op != OpMIPSMOVWconst {
   5050 			break
   5051 		}
   5052 		if v_0.AuxInt != 0 {
   5053 			break
   5054 		}
   5055 		v.reset(OpMIPSMOVWconst)
   5056 		v.AuxInt = 0
   5057 		return true
   5058 	}
   5059 	// match: (MUL _ (MOVWconst [0]))
   5060 	// cond:
   5061 	// result: (MOVWconst [0])
   5062 	for {
   5063 		_ = v.Args[1]
   5064 		v_1 := v.Args[1]
   5065 		if v_1.Op != OpMIPSMOVWconst {
   5066 			break
   5067 		}
   5068 		if v_1.AuxInt != 0 {
   5069 			break
   5070 		}
   5071 		v.reset(OpMIPSMOVWconst)
   5072 		v.AuxInt = 0
   5073 		return true
   5074 	}
   5075 	// match: (MUL (MOVWconst [1]) x)
   5076 	// cond:
   5077 	// result: x
   5078 	for {
   5079 		_ = v.Args[1]
   5080 		v_0 := v.Args[0]
   5081 		if v_0.Op != OpMIPSMOVWconst {
   5082 			break
   5083 		}
   5084 		if v_0.AuxInt != 1 {
   5085 			break
   5086 		}
   5087 		x := v.Args[1]
   5088 		v.reset(OpCopy)
   5089 		v.Type = x.Type
   5090 		v.AddArg(x)
   5091 		return true
   5092 	}
   5093 	// match: (MUL x (MOVWconst [1]))
   5094 	// cond:
   5095 	// result: x
   5096 	for {
   5097 		_ = v.Args[1]
   5098 		x := v.Args[0]
   5099 		v_1 := v.Args[1]
   5100 		if v_1.Op != OpMIPSMOVWconst {
   5101 			break
   5102 		}
   5103 		if v_1.AuxInt != 1 {
   5104 			break
   5105 		}
   5106 		v.reset(OpCopy)
   5107 		v.Type = x.Type
   5108 		v.AddArg(x)
   5109 		return true
   5110 	}
   5111 	// match: (MUL (MOVWconst [-1]) x)
   5112 	// cond:
   5113 	// result: (NEG x)
   5114 	for {
   5115 		_ = v.Args[1]
   5116 		v_0 := v.Args[0]
   5117 		if v_0.Op != OpMIPSMOVWconst {
   5118 			break
   5119 		}
   5120 		if v_0.AuxInt != -1 {
   5121 			break
   5122 		}
   5123 		x := v.Args[1]
   5124 		v.reset(OpMIPSNEG)
   5125 		v.AddArg(x)
   5126 		return true
   5127 	}
   5128 	// match: (MUL x (MOVWconst [-1]))
   5129 	// cond:
   5130 	// result: (NEG x)
   5131 	for {
   5132 		_ = v.Args[1]
   5133 		x := v.Args[0]
   5134 		v_1 := v.Args[1]
   5135 		if v_1.Op != OpMIPSMOVWconst {
   5136 			break
   5137 		}
   5138 		if v_1.AuxInt != -1 {
   5139 			break
   5140 		}
   5141 		v.reset(OpMIPSNEG)
   5142 		v.AddArg(x)
   5143 		return true
   5144 	}
   5145 	// match: (MUL (MOVWconst [c]) x)
   5146 	// cond: isPowerOfTwo(int64(uint32(c)))
   5147 	// result: (SLLconst [log2(int64(uint32(c)))] x)
   5148 	for {
   5149 		_ = v.Args[1]
   5150 		v_0 := v.Args[0]
   5151 		if v_0.Op != OpMIPSMOVWconst {
   5152 			break
   5153 		}
   5154 		c := v_0.AuxInt
   5155 		x := v.Args[1]
   5156 		if !(isPowerOfTwo(int64(uint32(c)))) {
   5157 			break
   5158 		}
   5159 		v.reset(OpMIPSSLLconst)
   5160 		v.AuxInt = log2(int64(uint32(c)))
   5161 		v.AddArg(x)
   5162 		return true
   5163 	}
   5164 	// match: (MUL x (MOVWconst [c]))
   5165 	// cond: isPowerOfTwo(int64(uint32(c)))
   5166 	// result: (SLLconst [log2(int64(uint32(c)))] x)
   5167 	for {
   5168 		_ = v.Args[1]
   5169 		x := v.Args[0]
   5170 		v_1 := v.Args[1]
   5171 		if v_1.Op != OpMIPSMOVWconst {
   5172 			break
   5173 		}
   5174 		c := v_1.AuxInt
   5175 		if !(isPowerOfTwo(int64(uint32(c)))) {
   5176 			break
   5177 		}
   5178 		v.reset(OpMIPSSLLconst)
   5179 		v.AuxInt = log2(int64(uint32(c)))
   5180 		v.AddArg(x)
   5181 		return true
   5182 	}
   5183 	// match: (MUL (MOVWconst [c]) (MOVWconst [d]))
   5184 	// cond:
   5185 	// result: (MOVWconst [int64(int32(c)*int32(d))])
   5186 	for {
   5187 		_ = v.Args[1]
   5188 		v_0 := v.Args[0]
   5189 		if v_0.Op != OpMIPSMOVWconst {
   5190 			break
   5191 		}
   5192 		c := v_0.AuxInt
   5193 		v_1 := v.Args[1]
   5194 		if v_1.Op != OpMIPSMOVWconst {
   5195 			break
   5196 		}
   5197 		d := v_1.AuxInt
   5198 		v.reset(OpMIPSMOVWconst)
   5199 		v.AuxInt = int64(int32(c) * int32(d))
   5200 		return true
   5201 	}
   5202 	// match: (MUL (MOVWconst [d]) (MOVWconst [c]))
   5203 	// cond:
   5204 	// result: (MOVWconst [int64(int32(c)*int32(d))])
   5205 	for {
   5206 		_ = v.Args[1]
   5207 		v_0 := v.Args[0]
   5208 		if v_0.Op != OpMIPSMOVWconst {
   5209 			break
   5210 		}
   5211 		d := v_0.AuxInt
   5212 		v_1 := v.Args[1]
   5213 		if v_1.Op != OpMIPSMOVWconst {
   5214 			break
   5215 		}
   5216 		c := v_1.AuxInt
   5217 		v.reset(OpMIPSMOVWconst)
   5218 		v.AuxInt = int64(int32(c) * int32(d))
   5219 		return true
   5220 	}
   5221 	return false
   5222 }
   5223 func rewriteValueMIPS_OpMIPSNEG_0(v *Value) bool {
   5224 	// match: (NEG (MOVWconst [c]))
   5225 	// cond:
   5226 	// result: (MOVWconst [int64(int32(-c))])
   5227 	for {
   5228 		v_0 := v.Args[0]
   5229 		if v_0.Op != OpMIPSMOVWconst {
   5230 			break
   5231 		}
   5232 		c := v_0.AuxInt
   5233 		v.reset(OpMIPSMOVWconst)
   5234 		v.AuxInt = int64(int32(-c))
   5235 		return true
   5236 	}
   5237 	return false
   5238 }
   5239 func rewriteValueMIPS_OpMIPSNOR_0(v *Value) bool {
   5240 	// match: (NOR x (MOVWconst [c]))
   5241 	// cond:
   5242 	// result: (NORconst [c] x)
   5243 	for {
   5244 		_ = v.Args[1]
   5245 		x := v.Args[0]
   5246 		v_1 := v.Args[1]
   5247 		if v_1.Op != OpMIPSMOVWconst {
   5248 			break
   5249 		}
   5250 		c := v_1.AuxInt
   5251 		v.reset(OpMIPSNORconst)
   5252 		v.AuxInt = c
   5253 		v.AddArg(x)
   5254 		return true
   5255 	}
   5256 	// match: (NOR (MOVWconst [c]) x)
   5257 	// cond:
   5258 	// result: (NORconst [c] x)
   5259 	for {
   5260 		_ = v.Args[1]
   5261 		v_0 := v.Args[0]
   5262 		if v_0.Op != OpMIPSMOVWconst {
   5263 			break
   5264 		}
   5265 		c := v_0.AuxInt
   5266 		x := v.Args[1]
   5267 		v.reset(OpMIPSNORconst)
   5268 		v.AuxInt = c
   5269 		v.AddArg(x)
   5270 		return true
   5271 	}
   5272 	return false
   5273 }
   5274 func rewriteValueMIPS_OpMIPSNORconst_0(v *Value) bool {
   5275 	// match: (NORconst [c] (MOVWconst [d]))
   5276 	// cond:
   5277 	// result: (MOVWconst [^(c|d)])
   5278 	for {
   5279 		c := v.AuxInt
   5280 		v_0 := v.Args[0]
   5281 		if v_0.Op != OpMIPSMOVWconst {
   5282 			break
   5283 		}
   5284 		d := v_0.AuxInt
   5285 		v.reset(OpMIPSMOVWconst)
   5286 		v.AuxInt = ^(c | d)
   5287 		return true
   5288 	}
   5289 	return false
   5290 }
   5291 func rewriteValueMIPS_OpMIPSOR_0(v *Value) bool {
   5292 	b := v.Block
   5293 	_ = b
   5294 	// match: (OR x (MOVWconst [c]))
   5295 	// cond:
   5296 	// result: (ORconst  [c] x)
   5297 	for {
   5298 		_ = v.Args[1]
   5299 		x := v.Args[0]
   5300 		v_1 := v.Args[1]
   5301 		if v_1.Op != OpMIPSMOVWconst {
   5302 			break
   5303 		}
   5304 		c := v_1.AuxInt
   5305 		v.reset(OpMIPSORconst)
   5306 		v.AuxInt = c
   5307 		v.AddArg(x)
   5308 		return true
   5309 	}
   5310 	// match: (OR (MOVWconst [c]) x)
   5311 	// cond:
   5312 	// result: (ORconst  [c] x)
   5313 	for {
   5314 		_ = v.Args[1]
   5315 		v_0 := v.Args[0]
   5316 		if v_0.Op != OpMIPSMOVWconst {
   5317 			break
   5318 		}
   5319 		c := v_0.AuxInt
   5320 		x := v.Args[1]
   5321 		v.reset(OpMIPSORconst)
   5322 		v.AuxInt = c
   5323 		v.AddArg(x)
   5324 		return true
   5325 	}
   5326 	// match: (OR x x)
   5327 	// cond:
   5328 	// result: x
   5329 	for {
   5330 		_ = v.Args[1]
   5331 		x := v.Args[0]
   5332 		if x != v.Args[1] {
   5333 			break
   5334 		}
   5335 		v.reset(OpCopy)
   5336 		v.Type = x.Type
   5337 		v.AddArg(x)
   5338 		return true
   5339 	}
   5340 	// match: (OR (SGTUzero x) (SGTUzero y))
   5341 	// cond:
   5342 	// result: (SGTUzero (OR <x.Type> x y))
   5343 	for {
   5344 		_ = v.Args[1]
   5345 		v_0 := v.Args[0]
   5346 		if v_0.Op != OpMIPSSGTUzero {
   5347 			break
   5348 		}
   5349 		x := v_0.Args[0]
   5350 		v_1 := v.Args[1]
   5351 		if v_1.Op != OpMIPSSGTUzero {
   5352 			break
   5353 		}
   5354 		y := v_1.Args[0]
   5355 		v.reset(OpMIPSSGTUzero)
   5356 		v0 := b.NewValue0(v.Pos, OpMIPSOR, x.Type)
   5357 		v0.AddArg(x)
   5358 		v0.AddArg(y)
   5359 		v.AddArg(v0)
   5360 		return true
   5361 	}
   5362 	// match: (OR (SGTUzero y) (SGTUzero x))
   5363 	// cond:
   5364 	// result: (SGTUzero (OR <x.Type> x y))
   5365 	for {
   5366 		_ = v.Args[1]
   5367 		v_0 := v.Args[0]
   5368 		if v_0.Op != OpMIPSSGTUzero {
   5369 			break
   5370 		}
   5371 		y := v_0.Args[0]
   5372 		v_1 := v.Args[1]
   5373 		if v_1.Op != OpMIPSSGTUzero {
   5374 			break
   5375 		}
   5376 		x := v_1.Args[0]
   5377 		v.reset(OpMIPSSGTUzero)
   5378 		v0 := b.NewValue0(v.Pos, OpMIPSOR, x.Type)
   5379 		v0.AddArg(x)
   5380 		v0.AddArg(y)
   5381 		v.AddArg(v0)
   5382 		return true
   5383 	}
   5384 	return false
   5385 }
   5386 func rewriteValueMIPS_OpMIPSORconst_0(v *Value) bool {
   5387 	// match: (ORconst [0] x)
   5388 	// cond:
   5389 	// result: x
   5390 	for {
   5391 		if v.AuxInt != 0 {
   5392 			break
   5393 		}
   5394 		x := v.Args[0]
   5395 		v.reset(OpCopy)
   5396 		v.Type = x.Type
   5397 		v.AddArg(x)
   5398 		return true
   5399 	}
   5400 	// match: (ORconst [-1] _)
   5401 	// cond:
   5402 	// result: (MOVWconst [-1])
   5403 	for {
   5404 		if v.AuxInt != -1 {
   5405 			break
   5406 		}
   5407 		v.reset(OpMIPSMOVWconst)
   5408 		v.AuxInt = -1
   5409 		return true
   5410 	}
   5411 	// match: (ORconst [c] (MOVWconst [d]))
   5412 	// cond:
   5413 	// result: (MOVWconst [c|d])
   5414 	for {
   5415 		c := v.AuxInt
   5416 		v_0 := v.Args[0]
   5417 		if v_0.Op != OpMIPSMOVWconst {
   5418 			break
   5419 		}
   5420 		d := v_0.AuxInt
   5421 		v.reset(OpMIPSMOVWconst)
   5422 		v.AuxInt = c | d
   5423 		return true
   5424 	}
   5425 	// match: (ORconst [c] (ORconst [d] x))
   5426 	// cond:
   5427 	// result: (ORconst [c|d] x)
   5428 	for {
   5429 		c := v.AuxInt
   5430 		v_0 := v.Args[0]
   5431 		if v_0.Op != OpMIPSORconst {
   5432 			break
   5433 		}
   5434 		d := v_0.AuxInt
   5435 		x := v_0.Args[0]
   5436 		v.reset(OpMIPSORconst)
   5437 		v.AuxInt = c | d
   5438 		v.AddArg(x)
   5439 		return true
   5440 	}
   5441 	return false
   5442 }
   5443 func rewriteValueMIPS_OpMIPSSGT_0(v *Value) bool {
   5444 	// match: (SGT (MOVWconst [c]) x)
   5445 	// cond:
   5446 	// result: (SGTconst  [c] x)
   5447 	for {
   5448 		_ = v.Args[1]
   5449 		v_0 := v.Args[0]
   5450 		if v_0.Op != OpMIPSMOVWconst {
   5451 			break
   5452 		}
   5453 		c := v_0.AuxInt
   5454 		x := v.Args[1]
   5455 		v.reset(OpMIPSSGTconst)
   5456 		v.AuxInt = c
   5457 		v.AddArg(x)
   5458 		return true
   5459 	}
   5460 	// match: (SGT x (MOVWconst [0]))
   5461 	// cond:
   5462 	// result: (SGTzero x)
   5463 	for {
   5464 		_ = v.Args[1]
   5465 		x := v.Args[0]
   5466 		v_1 := v.Args[1]
   5467 		if v_1.Op != OpMIPSMOVWconst {
   5468 			break
   5469 		}
   5470 		if v_1.AuxInt != 0 {
   5471 			break
   5472 		}
   5473 		v.reset(OpMIPSSGTzero)
   5474 		v.AddArg(x)
   5475 		return true
   5476 	}
   5477 	return false
   5478 }
   5479 func rewriteValueMIPS_OpMIPSSGTU_0(v *Value) bool {
   5480 	// match: (SGTU (MOVWconst [c]) x)
   5481 	// cond:
   5482 	// result: (SGTUconst [c] x)
   5483 	for {
   5484 		_ = v.Args[1]
   5485 		v_0 := v.Args[0]
   5486 		if v_0.Op != OpMIPSMOVWconst {
   5487 			break
   5488 		}
   5489 		c := v_0.AuxInt
   5490 		x := v.Args[1]
   5491 		v.reset(OpMIPSSGTUconst)
   5492 		v.AuxInt = c
   5493 		v.AddArg(x)
   5494 		return true
   5495 	}
   5496 	// match: (SGTU x (MOVWconst [0]))
   5497 	// cond:
   5498 	// result: (SGTUzero x)
   5499 	for {
   5500 		_ = v.Args[1]
   5501 		x := v.Args[0]
   5502 		v_1 := v.Args[1]
   5503 		if v_1.Op != OpMIPSMOVWconst {
   5504 			break
   5505 		}
   5506 		if v_1.AuxInt != 0 {
   5507 			break
   5508 		}
   5509 		v.reset(OpMIPSSGTUzero)
   5510 		v.AddArg(x)
   5511 		return true
   5512 	}
   5513 	return false
   5514 }
   5515 func rewriteValueMIPS_OpMIPSSGTUconst_0(v *Value) bool {
   5516 	// match: (SGTUconst [c] (MOVWconst [d]))
   5517 	// cond: uint32(c)>uint32(d)
   5518 	// result: (MOVWconst [1])
   5519 	for {
   5520 		c := v.AuxInt
   5521 		v_0 := v.Args[0]
   5522 		if v_0.Op != OpMIPSMOVWconst {
   5523 			break
   5524 		}
   5525 		d := v_0.AuxInt
   5526 		if !(uint32(c) > uint32(d)) {
   5527 			break
   5528 		}
   5529 		v.reset(OpMIPSMOVWconst)
   5530 		v.AuxInt = 1
   5531 		return true
   5532 	}
   5533 	// match: (SGTUconst [c] (MOVWconst [d]))
   5534 	// cond: uint32(c)<=uint32(d)
   5535 	// result: (MOVWconst [0])
   5536 	for {
   5537 		c := v.AuxInt
   5538 		v_0 := v.Args[0]
   5539 		if v_0.Op != OpMIPSMOVWconst {
   5540 			break
   5541 		}
   5542 		d := v_0.AuxInt
   5543 		if !(uint32(c) <= uint32(d)) {
   5544 			break
   5545 		}
   5546 		v.reset(OpMIPSMOVWconst)
   5547 		v.AuxInt = 0
   5548 		return true
   5549 	}
   5550 	// match: (SGTUconst [c] (MOVBUreg _))
   5551 	// cond: 0xff < uint32(c)
   5552 	// result: (MOVWconst [1])
   5553 	for {
   5554 		c := v.AuxInt
   5555 		v_0 := v.Args[0]
   5556 		if v_0.Op != OpMIPSMOVBUreg {
   5557 			break
   5558 		}
   5559 		if !(0xff < uint32(c)) {
   5560 			break
   5561 		}
   5562 		v.reset(OpMIPSMOVWconst)
   5563 		v.AuxInt = 1
   5564 		return true
   5565 	}
   5566 	// match: (SGTUconst [c] (MOVHUreg _))
   5567 	// cond: 0xffff < uint32(c)
   5568 	// result: (MOVWconst [1])
   5569 	for {
   5570 		c := v.AuxInt
   5571 		v_0 := v.Args[0]
   5572 		if v_0.Op != OpMIPSMOVHUreg {
   5573 			break
   5574 		}
   5575 		if !(0xffff < uint32(c)) {
   5576 			break
   5577 		}
   5578 		v.reset(OpMIPSMOVWconst)
   5579 		v.AuxInt = 1
   5580 		return true
   5581 	}
   5582 	// match: (SGTUconst [c] (ANDconst [m] _))
   5583 	// cond: uint32(m) < uint32(c)
   5584 	// result: (MOVWconst [1])
   5585 	for {
   5586 		c := v.AuxInt
   5587 		v_0 := v.Args[0]
   5588 		if v_0.Op != OpMIPSANDconst {
   5589 			break
   5590 		}
   5591 		m := v_0.AuxInt
   5592 		if !(uint32(m) < uint32(c)) {
   5593 			break
   5594 		}
   5595 		v.reset(OpMIPSMOVWconst)
   5596 		v.AuxInt = 1
   5597 		return true
   5598 	}
   5599 	// match: (SGTUconst [c] (SRLconst _ [d]))
   5600 	// cond: uint32(d) <= 31 && 1<<(32-uint32(d)) <= uint32(c)
   5601 	// result: (MOVWconst [1])
   5602 	for {
   5603 		c := v.AuxInt
   5604 		v_0 := v.Args[0]
   5605 		if v_0.Op != OpMIPSSRLconst {
   5606 			break
   5607 		}
   5608 		d := v_0.AuxInt
   5609 		if !(uint32(d) <= 31 && 1<<(32-uint32(d)) <= uint32(c)) {
   5610 			break
   5611 		}
   5612 		v.reset(OpMIPSMOVWconst)
   5613 		v.AuxInt = 1
   5614 		return true
   5615 	}
   5616 	return false
   5617 }
   5618 func rewriteValueMIPS_OpMIPSSGTUzero_0(v *Value) bool {
   5619 	// match: (SGTUzero (MOVWconst [d]))
   5620 	// cond: uint32(d) != 0
   5621 	// result: (MOVWconst [1])
   5622 	for {
   5623 		v_0 := v.Args[0]
   5624 		if v_0.Op != OpMIPSMOVWconst {
   5625 			break
   5626 		}
   5627 		d := v_0.AuxInt
   5628 		if !(uint32(d) != 0) {
   5629 			break
   5630 		}
   5631 		v.reset(OpMIPSMOVWconst)
   5632 		v.AuxInt = 1
   5633 		return true
   5634 	}
   5635 	// match: (SGTUzero (MOVWconst [d]))
   5636 	// cond: uint32(d) == 0
   5637 	// result: (MOVWconst [0])
   5638 	for {
   5639 		v_0 := v.Args[0]
   5640 		if v_0.Op != OpMIPSMOVWconst {
   5641 			break
   5642 		}
   5643 		d := v_0.AuxInt
   5644 		if !(uint32(d) == 0) {
   5645 			break
   5646 		}
   5647 		v.reset(OpMIPSMOVWconst)
   5648 		v.AuxInt = 0
   5649 		return true
   5650 	}
   5651 	return false
   5652 }
   5653 func rewriteValueMIPS_OpMIPSSGTconst_0(v *Value) bool {
   5654 	// match: (SGTconst [c] (MOVWconst [d]))
   5655 	// cond: int32(c) > int32(d)
   5656 	// result: (MOVWconst [1])
   5657 	for {
   5658 		c := v.AuxInt
   5659 		v_0 := v.Args[0]
   5660 		if v_0.Op != OpMIPSMOVWconst {
   5661 			break
   5662 		}
   5663 		d := v_0.AuxInt
   5664 		if !(int32(c) > int32(d)) {
   5665 			break
   5666 		}
   5667 		v.reset(OpMIPSMOVWconst)
   5668 		v.AuxInt = 1
   5669 		return true
   5670 	}
   5671 	// match: (SGTconst [c] (MOVWconst [d]))
   5672 	// cond: int32(c) <= int32(d)
   5673 	// result: (MOVWconst [0])
   5674 	for {
   5675 		c := v.AuxInt
   5676 		v_0 := v.Args[0]
   5677 		if v_0.Op != OpMIPSMOVWconst {
   5678 			break
   5679 		}
   5680 		d := v_0.AuxInt
   5681 		if !(int32(c) <= int32(d)) {
   5682 			break
   5683 		}
   5684 		v.reset(OpMIPSMOVWconst)
   5685 		v.AuxInt = 0
   5686 		return true
   5687 	}
   5688 	// match: (SGTconst [c] (MOVBreg _))
   5689 	// cond: 0x7f < int32(c)
   5690 	// result: (MOVWconst [1])
   5691 	for {
   5692 		c := v.AuxInt
   5693 		v_0 := v.Args[0]
   5694 		if v_0.Op != OpMIPSMOVBreg {
   5695 			break
   5696 		}
   5697 		if !(0x7f < int32(c)) {
   5698 			break
   5699 		}
   5700 		v.reset(OpMIPSMOVWconst)
   5701 		v.AuxInt = 1
   5702 		return true
   5703 	}
   5704 	// match: (SGTconst [c] (MOVBreg _))
   5705 	// cond: int32(c) <= -0x80
   5706 	// result: (MOVWconst [0])
   5707 	for {
   5708 		c := v.AuxInt
   5709 		v_0 := v.Args[0]
   5710 		if v_0.Op != OpMIPSMOVBreg {
   5711 			break
   5712 		}
   5713 		if !(int32(c) <= -0x80) {
   5714 			break
   5715 		}
   5716 		v.reset(OpMIPSMOVWconst)
   5717 		v.AuxInt = 0
   5718 		return true
   5719 	}
   5720 	// match: (SGTconst [c] (MOVBUreg _))
   5721 	// cond: 0xff < int32(c)
   5722 	// result: (MOVWconst [1])
   5723 	for {
   5724 		c := v.AuxInt
   5725 		v_0 := v.Args[0]
   5726 		if v_0.Op != OpMIPSMOVBUreg {
   5727 			break
   5728 		}
   5729 		if !(0xff < int32(c)) {
   5730 			break
   5731 		}
   5732 		v.reset(OpMIPSMOVWconst)
   5733 		v.AuxInt = 1
   5734 		return true
   5735 	}
   5736 	// match: (SGTconst [c] (MOVBUreg _))
   5737 	// cond: int32(c) < 0
   5738 	// result: (MOVWconst [0])
   5739 	for {
   5740 		c := v.AuxInt
   5741 		v_0 := v.Args[0]
   5742 		if v_0.Op != OpMIPSMOVBUreg {
   5743 			break
   5744 		}
   5745 		if !(int32(c) < 0) {
   5746 			break
   5747 		}
   5748 		v.reset(OpMIPSMOVWconst)
   5749 		v.AuxInt = 0
   5750 		return true
   5751 	}
   5752 	// match: (SGTconst [c] (MOVHreg _))
   5753 	// cond: 0x7fff < int32(c)
   5754 	// result: (MOVWconst [1])
   5755 	for {
   5756 		c := v.AuxInt
   5757 		v_0 := v.Args[0]
   5758 		if v_0.Op != OpMIPSMOVHreg {
   5759 			break
   5760 		}
   5761 		if !(0x7fff < int32(c)) {
   5762 			break
   5763 		}
   5764 		v.reset(OpMIPSMOVWconst)
   5765 		v.AuxInt = 1
   5766 		return true
   5767 	}
   5768 	// match: (SGTconst [c] (MOVHreg _))
   5769 	// cond: int32(c) <= -0x8000
   5770 	// result: (MOVWconst [0])
   5771 	for {
   5772 		c := v.AuxInt
   5773 		v_0 := v.Args[0]
   5774 		if v_0.Op != OpMIPSMOVHreg {
   5775 			break
   5776 		}
   5777 		if !(int32(c) <= -0x8000) {
   5778 			break
   5779 		}
   5780 		v.reset(OpMIPSMOVWconst)
   5781 		v.AuxInt = 0
   5782 		return true
   5783 	}
   5784 	// match: (SGTconst [c] (MOVHUreg _))
   5785 	// cond: 0xffff < int32(c)
   5786 	// result: (MOVWconst [1])
   5787 	for {
   5788 		c := v.AuxInt
   5789 		v_0 := v.Args[0]
   5790 		if v_0.Op != OpMIPSMOVHUreg {
   5791 			break
   5792 		}
   5793 		if !(0xffff < int32(c)) {
   5794 			break
   5795 		}
   5796 		v.reset(OpMIPSMOVWconst)
   5797 		v.AuxInt = 1
   5798 		return true
   5799 	}
   5800 	// match: (SGTconst [c] (MOVHUreg _))
   5801 	// cond: int32(c) < 0
   5802 	// result: (MOVWconst [0])
   5803 	for {
   5804 		c := v.AuxInt
   5805 		v_0 := v.Args[0]
   5806 		if v_0.Op != OpMIPSMOVHUreg {
   5807 			break
   5808 		}
   5809 		if !(int32(c) < 0) {
   5810 			break
   5811 		}
   5812 		v.reset(OpMIPSMOVWconst)
   5813 		v.AuxInt = 0
   5814 		return true
   5815 	}
   5816 	return false
   5817 }
   5818 func rewriteValueMIPS_OpMIPSSGTconst_10(v *Value) bool {
   5819 	// match: (SGTconst [c] (ANDconst [m] _))
   5820 	// cond: 0 <= int32(m) && int32(m) < int32(c)
   5821 	// result: (MOVWconst [1])
   5822 	for {
   5823 		c := v.AuxInt
   5824 		v_0 := v.Args[0]
   5825 		if v_0.Op != OpMIPSANDconst {
   5826 			break
   5827 		}
   5828 		m := v_0.AuxInt
   5829 		if !(0 <= int32(m) && int32(m) < int32(c)) {
   5830 			break
   5831 		}
   5832 		v.reset(OpMIPSMOVWconst)
   5833 		v.AuxInt = 1
   5834 		return true
   5835 	}
   5836 	// match: (SGTconst [c] (SRLconst _ [d]))
   5837 	// cond: 0 <= int32(c) && uint32(d) <= 31 && 1<<(32-uint32(d)) <= int32(c)
   5838 	// result: (MOVWconst [1])
   5839 	for {
   5840 		c := v.AuxInt
   5841 		v_0 := v.Args[0]
   5842 		if v_0.Op != OpMIPSSRLconst {
   5843 			break
   5844 		}
   5845 		d := v_0.AuxInt
   5846 		if !(0 <= int32(c) && uint32(d) <= 31 && 1<<(32-uint32(d)) <= int32(c)) {
   5847 			break
   5848 		}
   5849 		v.reset(OpMIPSMOVWconst)
   5850 		v.AuxInt = 1
   5851 		return true
   5852 	}
   5853 	return false
   5854 }
   5855 func rewriteValueMIPS_OpMIPSSGTzero_0(v *Value) bool {
   5856 	// match: (SGTzero (MOVWconst [d]))
   5857 	// cond: int32(d) > 0
   5858 	// result: (MOVWconst [1])
   5859 	for {
   5860 		v_0 := v.Args[0]
   5861 		if v_0.Op != OpMIPSMOVWconst {
   5862 			break
   5863 		}
   5864 		d := v_0.AuxInt
   5865 		if !(int32(d) > 0) {
   5866 			break
   5867 		}
   5868 		v.reset(OpMIPSMOVWconst)
   5869 		v.AuxInt = 1
   5870 		return true
   5871 	}
   5872 	// match: (SGTzero (MOVWconst [d]))
   5873 	// cond: int32(d) <= 0
   5874 	// result: (MOVWconst [0])
   5875 	for {
   5876 		v_0 := v.Args[0]
   5877 		if v_0.Op != OpMIPSMOVWconst {
   5878 			break
   5879 		}
   5880 		d := v_0.AuxInt
   5881 		if !(int32(d) <= 0) {
   5882 			break
   5883 		}
   5884 		v.reset(OpMIPSMOVWconst)
   5885 		v.AuxInt = 0
   5886 		return true
   5887 	}
   5888 	return false
   5889 }
   5890 func rewriteValueMIPS_OpMIPSSLL_0(v *Value) bool {
   5891 	// match: (SLL _ (MOVWconst [c]))
   5892 	// cond: uint32(c)>=32
   5893 	// result: (MOVWconst [0])
   5894 	for {
   5895 		_ = v.Args[1]
   5896 		v_1 := v.Args[1]
   5897 		if v_1.Op != OpMIPSMOVWconst {
   5898 			break
   5899 		}
   5900 		c := v_1.AuxInt
   5901 		if !(uint32(c) >= 32) {
   5902 			break
   5903 		}
   5904 		v.reset(OpMIPSMOVWconst)
   5905 		v.AuxInt = 0
   5906 		return true
   5907 	}
   5908 	// match: (SLL x (MOVWconst [c]))
   5909 	// cond:
   5910 	// result: (SLLconst x [c])
   5911 	for {
   5912 		_ = v.Args[1]
   5913 		x := v.Args[0]
   5914 		v_1 := v.Args[1]
   5915 		if v_1.Op != OpMIPSMOVWconst {
   5916 			break
   5917 		}
   5918 		c := v_1.AuxInt
   5919 		v.reset(OpMIPSSLLconst)
   5920 		v.AuxInt = c
   5921 		v.AddArg(x)
   5922 		return true
   5923 	}
   5924 	return false
   5925 }
   5926 func rewriteValueMIPS_OpMIPSSLLconst_0(v *Value) bool {
   5927 	// match: (SLLconst [c] (MOVWconst [d]))
   5928 	// cond:
   5929 	// result: (MOVWconst [int64(int32(uint32(d)<<uint32(c)))])
   5930 	for {
   5931 		c := v.AuxInt
   5932 		v_0 := v.Args[0]
   5933 		if v_0.Op != OpMIPSMOVWconst {
   5934 			break
   5935 		}
   5936 		d := v_0.AuxInt
   5937 		v.reset(OpMIPSMOVWconst)
   5938 		v.AuxInt = int64(int32(uint32(d) << uint32(c)))
   5939 		return true
   5940 	}
   5941 	return false
   5942 }
   5943 func rewriteValueMIPS_OpMIPSSRA_0(v *Value) bool {
   5944 	// match: (SRA x (MOVWconst [c]))
   5945 	// cond: uint32(c)>=32
   5946 	// result: (SRAconst x [31])
   5947 	for {
   5948 		_ = v.Args[1]
   5949 		x := v.Args[0]
   5950 		v_1 := v.Args[1]
   5951 		if v_1.Op != OpMIPSMOVWconst {
   5952 			break
   5953 		}
   5954 		c := v_1.AuxInt
   5955 		if !(uint32(c) >= 32) {
   5956 			break
   5957 		}
   5958 		v.reset(OpMIPSSRAconst)
   5959 		v.AuxInt = 31
   5960 		v.AddArg(x)
   5961 		return true
   5962 	}
   5963 	// match: (SRA x (MOVWconst [c]))
   5964 	// cond:
   5965 	// result: (SRAconst x [c])
   5966 	for {
   5967 		_ = v.Args[1]
   5968 		x := v.Args[0]
   5969 		v_1 := v.Args[1]
   5970 		if v_1.Op != OpMIPSMOVWconst {
   5971 			break
   5972 		}
   5973 		c := v_1.AuxInt
   5974 		v.reset(OpMIPSSRAconst)
   5975 		v.AuxInt = c
   5976 		v.AddArg(x)
   5977 		return true
   5978 	}
   5979 	return false
   5980 }
   5981 func rewriteValueMIPS_OpMIPSSRAconst_0(v *Value) bool {
   5982 	// match: (SRAconst [c] (MOVWconst [d]))
   5983 	// cond:
   5984 	// result: (MOVWconst [int64(int32(d)>>uint32(c))])
   5985 	for {
   5986 		c := v.AuxInt
   5987 		v_0 := v.Args[0]
   5988 		if v_0.Op != OpMIPSMOVWconst {
   5989 			break
   5990 		}
   5991 		d := v_0.AuxInt
   5992 		v.reset(OpMIPSMOVWconst)
   5993 		v.AuxInt = int64(int32(d) >> uint32(c))
   5994 		return true
   5995 	}
   5996 	return false
   5997 }
   5998 func rewriteValueMIPS_OpMIPSSRL_0(v *Value) bool {
   5999 	// match: (SRL _ (MOVWconst [c]))
   6000 	// cond: uint32(c)>=32
   6001 	// result: (MOVWconst [0])
   6002 	for {
   6003 		_ = v.Args[1]
   6004 		v_1 := v.Args[1]
   6005 		if v_1.Op != OpMIPSMOVWconst {
   6006 			break
   6007 		}
   6008 		c := v_1.AuxInt
   6009 		if !(uint32(c) >= 32) {
   6010 			break
   6011 		}
   6012 		v.reset(OpMIPSMOVWconst)
   6013 		v.AuxInt = 0
   6014 		return true
   6015 	}
   6016 	// match: (SRL x (MOVWconst [c]))
   6017 	// cond:
   6018 	// result: (SRLconst x [c])
   6019 	for {
   6020 		_ = v.Args[1]
   6021 		x := v.Args[0]
   6022 		v_1 := v.Args[1]
   6023 		if v_1.Op != OpMIPSMOVWconst {
   6024 			break
   6025 		}
   6026 		c := v_1.AuxInt
   6027 		v.reset(OpMIPSSRLconst)
   6028 		v.AuxInt = c
   6029 		v.AddArg(x)
   6030 		return true
   6031 	}
   6032 	return false
   6033 }
   6034 func rewriteValueMIPS_OpMIPSSRLconst_0(v *Value) bool {
   6035 	// match: (SRLconst [c] (MOVWconst [d]))
   6036 	// cond:
   6037 	// result: (MOVWconst [int64(uint32(d)>>uint32(c))])
   6038 	for {
   6039 		c := v.AuxInt
   6040 		v_0 := v.Args[0]
   6041 		if v_0.Op != OpMIPSMOVWconst {
   6042 			break
   6043 		}
   6044 		d := v_0.AuxInt
   6045 		v.reset(OpMIPSMOVWconst)
   6046 		v.AuxInt = int64(uint32(d) >> uint32(c))
   6047 		return true
   6048 	}
   6049 	return false
   6050 }
   6051 func rewriteValueMIPS_OpMIPSSUB_0(v *Value) bool {
   6052 	// match: (SUB x (MOVWconst [c]))
   6053 	// cond:
   6054 	// result: (SUBconst [c] x)
   6055 	for {
   6056 		_ = v.Args[1]
   6057 		x := v.Args[0]
   6058 		v_1 := v.Args[1]
   6059 		if v_1.Op != OpMIPSMOVWconst {
   6060 			break
   6061 		}
   6062 		c := v_1.AuxInt
   6063 		v.reset(OpMIPSSUBconst)
   6064 		v.AuxInt = c
   6065 		v.AddArg(x)
   6066 		return true
   6067 	}
   6068 	// match: (SUB x x)
   6069 	// cond:
   6070 	// result: (MOVWconst [0])
   6071 	for {
   6072 		_ = v.Args[1]
   6073 		x := v.Args[0]
   6074 		if x != v.Args[1] {
   6075 			break
   6076 		}
   6077 		v.reset(OpMIPSMOVWconst)
   6078 		v.AuxInt = 0
   6079 		return true
   6080 	}
   6081 	// match: (SUB (MOVWconst [0]) x)
   6082 	// cond:
   6083 	// result: (NEG x)
   6084 	for {
   6085 		_ = v.Args[1]
   6086 		v_0 := v.Args[0]
   6087 		if v_0.Op != OpMIPSMOVWconst {
   6088 			break
   6089 		}
   6090 		if v_0.AuxInt != 0 {
   6091 			break
   6092 		}
   6093 		x := v.Args[1]
   6094 		v.reset(OpMIPSNEG)
   6095 		v.AddArg(x)
   6096 		return true
   6097 	}
   6098 	return false
   6099 }
   6100 func rewriteValueMIPS_OpMIPSSUBconst_0(v *Value) bool {
   6101 	// match: (SUBconst [0] x)
   6102 	// cond:
   6103 	// result: x
   6104 	for {
   6105 		if v.AuxInt != 0 {
   6106 			break
   6107 		}
   6108 		x := v.Args[0]
   6109 		v.reset(OpCopy)
   6110 		v.Type = x.Type
   6111 		v.AddArg(x)
   6112 		return true
   6113 	}
   6114 	// match: (SUBconst [c] (MOVWconst [d]))
   6115 	// cond:
   6116 	// result: (MOVWconst [int64(int32(d-c))])
   6117 	for {
   6118 		c := v.AuxInt
   6119 		v_0 := v.Args[0]
   6120 		if v_0.Op != OpMIPSMOVWconst {
   6121 			break
   6122 		}
   6123 		d := v_0.AuxInt
   6124 		v.reset(OpMIPSMOVWconst)
   6125 		v.AuxInt = int64(int32(d - c))
   6126 		return true
   6127 	}
   6128 	// match: (SUBconst [c] (SUBconst [d] x))
   6129 	// cond:
   6130 	// result: (ADDconst [int64(int32(-c-d))] x)
   6131 	for {
   6132 		c := v.AuxInt
   6133 		v_0 := v.Args[0]
   6134 		if v_0.Op != OpMIPSSUBconst {
   6135 			break
   6136 		}
   6137 		d := v_0.AuxInt
   6138 		x := v_0.Args[0]
   6139 		v.reset(OpMIPSADDconst)
   6140 		v.AuxInt = int64(int32(-c - d))
   6141 		v.AddArg(x)
   6142 		return true
   6143 	}
   6144 	// match: (SUBconst [c] (ADDconst [d] x))
   6145 	// cond:
   6146 	// result: (ADDconst [int64(int32(-c+d))] x)
   6147 	for {
   6148 		c := v.AuxInt
   6149 		v_0 := v.Args[0]
   6150 		if v_0.Op != OpMIPSADDconst {
   6151 			break
   6152 		}
   6153 		d := v_0.AuxInt
   6154 		x := v_0.Args[0]
   6155 		v.reset(OpMIPSADDconst)
   6156 		v.AuxInt = int64(int32(-c + d))
   6157 		v.AddArg(x)
   6158 		return true
   6159 	}
   6160 	return false
   6161 }
   6162 func rewriteValueMIPS_OpMIPSXOR_0(v *Value) bool {
   6163 	// match: (XOR x (MOVWconst [c]))
   6164 	// cond:
   6165 	// result: (XORconst [c] x)
   6166 	for {
   6167 		_ = v.Args[1]
   6168 		x := v.Args[0]
   6169 		v_1 := v.Args[1]
   6170 		if v_1.Op != OpMIPSMOVWconst {
   6171 			break
   6172 		}
   6173 		c := v_1.AuxInt
   6174 		v.reset(OpMIPSXORconst)
   6175 		v.AuxInt = c
   6176 		v.AddArg(x)
   6177 		return true
   6178 	}
   6179 	// match: (XOR (MOVWconst [c]) x)
   6180 	// cond:
   6181 	// result: (XORconst [c] x)
   6182 	for {
   6183 		_ = v.Args[1]
   6184 		v_0 := v.Args[0]
   6185 		if v_0.Op != OpMIPSMOVWconst {
   6186 			break
   6187 		}
   6188 		c := v_0.AuxInt
   6189 		x := v.Args[1]
   6190 		v.reset(OpMIPSXORconst)
   6191 		v.AuxInt = c
   6192 		v.AddArg(x)
   6193 		return true
   6194 	}
   6195 	// match: (XOR x x)
   6196 	// cond:
   6197 	// result: (MOVWconst [0])
   6198 	for {
   6199 		_ = v.Args[1]
   6200 		x := v.Args[0]
   6201 		if x != v.Args[1] {
   6202 			break
   6203 		}
   6204 		v.reset(OpMIPSMOVWconst)
   6205 		v.AuxInt = 0
   6206 		return true
   6207 	}
   6208 	return false
   6209 }
   6210 func rewriteValueMIPS_OpMIPSXORconst_0(v *Value) bool {
   6211 	// match: (XORconst [0] x)
   6212 	// cond:
   6213 	// result: x
   6214 	for {
   6215 		if v.AuxInt != 0 {
   6216 			break
   6217 		}
   6218 		x := v.Args[0]
   6219 		v.reset(OpCopy)
   6220 		v.Type = x.Type
   6221 		v.AddArg(x)
   6222 		return true
   6223 	}
   6224 	// match: (XORconst [-1] x)
   6225 	// cond:
   6226 	// result: (NORconst [0] x)
   6227 	for {
   6228 		if v.AuxInt != -1 {
   6229 			break
   6230 		}
   6231 		x := v.Args[0]
   6232 		v.reset(OpMIPSNORconst)
   6233 		v.AuxInt = 0
   6234 		v.AddArg(x)
   6235 		return true
   6236 	}
   6237 	// match: (XORconst [c] (MOVWconst [d]))
   6238 	// cond:
   6239 	// result: (MOVWconst [c^d])
   6240 	for {
   6241 		c := v.AuxInt
   6242 		v_0 := v.Args[0]
   6243 		if v_0.Op != OpMIPSMOVWconst {
   6244 			break
   6245 		}
   6246 		d := v_0.AuxInt
   6247 		v.reset(OpMIPSMOVWconst)
   6248 		v.AuxInt = c ^ d
   6249 		return true
   6250 	}
   6251 	// match: (XORconst [c] (XORconst [d] x))
   6252 	// cond:
   6253 	// result: (XORconst [c^d] x)
   6254 	for {
   6255 		c := v.AuxInt
   6256 		v_0 := v.Args[0]
   6257 		if v_0.Op != OpMIPSXORconst {
   6258 			break
   6259 		}
   6260 		d := v_0.AuxInt
   6261 		x := v_0.Args[0]
   6262 		v.reset(OpMIPSXORconst)
   6263 		v.AuxInt = c ^ d
   6264 		v.AddArg(x)
   6265 		return true
   6266 	}
   6267 	return false
   6268 }
   6269 func rewriteValueMIPS_OpMod16_0(v *Value) bool {
   6270 	b := v.Block
   6271 	_ = b
   6272 	typ := &b.Func.Config.Types
   6273 	_ = typ
   6274 	// match: (Mod16 x y)
   6275 	// cond:
   6276 	// result: (Select0 (DIV (SignExt16to32 x) (SignExt16to32 y)))
   6277 	for {
   6278 		_ = v.Args[1]
   6279 		x := v.Args[0]
   6280 		y := v.Args[1]
   6281 		v.reset(OpSelect0)
   6282 		v0 := b.NewValue0(v.Pos, OpMIPSDIV, types.NewTuple(typ.Int32, typ.Int32))
   6283 		v1 := b.NewValue0(v.Pos, OpSignExt16to32, typ.Int32)
   6284 		v1.AddArg(x)
   6285 		v0.AddArg(v1)
   6286 		v2 := b.NewValue0(v.Pos, OpSignExt16to32, typ.Int32)
   6287 		v2.AddArg(y)
   6288 		v0.AddArg(v2)
   6289 		v.AddArg(v0)
   6290 		return true
   6291 	}
   6292 }
   6293 func rewriteValueMIPS_OpMod16u_0(v *Value) bool {
   6294 	b := v.Block
   6295 	_ = b
   6296 	typ := &b.Func.Config.Types
   6297 	_ = typ
   6298 	// match: (Mod16u x y)
   6299 	// cond:
   6300 	// result: (Select0 (DIVU (ZeroExt16to32 x) (ZeroExt16to32 y)))
   6301 	for {
   6302 		_ = v.Args[1]
   6303 		x := v.Args[0]
   6304 		y := v.Args[1]
   6305 		v.reset(OpSelect0)
   6306 		v0 := b.NewValue0(v.Pos, OpMIPSDIVU, types.NewTuple(typ.UInt32, typ.UInt32))
   6307 		v1 := b.NewValue0(v.Pos, OpZeroExt16to32, typ.UInt32)
   6308 		v1.AddArg(x)
   6309 		v0.AddArg(v1)
   6310 		v2 := b.NewValue0(v.Pos, OpZeroExt16to32, typ.UInt32)
   6311 		v2.AddArg(y)
   6312 		v0.AddArg(v2)
   6313 		v.AddArg(v0)
   6314 		return true
   6315 	}
   6316 }
   6317 func rewriteValueMIPS_OpMod32_0(v *Value) bool {
   6318 	b := v.Block
   6319 	_ = b
   6320 	typ := &b.Func.Config.Types
   6321 	_ = typ
   6322 	// match: (Mod32 x y)
   6323 	// cond:
   6324 	// result: (Select0 (DIV x y))
   6325 	for {
   6326 		_ = v.Args[1]
   6327 		x := v.Args[0]
   6328 		y := v.Args[1]
   6329 		v.reset(OpSelect0)
   6330 		v0 := b.NewValue0(v.Pos, OpMIPSDIV, types.NewTuple(typ.Int32, typ.Int32))
   6331 		v0.AddArg(x)
   6332 		v0.AddArg(y)
   6333 		v.AddArg(v0)
   6334 		return true
   6335 	}
   6336 }
   6337 func rewriteValueMIPS_OpMod32u_0(v *Value) bool {
   6338 	b := v.Block
   6339 	_ = b
   6340 	typ := &b.Func.Config.Types
   6341 	_ = typ
   6342 	// match: (Mod32u x y)
   6343 	// cond:
   6344 	// result: (Select0 (DIVU x y))
   6345 	for {
   6346 		_ = v.Args[1]
   6347 		x := v.Args[0]
   6348 		y := v.Args[1]
   6349 		v.reset(OpSelect0)
   6350 		v0 := b.NewValue0(v.Pos, OpMIPSDIVU, types.NewTuple(typ.UInt32, typ.UInt32))
   6351 		v0.AddArg(x)
   6352 		v0.AddArg(y)
   6353 		v.AddArg(v0)
   6354 		return true
   6355 	}
   6356 }
   6357 func rewriteValueMIPS_OpMod8_0(v *Value) bool {
   6358 	b := v.Block
   6359 	_ = b
   6360 	typ := &b.Func.Config.Types
   6361 	_ = typ
   6362 	// match: (Mod8 x y)
   6363 	// cond:
   6364 	// result: (Select0 (DIV (SignExt8to32 x) (SignExt8to32 y)))
   6365 	for {
   6366 		_ = v.Args[1]
   6367 		x := v.Args[0]
   6368 		y := v.Args[1]
   6369 		v.reset(OpSelect0)
   6370 		v0 := b.NewValue0(v.Pos, OpMIPSDIV, types.NewTuple(typ.Int32, typ.Int32))
   6371 		v1 := b.NewValue0(v.Pos, OpSignExt8to32, typ.Int32)
   6372 		v1.AddArg(x)
   6373 		v0.AddArg(v1)
   6374 		v2 := b.NewValue0(v.Pos, OpSignExt8to32, typ.Int32)
   6375 		v2.AddArg(y)
   6376 		v0.AddArg(v2)
   6377 		v.AddArg(v0)
   6378 		return true
   6379 	}
   6380 }
   6381 func rewriteValueMIPS_OpMod8u_0(v *Value) bool {
   6382 	b := v.Block
   6383 	_ = b
   6384 	typ := &b.Func.Config.Types
   6385 	_ = typ
   6386 	// match: (Mod8u x y)
   6387 	// cond:
   6388 	// result: (Select0 (DIVU (ZeroExt8to32 x) (ZeroExt8to32 y)))
   6389 	for {
   6390 		_ = v.Args[1]
   6391 		x := v.Args[0]
   6392 		y := v.Args[1]
   6393 		v.reset(OpSelect0)
   6394 		v0 := b.NewValue0(v.Pos, OpMIPSDIVU, types.NewTuple(typ.UInt32, typ.UInt32))
   6395 		v1 := b.NewValue0(v.Pos, OpZeroExt8to32, typ.UInt32)
   6396 		v1.AddArg(x)
   6397 		v0.AddArg(v1)
   6398 		v2 := b.NewValue0(v.Pos, OpZeroExt8to32, typ.UInt32)
   6399 		v2.AddArg(y)
   6400 		v0.AddArg(v2)
   6401 		v.AddArg(v0)
   6402 		return true
   6403 	}
   6404 }
   6405 func rewriteValueMIPS_OpMove_0(v *Value) bool {
   6406 	b := v.Block
   6407 	_ = b
   6408 	typ := &b.Func.Config.Types
   6409 	_ = typ
   6410 	// match: (Move [0] _ _ mem)
   6411 	// cond:
   6412 	// result: mem
   6413 	for {
   6414 		if v.AuxInt != 0 {
   6415 			break
   6416 		}
   6417 		_ = v.Args[2]
   6418 		mem := v.Args[2]
   6419 		v.reset(OpCopy)
   6420 		v.Type = mem.Type
   6421 		v.AddArg(mem)
   6422 		return true
   6423 	}
   6424 	// match: (Move [1] dst src mem)
   6425 	// cond:
   6426 	// result: (MOVBstore dst (MOVBUload src mem) mem)
   6427 	for {
   6428 		if v.AuxInt != 1 {
   6429 			break
   6430 		}
   6431 		_ = v.Args[2]
   6432 		dst := v.Args[0]
   6433 		src := v.Args[1]
   6434 		mem := v.Args[2]
   6435 		v.reset(OpMIPSMOVBstore)
   6436 		v.AddArg(dst)
   6437 		v0 := b.NewValue0(v.Pos, OpMIPSMOVBUload, typ.UInt8)
   6438 		v0.AddArg(src)
   6439 		v0.AddArg(mem)
   6440 		v.AddArg(v0)
   6441 		v.AddArg(mem)
   6442 		return true
   6443 	}
   6444 	// match: (Move [2] {t} dst src mem)
   6445 	// cond: t.(*types.Type).Alignment()%2 == 0
   6446 	// result: (MOVHstore dst (MOVHUload src mem) mem)
   6447 	for {
   6448 		if v.AuxInt != 2 {
   6449 			break
   6450 		}
   6451 		t := v.Aux
   6452 		_ = v.Args[2]
   6453 		dst := v.Args[0]
   6454 		src := v.Args[1]
   6455 		mem := v.Args[2]
   6456 		if !(t.(*types.Type).Alignment()%2 == 0) {
   6457 			break
   6458 		}
   6459 		v.reset(OpMIPSMOVHstore)
   6460 		v.AddArg(dst)
   6461 		v0 := b.NewValue0(v.Pos, OpMIPSMOVHUload, typ.UInt16)
   6462 		v0.AddArg(src)
   6463 		v0.AddArg(mem)
   6464 		v.AddArg(v0)
   6465 		v.AddArg(mem)
   6466 		return true
   6467 	}
   6468 	// match: (Move [2] dst src mem)
   6469 	// cond:
   6470 	// result: (MOVBstore [1] dst (MOVBUload [1] src mem) 		(MOVBstore dst (MOVBUload src mem) mem))
   6471 	for {
   6472 		if v.AuxInt != 2 {
   6473 			break
   6474 		}
   6475 		_ = v.Args[2]
   6476 		dst := v.Args[0]
   6477 		src := v.Args[1]
   6478 		mem := v.Args[2]
   6479 		v.reset(OpMIPSMOVBstore)
   6480 		v.AuxInt = 1
   6481 		v.AddArg(dst)
   6482 		v0 := b.NewValue0(v.Pos, OpMIPSMOVBUload, typ.UInt8)
   6483 		v0.AuxInt = 1
   6484 		v0.AddArg(src)
   6485 		v0.AddArg(mem)
   6486 		v.AddArg(v0)
   6487 		v1 := b.NewValue0(v.Pos, OpMIPSMOVBstore, types.TypeMem)
   6488 		v1.AddArg(dst)
   6489 		v2 := b.NewValue0(v.Pos, OpMIPSMOVBUload, typ.UInt8)
   6490 		v2.AddArg(src)
   6491 		v2.AddArg(mem)
   6492 		v1.AddArg(v2)
   6493 		v1.AddArg(mem)
   6494 		v.AddArg(v1)
   6495 		return true
   6496 	}
   6497 	// match: (Move [4] {t} dst src mem)
   6498 	// cond: t.(*types.Type).Alignment()%4 == 0
   6499 	// result: (MOVWstore dst (MOVWload src mem) mem)
   6500 	for {
   6501 		if v.AuxInt != 4 {
   6502 			break
   6503 		}
   6504 		t := v.Aux
   6505 		_ = v.Args[2]
   6506 		dst := v.Args[0]
   6507 		src := v.Args[1]
   6508 		mem := v.Args[2]
   6509 		if !(t.(*types.Type).Alignment()%4 == 0) {
   6510 			break
   6511 		}
   6512 		v.reset(OpMIPSMOVWstore)
   6513 		v.AddArg(dst)
   6514 		v0 := b.NewValue0(v.Pos, OpMIPSMOVWload, typ.UInt32)
   6515 		v0.AddArg(src)
   6516 		v0.AddArg(mem)
   6517 		v.AddArg(v0)
   6518 		v.AddArg(mem)
   6519 		return true
   6520 	}
   6521 	// match: (Move [4] {t} dst src mem)
   6522 	// cond: t.(*types.Type).Alignment()%2 == 0
   6523 	// result: (MOVHstore [2] dst (MOVHUload [2] src mem) 		(MOVHstore dst (MOVHUload src mem) mem))
   6524 	for {
   6525 		if v.AuxInt != 4 {
   6526 			break
   6527 		}
   6528 		t := v.Aux
   6529 		_ = v.Args[2]
   6530 		dst := v.Args[0]
   6531 		src := v.Args[1]
   6532 		mem := v.Args[2]
   6533 		if !(t.(*types.Type).Alignment()%2 == 0) {
   6534 			break
   6535 		}
   6536 		v.reset(OpMIPSMOVHstore)
   6537 		v.AuxInt = 2
   6538 		v.AddArg(dst)
   6539 		v0 := b.NewValue0(v.Pos, OpMIPSMOVHUload, typ.UInt16)
   6540 		v0.AuxInt = 2
   6541 		v0.AddArg(src)
   6542 		v0.AddArg(mem)
   6543 		v.AddArg(v0)
   6544 		v1 := b.NewValue0(v.Pos, OpMIPSMOVHstore, types.TypeMem)
   6545 		v1.AddArg(dst)
   6546 		v2 := b.NewValue0(v.Pos, OpMIPSMOVHUload, typ.UInt16)
   6547 		v2.AddArg(src)
   6548 		v2.AddArg(mem)
   6549 		v1.AddArg(v2)
   6550 		v1.AddArg(mem)
   6551 		v.AddArg(v1)
   6552 		return true
   6553 	}
   6554 	// match: (Move [4] dst src mem)
   6555 	// cond:
   6556 	// result: (MOVBstore [3] dst (MOVBUload [3] src mem) 		(MOVBstore [2] dst (MOVBUload [2] src mem) 			(MOVBstore [1] dst (MOVBUload [1] src mem) 				(MOVBstore dst (MOVBUload src mem) mem))))
   6557 	for {
   6558 		if v.AuxInt != 4 {
   6559 			break
   6560 		}
   6561 		_ = v.Args[2]
   6562 		dst := v.Args[0]
   6563 		src := v.Args[1]
   6564 		mem := v.Args[2]
   6565 		v.reset(OpMIPSMOVBstore)
   6566 		v.AuxInt = 3
   6567 		v.AddArg(dst)
   6568 		v0 := b.NewValue0(v.Pos, OpMIPSMOVBUload, typ.UInt8)
   6569 		v0.AuxInt = 3
   6570 		v0.AddArg(src)
   6571 		v0.AddArg(mem)
   6572 		v.AddArg(v0)
   6573 		v1 := b.NewValue0(v.Pos, OpMIPSMOVBstore, types.TypeMem)
   6574 		v1.AuxInt = 2
   6575 		v1.AddArg(dst)
   6576 		v2 := b.NewValue0(v.Pos, OpMIPSMOVBUload, typ.UInt8)
   6577 		v2.AuxInt = 2
   6578 		v2.AddArg(src)
   6579 		v2.AddArg(mem)
   6580 		v1.AddArg(v2)
   6581 		v3 := b.NewValue0(v.Pos, OpMIPSMOVBstore, types.TypeMem)
   6582 		v3.AuxInt = 1
   6583 		v3.AddArg(dst)
   6584 		v4 := b.NewValue0(v.Pos, OpMIPSMOVBUload, typ.UInt8)
   6585 		v4.AuxInt = 1
   6586 		v4.AddArg(src)
   6587 		v4.AddArg(mem)
   6588 		v3.AddArg(v4)
   6589 		v5 := b.NewValue0(v.Pos, OpMIPSMOVBstore, types.TypeMem)
   6590 		v5.AddArg(dst)
   6591 		v6 := b.NewValue0(v.Pos, OpMIPSMOVBUload, typ.UInt8)
   6592 		v6.AddArg(src)
   6593 		v6.AddArg(mem)
   6594 		v5.AddArg(v6)
   6595 		v5.AddArg(mem)
   6596 		v3.AddArg(v5)
   6597 		v1.AddArg(v3)
   6598 		v.AddArg(v1)
   6599 		return true
   6600 	}
   6601 	// match: (Move [3] dst src mem)
   6602 	// cond:
   6603 	// result: (MOVBstore [2] dst (MOVBUload [2] src mem) 		(MOVBstore [1] dst (MOVBUload [1] src mem) 			(MOVBstore dst (MOVBUload src mem) mem)))
   6604 	for {
   6605 		if v.AuxInt != 3 {
   6606 			break
   6607 		}
   6608 		_ = v.Args[2]
   6609 		dst := v.Args[0]
   6610 		src := v.Args[1]
   6611 		mem := v.Args[2]
   6612 		v.reset(OpMIPSMOVBstore)
   6613 		v.AuxInt = 2
   6614 		v.AddArg(dst)
   6615 		v0 := b.NewValue0(v.Pos, OpMIPSMOVBUload, typ.UInt8)
   6616 		v0.AuxInt = 2
   6617 		v0.AddArg(src)
   6618 		v0.AddArg(mem)
   6619 		v.AddArg(v0)
   6620 		v1 := b.NewValue0(v.Pos, OpMIPSMOVBstore, types.TypeMem)
   6621 		v1.AuxInt = 1
   6622 		v1.AddArg(dst)
   6623 		v2 := b.NewValue0(v.Pos, OpMIPSMOVBUload, typ.UInt8)
   6624 		v2.AuxInt = 1
   6625 		v2.AddArg(src)
   6626 		v2.AddArg(mem)
   6627 		v1.AddArg(v2)
   6628 		v3 := b.NewValue0(v.Pos, OpMIPSMOVBstore, types.TypeMem)
   6629 		v3.AddArg(dst)
   6630 		v4 := b.NewValue0(v.Pos, OpMIPSMOVBUload, typ.UInt8)
   6631 		v4.AddArg(src)
   6632 		v4.AddArg(mem)
   6633 		v3.AddArg(v4)
   6634 		v3.AddArg(mem)
   6635 		v1.AddArg(v3)
   6636 		v.AddArg(v1)
   6637 		return true
   6638 	}
   6639 	// match: (Move [8] {t} dst src mem)
   6640 	// cond: t.(*types.Type).Alignment()%4 == 0
   6641 	// result: (MOVWstore [4] dst (MOVWload [4] src mem) 		(MOVWstore dst (MOVWload src mem) mem))
   6642 	for {
   6643 		if v.AuxInt != 8 {
   6644 			break
   6645 		}
   6646 		t := v.Aux
   6647 		_ = v.Args[2]
   6648 		dst := v.Args[0]
   6649 		src := v.Args[1]
   6650 		mem := v.Args[2]
   6651 		if !(t.(*types.Type).Alignment()%4 == 0) {
   6652 			break
   6653 		}
   6654 		v.reset(OpMIPSMOVWstore)
   6655 		v.AuxInt = 4
   6656 		v.AddArg(dst)
   6657 		v0 := b.NewValue0(v.Pos, OpMIPSMOVWload, typ.UInt32)
   6658 		v0.AuxInt = 4
   6659 		v0.AddArg(src)
   6660 		v0.AddArg(mem)
   6661 		v.AddArg(v0)
   6662 		v1 := b.NewValue0(v.Pos, OpMIPSMOVWstore, types.TypeMem)
   6663 		v1.AddArg(dst)
   6664 		v2 := b.NewValue0(v.Pos, OpMIPSMOVWload, typ.UInt32)
   6665 		v2.AddArg(src)
   6666 		v2.AddArg(mem)
   6667 		v1.AddArg(v2)
   6668 		v1.AddArg(mem)
   6669 		v.AddArg(v1)
   6670 		return true
   6671 	}
   6672 	// match: (Move [8] {t} dst src mem)
   6673 	// cond: t.(*types.Type).Alignment()%2 == 0
   6674 	// result: (MOVHstore [6] dst (MOVHload [6] src mem) 		(MOVHstore [4] dst (MOVHload [4] src mem) 			(MOVHstore [2] dst (MOVHload [2] src mem) 				(MOVHstore dst (MOVHload src mem) mem))))
   6675 	for {
   6676 		if v.AuxInt != 8 {
   6677 			break
   6678 		}
   6679 		t := v.Aux
   6680 		_ = v.Args[2]
   6681 		dst := v.Args[0]
   6682 		src := v.Args[1]
   6683 		mem := v.Args[2]
   6684 		if !(t.(*types.Type).Alignment()%2 == 0) {
   6685 			break
   6686 		}
   6687 		v.reset(OpMIPSMOVHstore)
   6688 		v.AuxInt = 6
   6689 		v.AddArg(dst)
   6690 		v0 := b.NewValue0(v.Pos, OpMIPSMOVHload, typ.Int16)
   6691 		v0.AuxInt = 6
   6692 		v0.AddArg(src)
   6693 		v0.AddArg(mem)
   6694 		v.AddArg(v0)
   6695 		v1 := b.NewValue0(v.Pos, OpMIPSMOVHstore, types.TypeMem)
   6696 		v1.AuxInt = 4
   6697 		v1.AddArg(dst)
   6698 		v2 := b.NewValue0(v.Pos, OpMIPSMOVHload, typ.Int16)
   6699 		v2.AuxInt = 4
   6700 		v2.AddArg(src)
   6701 		v2.AddArg(mem)
   6702 		v1.AddArg(v2)
   6703 		v3 := b.NewValue0(v.Pos, OpMIPSMOVHstore, types.TypeMem)
   6704 		v3.AuxInt = 2
   6705 		v3.AddArg(dst)
   6706 		v4 := b.NewValue0(v.Pos, OpMIPSMOVHload, typ.Int16)
   6707 		v4.AuxInt = 2
   6708 		v4.AddArg(src)
   6709 		v4.AddArg(mem)
   6710 		v3.AddArg(v4)
   6711 		v5 := b.NewValue0(v.Pos, OpMIPSMOVHstore, types.TypeMem)
   6712 		v5.AddArg(dst)
   6713 		v6 := b.NewValue0(v.Pos, OpMIPSMOVHload, typ.Int16)
   6714 		v6.AddArg(src)
   6715 		v6.AddArg(mem)
   6716 		v5.AddArg(v6)
   6717 		v5.AddArg(mem)
   6718 		v3.AddArg(v5)
   6719 		v1.AddArg(v3)
   6720 		v.AddArg(v1)
   6721 		return true
   6722 	}
   6723 	return false
   6724 }
   6725 func rewriteValueMIPS_OpMove_10(v *Value) bool {
   6726 	b := v.Block
   6727 	_ = b
   6728 	config := b.Func.Config
   6729 	_ = config
   6730 	typ := &b.Func.Config.Types
   6731 	_ = typ
   6732 	// match: (Move [6] {t} dst src mem)
   6733 	// cond: t.(*types.Type).Alignment()%2 == 0
   6734 	// result: (MOVHstore [4] dst (MOVHload [4] src mem) 		(MOVHstore [2] dst (MOVHload [2] src mem) 			(MOVHstore dst (MOVHload src mem) mem)))
   6735 	for {
   6736 		if v.AuxInt != 6 {
   6737 			break
   6738 		}
   6739 		t := v.Aux
   6740 		_ = v.Args[2]
   6741 		dst := v.Args[0]
   6742 		src := v.Args[1]
   6743 		mem := v.Args[2]
   6744 		if !(t.(*types.Type).Alignment()%2 == 0) {
   6745 			break
   6746 		}
   6747 		v.reset(OpMIPSMOVHstore)
   6748 		v.AuxInt = 4
   6749 		v.AddArg(dst)
   6750 		v0 := b.NewValue0(v.Pos, OpMIPSMOVHload, typ.Int16)
   6751 		v0.AuxInt = 4
   6752 		v0.AddArg(src)
   6753 		v0.AddArg(mem)
   6754 		v.AddArg(v0)
   6755 		v1 := b.NewValue0(v.Pos, OpMIPSMOVHstore, types.TypeMem)
   6756 		v1.AuxInt = 2
   6757 		v1.AddArg(dst)
   6758 		v2 := b.NewValue0(v.Pos, OpMIPSMOVHload, typ.Int16)
   6759 		v2.AuxInt = 2
   6760 		v2.AddArg(src)
   6761 		v2.AddArg(mem)
   6762 		v1.AddArg(v2)
   6763 		v3 := b.NewValue0(v.Pos, OpMIPSMOVHstore, types.TypeMem)
   6764 		v3.AddArg(dst)
   6765 		v4 := b.NewValue0(v.Pos, OpMIPSMOVHload, typ.Int16)
   6766 		v4.AddArg(src)
   6767 		v4.AddArg(mem)
   6768 		v3.AddArg(v4)
   6769 		v3.AddArg(mem)
   6770 		v1.AddArg(v3)
   6771 		v.AddArg(v1)
   6772 		return true
   6773 	}
   6774 	// match: (Move [12] {t} dst src mem)
   6775 	// cond: t.(*types.Type).Alignment()%4 == 0
   6776 	// result: (MOVWstore [8] dst (MOVWload [8] src mem) 		(MOVWstore [4] dst (MOVWload [4] src mem) 			(MOVWstore dst (MOVWload src mem) mem)))
   6777 	for {
   6778 		if v.AuxInt != 12 {
   6779 			break
   6780 		}
   6781 		t := v.Aux
   6782 		_ = v.Args[2]
   6783 		dst := v.Args[0]
   6784 		src := v.Args[1]
   6785 		mem := v.Args[2]
   6786 		if !(t.(*types.Type).Alignment()%4 == 0) {
   6787 			break
   6788 		}
   6789 		v.reset(OpMIPSMOVWstore)
   6790 		v.AuxInt = 8
   6791 		v.AddArg(dst)
   6792 		v0 := b.NewValue0(v.Pos, OpMIPSMOVWload, typ.UInt32)
   6793 		v0.AuxInt = 8
   6794 		v0.AddArg(src)
   6795 		v0.AddArg(mem)
   6796 		v.AddArg(v0)
   6797 		v1 := b.NewValue0(v.Pos, OpMIPSMOVWstore, types.TypeMem)
   6798 		v1.AuxInt = 4
   6799 		v1.AddArg(dst)
   6800 		v2 := b.NewValue0(v.Pos, OpMIPSMOVWload, typ.UInt32)
   6801 		v2.AuxInt = 4
   6802 		v2.AddArg(src)
   6803 		v2.AddArg(mem)
   6804 		v1.AddArg(v2)
   6805 		v3 := b.NewValue0(v.Pos, OpMIPSMOVWstore, types.TypeMem)
   6806 		v3.AddArg(dst)
   6807 		v4 := b.NewValue0(v.Pos, OpMIPSMOVWload, typ.UInt32)
   6808 		v4.AddArg(src)
   6809 		v4.AddArg(mem)
   6810 		v3.AddArg(v4)
   6811 		v3.AddArg(mem)
   6812 		v1.AddArg(v3)
   6813 		v.AddArg(v1)
   6814 		return true
   6815 	}
   6816 	// match: (Move [16] {t} dst src mem)
   6817 	// cond: t.(*types.Type).Alignment()%4 == 0
   6818 	// result: (MOVWstore [12] dst (MOVWload [12] src mem) 		(MOVWstore [8] dst (MOVWload [8] src mem) 			(MOVWstore [4] dst (MOVWload [4] src mem) 				(MOVWstore dst (MOVWload src mem) mem))))
   6819 	for {
   6820 		if v.AuxInt != 16 {
   6821 			break
   6822 		}
   6823 		t := v.Aux
   6824 		_ = v.Args[2]
   6825 		dst := v.Args[0]
   6826 		src := v.Args[1]
   6827 		mem := v.Args[2]
   6828 		if !(t.(*types.Type).Alignment()%4 == 0) {
   6829 			break
   6830 		}
   6831 		v.reset(OpMIPSMOVWstore)
   6832 		v.AuxInt = 12
   6833 		v.AddArg(dst)
   6834 		v0 := b.NewValue0(v.Pos, OpMIPSMOVWload, typ.UInt32)
   6835 		v0.AuxInt = 12
   6836 		v0.AddArg(src)
   6837 		v0.AddArg(mem)
   6838 		v.AddArg(v0)
   6839 		v1 := b.NewValue0(v.Pos, OpMIPSMOVWstore, types.TypeMem)
   6840 		v1.AuxInt = 8
   6841 		v1.AddArg(dst)
   6842 		v2 := b.NewValue0(v.Pos, OpMIPSMOVWload, typ.UInt32)
   6843 		v2.AuxInt = 8
   6844 		v2.AddArg(src)
   6845 		v2.AddArg(mem)
   6846 		v1.AddArg(v2)
   6847 		v3 := b.NewValue0(v.Pos, OpMIPSMOVWstore, types.TypeMem)
   6848 		v3.AuxInt = 4
   6849 		v3.AddArg(dst)
   6850 		v4 := b.NewValue0(v.Pos, OpMIPSMOVWload, typ.UInt32)
   6851 		v4.AuxInt = 4
   6852 		v4.AddArg(src)
   6853 		v4.AddArg(mem)
   6854 		v3.AddArg(v4)
   6855 		v5 := b.NewValue0(v.Pos, OpMIPSMOVWstore, types.TypeMem)
   6856 		v5.AddArg(dst)
   6857 		v6 := b.NewValue0(v.Pos, OpMIPSMOVWload, typ.UInt32)
   6858 		v6.AddArg(src)
   6859 		v6.AddArg(mem)
   6860 		v5.AddArg(v6)
   6861 		v5.AddArg(mem)
   6862 		v3.AddArg(v5)
   6863 		v1.AddArg(v3)
   6864 		v.AddArg(v1)
   6865 		return true
   6866 	}
   6867 	// match: (Move [s] {t} dst src mem)
   6868 	// cond: (s > 16 || t.(*types.Type).Alignment()%4 != 0)
   6869 	// result: (LoweredMove [t.(*types.Type).Alignment()] 		dst 		src 		(ADDconst <src.Type> src [s-moveSize(t.(*types.Type).Alignment(), config)]) 		mem)
   6870 	for {
   6871 		s := v.AuxInt
   6872 		t := v.Aux
   6873 		_ = v.Args[2]
   6874 		dst := v.Args[0]
   6875 		src := v.Args[1]
   6876 		mem := v.Args[2]
   6877 		if !(s > 16 || t.(*types.Type).Alignment()%4 != 0) {
   6878 			break
   6879 		}
   6880 		v.reset(OpMIPSLoweredMove)
   6881 		v.AuxInt = t.(*types.Type).Alignment()
   6882 		v.AddArg(dst)
   6883 		v.AddArg(src)
   6884 		v0 := b.NewValue0(v.Pos, OpMIPSADDconst, src.Type)
   6885 		v0.AuxInt = s - moveSize(t.(*types.Type).Alignment(), config)
   6886 		v0.AddArg(src)
   6887 		v.AddArg(v0)
   6888 		v.AddArg(mem)
   6889 		return true
   6890 	}
   6891 	return false
   6892 }
   6893 func rewriteValueMIPS_OpMul16_0(v *Value) bool {
   6894 	// match: (Mul16 x y)
   6895 	// cond:
   6896 	// result: (MUL x y)
   6897 	for {
   6898 		_ = v.Args[1]
   6899 		x := v.Args[0]
   6900 		y := v.Args[1]
   6901 		v.reset(OpMIPSMUL)
   6902 		v.AddArg(x)
   6903 		v.AddArg(y)
   6904 		return true
   6905 	}
   6906 }
   6907 func rewriteValueMIPS_OpMul32_0(v *Value) bool {
   6908 	// match: (Mul32 x y)
   6909 	// cond:
   6910 	// result: (MUL x y)
   6911 	for {
   6912 		_ = v.Args[1]
   6913 		x := v.Args[0]
   6914 		y := v.Args[1]
   6915 		v.reset(OpMIPSMUL)
   6916 		v.AddArg(x)
   6917 		v.AddArg(y)
   6918 		return true
   6919 	}
   6920 }
   6921 func rewriteValueMIPS_OpMul32F_0(v *Value) bool {
   6922 	// match: (Mul32F x y)
   6923 	// cond:
   6924 	// result: (MULF x y)
   6925 	for {
   6926 		_ = v.Args[1]
   6927 		x := v.Args[0]
   6928 		y := v.Args[1]
   6929 		v.reset(OpMIPSMULF)
   6930 		v.AddArg(x)
   6931 		v.AddArg(y)
   6932 		return true
   6933 	}
   6934 }
   6935 func rewriteValueMIPS_OpMul32uhilo_0(v *Value) bool {
   6936 	// match: (Mul32uhilo x y)
   6937 	// cond:
   6938 	// result: (MULTU x y)
   6939 	for {
   6940 		_ = v.Args[1]
   6941 		x := v.Args[0]
   6942 		y := v.Args[1]
   6943 		v.reset(OpMIPSMULTU)
   6944 		v.AddArg(x)
   6945 		v.AddArg(y)
   6946 		return true
   6947 	}
   6948 }
   6949 func rewriteValueMIPS_OpMul64F_0(v *Value) bool {
   6950 	// match: (Mul64F x y)
   6951 	// cond:
   6952 	// result: (MULD x y)
   6953 	for {
   6954 		_ = v.Args[1]
   6955 		x := v.Args[0]
   6956 		y := v.Args[1]
   6957 		v.reset(OpMIPSMULD)
   6958 		v.AddArg(x)
   6959 		v.AddArg(y)
   6960 		return true
   6961 	}
   6962 }
   6963 func rewriteValueMIPS_OpMul8_0(v *Value) bool {
   6964 	// match: (Mul8 x y)
   6965 	// cond:
   6966 	// result: (MUL x y)
   6967 	for {
   6968 		_ = v.Args[1]
   6969 		x := v.Args[0]
   6970 		y := v.Args[1]
   6971 		v.reset(OpMIPSMUL)
   6972 		v.AddArg(x)
   6973 		v.AddArg(y)
   6974 		return true
   6975 	}
   6976 }
   6977 func rewriteValueMIPS_OpNeg16_0(v *Value) bool {
   6978 	// match: (Neg16 x)
   6979 	// cond:
   6980 	// result: (NEG x)
   6981 	for {
   6982 		x := v.Args[0]
   6983 		v.reset(OpMIPSNEG)
   6984 		v.AddArg(x)
   6985 		return true
   6986 	}
   6987 }
   6988 func rewriteValueMIPS_OpNeg32_0(v *Value) bool {
   6989 	// match: (Neg32 x)
   6990 	// cond:
   6991 	// result: (NEG x)
   6992 	for {
   6993 		x := v.Args[0]
   6994 		v.reset(OpMIPSNEG)
   6995 		v.AddArg(x)
   6996 		return true
   6997 	}
   6998 }
   6999 func rewriteValueMIPS_OpNeg32F_0(v *Value) bool {
   7000 	// match: (Neg32F x)
   7001 	// cond:
   7002 	// result: (NEGF x)
   7003 	for {
   7004 		x := v.Args[0]
   7005 		v.reset(OpMIPSNEGF)
   7006 		v.AddArg(x)
   7007 		return true
   7008 	}
   7009 }
   7010 func rewriteValueMIPS_OpNeg64F_0(v *Value) bool {
   7011 	// match: (Neg64F x)
   7012 	// cond:
   7013 	// result: (NEGD x)
   7014 	for {
   7015 		x := v.Args[0]
   7016 		v.reset(OpMIPSNEGD)
   7017 		v.AddArg(x)
   7018 		return true
   7019 	}
   7020 }
   7021 func rewriteValueMIPS_OpNeg8_0(v *Value) bool {
   7022 	// match: (Neg8 x)
   7023 	// cond:
   7024 	// result: (NEG x)
   7025 	for {
   7026 		x := v.Args[0]
   7027 		v.reset(OpMIPSNEG)
   7028 		v.AddArg(x)
   7029 		return true
   7030 	}
   7031 }
   7032 func rewriteValueMIPS_OpNeq16_0(v *Value) bool {
   7033 	b := v.Block
   7034 	_ = b
   7035 	typ := &b.Func.Config.Types
   7036 	_ = typ
   7037 	// match: (Neq16 x y)
   7038 	// cond:
   7039 	// result: (SGTU (XOR (ZeroExt16to32 x) (ZeroExt16to32 y)) (MOVWconst [0]))
   7040 	for {
   7041 		_ = v.Args[1]
   7042 		x := v.Args[0]
   7043 		y := v.Args[1]
   7044 		v.reset(OpMIPSSGTU)
   7045 		v0 := b.NewValue0(v.Pos, OpMIPSXOR, typ.UInt32)
   7046 		v1 := b.NewValue0(v.Pos, OpZeroExt16to32, typ.UInt32)
   7047 		v1.AddArg(x)
   7048 		v0.AddArg(v1)
   7049 		v2 := b.NewValue0(v.Pos, OpZeroExt16to32, typ.UInt32)
   7050 		v2.AddArg(y)
   7051 		v0.AddArg(v2)
   7052 		v.AddArg(v0)
   7053 		v3 := b.NewValue0(v.Pos, OpMIPSMOVWconst, typ.UInt32)
   7054 		v3.AuxInt = 0
   7055 		v.AddArg(v3)
   7056 		return true
   7057 	}
   7058 }
   7059 func rewriteValueMIPS_OpNeq32_0(v *Value) bool {
   7060 	b := v.Block
   7061 	_ = b
   7062 	typ := &b.Func.Config.Types
   7063 	_ = typ
   7064 	// match: (Neq32 x y)
   7065 	// cond:
   7066 	// result: (SGTU (XOR x y) (MOVWconst [0]))
   7067 	for {
   7068 		_ = v.Args[1]
   7069 		x := v.Args[0]
   7070 		y := v.Args[1]
   7071 		v.reset(OpMIPSSGTU)
   7072 		v0 := b.NewValue0(v.Pos, OpMIPSXOR, typ.UInt32)
   7073 		v0.AddArg(x)
   7074 		v0.AddArg(y)
   7075 		v.AddArg(v0)
   7076 		v1 := b.NewValue0(v.Pos, OpMIPSMOVWconst, typ.UInt32)
   7077 		v1.AuxInt = 0
   7078 		v.AddArg(v1)
   7079 		return true
   7080 	}
   7081 }
   7082 func rewriteValueMIPS_OpNeq32F_0(v *Value) bool {
   7083 	b := v.Block
   7084 	_ = b
   7085 	// match: (Neq32F x y)
   7086 	// cond:
   7087 	// result: (FPFlagFalse (CMPEQF x y))
   7088 	for {
   7089 		_ = v.Args[1]
   7090 		x := v.Args[0]
   7091 		y := v.Args[1]
   7092 		v.reset(OpMIPSFPFlagFalse)
   7093 		v0 := b.NewValue0(v.Pos, OpMIPSCMPEQF, types.TypeFlags)
   7094 		v0.AddArg(x)
   7095 		v0.AddArg(y)
   7096 		v.AddArg(v0)
   7097 		return true
   7098 	}
   7099 }
   7100 func rewriteValueMIPS_OpNeq64F_0(v *Value) bool {
   7101 	b := v.Block
   7102 	_ = b
   7103 	// match: (Neq64F x y)
   7104 	// cond:
   7105 	// result: (FPFlagFalse (CMPEQD x y))
   7106 	for {
   7107 		_ = v.Args[1]
   7108 		x := v.Args[0]
   7109 		y := v.Args[1]
   7110 		v.reset(OpMIPSFPFlagFalse)
   7111 		v0 := b.NewValue0(v.Pos, OpMIPSCMPEQD, types.TypeFlags)
   7112 		v0.AddArg(x)
   7113 		v0.AddArg(y)
   7114 		v.AddArg(v0)
   7115 		return true
   7116 	}
   7117 }
   7118 func rewriteValueMIPS_OpNeq8_0(v *Value) bool {
   7119 	b := v.Block
   7120 	_ = b
   7121 	typ := &b.Func.Config.Types
   7122 	_ = typ
   7123 	// match: (Neq8 x y)
   7124 	// cond:
   7125 	// result: (SGTU (XOR (ZeroExt8to32 x) (ZeroExt8to32 y)) (MOVWconst [0]))
   7126 	for {
   7127 		_ = v.Args[1]
   7128 		x := v.Args[0]
   7129 		y := v.Args[1]
   7130 		v.reset(OpMIPSSGTU)
   7131 		v0 := b.NewValue0(v.Pos, OpMIPSXOR, typ.UInt32)
   7132 		v1 := b.NewValue0(v.Pos, OpZeroExt8to32, typ.UInt32)
   7133 		v1.AddArg(x)
   7134 		v0.AddArg(v1)
   7135 		v2 := b.NewValue0(v.Pos, OpZeroExt8to32, typ.UInt32)
   7136 		v2.AddArg(y)
   7137 		v0.AddArg(v2)
   7138 		v.AddArg(v0)
   7139 		v3 := b.NewValue0(v.Pos, OpMIPSMOVWconst, typ.UInt32)
   7140 		v3.AuxInt = 0
   7141 		v.AddArg(v3)
   7142 		return true
   7143 	}
   7144 }
   7145 func rewriteValueMIPS_OpNeqB_0(v *Value) bool {
   7146 	// match: (NeqB x y)
   7147 	// cond:
   7148 	// result: (XOR x y)
   7149 	for {
   7150 		_ = v.Args[1]
   7151 		x := v.Args[0]
   7152 		y := v.Args[1]
   7153 		v.reset(OpMIPSXOR)
   7154 		v.AddArg(x)
   7155 		v.AddArg(y)
   7156 		return true
   7157 	}
   7158 }
   7159 func rewriteValueMIPS_OpNeqPtr_0(v *Value) bool {
   7160 	b := v.Block
   7161 	_ = b
   7162 	typ := &b.Func.Config.Types
   7163 	_ = typ
   7164 	// match: (NeqPtr x y)
   7165 	// cond:
   7166 	// result: (SGTU (XOR x y) (MOVWconst [0]))
   7167 	for {
   7168 		_ = v.Args[1]
   7169 		x := v.Args[0]
   7170 		y := v.Args[1]
   7171 		v.reset(OpMIPSSGTU)
   7172 		v0 := b.NewValue0(v.Pos, OpMIPSXOR, typ.UInt32)
   7173 		v0.AddArg(x)
   7174 		v0.AddArg(y)
   7175 		v.AddArg(v0)
   7176 		v1 := b.NewValue0(v.Pos, OpMIPSMOVWconst, typ.UInt32)
   7177 		v1.AuxInt = 0
   7178 		v.AddArg(v1)
   7179 		return true
   7180 	}
   7181 }
   7182 func rewriteValueMIPS_OpNilCheck_0(v *Value) bool {
   7183 	// match: (NilCheck ptr mem)
   7184 	// cond:
   7185 	// result: (LoweredNilCheck ptr mem)
   7186 	for {
   7187 		_ = v.Args[1]
   7188 		ptr := v.Args[0]
   7189 		mem := v.Args[1]
   7190 		v.reset(OpMIPSLoweredNilCheck)
   7191 		v.AddArg(ptr)
   7192 		v.AddArg(mem)
   7193 		return true
   7194 	}
   7195 }
   7196 func rewriteValueMIPS_OpNot_0(v *Value) bool {
   7197 	// match: (Not x)
   7198 	// cond:
   7199 	// result: (XORconst [1] x)
   7200 	for {
   7201 		x := v.Args[0]
   7202 		v.reset(OpMIPSXORconst)
   7203 		v.AuxInt = 1
   7204 		v.AddArg(x)
   7205 		return true
   7206 	}
   7207 }
   7208 func rewriteValueMIPS_OpOffPtr_0(v *Value) bool {
   7209 	// match: (OffPtr [off] ptr:(SP))
   7210 	// cond:
   7211 	// result: (MOVWaddr [off] ptr)
   7212 	for {
   7213 		off := v.AuxInt
   7214 		ptr := v.Args[0]
   7215 		if ptr.Op != OpSP {
   7216 			break
   7217 		}
   7218 		v.reset(OpMIPSMOVWaddr)
   7219 		v.AuxInt = off
   7220 		v.AddArg(ptr)
   7221 		return true
   7222 	}
   7223 	// match: (OffPtr [off] ptr)
   7224 	// cond:
   7225 	// result: (ADDconst [off] ptr)
   7226 	for {
   7227 		off := v.AuxInt
   7228 		ptr := v.Args[0]
   7229 		v.reset(OpMIPSADDconst)
   7230 		v.AuxInt = off
   7231 		v.AddArg(ptr)
   7232 		return true
   7233 	}
   7234 }
   7235 func rewriteValueMIPS_OpOr16_0(v *Value) bool {
   7236 	// match: (Or16 x y)
   7237 	// cond:
   7238 	// result: (OR x y)
   7239 	for {
   7240 		_ = v.Args[1]
   7241 		x := v.Args[0]
   7242 		y := v.Args[1]
   7243 		v.reset(OpMIPSOR)
   7244 		v.AddArg(x)
   7245 		v.AddArg(y)
   7246 		return true
   7247 	}
   7248 }
   7249 func rewriteValueMIPS_OpOr32_0(v *Value) bool {
   7250 	// match: (Or32 x y)
   7251 	// cond:
   7252 	// result: (OR x y)
   7253 	for {
   7254 		_ = v.Args[1]
   7255 		x := v.Args[0]
   7256 		y := v.Args[1]
   7257 		v.reset(OpMIPSOR)
   7258 		v.AddArg(x)
   7259 		v.AddArg(y)
   7260 		return true
   7261 	}
   7262 }
   7263 func rewriteValueMIPS_OpOr8_0(v *Value) bool {
   7264 	// match: (Or8 x y)
   7265 	// cond:
   7266 	// result: (OR x y)
   7267 	for {
   7268 		_ = v.Args[1]
   7269 		x := v.Args[0]
   7270 		y := v.Args[1]
   7271 		v.reset(OpMIPSOR)
   7272 		v.AddArg(x)
   7273 		v.AddArg(y)
   7274 		return true
   7275 	}
   7276 }
   7277 func rewriteValueMIPS_OpOrB_0(v *Value) bool {
   7278 	// match: (OrB x y)
   7279 	// cond:
   7280 	// result: (OR x y)
   7281 	for {
   7282 		_ = v.Args[1]
   7283 		x := v.Args[0]
   7284 		y := v.Args[1]
   7285 		v.reset(OpMIPSOR)
   7286 		v.AddArg(x)
   7287 		v.AddArg(y)
   7288 		return true
   7289 	}
   7290 }
   7291 func rewriteValueMIPS_OpRound32F_0(v *Value) bool {
   7292 	// match: (Round32F x)
   7293 	// cond:
   7294 	// result: x
   7295 	for {
   7296 		x := v.Args[0]
   7297 		v.reset(OpCopy)
   7298 		v.Type = x.Type
   7299 		v.AddArg(x)
   7300 		return true
   7301 	}
   7302 }
   7303 func rewriteValueMIPS_OpRound64F_0(v *Value) bool {
   7304 	// match: (Round64F x)
   7305 	// cond:
   7306 	// result: x
   7307 	for {
   7308 		x := v.Args[0]
   7309 		v.reset(OpCopy)
   7310 		v.Type = x.Type
   7311 		v.AddArg(x)
   7312 		return true
   7313 	}
   7314 }
   7315 func rewriteValueMIPS_OpRsh16Ux16_0(v *Value) bool {
   7316 	b := v.Block
   7317 	_ = b
   7318 	typ := &b.Func.Config.Types
   7319 	_ = typ
   7320 	// match: (Rsh16Ux16 <t> x y)
   7321 	// cond:
   7322 	// result: (CMOVZ (SRL <t> (ZeroExt16to32 x) (ZeroExt16to32 y) ) (MOVWconst [0]) (SGTUconst [32] (ZeroExt16to32 y)))
   7323 	for {
   7324 		t := v.Type
   7325 		_ = v.Args[1]
   7326 		x := v.Args[0]
   7327 		y := v.Args[1]
   7328 		v.reset(OpMIPSCMOVZ)
   7329 		v0 := b.NewValue0(v.Pos, OpMIPSSRL, t)
   7330 		v1 := b.NewValue0(v.Pos, OpZeroExt16to32, typ.UInt32)
   7331 		v1.AddArg(x)
   7332 		v0.AddArg(v1)
   7333 		v2 := b.NewValue0(v.Pos, OpZeroExt16to32, typ.UInt32)
   7334 		v2.AddArg(y)
   7335 		v0.AddArg(v2)
   7336 		v.AddArg(v0)
   7337 		v3 := b.NewValue0(v.Pos, OpMIPSMOVWconst, typ.UInt32)
   7338 		v3.AuxInt = 0
   7339 		v.AddArg(v3)
   7340 		v4 := b.NewValue0(v.Pos, OpMIPSSGTUconst, typ.Bool)
   7341 		v4.AuxInt = 32
   7342 		v5 := b.NewValue0(v.Pos, OpZeroExt16to32, typ.UInt32)
   7343 		v5.AddArg(y)
   7344 		v4.AddArg(v5)
   7345 		v.AddArg(v4)
   7346 		return true
   7347 	}
   7348 }
   7349 func rewriteValueMIPS_OpRsh16Ux32_0(v *Value) bool {
   7350 	b := v.Block
   7351 	_ = b
   7352 	typ := &b.Func.Config.Types
   7353 	_ = typ
   7354 	// match: (Rsh16Ux32 <t> x y)
   7355 	// cond:
   7356 	// result: (CMOVZ (SRL <t> (ZeroExt16to32 x) y) (MOVWconst [0]) (SGTUconst [32] y))
   7357 	for {
   7358 		t := v.Type
   7359 		_ = v.Args[1]
   7360 		x := v.Args[0]
   7361 		y := v.Args[1]
   7362 		v.reset(OpMIPSCMOVZ)
   7363 		v0 := b.NewValue0(v.Pos, OpMIPSSRL, t)
   7364 		v1 := b.NewValue0(v.Pos, OpZeroExt16to32, typ.UInt32)
   7365 		v1.AddArg(x)
   7366 		v0.AddArg(v1)
   7367 		v0.AddArg(y)
   7368 		v.AddArg(v0)
   7369 		v2 := b.NewValue0(v.Pos, OpMIPSMOVWconst, typ.UInt32)
   7370 		v2.AuxInt = 0
   7371 		v.AddArg(v2)
   7372 		v3 := b.NewValue0(v.Pos, OpMIPSSGTUconst, typ.Bool)
   7373 		v3.AuxInt = 32
   7374 		v3.AddArg(y)
   7375 		v.AddArg(v3)
   7376 		return true
   7377 	}
   7378 }
   7379 func rewriteValueMIPS_OpRsh16Ux64_0(v *Value) bool {
   7380 	b := v.Block
   7381 	_ = b
   7382 	typ := &b.Func.Config.Types
   7383 	_ = typ
   7384 	// match: (Rsh16Ux64 x (Const64 [c]))
   7385 	// cond: uint32(c) < 16
   7386 	// result: (SRLconst (SLLconst <typ.UInt32> x [16]) [c+16])
   7387 	for {
   7388 		_ = v.Args[1]
   7389 		x := v.Args[0]
   7390 		v_1 := v.Args[1]
   7391 		if v_1.Op != OpConst64 {
   7392 			break
   7393 		}
   7394 		c := v_1.AuxInt
   7395 		if !(uint32(c) < 16) {
   7396 			break
   7397 		}
   7398 		v.reset(OpMIPSSRLconst)
   7399 		v.AuxInt = c + 16
   7400 		v0 := b.NewValue0(v.Pos, OpMIPSSLLconst, typ.UInt32)
   7401 		v0.AuxInt = 16
   7402 		v0.AddArg(x)
   7403 		v.AddArg(v0)
   7404 		return true
   7405 	}
   7406 	// match: (Rsh16Ux64 _ (Const64 [c]))
   7407 	// cond: uint32(c) >= 16
   7408 	// result: (MOVWconst [0])
   7409 	for {
   7410 		_ = v.Args[1]
   7411 		v_1 := v.Args[1]
   7412 		if v_1.Op != OpConst64 {
   7413 			break
   7414 		}
   7415 		c := v_1.AuxInt
   7416 		if !(uint32(c) >= 16) {
   7417 			break
   7418 		}
   7419 		v.reset(OpMIPSMOVWconst)
   7420 		v.AuxInt = 0
   7421 		return true
   7422 	}
   7423 	return false
   7424 }
   7425 func rewriteValueMIPS_OpRsh16Ux8_0(v *Value) bool {
   7426 	b := v.Block
   7427 	_ = b
   7428 	typ := &b.Func.Config.Types
   7429 	_ = typ
   7430 	// match: (Rsh16Ux8 <t> x y)
   7431 	// cond:
   7432 	// result: (CMOVZ (SRL <t> (ZeroExt16to32 x) (ZeroExt8to32 y) ) (MOVWconst [0]) (SGTUconst [32] (ZeroExt8to32 y)))
   7433 	for {
   7434 		t := v.Type
   7435 		_ = v.Args[1]
   7436 		x := v.Args[0]
   7437 		y := v.Args[1]
   7438 		v.reset(OpMIPSCMOVZ)
   7439 		v0 := b.NewValue0(v.Pos, OpMIPSSRL, t)
   7440 		v1 := b.NewValue0(v.Pos, OpZeroExt16to32, typ.UInt32)
   7441 		v1.AddArg(x)
   7442 		v0.AddArg(v1)
   7443 		v2 := b.NewValue0(v.Pos, OpZeroExt8to32, typ.UInt32)
   7444 		v2.AddArg(y)
   7445 		v0.AddArg(v2)
   7446 		v.AddArg(v0)
   7447 		v3 := b.NewValue0(v.Pos, OpMIPSMOVWconst, typ.UInt32)
   7448 		v3.AuxInt = 0
   7449 		v.AddArg(v3)
   7450 		v4 := b.NewValue0(v.Pos, OpMIPSSGTUconst, typ.Bool)
   7451 		v4.AuxInt = 32
   7452 		v5 := b.NewValue0(v.Pos, OpZeroExt8to32, typ.UInt32)
   7453 		v5.AddArg(y)
   7454 		v4.AddArg(v5)
   7455 		v.AddArg(v4)
   7456 		return true
   7457 	}
   7458 }
   7459 func rewriteValueMIPS_OpRsh16x16_0(v *Value) bool {
   7460 	b := v.Block
   7461 	_ = b
   7462 	typ := &b.Func.Config.Types
   7463 	_ = typ
   7464 	// match: (Rsh16x16 x y)
   7465 	// cond:
   7466 	// result: (SRA (SignExt16to32 x) ( CMOVZ <typ.UInt32> (ZeroExt16to32 y) (MOVWconst [-1]) (SGTUconst [32] (ZeroExt16to32 y))))
   7467 	for {
   7468 		_ = v.Args[1]
   7469 		x := v.Args[0]
   7470 		y := v.Args[1]
   7471 		v.reset(OpMIPSSRA)
   7472 		v0 := b.NewValue0(v.Pos, OpSignExt16to32, typ.Int32)
   7473 		v0.AddArg(x)
   7474 		v.AddArg(v0)
   7475 		v1 := b.NewValue0(v.Pos, OpMIPSCMOVZ, typ.UInt32)
   7476 		v2 := b.NewValue0(v.Pos, OpZeroExt16to32, typ.UInt32)
   7477 		v2.AddArg(y)
   7478 		v1.AddArg(v2)
   7479 		v3 := b.NewValue0(v.Pos, OpMIPSMOVWconst, typ.UInt32)
   7480 		v3.AuxInt = -1
   7481 		v1.AddArg(v3)
   7482 		v4 := b.NewValue0(v.Pos, OpMIPSSGTUconst, typ.Bool)
   7483 		v4.AuxInt = 32
   7484 		v5 := b.NewValue0(v.Pos, OpZeroExt16to32, typ.UInt32)
   7485 		v5.AddArg(y)
   7486 		v4.AddArg(v5)
   7487 		v1.AddArg(v4)
   7488 		v.AddArg(v1)
   7489 		return true
   7490 	}
   7491 }
   7492 func rewriteValueMIPS_OpRsh16x32_0(v *Value) bool {
   7493 	b := v.Block
   7494 	_ = b
   7495 	typ := &b.Func.Config.Types
   7496 	_ = typ
   7497 	// match: (Rsh16x32 x y)
   7498 	// cond:
   7499 	// result: (SRA (SignExt16to32 x) ( CMOVZ <typ.UInt32> y (MOVWconst [-1]) (SGTUconst [32] y)))
   7500 	for {
   7501 		_ = v.Args[1]
   7502 		x := v.Args[0]
   7503 		y := v.Args[1]
   7504 		v.reset(OpMIPSSRA)
   7505 		v0 := b.NewValue0(v.Pos, OpSignExt16to32, typ.Int32)
   7506 		v0.AddArg(x)
   7507 		v.AddArg(v0)
   7508 		v1 := b.NewValue0(v.Pos, OpMIPSCMOVZ, typ.UInt32)
   7509 		v1.AddArg(y)
   7510 		v2 := b.NewValue0(v.Pos, OpMIPSMOVWconst, typ.UInt32)
   7511 		v2.AuxInt = -1
   7512 		v1.AddArg(v2)
   7513 		v3 := b.NewValue0(v.Pos, OpMIPSSGTUconst, typ.Bool)
   7514 		v3.AuxInt = 32
   7515 		v3.AddArg(y)
   7516 		v1.AddArg(v3)
   7517 		v.AddArg(v1)
   7518 		return true
   7519 	}
   7520 }
   7521 func rewriteValueMIPS_OpRsh16x64_0(v *Value) bool {
   7522 	b := v.Block
   7523 	_ = b
   7524 	typ := &b.Func.Config.Types
   7525 	_ = typ
   7526 	// match: (Rsh16x64 x (Const64 [c]))
   7527 	// cond: uint32(c) < 16
   7528 	// result: (SRAconst (SLLconst <typ.UInt32> x [16]) [c+16])
   7529 	for {
   7530 		_ = v.Args[1]
   7531 		x := v.Args[0]
   7532 		v_1 := v.Args[1]
   7533 		if v_1.Op != OpConst64 {
   7534 			break
   7535 		}
   7536 		c := v_1.AuxInt
   7537 		if !(uint32(c) < 16) {
   7538 			break
   7539 		}
   7540 		v.reset(OpMIPSSRAconst)
   7541 		v.AuxInt = c + 16
   7542 		v0 := b.NewValue0(v.Pos, OpMIPSSLLconst, typ.UInt32)
   7543 		v0.AuxInt = 16
   7544 		v0.AddArg(x)
   7545 		v.AddArg(v0)
   7546 		return true
   7547 	}
   7548 	// match: (Rsh16x64 x (Const64 [c]))
   7549 	// cond: uint32(c) >= 16
   7550 	// result: (SRAconst (SLLconst <typ.UInt32> x [16]) [31])
   7551 	for {
   7552 		_ = v.Args[1]
   7553 		x := v.Args[0]
   7554 		v_1 := v.Args[1]
   7555 		if v_1.Op != OpConst64 {
   7556 			break
   7557 		}
   7558 		c := v_1.AuxInt
   7559 		if !(uint32(c) >= 16) {
   7560 			break
   7561 		}
   7562 		v.reset(OpMIPSSRAconst)
   7563 		v.AuxInt = 31
   7564 		v0 := b.NewValue0(v.Pos, OpMIPSSLLconst, typ.UInt32)
   7565 		v0.AuxInt = 16
   7566 		v0.AddArg(x)
   7567 		v.AddArg(v0)
   7568 		return true
   7569 	}
   7570 	return false
   7571 }
   7572 func rewriteValueMIPS_OpRsh16x8_0(v *Value) bool {
   7573 	b := v.Block
   7574 	_ = b
   7575 	typ := &b.Func.Config.Types
   7576 	_ = typ
   7577 	// match: (Rsh16x8 x y)
   7578 	// cond:
   7579 	// result: (SRA (SignExt16to32 x) ( CMOVZ <typ.UInt32> (ZeroExt8to32 y) (MOVWconst [-1]) (SGTUconst [32] (ZeroExt8to32 y))))
   7580 	for {
   7581 		_ = v.Args[1]
   7582 		x := v.Args[0]
   7583 		y := v.Args[1]
   7584 		v.reset(OpMIPSSRA)
   7585 		v0 := b.NewValue0(v.Pos, OpSignExt16to32, typ.Int32)
   7586 		v0.AddArg(x)
   7587 		v.AddArg(v0)
   7588 		v1 := b.NewValue0(v.Pos, OpMIPSCMOVZ, typ.UInt32)
   7589 		v2 := b.NewValue0(v.Pos, OpZeroExt8to32, typ.UInt32)
   7590 		v2.AddArg(y)
   7591 		v1.AddArg(v2)
   7592 		v3 := b.NewValue0(v.Pos, OpMIPSMOVWconst, typ.UInt32)
   7593 		v3.AuxInt = -1
   7594 		v1.AddArg(v3)
   7595 		v4 := b.NewValue0(v.Pos, OpMIPSSGTUconst, typ.Bool)
   7596 		v4.AuxInt = 32
   7597 		v5 := b.NewValue0(v.Pos, OpZeroExt8to32, typ.UInt32)
   7598 		v5.AddArg(y)
   7599 		v4.AddArg(v5)
   7600 		v1.AddArg(v4)
   7601 		v.AddArg(v1)
   7602 		return true
   7603 	}
   7604 }
   7605 func rewriteValueMIPS_OpRsh32Ux16_0(v *Value) bool {
   7606 	b := v.Block
   7607 	_ = b
   7608 	typ := &b.Func.Config.Types
   7609 	_ = typ
   7610 	// match: (Rsh32Ux16 <t> x y)
   7611 	// cond:
   7612 	// result: (CMOVZ (SRL <t> x (ZeroExt16to32 y) ) (MOVWconst [0]) (SGTUconst [32] (ZeroExt16to32 y)))
   7613 	for {
   7614 		t := v.Type
   7615 		_ = v.Args[1]
   7616 		x := v.Args[0]
   7617 		y := v.Args[1]
   7618 		v.reset(OpMIPSCMOVZ)
   7619 		v0 := b.NewValue0(v.Pos, OpMIPSSRL, t)
   7620 		v0.AddArg(x)
   7621 		v1 := b.NewValue0(v.Pos, OpZeroExt16to32, typ.UInt32)
   7622 		v1.AddArg(y)
   7623 		v0.AddArg(v1)
   7624 		v.AddArg(v0)
   7625 		v2 := b.NewValue0(v.Pos, OpMIPSMOVWconst, typ.UInt32)
   7626 		v2.AuxInt = 0
   7627 		v.AddArg(v2)
   7628 		v3 := b.NewValue0(v.Pos, OpMIPSSGTUconst, typ.Bool)
   7629 		v3.AuxInt = 32
   7630 		v4 := b.NewValue0(v.Pos, OpZeroExt16to32, typ.UInt32)
   7631 		v4.AddArg(y)
   7632 		v3.AddArg(v4)
   7633 		v.AddArg(v3)
   7634 		return true
   7635 	}
   7636 }
   7637 func rewriteValueMIPS_OpRsh32Ux32_0(v *Value) bool {
   7638 	b := v.Block
   7639 	_ = b
   7640 	typ := &b.Func.Config.Types
   7641 	_ = typ
   7642 	// match: (Rsh32Ux32 <t> x y)
   7643 	// cond:
   7644 	// result: (CMOVZ (SRL <t> x y) (MOVWconst [0]) (SGTUconst [32] y))
   7645 	for {
   7646 		t := v.Type
   7647 		_ = v.Args[1]
   7648 		x := v.Args[0]
   7649 		y := v.Args[1]
   7650 		v.reset(OpMIPSCMOVZ)
   7651 		v0 := b.NewValue0(v.Pos, OpMIPSSRL, t)
   7652 		v0.AddArg(x)
   7653 		v0.AddArg(y)
   7654 		v.AddArg(v0)
   7655 		v1 := b.NewValue0(v.Pos, OpMIPSMOVWconst, typ.UInt32)
   7656 		v1.AuxInt = 0
   7657 		v.AddArg(v1)
   7658 		v2 := b.NewValue0(v.Pos, OpMIPSSGTUconst, typ.Bool)
   7659 		v2.AuxInt = 32
   7660 		v2.AddArg(y)
   7661 		v.AddArg(v2)
   7662 		return true
   7663 	}
   7664 }
   7665 func rewriteValueMIPS_OpRsh32Ux64_0(v *Value) bool {
   7666 	// match: (Rsh32Ux64 x (Const64 [c]))
   7667 	// cond: uint32(c) < 32
   7668 	// result: (SRLconst x [c])
   7669 	for {
   7670 		_ = v.Args[1]
   7671 		x := v.Args[0]
   7672 		v_1 := v.Args[1]
   7673 		if v_1.Op != OpConst64 {
   7674 			break
   7675 		}
   7676 		c := v_1.AuxInt
   7677 		if !(uint32(c) < 32) {
   7678 			break
   7679 		}
   7680 		v.reset(OpMIPSSRLconst)
   7681 		v.AuxInt = c
   7682 		v.AddArg(x)
   7683 		return true
   7684 	}
   7685 	// match: (Rsh32Ux64 _ (Const64 [c]))
   7686 	// cond: uint32(c) >= 32
   7687 	// result: (MOVWconst [0])
   7688 	for {
   7689 		_ = v.Args[1]
   7690 		v_1 := v.Args[1]
   7691 		if v_1.Op != OpConst64 {
   7692 			break
   7693 		}
   7694 		c := v_1.AuxInt
   7695 		if !(uint32(c) >= 32) {
   7696 			break
   7697 		}
   7698 		v.reset(OpMIPSMOVWconst)
   7699 		v.AuxInt = 0
   7700 		return true
   7701 	}
   7702 	return false
   7703 }
   7704 func rewriteValueMIPS_OpRsh32Ux8_0(v *Value) bool {
   7705 	b := v.Block
   7706 	_ = b
   7707 	typ := &b.Func.Config.Types
   7708 	_ = typ
   7709 	// match: (Rsh32Ux8 <t> x y)
   7710 	// cond:
   7711 	// result: (CMOVZ (SRL <t> x (ZeroExt8to32 y) ) (MOVWconst [0]) (SGTUconst [32] (ZeroExt8to32 y)))
   7712 	for {
   7713 		t := v.Type
   7714 		_ = v.Args[1]
   7715 		x := v.Args[0]
   7716 		y := v.Args[1]
   7717 		v.reset(OpMIPSCMOVZ)
   7718 		v0 := b.NewValue0(v.Pos, OpMIPSSRL, t)
   7719 		v0.AddArg(x)
   7720 		v1 := b.NewValue0(v.Pos, OpZeroExt8to32, typ.UInt32)
   7721 		v1.AddArg(y)
   7722 		v0.AddArg(v1)
   7723 		v.AddArg(v0)
   7724 		v2 := b.NewValue0(v.Pos, OpMIPSMOVWconst, typ.UInt32)
   7725 		v2.AuxInt = 0
   7726 		v.AddArg(v2)
   7727 		v3 := b.NewValue0(v.Pos, OpMIPSSGTUconst, typ.Bool)
   7728 		v3.AuxInt = 32
   7729 		v4 := b.NewValue0(v.Pos, OpZeroExt8to32, typ.UInt32)
   7730 		v4.AddArg(y)
   7731 		v3.AddArg(v4)
   7732 		v.AddArg(v3)
   7733 		return true
   7734 	}
   7735 }
   7736 func rewriteValueMIPS_OpRsh32x16_0(v *Value) bool {
   7737 	b := v.Block
   7738 	_ = b
   7739 	typ := &b.Func.Config.Types
   7740 	_ = typ
   7741 	// match: (Rsh32x16 x y)
   7742 	// cond:
   7743 	// result: (SRA x ( CMOVZ <typ.UInt32> (ZeroExt16to32 y) (MOVWconst [-1]) (SGTUconst [32] (ZeroExt16to32 y))))
   7744 	for {
   7745 		_ = v.Args[1]
   7746 		x := v.Args[0]
   7747 		y := v.Args[1]
   7748 		v.reset(OpMIPSSRA)
   7749 		v.AddArg(x)
   7750 		v0 := b.NewValue0(v.Pos, OpMIPSCMOVZ, typ.UInt32)
   7751 		v1 := b.NewValue0(v.Pos, OpZeroExt16to32, typ.UInt32)
   7752 		v1.AddArg(y)
   7753 		v0.AddArg(v1)
   7754 		v2 := b.NewValue0(v.Pos, OpMIPSMOVWconst, typ.UInt32)
   7755 		v2.AuxInt = -1
   7756 		v0.AddArg(v2)
   7757 		v3 := b.NewValue0(v.Pos, OpMIPSSGTUconst, typ.Bool)
   7758 		v3.AuxInt = 32
   7759 		v4 := b.NewValue0(v.Pos, OpZeroExt16to32, typ.UInt32)
   7760 		v4.AddArg(y)
   7761 		v3.AddArg(v4)
   7762 		v0.AddArg(v3)
   7763 		v.AddArg(v0)
   7764 		return true
   7765 	}
   7766 }
   7767 func rewriteValueMIPS_OpRsh32x32_0(v *Value) bool {
   7768 	b := v.Block
   7769 	_ = b
   7770 	typ := &b.Func.Config.Types
   7771 	_ = typ
   7772 	// match: (Rsh32x32 x y)
   7773 	// cond:
   7774 	// result: (SRA x ( CMOVZ <typ.UInt32> y (MOVWconst [-1]) (SGTUconst [32] y)))
   7775 	for {
   7776 		_ = v.Args[1]
   7777 		x := v.Args[0]
   7778 		y := v.Args[1]
   7779 		v.reset(OpMIPSSRA)
   7780 		v.AddArg(x)
   7781 		v0 := b.NewValue0(v.Pos, OpMIPSCMOVZ, typ.UInt32)
   7782 		v0.AddArg(y)
   7783 		v1 := b.NewValue0(v.Pos, OpMIPSMOVWconst, typ.UInt32)
   7784 		v1.AuxInt = -1
   7785 		v0.AddArg(v1)
   7786 		v2 := b.NewValue0(v.Pos, OpMIPSSGTUconst, typ.Bool)
   7787 		v2.AuxInt = 32
   7788 		v2.AddArg(y)
   7789 		v0.AddArg(v2)
   7790 		v.AddArg(v0)
   7791 		return true
   7792 	}
   7793 }
   7794 func rewriteValueMIPS_OpRsh32x64_0(v *Value) bool {
   7795 	// match: (Rsh32x64 x (Const64 [c]))
   7796 	// cond: uint32(c) < 32
   7797 	// result: (SRAconst x [c])
   7798 	for {
   7799 		_ = v.Args[1]
   7800 		x := v.Args[0]
   7801 		v_1 := v.Args[1]
   7802 		if v_1.Op != OpConst64 {
   7803 			break
   7804 		}
   7805 		c := v_1.AuxInt
   7806 		if !(uint32(c) < 32) {
   7807 			break
   7808 		}
   7809 		v.reset(OpMIPSSRAconst)
   7810 		v.AuxInt = c
   7811 		v.AddArg(x)
   7812 		return true
   7813 	}
   7814 	// match: (Rsh32x64 x (Const64 [c]))
   7815 	// cond: uint32(c) >= 32
   7816 	// result: (SRAconst x [31])
   7817 	for {
   7818 		_ = v.Args[1]
   7819 		x := v.Args[0]
   7820 		v_1 := v.Args[1]
   7821 		if v_1.Op != OpConst64 {
   7822 			break
   7823 		}
   7824 		c := v_1.AuxInt
   7825 		if !(uint32(c) >= 32) {
   7826 			break
   7827 		}
   7828 		v.reset(OpMIPSSRAconst)
   7829 		v.AuxInt = 31
   7830 		v.AddArg(x)
   7831 		return true
   7832 	}
   7833 	return false
   7834 }
   7835 func rewriteValueMIPS_OpRsh32x8_0(v *Value) bool {
   7836 	b := v.Block
   7837 	_ = b
   7838 	typ := &b.Func.Config.Types
   7839 	_ = typ
   7840 	// match: (Rsh32x8 x y)
   7841 	// cond:
   7842 	// result: (SRA x ( CMOVZ <typ.UInt32> (ZeroExt8to32 y) (MOVWconst [-1]) (SGTUconst [32] (ZeroExt8to32 y))))
   7843 	for {
   7844 		_ = v.Args[1]
   7845 		x := v.Args[0]
   7846 		y := v.Args[1]
   7847 		v.reset(OpMIPSSRA)
   7848 		v.AddArg(x)
   7849 		v0 := b.NewValue0(v.Pos, OpMIPSCMOVZ, typ.UInt32)
   7850 		v1 := b.NewValue0(v.Pos, OpZeroExt8to32, typ.UInt32)
   7851 		v1.AddArg(y)
   7852 		v0.AddArg(v1)
   7853 		v2 := b.NewValue0(v.Pos, OpMIPSMOVWconst, typ.UInt32)
   7854 		v2.AuxInt = -1
   7855 		v0.AddArg(v2)
   7856 		v3 := b.NewValue0(v.Pos, OpMIPSSGTUconst, typ.Bool)
   7857 		v3.AuxInt = 32
   7858 		v4 := b.NewValue0(v.Pos, OpZeroExt8to32, typ.UInt32)
   7859 		v4.AddArg(y)
   7860 		v3.AddArg(v4)
   7861 		v0.AddArg(v3)
   7862 		v.AddArg(v0)
   7863 		return true
   7864 	}
   7865 }
   7866 func rewriteValueMIPS_OpRsh8Ux16_0(v *Value) bool {
   7867 	b := v.Block
   7868 	_ = b
   7869 	typ := &b.Func.Config.Types
   7870 	_ = typ
   7871 	// match: (Rsh8Ux16 <t> x y)
   7872 	// cond:
   7873 	// result: (CMOVZ (SRL <t> (ZeroExt8to32 x) (ZeroExt16to32 y) ) (MOVWconst [0]) (SGTUconst [32] (ZeroExt16to32 y)))
   7874 	for {
   7875 		t := v.Type
   7876 		_ = v.Args[1]
   7877 		x := v.Args[0]
   7878 		y := v.Args[1]
   7879 		v.reset(OpMIPSCMOVZ)
   7880 		v0 := b.NewValue0(v.Pos, OpMIPSSRL, t)
   7881 		v1 := b.NewValue0(v.Pos, OpZeroExt8to32, typ.UInt32)
   7882 		v1.AddArg(x)
   7883 		v0.AddArg(v1)
   7884 		v2 := b.NewValue0(v.Pos, OpZeroExt16to32, typ.UInt32)
   7885 		v2.AddArg(y)
   7886 		v0.AddArg(v2)
   7887 		v.AddArg(v0)
   7888 		v3 := b.NewValue0(v.Pos, OpMIPSMOVWconst, typ.UInt32)
   7889 		v3.AuxInt = 0
   7890 		v.AddArg(v3)
   7891 		v4 := b.NewValue0(v.Pos, OpMIPSSGTUconst, typ.Bool)
   7892 		v4.AuxInt = 32
   7893 		v5 := b.NewValue0(v.Pos, OpZeroExt16to32, typ.UInt32)
   7894 		v5.AddArg(y)
   7895 		v4.AddArg(v5)
   7896 		v.AddArg(v4)
   7897 		return true
   7898 	}
   7899 }
   7900 func rewriteValueMIPS_OpRsh8Ux32_0(v *Value) bool {
   7901 	b := v.Block
   7902 	_ = b
   7903 	typ := &b.Func.Config.Types
   7904 	_ = typ
   7905 	// match: (Rsh8Ux32 <t> x y)
   7906 	// cond:
   7907 	// result: (CMOVZ (SRL <t> (ZeroExt8to32 x) y) (MOVWconst [0]) (SGTUconst [32] y))
   7908 	for {
   7909 		t := v.Type
   7910 		_ = v.Args[1]
   7911 		x := v.Args[0]
   7912 		y := v.Args[1]
   7913 		v.reset(OpMIPSCMOVZ)
   7914 		v0 := b.NewValue0(v.Pos, OpMIPSSRL, t)
   7915 		v1 := b.NewValue0(v.Pos, OpZeroExt8to32, typ.UInt32)
   7916 		v1.AddArg(x)
   7917 		v0.AddArg(v1)
   7918 		v0.AddArg(y)
   7919 		v.AddArg(v0)
   7920 		v2 := b.NewValue0(v.Pos, OpMIPSMOVWconst, typ.UInt32)
   7921 		v2.AuxInt = 0
   7922 		v.AddArg(v2)
   7923 		v3 := b.NewValue0(v.Pos, OpMIPSSGTUconst, typ.Bool)
   7924 		v3.AuxInt = 32
   7925 		v3.AddArg(y)
   7926 		v.AddArg(v3)
   7927 		return true
   7928 	}
   7929 }
   7930 func rewriteValueMIPS_OpRsh8Ux64_0(v *Value) bool {
   7931 	b := v.Block
   7932 	_ = b
   7933 	typ := &b.Func.Config.Types
   7934 	_ = typ
   7935 	// match: (Rsh8Ux64 x (Const64 [c]))
   7936 	// cond: uint32(c) < 8
   7937 	// result: (SRLconst (SLLconst <typ.UInt32> x [24]) [c+24])
   7938 	for {
   7939 		_ = v.Args[1]
   7940 		x := v.Args[0]
   7941 		v_1 := v.Args[1]
   7942 		if v_1.Op != OpConst64 {
   7943 			break
   7944 		}
   7945 		c := v_1.AuxInt
   7946 		if !(uint32(c) < 8) {
   7947 			break
   7948 		}
   7949 		v.reset(OpMIPSSRLconst)
   7950 		v.AuxInt = c + 24
   7951 		v0 := b.NewValue0(v.Pos, OpMIPSSLLconst, typ.UInt32)
   7952 		v0.AuxInt = 24
   7953 		v0.AddArg(x)
   7954 		v.AddArg(v0)
   7955 		return true
   7956 	}
   7957 	// match: (Rsh8Ux64 _ (Const64 [c]))
   7958 	// cond: uint32(c) >= 8
   7959 	// result: (MOVWconst [0])
   7960 	for {
   7961 		_ = v.Args[1]
   7962 		v_1 := v.Args[1]
   7963 		if v_1.Op != OpConst64 {
   7964 			break
   7965 		}
   7966 		c := v_1.AuxInt
   7967 		if !(uint32(c) >= 8) {
   7968 			break
   7969 		}
   7970 		v.reset(OpMIPSMOVWconst)
   7971 		v.AuxInt = 0
   7972 		return true
   7973 	}
   7974 	return false
   7975 }
   7976 func rewriteValueMIPS_OpRsh8Ux8_0(v *Value) bool {
   7977 	b := v.Block
   7978 	_ = b
   7979 	typ := &b.Func.Config.Types
   7980 	_ = typ
   7981 	// match: (Rsh8Ux8 <t> x y)
   7982 	// cond:
   7983 	// result: (CMOVZ (SRL <t> (ZeroExt8to32 x) (ZeroExt8to32 y) ) (MOVWconst [0]) (SGTUconst [32] (ZeroExt8to32 y)))
   7984 	for {
   7985 		t := v.Type
   7986 		_ = v.Args[1]
   7987 		x := v.Args[0]
   7988 		y := v.Args[1]
   7989 		v.reset(OpMIPSCMOVZ)
   7990 		v0 := b.NewValue0(v.Pos, OpMIPSSRL, t)
   7991 		v1 := b.NewValue0(v.Pos, OpZeroExt8to32, typ.UInt32)
   7992 		v1.AddArg(x)
   7993 		v0.AddArg(v1)
   7994 		v2 := b.NewValue0(v.Pos, OpZeroExt8to32, typ.UInt32)
   7995 		v2.AddArg(y)
   7996 		v0.AddArg(v2)
   7997 		v.AddArg(v0)
   7998 		v3 := b.NewValue0(v.Pos, OpMIPSMOVWconst, typ.UInt32)
   7999 		v3.AuxInt = 0
   8000 		v.AddArg(v3)
   8001 		v4 := b.NewValue0(v.Pos, OpMIPSSGTUconst, typ.Bool)
   8002 		v4.AuxInt = 32
   8003 		v5 := b.NewValue0(v.Pos, OpZeroExt8to32, typ.UInt32)
   8004 		v5.AddArg(y)
   8005 		v4.AddArg(v5)
   8006 		v.AddArg(v4)
   8007 		return true
   8008 	}
   8009 }
   8010 func rewriteValueMIPS_OpRsh8x16_0(v *Value) bool {
   8011 	b := v.Block
   8012 	_ = b
   8013 	typ := &b.Func.Config.Types
   8014 	_ = typ
   8015 	// match: (Rsh8x16 x y)
   8016 	// cond:
   8017 	// result: (SRA (SignExt16to32 x) ( CMOVZ <typ.UInt32> (ZeroExt16to32 y) (MOVWconst [-1]) (SGTUconst [32] (ZeroExt16to32 y))))
   8018 	for {
   8019 		_ = v.Args[1]
   8020 		x := v.Args[0]
   8021 		y := v.Args[1]
   8022 		v.reset(OpMIPSSRA)
   8023 		v0 := b.NewValue0(v.Pos, OpSignExt16to32, typ.Int32)
   8024 		v0.AddArg(x)
   8025 		v.AddArg(v0)
   8026 		v1 := b.NewValue0(v.Pos, OpMIPSCMOVZ, typ.UInt32)
   8027 		v2 := b.NewValue0(v.Pos, OpZeroExt16to32, typ.UInt32)
   8028 		v2.AddArg(y)
   8029 		v1.AddArg(v2)
   8030 		v3 := b.NewValue0(v.Pos, OpMIPSMOVWconst, typ.UInt32)
   8031 		v3.AuxInt = -1
   8032 		v1.AddArg(v3)
   8033 		v4 := b.NewValue0(v.Pos, OpMIPSSGTUconst, typ.Bool)
   8034 		v4.AuxInt = 32
   8035 		v5 := b.NewValue0(v.Pos, OpZeroExt16to32, typ.UInt32)
   8036 		v5.AddArg(y)
   8037 		v4.AddArg(v5)
   8038 		v1.AddArg(v4)
   8039 		v.AddArg(v1)
   8040 		return true
   8041 	}
   8042 }
   8043 func rewriteValueMIPS_OpRsh8x32_0(v *Value) bool {
   8044 	b := v.Block
   8045 	_ = b
   8046 	typ := &b.Func.Config.Types
   8047 	_ = typ
   8048 	// match: (Rsh8x32 x y)
   8049 	// cond:
   8050 	// result: (SRA (SignExt16to32 x) ( CMOVZ <typ.UInt32> y (MOVWconst [-1]) (SGTUconst [32] y)))
   8051 	for {
   8052 		_ = v.Args[1]
   8053 		x := v.Args[0]
   8054 		y := v.Args[1]
   8055 		v.reset(OpMIPSSRA)
   8056 		v0 := b.NewValue0(v.Pos, OpSignExt16to32, typ.Int32)
   8057 		v0.AddArg(x)
   8058 		v.AddArg(v0)
   8059 		v1 := b.NewValue0(v.Pos, OpMIPSCMOVZ, typ.UInt32)
   8060 		v1.AddArg(y)
   8061 		v2 := b.NewValue0(v.Pos, OpMIPSMOVWconst, typ.UInt32)
   8062 		v2.AuxInt = -1
   8063 		v1.AddArg(v2)
   8064 		v3 := b.NewValue0(v.Pos, OpMIPSSGTUconst, typ.Bool)
   8065 		v3.AuxInt = 32
   8066 		v3.AddArg(y)
   8067 		v1.AddArg(v3)
   8068 		v.AddArg(v1)
   8069 		return true
   8070 	}
   8071 }
   8072 func rewriteValueMIPS_OpRsh8x64_0(v *Value) bool {
   8073 	b := v.Block
   8074 	_ = b
   8075 	typ := &b.Func.Config.Types
   8076 	_ = typ
   8077 	// match: (Rsh8x64 x (Const64 [c]))
   8078 	// cond: uint32(c) < 8
   8079 	// result: (SRAconst (SLLconst <typ.UInt32> x [24]) [c+24])
   8080 	for {
   8081 		_ = v.Args[1]
   8082 		x := v.Args[0]
   8083 		v_1 := v.Args[1]
   8084 		if v_1.Op != OpConst64 {
   8085 			break
   8086 		}
   8087 		c := v_1.AuxInt
   8088 		if !(uint32(c) < 8) {
   8089 			break
   8090 		}
   8091 		v.reset(OpMIPSSRAconst)
   8092 		v.AuxInt = c + 24
   8093 		v0 := b.NewValue0(v.Pos, OpMIPSSLLconst, typ.UInt32)
   8094 		v0.AuxInt = 24
   8095 		v0.AddArg(x)
   8096 		v.AddArg(v0)
   8097 		return true
   8098 	}
   8099 	// match: (Rsh8x64 x (Const64 [c]))
   8100 	// cond: uint32(c) >= 8
   8101 	// result: (SRAconst (SLLconst <typ.UInt32> x [24]) [31])
   8102 	for {
   8103 		_ = v.Args[1]
   8104 		x := v.Args[0]
   8105 		v_1 := v.Args[1]
   8106 		if v_1.Op != OpConst64 {
   8107 			break
   8108 		}
   8109 		c := v_1.AuxInt
   8110 		if !(uint32(c) >= 8) {
   8111 			break
   8112 		}
   8113 		v.reset(OpMIPSSRAconst)
   8114 		v.AuxInt = 31
   8115 		v0 := b.NewValue0(v.Pos, OpMIPSSLLconst, typ.UInt32)
   8116 		v0.AuxInt = 24
   8117 		v0.AddArg(x)
   8118 		v.AddArg(v0)
   8119 		return true
   8120 	}
   8121 	return false
   8122 }
   8123 func rewriteValueMIPS_OpRsh8x8_0(v *Value) bool {
   8124 	b := v.Block
   8125 	_ = b
   8126 	typ := &b.Func.Config.Types
   8127 	_ = typ
   8128 	// match: (Rsh8x8 x y)
   8129 	// cond:
   8130 	// result: (SRA (SignExt16to32 x) ( CMOVZ <typ.UInt32> (ZeroExt8to32 y) (MOVWconst [-1]) (SGTUconst [32] (ZeroExt8to32 y))))
   8131 	for {
   8132 		_ = v.Args[1]
   8133 		x := v.Args[0]
   8134 		y := v.Args[1]
   8135 		v.reset(OpMIPSSRA)
   8136 		v0 := b.NewValue0(v.Pos, OpSignExt16to32, typ.Int32)
   8137 		v0.AddArg(x)
   8138 		v.AddArg(v0)
   8139 		v1 := b.NewValue0(v.Pos, OpMIPSCMOVZ, typ.UInt32)
   8140 		v2 := b.NewValue0(v.Pos, OpZeroExt8to32, typ.UInt32)
   8141 		v2.AddArg(y)
   8142 		v1.AddArg(v2)
   8143 		v3 := b.NewValue0(v.Pos, OpMIPSMOVWconst, typ.UInt32)
   8144 		v3.AuxInt = -1
   8145 		v1.AddArg(v3)
   8146 		v4 := b.NewValue0(v.Pos, OpMIPSSGTUconst, typ.Bool)
   8147 		v4.AuxInt = 32
   8148 		v5 := b.NewValue0(v.Pos, OpZeroExt8to32, typ.UInt32)
   8149 		v5.AddArg(y)
   8150 		v4.AddArg(v5)
   8151 		v1.AddArg(v4)
   8152 		v.AddArg(v1)
   8153 		return true
   8154 	}
   8155 }
   8156 func rewriteValueMIPS_OpSelect0_0(v *Value) bool {
   8157 	b := v.Block
   8158 	_ = b
   8159 	typ := &b.Func.Config.Types
   8160 	_ = typ
   8161 	// match: (Select0 (Add32carry <t> x y))
   8162 	// cond:
   8163 	// result: (ADD <t.FieldType(0)> x y)
   8164 	for {
   8165 		v_0 := v.Args[0]
   8166 		if v_0.Op != OpAdd32carry {
   8167 			break
   8168 		}
   8169 		t := v_0.Type
   8170 		_ = v_0.Args[1]
   8171 		x := v_0.Args[0]
   8172 		y := v_0.Args[1]
   8173 		v.reset(OpMIPSADD)
   8174 		v.Type = t.FieldType(0)
   8175 		v.AddArg(x)
   8176 		v.AddArg(y)
   8177 		return true
   8178 	}
   8179 	// match: (Select0 (Sub32carry <t> x y))
   8180 	// cond:
   8181 	// result: (SUB <t.FieldType(0)> x y)
   8182 	for {
   8183 		v_0 := v.Args[0]
   8184 		if v_0.Op != OpSub32carry {
   8185 			break
   8186 		}
   8187 		t := v_0.Type
   8188 		_ = v_0.Args[1]
   8189 		x := v_0.Args[0]
   8190 		y := v_0.Args[1]
   8191 		v.reset(OpMIPSSUB)
   8192 		v.Type = t.FieldType(0)
   8193 		v.AddArg(x)
   8194 		v.AddArg(y)
   8195 		return true
   8196 	}
   8197 	// match: (Select0 (MULTU (MOVWconst [0]) _))
   8198 	// cond:
   8199 	// result: (MOVWconst [0])
   8200 	for {
   8201 		v_0 := v.Args[0]
   8202 		if v_0.Op != OpMIPSMULTU {
   8203 			break
   8204 		}
   8205 		_ = v_0.Args[1]
   8206 		v_0_0 := v_0.Args[0]
   8207 		if v_0_0.Op != OpMIPSMOVWconst {
   8208 			break
   8209 		}
   8210 		if v_0_0.AuxInt != 0 {
   8211 			break
   8212 		}
   8213 		v.reset(OpMIPSMOVWconst)
   8214 		v.AuxInt = 0
   8215 		return true
   8216 	}
   8217 	// match: (Select0 (MULTU _ (MOVWconst [0])))
   8218 	// cond:
   8219 	// result: (MOVWconst [0])
   8220 	for {
   8221 		v_0 := v.Args[0]
   8222 		if v_0.Op != OpMIPSMULTU {
   8223 			break
   8224 		}
   8225 		_ = v_0.Args[1]
   8226 		v_0_1 := v_0.Args[1]
   8227 		if v_0_1.Op != OpMIPSMOVWconst {
   8228 			break
   8229 		}
   8230 		if v_0_1.AuxInt != 0 {
   8231 			break
   8232 		}
   8233 		v.reset(OpMIPSMOVWconst)
   8234 		v.AuxInt = 0
   8235 		return true
   8236 	}
   8237 	// match: (Select0 (MULTU (MOVWconst [1]) _))
   8238 	// cond:
   8239 	// result: (MOVWconst [0])
   8240 	for {
   8241 		v_0 := v.Args[0]
   8242 		if v_0.Op != OpMIPSMULTU {
   8243 			break
   8244 		}
   8245 		_ = v_0.Args[1]
   8246 		v_0_0 := v_0.Args[0]
   8247 		if v_0_0.Op != OpMIPSMOVWconst {
   8248 			break
   8249 		}
   8250 		if v_0_0.AuxInt != 1 {
   8251 			break
   8252 		}
   8253 		v.reset(OpMIPSMOVWconst)
   8254 		v.AuxInt = 0
   8255 		return true
   8256 	}
   8257 	// match: (Select0 (MULTU _ (MOVWconst [1])))
   8258 	// cond:
   8259 	// result: (MOVWconst [0])
   8260 	for {
   8261 		v_0 := v.Args[0]
   8262 		if v_0.Op != OpMIPSMULTU {
   8263 			break
   8264 		}
   8265 		_ = v_0.Args[1]
   8266 		v_0_1 := v_0.Args[1]
   8267 		if v_0_1.Op != OpMIPSMOVWconst {
   8268 			break
   8269 		}
   8270 		if v_0_1.AuxInt != 1 {
   8271 			break
   8272 		}
   8273 		v.reset(OpMIPSMOVWconst)
   8274 		v.AuxInt = 0
   8275 		return true
   8276 	}
   8277 	// match: (Select0 (MULTU (MOVWconst [-1]) x))
   8278 	// cond:
   8279 	// result: (CMOVZ (ADDconst <x.Type> [-1] x) (MOVWconst [0]) x)
   8280 	for {
   8281 		v_0 := v.Args[0]
   8282 		if v_0.Op != OpMIPSMULTU {
   8283 			break
   8284 		}
   8285 		_ = v_0.Args[1]
   8286 		v_0_0 := v_0.Args[0]
   8287 		if v_0_0.Op != OpMIPSMOVWconst {
   8288 			break
   8289 		}
   8290 		if v_0_0.AuxInt != -1 {
   8291 			break
   8292 		}
   8293 		x := v_0.Args[1]
   8294 		v.reset(OpMIPSCMOVZ)
   8295 		v0 := b.NewValue0(v.Pos, OpMIPSADDconst, x.Type)
   8296 		v0.AuxInt = -1
   8297 		v0.AddArg(x)
   8298 		v.AddArg(v0)
   8299 		v1 := b.NewValue0(v.Pos, OpMIPSMOVWconst, typ.UInt32)
   8300 		v1.AuxInt = 0
   8301 		v.AddArg(v1)
   8302 		v.AddArg(x)
   8303 		return true
   8304 	}
   8305 	// match: (Select0 (MULTU x (MOVWconst [-1])))
   8306 	// cond:
   8307 	// result: (CMOVZ (ADDconst <x.Type> [-1] x) (MOVWconst [0]) x)
   8308 	for {
   8309 		v_0 := v.Args[0]
   8310 		if v_0.Op != OpMIPSMULTU {
   8311 			break
   8312 		}
   8313 		_ = v_0.Args[1]
   8314 		x := v_0.Args[0]
   8315 		v_0_1 := v_0.Args[1]
   8316 		if v_0_1.Op != OpMIPSMOVWconst {
   8317 			break
   8318 		}
   8319 		if v_0_1.AuxInt != -1 {
   8320 			break
   8321 		}
   8322 		v.reset(OpMIPSCMOVZ)
   8323 		v0 := b.NewValue0(v.Pos, OpMIPSADDconst, x.Type)
   8324 		v0.AuxInt = -1
   8325 		v0.AddArg(x)
   8326 		v.AddArg(v0)
   8327 		v1 := b.NewValue0(v.Pos, OpMIPSMOVWconst, typ.UInt32)
   8328 		v1.AuxInt = 0
   8329 		v.AddArg(v1)
   8330 		v.AddArg(x)
   8331 		return true
   8332 	}
   8333 	// match: (Select0 (MULTU (MOVWconst [c]) x))
   8334 	// cond: isPowerOfTwo(int64(uint32(c)))
   8335 	// result: (SRLconst [32-log2(int64(uint32(c)))] x)
   8336 	for {
   8337 		v_0 := v.Args[0]
   8338 		if v_0.Op != OpMIPSMULTU {
   8339 			break
   8340 		}
   8341 		_ = v_0.Args[1]
   8342 		v_0_0 := v_0.Args[0]
   8343 		if v_0_0.Op != OpMIPSMOVWconst {
   8344 			break
   8345 		}
   8346 		c := v_0_0.AuxInt
   8347 		x := v_0.Args[1]
   8348 		if !(isPowerOfTwo(int64(uint32(c)))) {
   8349 			break
   8350 		}
   8351 		v.reset(OpMIPSSRLconst)
   8352 		v.AuxInt = 32 - log2(int64(uint32(c)))
   8353 		v.AddArg(x)
   8354 		return true
   8355 	}
   8356 	// match: (Select0 (MULTU x (MOVWconst [c])))
   8357 	// cond: isPowerOfTwo(int64(uint32(c)))
   8358 	// result: (SRLconst [32-log2(int64(uint32(c)))] x)
   8359 	for {
   8360 		v_0 := v.Args[0]
   8361 		if v_0.Op != OpMIPSMULTU {
   8362 			break
   8363 		}
   8364 		_ = v_0.Args[1]
   8365 		x := v_0.Args[0]
   8366 		v_0_1 := v_0.Args[1]
   8367 		if v_0_1.Op != OpMIPSMOVWconst {
   8368 			break
   8369 		}
   8370 		c := v_0_1.AuxInt
   8371 		if !(isPowerOfTwo(int64(uint32(c)))) {
   8372 			break
   8373 		}
   8374 		v.reset(OpMIPSSRLconst)
   8375 		v.AuxInt = 32 - log2(int64(uint32(c)))
   8376 		v.AddArg(x)
   8377 		return true
   8378 	}
   8379 	return false
   8380 }
   8381 func rewriteValueMIPS_OpSelect0_10(v *Value) bool {
   8382 	// match: (Select0 (MULTU (MOVWconst [c]) (MOVWconst [d])))
   8383 	// cond:
   8384 	// result: (MOVWconst [(c*d)>>32])
   8385 	for {
   8386 		v_0 := v.Args[0]
   8387 		if v_0.Op != OpMIPSMULTU {
   8388 			break
   8389 		}
   8390 		_ = v_0.Args[1]
   8391 		v_0_0 := v_0.Args[0]
   8392 		if v_0_0.Op != OpMIPSMOVWconst {
   8393 			break
   8394 		}
   8395 		c := v_0_0.AuxInt
   8396 		v_0_1 := v_0.Args[1]
   8397 		if v_0_1.Op != OpMIPSMOVWconst {
   8398 			break
   8399 		}
   8400 		d := v_0_1.AuxInt
   8401 		v.reset(OpMIPSMOVWconst)
   8402 		v.AuxInt = (c * d) >> 32
   8403 		return true
   8404 	}
   8405 	// match: (Select0 (MULTU (MOVWconst [d]) (MOVWconst [c])))
   8406 	// cond:
   8407 	// result: (MOVWconst [(c*d)>>32])
   8408 	for {
   8409 		v_0 := v.Args[0]
   8410 		if v_0.Op != OpMIPSMULTU {
   8411 			break
   8412 		}
   8413 		_ = v_0.Args[1]
   8414 		v_0_0 := v_0.Args[0]
   8415 		if v_0_0.Op != OpMIPSMOVWconst {
   8416 			break
   8417 		}
   8418 		d := v_0_0.AuxInt
   8419 		v_0_1 := v_0.Args[1]
   8420 		if v_0_1.Op != OpMIPSMOVWconst {
   8421 			break
   8422 		}
   8423 		c := v_0_1.AuxInt
   8424 		v.reset(OpMIPSMOVWconst)
   8425 		v.AuxInt = (c * d) >> 32
   8426 		return true
   8427 	}
   8428 	// match: (Select0 (DIV (MOVWconst [c]) (MOVWconst [d])))
   8429 	// cond:
   8430 	// result: (MOVWconst [int64(int32(c)%int32(d))])
   8431 	for {
   8432 		v_0 := v.Args[0]
   8433 		if v_0.Op != OpMIPSDIV {
   8434 			break
   8435 		}
   8436 		_ = v_0.Args[1]
   8437 		v_0_0 := v_0.Args[0]
   8438 		if v_0_0.Op != OpMIPSMOVWconst {
   8439 			break
   8440 		}
   8441 		c := v_0_0.AuxInt
   8442 		v_0_1 := v_0.Args[1]
   8443 		if v_0_1.Op != OpMIPSMOVWconst {
   8444 			break
   8445 		}
   8446 		d := v_0_1.AuxInt
   8447 		v.reset(OpMIPSMOVWconst)
   8448 		v.AuxInt = int64(int32(c) % int32(d))
   8449 		return true
   8450 	}
   8451 	// match: (Select0 (DIVU (MOVWconst [c]) (MOVWconst [d])))
   8452 	// cond:
   8453 	// result: (MOVWconst [int64(int32(uint32(c)%uint32(d)))])
   8454 	for {
   8455 		v_0 := v.Args[0]
   8456 		if v_0.Op != OpMIPSDIVU {
   8457 			break
   8458 		}
   8459 		_ = v_0.Args[1]
   8460 		v_0_0 := v_0.Args[0]
   8461 		if v_0_0.Op != OpMIPSMOVWconst {
   8462 			break
   8463 		}
   8464 		c := v_0_0.AuxInt
   8465 		v_0_1 := v_0.Args[1]
   8466 		if v_0_1.Op != OpMIPSMOVWconst {
   8467 			break
   8468 		}
   8469 		d := v_0_1.AuxInt
   8470 		v.reset(OpMIPSMOVWconst)
   8471 		v.AuxInt = int64(int32(uint32(c) % uint32(d)))
   8472 		return true
   8473 	}
   8474 	return false
   8475 }
   8476 func rewriteValueMIPS_OpSelect1_0(v *Value) bool {
   8477 	b := v.Block
   8478 	_ = b
   8479 	typ := &b.Func.Config.Types
   8480 	_ = typ
   8481 	// match: (Select1 (Add32carry <t> x y))
   8482 	// cond:
   8483 	// result: (SGTU <typ.Bool> x (ADD <t.FieldType(0)> x y))
   8484 	for {
   8485 		v_0 := v.Args[0]
   8486 		if v_0.Op != OpAdd32carry {
   8487 			break
   8488 		}
   8489 		t := v_0.Type
   8490 		_ = v_0.Args[1]
   8491 		x := v_0.Args[0]
   8492 		y := v_0.Args[1]
   8493 		v.reset(OpMIPSSGTU)
   8494 		v.Type = typ.Bool
   8495 		v.AddArg(x)
   8496 		v0 := b.NewValue0(v.Pos, OpMIPSADD, t.FieldType(0))
   8497 		v0.AddArg(x)
   8498 		v0.AddArg(y)
   8499 		v.AddArg(v0)
   8500 		return true
   8501 	}
   8502 	// match: (Select1 (Sub32carry <t> x y))
   8503 	// cond:
   8504 	// result: (SGTU <typ.Bool> (SUB <t.FieldType(0)> x y) x)
   8505 	for {
   8506 		v_0 := v.Args[0]
   8507 		if v_0.Op != OpSub32carry {
   8508 			break
   8509 		}
   8510 		t := v_0.Type
   8511 		_ = v_0.Args[1]
   8512 		x := v_0.Args[0]
   8513 		y := v_0.Args[1]
   8514 		v.reset(OpMIPSSGTU)
   8515 		v.Type = typ.Bool
   8516 		v0 := b.NewValue0(v.Pos, OpMIPSSUB, t.FieldType(0))
   8517 		v0.AddArg(x)
   8518 		v0.AddArg(y)
   8519 		v.AddArg(v0)
   8520 		v.AddArg(x)
   8521 		return true
   8522 	}
   8523 	// match: (Select1 (MULTU (MOVWconst [0]) _))
   8524 	// cond:
   8525 	// result: (MOVWconst [0])
   8526 	for {
   8527 		v_0 := v.Args[0]
   8528 		if v_0.Op != OpMIPSMULTU {
   8529 			break
   8530 		}
   8531 		_ = v_0.Args[1]
   8532 		v_0_0 := v_0.Args[0]
   8533 		if v_0_0.Op != OpMIPSMOVWconst {
   8534 			break
   8535 		}
   8536 		if v_0_0.AuxInt != 0 {
   8537 			break
   8538 		}
   8539 		v.reset(OpMIPSMOVWconst)
   8540 		v.AuxInt = 0
   8541 		return true
   8542 	}
   8543 	// match: (Select1 (MULTU _ (MOVWconst [0])))
   8544 	// cond:
   8545 	// result: (MOVWconst [0])
   8546 	for {
   8547 		v_0 := v.Args[0]
   8548 		if v_0.Op != OpMIPSMULTU {
   8549 			break
   8550 		}
   8551 		_ = v_0.Args[1]
   8552 		v_0_1 := v_0.Args[1]
   8553 		if v_0_1.Op != OpMIPSMOVWconst {
   8554 			break
   8555 		}
   8556 		if v_0_1.AuxInt != 0 {
   8557 			break
   8558 		}
   8559 		v.reset(OpMIPSMOVWconst)
   8560 		v.AuxInt = 0
   8561 		return true
   8562 	}
   8563 	// match: (Select1 (MULTU (MOVWconst [1]) x))
   8564 	// cond:
   8565 	// result: x
   8566 	for {
   8567 		v_0 := v.Args[0]
   8568 		if v_0.Op != OpMIPSMULTU {
   8569 			break
   8570 		}
   8571 		_ = v_0.Args[1]
   8572 		v_0_0 := v_0.Args[0]
   8573 		if v_0_0.Op != OpMIPSMOVWconst {
   8574 			break
   8575 		}
   8576 		if v_0_0.AuxInt != 1 {
   8577 			break
   8578 		}
   8579 		x := v_0.Args[1]
   8580 		v.reset(OpCopy)
   8581 		v.Type = x.Type
   8582 		v.AddArg(x)
   8583 		return true
   8584 	}
   8585 	// match: (Select1 (MULTU x (MOVWconst [1])))
   8586 	// cond:
   8587 	// result: x
   8588 	for {
   8589 		v_0 := v.Args[0]
   8590 		if v_0.Op != OpMIPSMULTU {
   8591 			break
   8592 		}
   8593 		_ = v_0.Args[1]
   8594 		x := v_0.Args[0]
   8595 		v_0_1 := v_0.Args[1]
   8596 		if v_0_1.Op != OpMIPSMOVWconst {
   8597 			break
   8598 		}
   8599 		if v_0_1.AuxInt != 1 {
   8600 			break
   8601 		}
   8602 		v.reset(OpCopy)
   8603 		v.Type = x.Type
   8604 		v.AddArg(x)
   8605 		return true
   8606 	}
   8607 	// match: (Select1 (MULTU (MOVWconst [-1]) x))
   8608 	// cond:
   8609 	// result: (NEG <x.Type> x)
   8610 	for {
   8611 		v_0 := v.Args[0]
   8612 		if v_0.Op != OpMIPSMULTU {
   8613 			break
   8614 		}
   8615 		_ = v_0.Args[1]
   8616 		v_0_0 := v_0.Args[0]
   8617 		if v_0_0.Op != OpMIPSMOVWconst {
   8618 			break
   8619 		}
   8620 		if v_0_0.AuxInt != -1 {
   8621 			break
   8622 		}
   8623 		x := v_0.Args[1]
   8624 		v.reset(OpMIPSNEG)
   8625 		v.Type = x.Type
   8626 		v.AddArg(x)
   8627 		return true
   8628 	}
   8629 	// match: (Select1 (MULTU x (MOVWconst [-1])))
   8630 	// cond:
   8631 	// result: (NEG <x.Type> x)
   8632 	for {
   8633 		v_0 := v.Args[0]
   8634 		if v_0.Op != OpMIPSMULTU {
   8635 			break
   8636 		}
   8637 		_ = v_0.Args[1]
   8638 		x := v_0.Args[0]
   8639 		v_0_1 := v_0.Args[1]
   8640 		if v_0_1.Op != OpMIPSMOVWconst {
   8641 			break
   8642 		}
   8643 		if v_0_1.AuxInt != -1 {
   8644 			break
   8645 		}
   8646 		v.reset(OpMIPSNEG)
   8647 		v.Type = x.Type
   8648 		v.AddArg(x)
   8649 		return true
   8650 	}
   8651 	// match: (Select1 (MULTU (MOVWconst [c]) x))
   8652 	// cond: isPowerOfTwo(int64(uint32(c)))
   8653 	// result: (SLLconst [log2(int64(uint32(c)))] x)
   8654 	for {
   8655 		v_0 := v.Args[0]
   8656 		if v_0.Op != OpMIPSMULTU {
   8657 			break
   8658 		}
   8659 		_ = v_0.Args[1]
   8660 		v_0_0 := v_0.Args[0]
   8661 		if v_0_0.Op != OpMIPSMOVWconst {
   8662 			break
   8663 		}
   8664 		c := v_0_0.AuxInt
   8665 		x := v_0.Args[1]
   8666 		if !(isPowerOfTwo(int64(uint32(c)))) {
   8667 			break
   8668 		}
   8669 		v.reset(OpMIPSSLLconst)
   8670 		v.AuxInt = log2(int64(uint32(c)))
   8671 		v.AddArg(x)
   8672 		return true
   8673 	}
   8674 	// match: (Select1 (MULTU x (MOVWconst [c])))
   8675 	// cond: isPowerOfTwo(int64(uint32(c)))
   8676 	// result: (SLLconst [log2(int64(uint32(c)))] x)
   8677 	for {
   8678 		v_0 := v.Args[0]
   8679 		if v_0.Op != OpMIPSMULTU {
   8680 			break
   8681 		}
   8682 		_ = v_0.Args[1]
   8683 		x := v_0.Args[0]
   8684 		v_0_1 := v_0.Args[1]
   8685 		if v_0_1.Op != OpMIPSMOVWconst {
   8686 			break
   8687 		}
   8688 		c := v_0_1.AuxInt
   8689 		if !(isPowerOfTwo(int64(uint32(c)))) {
   8690 			break
   8691 		}
   8692 		v.reset(OpMIPSSLLconst)
   8693 		v.AuxInt = log2(int64(uint32(c)))
   8694 		v.AddArg(x)
   8695 		return true
   8696 	}
   8697 	return false
   8698 }
   8699 func rewriteValueMIPS_OpSelect1_10(v *Value) bool {
   8700 	// match: (Select1 (MULTU (MOVWconst [c]) (MOVWconst [d])))
   8701 	// cond:
   8702 	// result: (MOVWconst [int64(int32(uint32(c)*uint32(d)))])
   8703 	for {
   8704 		v_0 := v.Args[0]
   8705 		if v_0.Op != OpMIPSMULTU {
   8706 			break
   8707 		}
   8708 		_ = v_0.Args[1]
   8709 		v_0_0 := v_0.Args[0]
   8710 		if v_0_0.Op != OpMIPSMOVWconst {
   8711 			break
   8712 		}
   8713 		c := v_0_0.AuxInt
   8714 		v_0_1 := v_0.Args[1]
   8715 		if v_0_1.Op != OpMIPSMOVWconst {
   8716 			break
   8717 		}
   8718 		d := v_0_1.AuxInt
   8719 		v.reset(OpMIPSMOVWconst)
   8720 		v.AuxInt = int64(int32(uint32(c) * uint32(d)))
   8721 		return true
   8722 	}
   8723 	// match: (Select1 (MULTU (MOVWconst [d]) (MOVWconst [c])))
   8724 	// cond:
   8725 	// result: (MOVWconst [int64(int32(uint32(c)*uint32(d)))])
   8726 	for {
   8727 		v_0 := v.Args[0]
   8728 		if v_0.Op != OpMIPSMULTU {
   8729 			break
   8730 		}
   8731 		_ = v_0.Args[1]
   8732 		v_0_0 := v_0.Args[0]
   8733 		if v_0_0.Op != OpMIPSMOVWconst {
   8734 			break
   8735 		}
   8736 		d := v_0_0.AuxInt
   8737 		v_0_1 := v_0.Args[1]
   8738 		if v_0_1.Op != OpMIPSMOVWconst {
   8739 			break
   8740 		}
   8741 		c := v_0_1.AuxInt
   8742 		v.reset(OpMIPSMOVWconst)
   8743 		v.AuxInt = int64(int32(uint32(c) * uint32(d)))
   8744 		return true
   8745 	}
   8746 	// match: (Select1 (DIV (MOVWconst [c]) (MOVWconst [d])))
   8747 	// cond:
   8748 	// result: (MOVWconst [int64(int32(c)/int32(d))])
   8749 	for {
   8750 		v_0 := v.Args[0]
   8751 		if v_0.Op != OpMIPSDIV {
   8752 			break
   8753 		}
   8754 		_ = v_0.Args[1]
   8755 		v_0_0 := v_0.Args[0]
   8756 		if v_0_0.Op != OpMIPSMOVWconst {
   8757 			break
   8758 		}
   8759 		c := v_0_0.AuxInt
   8760 		v_0_1 := v_0.Args[1]
   8761 		if v_0_1.Op != OpMIPSMOVWconst {
   8762 			break
   8763 		}
   8764 		d := v_0_1.AuxInt
   8765 		v.reset(OpMIPSMOVWconst)
   8766 		v.AuxInt = int64(int32(c) / int32(d))
   8767 		return true
   8768 	}
   8769 	// match: (Select1 (DIVU (MOVWconst [c]) (MOVWconst [d])))
   8770 	// cond:
   8771 	// result: (MOVWconst [int64(int32(uint32(c)/uint32(d)))])
   8772 	for {
   8773 		v_0 := v.Args[0]
   8774 		if v_0.Op != OpMIPSDIVU {
   8775 			break
   8776 		}
   8777 		_ = v_0.Args[1]
   8778 		v_0_0 := v_0.Args[0]
   8779 		if v_0_0.Op != OpMIPSMOVWconst {
   8780 			break
   8781 		}
   8782 		c := v_0_0.AuxInt
   8783 		v_0_1 := v_0.Args[1]
   8784 		if v_0_1.Op != OpMIPSMOVWconst {
   8785 			break
   8786 		}
   8787 		d := v_0_1.AuxInt
   8788 		v.reset(OpMIPSMOVWconst)
   8789 		v.AuxInt = int64(int32(uint32(c) / uint32(d)))
   8790 		return true
   8791 	}
   8792 	return false
   8793 }
   8794 func rewriteValueMIPS_OpSignExt16to32_0(v *Value) bool {
   8795 	// match: (SignExt16to32 x)
   8796 	// cond:
   8797 	// result: (MOVHreg x)
   8798 	for {
   8799 		x := v.Args[0]
   8800 		v.reset(OpMIPSMOVHreg)
   8801 		v.AddArg(x)
   8802 		return true
   8803 	}
   8804 }
   8805 func rewriteValueMIPS_OpSignExt8to16_0(v *Value) bool {
   8806 	// match: (SignExt8to16 x)
   8807 	// cond:
   8808 	// result: (MOVBreg x)
   8809 	for {
   8810 		x := v.Args[0]
   8811 		v.reset(OpMIPSMOVBreg)
   8812 		v.AddArg(x)
   8813 		return true
   8814 	}
   8815 }
   8816 func rewriteValueMIPS_OpSignExt8to32_0(v *Value) bool {
   8817 	// match: (SignExt8to32 x)
   8818 	// cond:
   8819 	// result: (MOVBreg x)
   8820 	for {
   8821 		x := v.Args[0]
   8822 		v.reset(OpMIPSMOVBreg)
   8823 		v.AddArg(x)
   8824 		return true
   8825 	}
   8826 }
   8827 func rewriteValueMIPS_OpSignmask_0(v *Value) bool {
   8828 	// match: (Signmask x)
   8829 	// cond:
   8830 	// result: (SRAconst x [31])
   8831 	for {
   8832 		x := v.Args[0]
   8833 		v.reset(OpMIPSSRAconst)
   8834 		v.AuxInt = 31
   8835 		v.AddArg(x)
   8836 		return true
   8837 	}
   8838 }
   8839 func rewriteValueMIPS_OpSlicemask_0(v *Value) bool {
   8840 	b := v.Block
   8841 	_ = b
   8842 	// match: (Slicemask <t> x)
   8843 	// cond:
   8844 	// result: (SRAconst (NEG <t> x) [31])
   8845 	for {
   8846 		t := v.Type
   8847 		x := v.Args[0]
   8848 		v.reset(OpMIPSSRAconst)
   8849 		v.AuxInt = 31
   8850 		v0 := b.NewValue0(v.Pos, OpMIPSNEG, t)
   8851 		v0.AddArg(x)
   8852 		v.AddArg(v0)
   8853 		return true
   8854 	}
   8855 }
   8856 func rewriteValueMIPS_OpSqrt_0(v *Value) bool {
   8857 	// match: (Sqrt x)
   8858 	// cond:
   8859 	// result: (SQRTD x)
   8860 	for {
   8861 		x := v.Args[0]
   8862 		v.reset(OpMIPSSQRTD)
   8863 		v.AddArg(x)
   8864 		return true
   8865 	}
   8866 }
   8867 func rewriteValueMIPS_OpStaticCall_0(v *Value) bool {
   8868 	// match: (StaticCall [argwid] {target} mem)
   8869 	// cond:
   8870 	// result: (CALLstatic [argwid] {target} mem)
   8871 	for {
   8872 		argwid := v.AuxInt
   8873 		target := v.Aux
   8874 		mem := v.Args[0]
   8875 		v.reset(OpMIPSCALLstatic)
   8876 		v.AuxInt = argwid
   8877 		v.Aux = target
   8878 		v.AddArg(mem)
   8879 		return true
   8880 	}
   8881 }
   8882 func rewriteValueMIPS_OpStore_0(v *Value) bool {
   8883 	// match: (Store {t} ptr val mem)
   8884 	// cond: t.(*types.Type).Size() == 1
   8885 	// result: (MOVBstore ptr val mem)
   8886 	for {
   8887 		t := v.Aux
   8888 		_ = v.Args[2]
   8889 		ptr := v.Args[0]
   8890 		val := v.Args[1]
   8891 		mem := v.Args[2]
   8892 		if !(t.(*types.Type).Size() == 1) {
   8893 			break
   8894 		}
   8895 		v.reset(OpMIPSMOVBstore)
   8896 		v.AddArg(ptr)
   8897 		v.AddArg(val)
   8898 		v.AddArg(mem)
   8899 		return true
   8900 	}
   8901 	// match: (Store {t} ptr val mem)
   8902 	// cond: t.(*types.Type).Size() == 2
   8903 	// result: (MOVHstore ptr val mem)
   8904 	for {
   8905 		t := v.Aux
   8906 		_ = v.Args[2]
   8907 		ptr := v.Args[0]
   8908 		val := v.Args[1]
   8909 		mem := v.Args[2]
   8910 		if !(t.(*types.Type).Size() == 2) {
   8911 			break
   8912 		}
   8913 		v.reset(OpMIPSMOVHstore)
   8914 		v.AddArg(ptr)
   8915 		v.AddArg(val)
   8916 		v.AddArg(mem)
   8917 		return true
   8918 	}
   8919 	// match: (Store {t} ptr val mem)
   8920 	// cond: t.(*types.Type).Size() == 4 && !is32BitFloat(val.Type)
   8921 	// result: (MOVWstore ptr val mem)
   8922 	for {
   8923 		t := v.Aux
   8924 		_ = v.Args[2]
   8925 		ptr := v.Args[0]
   8926 		val := v.Args[1]
   8927 		mem := v.Args[2]
   8928 		if !(t.(*types.Type).Size() == 4 && !is32BitFloat(val.Type)) {
   8929 			break
   8930 		}
   8931 		v.reset(OpMIPSMOVWstore)
   8932 		v.AddArg(ptr)
   8933 		v.AddArg(val)
   8934 		v.AddArg(mem)
   8935 		return true
   8936 	}
   8937 	// match: (Store {t} ptr val mem)
   8938 	// cond: t.(*types.Type).Size() == 4 && is32BitFloat(val.Type)
   8939 	// result: (MOVFstore ptr val mem)
   8940 	for {
   8941 		t := v.Aux
   8942 		_ = v.Args[2]
   8943 		ptr := v.Args[0]
   8944 		val := v.Args[1]
   8945 		mem := v.Args[2]
   8946 		if !(t.(*types.Type).Size() == 4 && is32BitFloat(val.Type)) {
   8947 			break
   8948 		}
   8949 		v.reset(OpMIPSMOVFstore)
   8950 		v.AddArg(ptr)
   8951 		v.AddArg(val)
   8952 		v.AddArg(mem)
   8953 		return true
   8954 	}
   8955 	// match: (Store {t} ptr val mem)
   8956 	// cond: t.(*types.Type).Size() == 8 && is64BitFloat(val.Type)
   8957 	// result: (MOVDstore ptr val mem)
   8958 	for {
   8959 		t := v.Aux
   8960 		_ = v.Args[2]
   8961 		ptr := v.Args[0]
   8962 		val := v.Args[1]
   8963 		mem := v.Args[2]
   8964 		if !(t.(*types.Type).Size() == 8 && is64BitFloat(val.Type)) {
   8965 			break
   8966 		}
   8967 		v.reset(OpMIPSMOVDstore)
   8968 		v.AddArg(ptr)
   8969 		v.AddArg(val)
   8970 		v.AddArg(mem)
   8971 		return true
   8972 	}
   8973 	return false
   8974 }
   8975 func rewriteValueMIPS_OpSub16_0(v *Value) bool {
   8976 	// match: (Sub16 x y)
   8977 	// cond:
   8978 	// result: (SUB x y)
   8979 	for {
   8980 		_ = v.Args[1]
   8981 		x := v.Args[0]
   8982 		y := v.Args[1]
   8983 		v.reset(OpMIPSSUB)
   8984 		v.AddArg(x)
   8985 		v.AddArg(y)
   8986 		return true
   8987 	}
   8988 }
   8989 func rewriteValueMIPS_OpSub32_0(v *Value) bool {
   8990 	// match: (Sub32 x y)
   8991 	// cond:
   8992 	// result: (SUB x y)
   8993 	for {
   8994 		_ = v.Args[1]
   8995 		x := v.Args[0]
   8996 		y := v.Args[1]
   8997 		v.reset(OpMIPSSUB)
   8998 		v.AddArg(x)
   8999 		v.AddArg(y)
   9000 		return true
   9001 	}
   9002 }
   9003 func rewriteValueMIPS_OpSub32F_0(v *Value) bool {
   9004 	// match: (Sub32F x y)
   9005 	// cond:
   9006 	// result: (SUBF x y)
   9007 	for {
   9008 		_ = v.Args[1]
   9009 		x := v.Args[0]
   9010 		y := v.Args[1]
   9011 		v.reset(OpMIPSSUBF)
   9012 		v.AddArg(x)
   9013 		v.AddArg(y)
   9014 		return true
   9015 	}
   9016 }
   9017 func rewriteValueMIPS_OpSub32withcarry_0(v *Value) bool {
   9018 	b := v.Block
   9019 	_ = b
   9020 	// match: (Sub32withcarry <t> x y c)
   9021 	// cond:
   9022 	// result: (SUB (SUB <t> x y) c)
   9023 	for {
   9024 		t := v.Type
   9025 		_ = v.Args[2]
   9026 		x := v.Args[0]
   9027 		y := v.Args[1]
   9028 		c := v.Args[2]
   9029 		v.reset(OpMIPSSUB)
   9030 		v0 := b.NewValue0(v.Pos, OpMIPSSUB, t)
   9031 		v0.AddArg(x)
   9032 		v0.AddArg(y)
   9033 		v.AddArg(v0)
   9034 		v.AddArg(c)
   9035 		return true
   9036 	}
   9037 }
   9038 func rewriteValueMIPS_OpSub64F_0(v *Value) bool {
   9039 	// match: (Sub64F x y)
   9040 	// cond:
   9041 	// result: (SUBD x y)
   9042 	for {
   9043 		_ = v.Args[1]
   9044 		x := v.Args[0]
   9045 		y := v.Args[1]
   9046 		v.reset(OpMIPSSUBD)
   9047 		v.AddArg(x)
   9048 		v.AddArg(y)
   9049 		return true
   9050 	}
   9051 }
   9052 func rewriteValueMIPS_OpSub8_0(v *Value) bool {
   9053 	// match: (Sub8 x y)
   9054 	// cond:
   9055 	// result: (SUB x y)
   9056 	for {
   9057 		_ = v.Args[1]
   9058 		x := v.Args[0]
   9059 		y := v.Args[1]
   9060 		v.reset(OpMIPSSUB)
   9061 		v.AddArg(x)
   9062 		v.AddArg(y)
   9063 		return true
   9064 	}
   9065 }
   9066 func rewriteValueMIPS_OpSubPtr_0(v *Value) bool {
   9067 	// match: (SubPtr x y)
   9068 	// cond:
   9069 	// result: (SUB x y)
   9070 	for {
   9071 		_ = v.Args[1]
   9072 		x := v.Args[0]
   9073 		y := v.Args[1]
   9074 		v.reset(OpMIPSSUB)
   9075 		v.AddArg(x)
   9076 		v.AddArg(y)
   9077 		return true
   9078 	}
   9079 }
   9080 func rewriteValueMIPS_OpTrunc16to8_0(v *Value) bool {
   9081 	// match: (Trunc16to8 x)
   9082 	// cond:
   9083 	// result: x
   9084 	for {
   9085 		x := v.Args[0]
   9086 		v.reset(OpCopy)
   9087 		v.Type = x.Type
   9088 		v.AddArg(x)
   9089 		return true
   9090 	}
   9091 }
   9092 func rewriteValueMIPS_OpTrunc32to16_0(v *Value) bool {
   9093 	// match: (Trunc32to16 x)
   9094 	// cond:
   9095 	// result: x
   9096 	for {
   9097 		x := v.Args[0]
   9098 		v.reset(OpCopy)
   9099 		v.Type = x.Type
   9100 		v.AddArg(x)
   9101 		return true
   9102 	}
   9103 }
   9104 func rewriteValueMIPS_OpTrunc32to8_0(v *Value) bool {
   9105 	// match: (Trunc32to8 x)
   9106 	// cond:
   9107 	// result: x
   9108 	for {
   9109 		x := v.Args[0]
   9110 		v.reset(OpCopy)
   9111 		v.Type = x.Type
   9112 		v.AddArg(x)
   9113 		return true
   9114 	}
   9115 }
   9116 func rewriteValueMIPS_OpXor16_0(v *Value) bool {
   9117 	// match: (Xor16 x y)
   9118 	// cond:
   9119 	// result: (XOR x y)
   9120 	for {
   9121 		_ = v.Args[1]
   9122 		x := v.Args[0]
   9123 		y := v.Args[1]
   9124 		v.reset(OpMIPSXOR)
   9125 		v.AddArg(x)
   9126 		v.AddArg(y)
   9127 		return true
   9128 	}
   9129 }
   9130 func rewriteValueMIPS_OpXor32_0(v *Value) bool {
   9131 	// match: (Xor32 x y)
   9132 	// cond:
   9133 	// result: (XOR x y)
   9134 	for {
   9135 		_ = v.Args[1]
   9136 		x := v.Args[0]
   9137 		y := v.Args[1]
   9138 		v.reset(OpMIPSXOR)
   9139 		v.AddArg(x)
   9140 		v.AddArg(y)
   9141 		return true
   9142 	}
   9143 }
   9144 func rewriteValueMIPS_OpXor8_0(v *Value) bool {
   9145 	// match: (Xor8 x y)
   9146 	// cond:
   9147 	// result: (XOR x y)
   9148 	for {
   9149 		_ = v.Args[1]
   9150 		x := v.Args[0]
   9151 		y := v.Args[1]
   9152 		v.reset(OpMIPSXOR)
   9153 		v.AddArg(x)
   9154 		v.AddArg(y)
   9155 		return true
   9156 	}
   9157 }
   9158 func rewriteValueMIPS_OpZero_0(v *Value) bool {
   9159 	b := v.Block
   9160 	_ = b
   9161 	typ := &b.Func.Config.Types
   9162 	_ = typ
   9163 	// match: (Zero [0] _ mem)
   9164 	// cond:
   9165 	// result: mem
   9166 	for {
   9167 		if v.AuxInt != 0 {
   9168 			break
   9169 		}
   9170 		_ = v.Args[1]
   9171 		mem := v.Args[1]
   9172 		v.reset(OpCopy)
   9173 		v.Type = mem.Type
   9174 		v.AddArg(mem)
   9175 		return true
   9176 	}
   9177 	// match: (Zero [1] ptr mem)
   9178 	// cond:
   9179 	// result: (MOVBstore ptr (MOVWconst [0]) mem)
   9180 	for {
   9181 		if v.AuxInt != 1 {
   9182 			break
   9183 		}
   9184 		_ = v.Args[1]
   9185 		ptr := v.Args[0]
   9186 		mem := v.Args[1]
   9187 		v.reset(OpMIPSMOVBstore)
   9188 		v.AddArg(ptr)
   9189 		v0 := b.NewValue0(v.Pos, OpMIPSMOVWconst, typ.UInt32)
   9190 		v0.AuxInt = 0
   9191 		v.AddArg(v0)
   9192 		v.AddArg(mem)
   9193 		return true
   9194 	}
   9195 	// match: (Zero [2] {t} ptr mem)
   9196 	// cond: t.(*types.Type).Alignment()%2 == 0
   9197 	// result: (MOVHstore ptr (MOVWconst [0]) mem)
   9198 	for {
   9199 		if v.AuxInt != 2 {
   9200 			break
   9201 		}
   9202 		t := v.Aux
   9203 		_ = v.Args[1]
   9204 		ptr := v.Args[0]
   9205 		mem := v.Args[1]
   9206 		if !(t.(*types.Type).Alignment()%2 == 0) {
   9207 			break
   9208 		}
   9209 		v.reset(OpMIPSMOVHstore)
   9210 		v.AddArg(ptr)
   9211 		v0 := b.NewValue0(v.Pos, OpMIPSMOVWconst, typ.UInt32)
   9212 		v0.AuxInt = 0
   9213 		v.AddArg(v0)
   9214 		v.AddArg(mem)
   9215 		return true
   9216 	}
   9217 	// match: (Zero [2] ptr mem)
   9218 	// cond:
   9219 	// result: (MOVBstore [1] ptr (MOVWconst [0]) 		(MOVBstore [0] ptr (MOVWconst [0]) mem))
   9220 	for {
   9221 		if v.AuxInt != 2 {
   9222 			break
   9223 		}
   9224 		_ = v.Args[1]
   9225 		ptr := v.Args[0]
   9226 		mem := v.Args[1]
   9227 		v.reset(OpMIPSMOVBstore)
   9228 		v.AuxInt = 1
   9229 		v.AddArg(ptr)
   9230 		v0 := b.NewValue0(v.Pos, OpMIPSMOVWconst, typ.UInt32)
   9231 		v0.AuxInt = 0
   9232 		v.AddArg(v0)
   9233 		v1 := b.NewValue0(v.Pos, OpMIPSMOVBstore, types.TypeMem)
   9234 		v1.AuxInt = 0
   9235 		v1.AddArg(ptr)
   9236 		v2 := b.NewValue0(v.Pos, OpMIPSMOVWconst, typ.UInt32)
   9237 		v2.AuxInt = 0
   9238 		v1.AddArg(v2)
   9239 		v1.AddArg(mem)
   9240 		v.AddArg(v1)
   9241 		return true
   9242 	}
   9243 	// match: (Zero [4] {t} ptr mem)
   9244 	// cond: t.(*types.Type).Alignment()%4 == 0
   9245 	// result: (MOVWstore ptr (MOVWconst [0]) mem)
   9246 	for {
   9247 		if v.AuxInt != 4 {
   9248 			break
   9249 		}
   9250 		t := v.Aux
   9251 		_ = v.Args[1]
   9252 		ptr := v.Args[0]
   9253 		mem := v.Args[1]
   9254 		if !(t.(*types.Type).Alignment()%4 == 0) {
   9255 			break
   9256 		}
   9257 		v.reset(OpMIPSMOVWstore)
   9258 		v.AddArg(ptr)
   9259 		v0 := b.NewValue0(v.Pos, OpMIPSMOVWconst, typ.UInt32)
   9260 		v0.AuxInt = 0
   9261 		v.AddArg(v0)
   9262 		v.AddArg(mem)
   9263 		return true
   9264 	}
   9265 	// match: (Zero [4] {t} ptr mem)
   9266 	// cond: t.(*types.Type).Alignment()%2 == 0
   9267 	// result: (MOVHstore [2] ptr (MOVWconst [0]) 		(MOVHstore [0] ptr (MOVWconst [0]) mem))
   9268 	for {
   9269 		if v.AuxInt != 4 {
   9270 			break
   9271 		}
   9272 		t := v.Aux
   9273 		_ = v.Args[1]
   9274 		ptr := v.Args[0]
   9275 		mem := v.Args[1]
   9276 		if !(t.(*types.Type).Alignment()%2 == 0) {
   9277 			break
   9278 		}
   9279 		v.reset(OpMIPSMOVHstore)
   9280 		v.AuxInt = 2
   9281 		v.AddArg(ptr)
   9282 		v0 := b.NewValue0(v.Pos, OpMIPSMOVWconst, typ.UInt32)
   9283 		v0.AuxInt = 0
   9284 		v.AddArg(v0)
   9285 		v1 := b.NewValue0(v.Pos, OpMIPSMOVHstore, types.TypeMem)
   9286 		v1.AuxInt = 0
   9287 		v1.AddArg(ptr)
   9288 		v2 := b.NewValue0(v.Pos, OpMIPSMOVWconst, typ.UInt32)
   9289 		v2.AuxInt = 0
   9290 		v1.AddArg(v2)
   9291 		v1.AddArg(mem)
   9292 		v.AddArg(v1)
   9293 		return true
   9294 	}
   9295 	// match: (Zero [4] ptr mem)
   9296 	// cond:
   9297 	// result: (MOVBstore [3] ptr (MOVWconst [0]) 		(MOVBstore [2] ptr (MOVWconst [0]) 			(MOVBstore [1] ptr (MOVWconst [0]) 				(MOVBstore [0] ptr (MOVWconst [0]) mem))))
   9298 	for {
   9299 		if v.AuxInt != 4 {
   9300 			break
   9301 		}
   9302 		_ = v.Args[1]
   9303 		ptr := v.Args[0]
   9304 		mem := v.Args[1]
   9305 		v.reset(OpMIPSMOVBstore)
   9306 		v.AuxInt = 3
   9307 		v.AddArg(ptr)
   9308 		v0 := b.NewValue0(v.Pos, OpMIPSMOVWconst, typ.UInt32)
   9309 		v0.AuxInt = 0
   9310 		v.AddArg(v0)
   9311 		v1 := b.NewValue0(v.Pos, OpMIPSMOVBstore, types.TypeMem)
   9312 		v1.AuxInt = 2
   9313 		v1.AddArg(ptr)
   9314 		v2 := b.NewValue0(v.Pos, OpMIPSMOVWconst, typ.UInt32)
   9315 		v2.AuxInt = 0
   9316 		v1.AddArg(v2)
   9317 		v3 := b.NewValue0(v.Pos, OpMIPSMOVBstore, types.TypeMem)
   9318 		v3.AuxInt = 1
   9319 		v3.AddArg(ptr)
   9320 		v4 := b.NewValue0(v.Pos, OpMIPSMOVWconst, typ.UInt32)
   9321 		v4.AuxInt = 0
   9322 		v3.AddArg(v4)
   9323 		v5 := b.NewValue0(v.Pos, OpMIPSMOVBstore, types.TypeMem)
   9324 		v5.AuxInt = 0
   9325 		v5.AddArg(ptr)
   9326 		v6 := b.NewValue0(v.Pos, OpMIPSMOVWconst, typ.UInt32)
   9327 		v6.AuxInt = 0
   9328 		v5.AddArg(v6)
   9329 		v5.AddArg(mem)
   9330 		v3.AddArg(v5)
   9331 		v1.AddArg(v3)
   9332 		v.AddArg(v1)
   9333 		return true
   9334 	}
   9335 	// match: (Zero [3] ptr mem)
   9336 	// cond:
   9337 	// result: (MOVBstore [2] ptr (MOVWconst [0]) 		(MOVBstore [1] ptr (MOVWconst [0]) 			(MOVBstore [0] ptr (MOVWconst [0]) mem)))
   9338 	for {
   9339 		if v.AuxInt != 3 {
   9340 			break
   9341 		}
   9342 		_ = v.Args[1]
   9343 		ptr := v.Args[0]
   9344 		mem := v.Args[1]
   9345 		v.reset(OpMIPSMOVBstore)
   9346 		v.AuxInt = 2
   9347 		v.AddArg(ptr)
   9348 		v0 := b.NewValue0(v.Pos, OpMIPSMOVWconst, typ.UInt32)
   9349 		v0.AuxInt = 0
   9350 		v.AddArg(v0)
   9351 		v1 := b.NewValue0(v.Pos, OpMIPSMOVBstore, types.TypeMem)
   9352 		v1.AuxInt = 1
   9353 		v1.AddArg(ptr)
   9354 		v2 := b.NewValue0(v.Pos, OpMIPSMOVWconst, typ.UInt32)
   9355 		v2.AuxInt = 0
   9356 		v1.AddArg(v2)
   9357 		v3 := b.NewValue0(v.Pos, OpMIPSMOVBstore, types.TypeMem)
   9358 		v3.AuxInt = 0
   9359 		v3.AddArg(ptr)
   9360 		v4 := b.NewValue0(v.Pos, OpMIPSMOVWconst, typ.UInt32)
   9361 		v4.AuxInt = 0
   9362 		v3.AddArg(v4)
   9363 		v3.AddArg(mem)
   9364 		v1.AddArg(v3)
   9365 		v.AddArg(v1)
   9366 		return true
   9367 	}
   9368 	// match: (Zero [6] {t} ptr mem)
   9369 	// cond: t.(*types.Type).Alignment()%2 == 0
   9370 	// result: (MOVHstore [4] ptr (MOVWconst [0]) 		(MOVHstore [2] ptr (MOVWconst [0]) 			(MOVHstore [0] ptr (MOVWconst [0]) mem)))
   9371 	for {
   9372 		if v.AuxInt != 6 {
   9373 			break
   9374 		}
   9375 		t := v.Aux
   9376 		_ = v.Args[1]
   9377 		ptr := v.Args[0]
   9378 		mem := v.Args[1]
   9379 		if !(t.(*types.Type).Alignment()%2 == 0) {
   9380 			break
   9381 		}
   9382 		v.reset(OpMIPSMOVHstore)
   9383 		v.AuxInt = 4
   9384 		v.AddArg(ptr)
   9385 		v0 := b.NewValue0(v.Pos, OpMIPSMOVWconst, typ.UInt32)
   9386 		v0.AuxInt = 0
   9387 		v.AddArg(v0)
   9388 		v1 := b.NewValue0(v.Pos, OpMIPSMOVHstore, types.TypeMem)
   9389 		v1.AuxInt = 2
   9390 		v1.AddArg(ptr)
   9391 		v2 := b.NewValue0(v.Pos, OpMIPSMOVWconst, typ.UInt32)
   9392 		v2.AuxInt = 0
   9393 		v1.AddArg(v2)
   9394 		v3 := b.NewValue0(v.Pos, OpMIPSMOVHstore, types.TypeMem)
   9395 		v3.AuxInt = 0
   9396 		v3.AddArg(ptr)
   9397 		v4 := b.NewValue0(v.Pos, OpMIPSMOVWconst, typ.UInt32)
   9398 		v4.AuxInt = 0
   9399 		v3.AddArg(v4)
   9400 		v3.AddArg(mem)
   9401 		v1.AddArg(v3)
   9402 		v.AddArg(v1)
   9403 		return true
   9404 	}
   9405 	// match: (Zero [8] {t} ptr mem)
   9406 	// cond: t.(*types.Type).Alignment()%4 == 0
   9407 	// result: (MOVWstore [4] ptr (MOVWconst [0]) 			(MOVWstore [0] ptr (MOVWconst [0]) mem))
   9408 	for {
   9409 		if v.AuxInt != 8 {
   9410 			break
   9411 		}
   9412 		t := v.Aux
   9413 		_ = v.Args[1]
   9414 		ptr := v.Args[0]
   9415 		mem := v.Args[1]
   9416 		if !(t.(*types.Type).Alignment()%4 == 0) {
   9417 			break
   9418 		}
   9419 		v.reset(OpMIPSMOVWstore)
   9420 		v.AuxInt = 4
   9421 		v.AddArg(ptr)
   9422 		v0 := b.NewValue0(v.Pos, OpMIPSMOVWconst, typ.UInt32)
   9423 		v0.AuxInt = 0
   9424 		v.AddArg(v0)
   9425 		v1 := b.NewValue0(v.Pos, OpMIPSMOVWstore, types.TypeMem)
   9426 		v1.AuxInt = 0
   9427 		v1.AddArg(ptr)
   9428 		v2 := b.NewValue0(v.Pos, OpMIPSMOVWconst, typ.UInt32)
   9429 		v2.AuxInt = 0
   9430 		v1.AddArg(v2)
   9431 		v1.AddArg(mem)
   9432 		v.AddArg(v1)
   9433 		return true
   9434 	}
   9435 	return false
   9436 }
   9437 func rewriteValueMIPS_OpZero_10(v *Value) bool {
   9438 	b := v.Block
   9439 	_ = b
   9440 	config := b.Func.Config
   9441 	_ = config
   9442 	typ := &b.Func.Config.Types
   9443 	_ = typ
   9444 	// match: (Zero [12] {t} ptr mem)
   9445 	// cond: t.(*types.Type).Alignment()%4 == 0
   9446 	// result: (MOVWstore [8] ptr (MOVWconst [0]) 		(MOVWstore [4] ptr (MOVWconst [0]) 			(MOVWstore [0] ptr (MOVWconst [0]) mem)))
   9447 	for {
   9448 		if v.AuxInt != 12 {
   9449 			break
   9450 		}
   9451 		t := v.Aux
   9452 		_ = v.Args[1]
   9453 		ptr := v.Args[0]
   9454 		mem := v.Args[1]
   9455 		if !(t.(*types.Type).Alignment()%4 == 0) {
   9456 			break
   9457 		}
   9458 		v.reset(OpMIPSMOVWstore)
   9459 		v.AuxInt = 8
   9460 		v.AddArg(ptr)
   9461 		v0 := b.NewValue0(v.Pos, OpMIPSMOVWconst, typ.UInt32)
   9462 		v0.AuxInt = 0
   9463 		v.AddArg(v0)
   9464 		v1 := b.NewValue0(v.Pos, OpMIPSMOVWstore, types.TypeMem)
   9465 		v1.AuxInt = 4
   9466 		v1.AddArg(ptr)
   9467 		v2 := b.NewValue0(v.Pos, OpMIPSMOVWconst, typ.UInt32)
   9468 		v2.AuxInt = 0
   9469 		v1.AddArg(v2)
   9470 		v3 := b.NewValue0(v.Pos, OpMIPSMOVWstore, types.TypeMem)
   9471 		v3.AuxInt = 0
   9472 		v3.AddArg(ptr)
   9473 		v4 := b.NewValue0(v.Pos, OpMIPSMOVWconst, typ.UInt32)
   9474 		v4.AuxInt = 0
   9475 		v3.AddArg(v4)
   9476 		v3.AddArg(mem)
   9477 		v1.AddArg(v3)
   9478 		v.AddArg(v1)
   9479 		return true
   9480 	}
   9481 	// match: (Zero [16] {t} ptr mem)
   9482 	// cond: t.(*types.Type).Alignment()%4 == 0
   9483 	// result: (MOVWstore [12] ptr (MOVWconst [0]) 		(MOVWstore [8] ptr (MOVWconst [0]) 			(MOVWstore [4] ptr (MOVWconst [0]) 				(MOVWstore [0] ptr (MOVWconst [0]) mem))))
   9484 	for {
   9485 		if v.AuxInt != 16 {
   9486 			break
   9487 		}
   9488 		t := v.Aux
   9489 		_ = v.Args[1]
   9490 		ptr := v.Args[0]
   9491 		mem := v.Args[1]
   9492 		if !(t.(*types.Type).Alignment()%4 == 0) {
   9493 			break
   9494 		}
   9495 		v.reset(OpMIPSMOVWstore)
   9496 		v.AuxInt = 12
   9497 		v.AddArg(ptr)
   9498 		v0 := b.NewValue0(v.Pos, OpMIPSMOVWconst, typ.UInt32)
   9499 		v0.AuxInt = 0
   9500 		v.AddArg(v0)
   9501 		v1 := b.NewValue0(v.Pos, OpMIPSMOVWstore, types.TypeMem)
   9502 		v1.AuxInt = 8
   9503 		v1.AddArg(ptr)
   9504 		v2 := b.NewValue0(v.Pos, OpMIPSMOVWconst, typ.UInt32)
   9505 		v2.AuxInt = 0
   9506 		v1.AddArg(v2)
   9507 		v3 := b.NewValue0(v.Pos, OpMIPSMOVWstore, types.TypeMem)
   9508 		v3.AuxInt = 4
   9509 		v3.AddArg(ptr)
   9510 		v4 := b.NewValue0(v.Pos, OpMIPSMOVWconst, typ.UInt32)
   9511 		v4.AuxInt = 0
   9512 		v3.AddArg(v4)
   9513 		v5 := b.NewValue0(v.Pos, OpMIPSMOVWstore, types.TypeMem)
   9514 		v5.AuxInt = 0
   9515 		v5.AddArg(ptr)
   9516 		v6 := b.NewValue0(v.Pos, OpMIPSMOVWconst, typ.UInt32)
   9517 		v6.AuxInt = 0
   9518 		v5.AddArg(v6)
   9519 		v5.AddArg(mem)
   9520 		v3.AddArg(v5)
   9521 		v1.AddArg(v3)
   9522 		v.AddArg(v1)
   9523 		return true
   9524 	}
   9525 	// match: (Zero [s] {t} ptr mem)
   9526 	// cond: (s > 16  || t.(*types.Type).Alignment()%4 != 0)
   9527 	// result: (LoweredZero [t.(*types.Type).Alignment()] 		ptr 		(ADDconst <ptr.Type> ptr [s-moveSize(t.(*types.Type).Alignment(), config)]) 		mem)
   9528 	for {
   9529 		s := v.AuxInt
   9530 		t := v.Aux
   9531 		_ = v.Args[1]
   9532 		ptr := v.Args[0]
   9533 		mem := v.Args[1]
   9534 		if !(s > 16 || t.(*types.Type).Alignment()%4 != 0) {
   9535 			break
   9536 		}
   9537 		v.reset(OpMIPSLoweredZero)
   9538 		v.AuxInt = t.(*types.Type).Alignment()
   9539 		v.AddArg(ptr)
   9540 		v0 := b.NewValue0(v.Pos, OpMIPSADDconst, ptr.Type)
   9541 		v0.AuxInt = s - moveSize(t.(*types.Type).Alignment(), config)
   9542 		v0.AddArg(ptr)
   9543 		v.AddArg(v0)
   9544 		v.AddArg(mem)
   9545 		return true
   9546 	}
   9547 	return false
   9548 }
   9549 func rewriteValueMIPS_OpZeroExt16to32_0(v *Value) bool {
   9550 	// match: (ZeroExt16to32 x)
   9551 	// cond:
   9552 	// result: (MOVHUreg x)
   9553 	for {
   9554 		x := v.Args[0]
   9555 		v.reset(OpMIPSMOVHUreg)
   9556 		v.AddArg(x)
   9557 		return true
   9558 	}
   9559 }
   9560 func rewriteValueMIPS_OpZeroExt8to16_0(v *Value) bool {
   9561 	// match: (ZeroExt8to16 x)
   9562 	// cond:
   9563 	// result: (MOVBUreg x)
   9564 	for {
   9565 		x := v.Args[0]
   9566 		v.reset(OpMIPSMOVBUreg)
   9567 		v.AddArg(x)
   9568 		return true
   9569 	}
   9570 }
   9571 func rewriteValueMIPS_OpZeroExt8to32_0(v *Value) bool {
   9572 	// match: (ZeroExt8to32 x)
   9573 	// cond:
   9574 	// result: (MOVBUreg x)
   9575 	for {
   9576 		x := v.Args[0]
   9577 		v.reset(OpMIPSMOVBUreg)
   9578 		v.AddArg(x)
   9579 		return true
   9580 	}
   9581 }
   9582 func rewriteValueMIPS_OpZeromask_0(v *Value) bool {
   9583 	b := v.Block
   9584 	_ = b
   9585 	typ := &b.Func.Config.Types
   9586 	_ = typ
   9587 	// match: (Zeromask x)
   9588 	// cond:
   9589 	// result: (NEG (SGTU x (MOVWconst [0])))
   9590 	for {
   9591 		x := v.Args[0]
   9592 		v.reset(OpMIPSNEG)
   9593 		v0 := b.NewValue0(v.Pos, OpMIPSSGTU, typ.Bool)
   9594 		v0.AddArg(x)
   9595 		v1 := b.NewValue0(v.Pos, OpMIPSMOVWconst, typ.UInt32)
   9596 		v1.AuxInt = 0
   9597 		v0.AddArg(v1)
   9598 		v.AddArg(v0)
   9599 		return true
   9600 	}
   9601 }
   9602 func rewriteBlockMIPS(b *Block) bool {
   9603 	config := b.Func.Config
   9604 	_ = config
   9605 	fe := b.Func.fe
   9606 	_ = fe
   9607 	typ := &config.Types
   9608 	_ = typ
   9609 	switch b.Kind {
   9610 	case BlockMIPSEQ:
   9611 		// match: (EQ (FPFlagTrue cmp) yes no)
   9612 		// cond:
   9613 		// result: (FPF cmp yes no)
   9614 		for {
   9615 			v := b.Control
   9616 			if v.Op != OpMIPSFPFlagTrue {
   9617 				break
   9618 			}
   9619 			cmp := v.Args[0]
   9620 			b.Kind = BlockMIPSFPF
   9621 			b.SetControl(cmp)
   9622 			b.Aux = nil
   9623 			return true
   9624 		}
   9625 		// match: (EQ (FPFlagFalse cmp) yes no)
   9626 		// cond:
   9627 		// result: (FPT cmp yes no)
   9628 		for {
   9629 			v := b.Control
   9630 			if v.Op != OpMIPSFPFlagFalse {
   9631 				break
   9632 			}
   9633 			cmp := v.Args[0]
   9634 			b.Kind = BlockMIPSFPT
   9635 			b.SetControl(cmp)
   9636 			b.Aux = nil
   9637 			return true
   9638 		}
   9639 		// match: (EQ (XORconst [1] cmp:(SGT _ _)) yes no)
   9640 		// cond:
   9641 		// result: (NE cmp yes no)
   9642 		for {
   9643 			v := b.Control
   9644 			if v.Op != OpMIPSXORconst {
   9645 				break
   9646 			}
   9647 			if v.AuxInt != 1 {
   9648 				break
   9649 			}
   9650 			cmp := v.Args[0]
   9651 			if cmp.Op != OpMIPSSGT {
   9652 				break
   9653 			}
   9654 			_ = cmp.Args[1]
   9655 			b.Kind = BlockMIPSNE
   9656 			b.SetControl(cmp)
   9657 			b.Aux = nil
   9658 			return true
   9659 		}
   9660 		// match: (EQ (XORconst [1] cmp:(SGTU _ _)) yes no)
   9661 		// cond:
   9662 		// result: (NE cmp yes no)
   9663 		for {
   9664 			v := b.Control
   9665 			if v.Op != OpMIPSXORconst {
   9666 				break
   9667 			}
   9668 			if v.AuxInt != 1 {
   9669 				break
   9670 			}
   9671 			cmp := v.Args[0]
   9672 			if cmp.Op != OpMIPSSGTU {
   9673 				break
   9674 			}
   9675 			_ = cmp.Args[1]
   9676 			b.Kind = BlockMIPSNE
   9677 			b.SetControl(cmp)
   9678 			b.Aux = nil
   9679 			return true
   9680 		}
   9681 		// match: (EQ (XORconst [1] cmp:(SGTconst _)) yes no)
   9682 		// cond:
   9683 		// result: (NE cmp yes no)
   9684 		for {
   9685 			v := b.Control
   9686 			if v.Op != OpMIPSXORconst {
   9687 				break
   9688 			}
   9689 			if v.AuxInt != 1 {
   9690 				break
   9691 			}
   9692 			cmp := v.Args[0]
   9693 			if cmp.Op != OpMIPSSGTconst {
   9694 				break
   9695 			}
   9696 			b.Kind = BlockMIPSNE
   9697 			b.SetControl(cmp)
   9698 			b.Aux = nil
   9699 			return true
   9700 		}
   9701 		// match: (EQ (XORconst [1] cmp:(SGTUconst _)) yes no)
   9702 		// cond:
   9703 		// result: (NE cmp yes no)
   9704 		for {
   9705 			v := b.Control
   9706 			if v.Op != OpMIPSXORconst {
   9707 				break
   9708 			}
   9709 			if v.AuxInt != 1 {
   9710 				break
   9711 			}
   9712 			cmp := v.Args[0]
   9713 			if cmp.Op != OpMIPSSGTUconst {
   9714 				break
   9715 			}
   9716 			b.Kind = BlockMIPSNE
   9717 			b.SetControl(cmp)
   9718 			b.Aux = nil
   9719 			return true
   9720 		}
   9721 		// match: (EQ (XORconst [1] cmp:(SGTzero _)) yes no)
   9722 		// cond:
   9723 		// result: (NE cmp yes no)
   9724 		for {
   9725 			v := b.Control
   9726 			if v.Op != OpMIPSXORconst {
   9727 				break
   9728 			}
   9729 			if v.AuxInt != 1 {
   9730 				break
   9731 			}
   9732 			cmp := v.Args[0]
   9733 			if cmp.Op != OpMIPSSGTzero {
   9734 				break
   9735 			}
   9736 			b.Kind = BlockMIPSNE
   9737 			b.SetControl(cmp)
   9738 			b.Aux = nil
   9739 			return true
   9740 		}
   9741 		// match: (EQ (XORconst [1] cmp:(SGTUzero _)) yes no)
   9742 		// cond:
   9743 		// result: (NE cmp yes no)
   9744 		for {
   9745 			v := b.Control
   9746 			if v.Op != OpMIPSXORconst {
   9747 				break
   9748 			}
   9749 			if v.AuxInt != 1 {
   9750 				break
   9751 			}
   9752 			cmp := v.Args[0]
   9753 			if cmp.Op != OpMIPSSGTUzero {
   9754 				break
   9755 			}
   9756 			b.Kind = BlockMIPSNE
   9757 			b.SetControl(cmp)
   9758 			b.Aux = nil
   9759 			return true
   9760 		}
   9761 		// match: (EQ (SGTUconst [1] x) yes no)
   9762 		// cond:
   9763 		// result: (NE x yes no)
   9764 		for {
   9765 			v := b.Control
   9766 			if v.Op != OpMIPSSGTUconst {
   9767 				break
   9768 			}
   9769 			if v.AuxInt != 1 {
   9770 				break
   9771 			}
   9772 			x := v.Args[0]
   9773 			b.Kind = BlockMIPSNE
   9774 			b.SetControl(x)
   9775 			b.Aux = nil
   9776 			return true
   9777 		}
   9778 		// match: (EQ (SGTUzero x) yes no)
   9779 		// cond:
   9780 		// result: (EQ x yes no)
   9781 		for {
   9782 			v := b.Control
   9783 			if v.Op != OpMIPSSGTUzero {
   9784 				break
   9785 			}
   9786 			x := v.Args[0]
   9787 			b.Kind = BlockMIPSEQ
   9788 			b.SetControl(x)
   9789 			b.Aux = nil
   9790 			return true
   9791 		}
   9792 		// match: (EQ (SGTconst [0] x) yes no)
   9793 		// cond:
   9794 		// result: (GEZ x yes no)
   9795 		for {
   9796 			v := b.Control
   9797 			if v.Op != OpMIPSSGTconst {
   9798 				break
   9799 			}
   9800 			if v.AuxInt != 0 {
   9801 				break
   9802 			}
   9803 			x := v.Args[0]
   9804 			b.Kind = BlockMIPSGEZ
   9805 			b.SetControl(x)
   9806 			b.Aux = nil
   9807 			return true
   9808 		}
   9809 		// match: (EQ (SGTzero x) yes no)
   9810 		// cond:
   9811 		// result: (LEZ x yes no)
   9812 		for {
   9813 			v := b.Control
   9814 			if v.Op != OpMIPSSGTzero {
   9815 				break
   9816 			}
   9817 			x := v.Args[0]
   9818 			b.Kind = BlockMIPSLEZ
   9819 			b.SetControl(x)
   9820 			b.Aux = nil
   9821 			return true
   9822 		}
   9823 		// match: (EQ (MOVWconst [0]) yes no)
   9824 		// cond:
   9825 		// result: (First nil yes no)
   9826 		for {
   9827 			v := b.Control
   9828 			if v.Op != OpMIPSMOVWconst {
   9829 				break
   9830 			}
   9831 			if v.AuxInt != 0 {
   9832 				break
   9833 			}
   9834 			b.Kind = BlockFirst
   9835 			b.SetControl(nil)
   9836 			b.Aux = nil
   9837 			return true
   9838 		}
   9839 		// match: (EQ (MOVWconst [c]) yes no)
   9840 		// cond: c != 0
   9841 		// result: (First nil no yes)
   9842 		for {
   9843 			v := b.Control
   9844 			if v.Op != OpMIPSMOVWconst {
   9845 				break
   9846 			}
   9847 			c := v.AuxInt
   9848 			if !(c != 0) {
   9849 				break
   9850 			}
   9851 			b.Kind = BlockFirst
   9852 			b.SetControl(nil)
   9853 			b.Aux = nil
   9854 			b.swapSuccessors()
   9855 			return true
   9856 		}
   9857 	case BlockMIPSGEZ:
   9858 		// match: (GEZ (MOVWconst [c]) yes no)
   9859 		// cond: int32(c) >= 0
   9860 		// result: (First nil yes no)
   9861 		for {
   9862 			v := b.Control
   9863 			if v.Op != OpMIPSMOVWconst {
   9864 				break
   9865 			}
   9866 			c := v.AuxInt
   9867 			if !(int32(c) >= 0) {
   9868 				break
   9869 			}
   9870 			b.Kind = BlockFirst
   9871 			b.SetControl(nil)
   9872 			b.Aux = nil
   9873 			return true
   9874 		}
   9875 		// match: (GEZ (MOVWconst [c]) yes no)
   9876 		// cond: int32(c) <  0
   9877 		// result: (First nil no yes)
   9878 		for {
   9879 			v := b.Control
   9880 			if v.Op != OpMIPSMOVWconst {
   9881 				break
   9882 			}
   9883 			c := v.AuxInt
   9884 			if !(int32(c) < 0) {
   9885 				break
   9886 			}
   9887 			b.Kind = BlockFirst
   9888 			b.SetControl(nil)
   9889 			b.Aux = nil
   9890 			b.swapSuccessors()
   9891 			return true
   9892 		}
   9893 	case BlockMIPSGTZ:
   9894 		// match: (GTZ (MOVWconst [c]) yes no)
   9895 		// cond: int32(c) >  0
   9896 		// result: (First nil yes no)
   9897 		for {
   9898 			v := b.Control
   9899 			if v.Op != OpMIPSMOVWconst {
   9900 				break
   9901 			}
   9902 			c := v.AuxInt
   9903 			if !(int32(c) > 0) {
   9904 				break
   9905 			}
   9906 			b.Kind = BlockFirst
   9907 			b.SetControl(nil)
   9908 			b.Aux = nil
   9909 			return true
   9910 		}
   9911 		// match: (GTZ (MOVWconst [c]) yes no)
   9912 		// cond: int32(c) <= 0
   9913 		// result: (First nil no yes)
   9914 		for {
   9915 			v := b.Control
   9916 			if v.Op != OpMIPSMOVWconst {
   9917 				break
   9918 			}
   9919 			c := v.AuxInt
   9920 			if !(int32(c) <= 0) {
   9921 				break
   9922 			}
   9923 			b.Kind = BlockFirst
   9924 			b.SetControl(nil)
   9925 			b.Aux = nil
   9926 			b.swapSuccessors()
   9927 			return true
   9928 		}
   9929 	case BlockIf:
   9930 		// match: (If cond yes no)
   9931 		// cond:
   9932 		// result: (NE cond yes no)
   9933 		for {
   9934 			v := b.Control
   9935 			_ = v
   9936 			cond := b.Control
   9937 			b.Kind = BlockMIPSNE
   9938 			b.SetControl(cond)
   9939 			b.Aux = nil
   9940 			return true
   9941 		}
   9942 	case BlockMIPSLEZ:
   9943 		// match: (LEZ (MOVWconst [c]) yes no)
   9944 		// cond: int32(c) <= 0
   9945 		// result: (First nil yes no)
   9946 		for {
   9947 			v := b.Control
   9948 			if v.Op != OpMIPSMOVWconst {
   9949 				break
   9950 			}
   9951 			c := v.AuxInt
   9952 			if !(int32(c) <= 0) {
   9953 				break
   9954 			}
   9955 			b.Kind = BlockFirst
   9956 			b.SetControl(nil)
   9957 			b.Aux = nil
   9958 			return true
   9959 		}
   9960 		// match: (LEZ (MOVWconst [c]) yes no)
   9961 		// cond: int32(c) >  0
   9962 		// result: (First nil no yes)
   9963 		for {
   9964 			v := b.Control
   9965 			if v.Op != OpMIPSMOVWconst {
   9966 				break
   9967 			}
   9968 			c := v.AuxInt
   9969 			if !(int32(c) > 0) {
   9970 				break
   9971 			}
   9972 			b.Kind = BlockFirst
   9973 			b.SetControl(nil)
   9974 			b.Aux = nil
   9975 			b.swapSuccessors()
   9976 			return true
   9977 		}
   9978 	case BlockMIPSLTZ:
   9979 		// match: (LTZ (MOVWconst [c]) yes no)
   9980 		// cond: int32(c) <  0
   9981 		// result: (First nil yes no)
   9982 		for {
   9983 			v := b.Control
   9984 			if v.Op != OpMIPSMOVWconst {
   9985 				break
   9986 			}
   9987 			c := v.AuxInt
   9988 			if !(int32(c) < 0) {
   9989 				break
   9990 			}
   9991 			b.Kind = BlockFirst
   9992 			b.SetControl(nil)
   9993 			b.Aux = nil
   9994 			return true
   9995 		}
   9996 		// match: (LTZ (MOVWconst [c]) yes no)
   9997 		// cond: int32(c) >= 0
   9998 		// result: (First nil no yes)
   9999 		for {
   10000 			v := b.Control
   10001 			if v.Op != OpMIPSMOVWconst {
   10002 				break
   10003 			}
   10004 			c := v.AuxInt
   10005 			if !(int32(c) >= 0) {
   10006 				break
   10007 			}
   10008 			b.Kind = BlockFirst
   10009 			b.SetControl(nil)
   10010 			b.Aux = nil
   10011 			b.swapSuccessors()
   10012 			return true
   10013 		}
   10014 	case BlockMIPSNE:
   10015 		// match: (NE (FPFlagTrue cmp) yes no)
   10016 		// cond:
   10017 		// result: (FPT cmp yes no)
   10018 		for {
   10019 			v := b.Control
   10020 			if v.Op != OpMIPSFPFlagTrue {
   10021 				break
   10022 			}
   10023 			cmp := v.Args[0]
   10024 			b.Kind = BlockMIPSFPT
   10025 			b.SetControl(cmp)
   10026 			b.Aux = nil
   10027 			return true
   10028 		}
   10029 		// match: (NE (FPFlagFalse cmp) yes no)
   10030 		// cond:
   10031 		// result: (FPF cmp yes no)
   10032 		for {
   10033 			v := b.Control
   10034 			if v.Op != OpMIPSFPFlagFalse {
   10035 				break
   10036 			}
   10037 			cmp := v.Args[0]
   10038 			b.Kind = BlockMIPSFPF
   10039 			b.SetControl(cmp)
   10040 			b.Aux = nil
   10041 			return true
   10042 		}
   10043 		// match: (NE (XORconst [1] cmp:(SGT _ _)) yes no)
   10044 		// cond:
   10045 		// result: (EQ cmp yes no)
   10046 		for {
   10047 			v := b.Control
   10048 			if v.Op != OpMIPSXORconst {
   10049 				break
   10050 			}
   10051 			if v.AuxInt != 1 {
   10052 				break
   10053 			}
   10054 			cmp := v.Args[0]
   10055 			if cmp.Op != OpMIPSSGT {
   10056 				break
   10057 			}
   10058 			_ = cmp.Args[1]
   10059 			b.Kind = BlockMIPSEQ
   10060 			b.SetControl(cmp)
   10061 			b.Aux = nil
   10062 			return true
   10063 		}
   10064 		// match: (NE (XORconst [1] cmp:(SGTU _ _)) yes no)
   10065 		// cond:
   10066 		// result: (EQ cmp yes no)
   10067 		for {
   10068 			v := b.Control
   10069 			if v.Op != OpMIPSXORconst {
   10070 				break
   10071 			}
   10072 			if v.AuxInt != 1 {
   10073 				break
   10074 			}
   10075 			cmp := v.Args[0]
   10076 			if cmp.Op != OpMIPSSGTU {
   10077 				break
   10078 			}
   10079 			_ = cmp.Args[1]
   10080 			b.Kind = BlockMIPSEQ
   10081 			b.SetControl(cmp)
   10082 			b.Aux = nil
   10083 			return true
   10084 		}
   10085 		// match: (NE (XORconst [1] cmp:(SGTconst _)) yes no)
   10086 		// cond:
   10087 		// result: (EQ cmp yes no)
   10088 		for {
   10089 			v := b.Control
   10090 			if v.Op != OpMIPSXORconst {
   10091 				break
   10092 			}
   10093 			if v.AuxInt != 1 {
   10094 				break
   10095 			}
   10096 			cmp := v.Args[0]
   10097 			if cmp.Op != OpMIPSSGTconst {
   10098 				break
   10099 			}
   10100 			b.Kind = BlockMIPSEQ
   10101 			b.SetControl(cmp)
   10102 			b.Aux = nil
   10103 			return true
   10104 		}
   10105 		// match: (NE (XORconst [1] cmp:(SGTUconst _)) yes no)
   10106 		// cond:
   10107 		// result: (EQ cmp yes no)
   10108 		for {
   10109 			v := b.Control
   10110 			if v.Op != OpMIPSXORconst {
   10111 				break
   10112 			}
   10113 			if v.AuxInt != 1 {
   10114 				break
   10115 			}
   10116 			cmp := v.Args[0]
   10117 			if cmp.Op != OpMIPSSGTUconst {
   10118 				break
   10119 			}
   10120 			b.Kind = BlockMIPSEQ
   10121 			b.SetControl(cmp)
   10122 			b.Aux = nil
   10123 			return true
   10124 		}
   10125 		// match: (NE (XORconst [1] cmp:(SGTzero _)) yes no)
   10126 		// cond:
   10127 		// result: (EQ cmp yes no)
   10128 		for {
   10129 			v := b.Control
   10130 			if v.Op != OpMIPSXORconst {
   10131 				break
   10132 			}
   10133 			if v.AuxInt != 1 {
   10134 				break
   10135 			}
   10136 			cmp := v.Args[0]
   10137 			if cmp.Op != OpMIPSSGTzero {
   10138 				break
   10139 			}
   10140 			b.Kind = BlockMIPSEQ
   10141 			b.SetControl(cmp)
   10142 			b.Aux = nil
   10143 			return true
   10144 		}
   10145 		// match: (NE (XORconst [1] cmp:(SGTUzero _)) yes no)
   10146 		// cond:
   10147 		// result: (EQ cmp yes no)
   10148 		for {
   10149 			v := b.Control
   10150 			if v.Op != OpMIPSXORconst {
   10151 				break
   10152 			}
   10153 			if v.AuxInt != 1 {
   10154 				break
   10155 			}
   10156 			cmp := v.Args[0]
   10157 			if cmp.Op != OpMIPSSGTUzero {
   10158 				break
   10159 			}
   10160 			b.Kind = BlockMIPSEQ
   10161 			b.SetControl(cmp)
   10162 			b.Aux = nil
   10163 			return true
   10164 		}
   10165 		// match: (NE (SGTUconst [1] x) yes no)
   10166 		// cond:
   10167 		// result: (EQ x yes no)
   10168 		for {
   10169 			v := b.Control
   10170 			if v.Op != OpMIPSSGTUconst {
   10171 				break
   10172 			}
   10173 			if v.AuxInt != 1 {
   10174 				break
   10175 			}
   10176 			x := v.Args[0]
   10177 			b.Kind = BlockMIPSEQ
   10178 			b.SetControl(x)
   10179 			b.Aux = nil
   10180 			return true
   10181 		}
   10182 		// match: (NE (SGTUzero x) yes no)
   10183 		// cond:
   10184 		// result: (NE x yes no)
   10185 		for {
   10186 			v := b.Control
   10187 			if v.Op != OpMIPSSGTUzero {
   10188 				break
   10189 			}
   10190 			x := v.Args[0]
   10191 			b.Kind = BlockMIPSNE
   10192 			b.SetControl(x)
   10193 			b.Aux = nil
   10194 			return true
   10195 		}
   10196 		// match: (NE (SGTconst [0] x) yes no)
   10197 		// cond:
   10198 		// result: (LTZ x yes no)
   10199 		for {
   10200 			v := b.Control
   10201 			if v.Op != OpMIPSSGTconst {
   10202 				break
   10203 			}
   10204 			if v.AuxInt != 0 {
   10205 				break
   10206 			}
   10207 			x := v.Args[0]
   10208 			b.Kind = BlockMIPSLTZ
   10209 			b.SetControl(x)
   10210 			b.Aux = nil
   10211 			return true
   10212 		}
   10213 		// match: (NE (SGTzero x) yes no)
   10214 		// cond:
   10215 		// result: (GTZ x yes no)
   10216 		for {
   10217 			v := b.Control
   10218 			if v.Op != OpMIPSSGTzero {
   10219 				break
   10220 			}
   10221 			x := v.Args[0]
   10222 			b.Kind = BlockMIPSGTZ
   10223 			b.SetControl(x)
   10224 			b.Aux = nil
   10225 			return true
   10226 		}
   10227 		// match: (NE (MOVWconst [0]) yes no)
   10228 		// cond:
   10229 		// result: (First nil no yes)
   10230 		for {
   10231 			v := b.Control
   10232 			if v.Op != OpMIPSMOVWconst {
   10233 				break
   10234 			}
   10235 			if v.AuxInt != 0 {
   10236 				break
   10237 			}
   10238 			b.Kind = BlockFirst
   10239 			b.SetControl(nil)
   10240 			b.Aux = nil
   10241 			b.swapSuccessors()
   10242 			return true
   10243 		}
   10244 		// match: (NE (MOVWconst [c]) yes no)
   10245 		// cond: c != 0
   10246 		// result: (First nil yes no)
   10247 		for {
   10248 			v := b.Control
   10249 			if v.Op != OpMIPSMOVWconst {
   10250 				break
   10251 			}
   10252 			c := v.AuxInt
   10253 			if !(c != 0) {
   10254 				break
   10255 			}
   10256 			b.Kind = BlockFirst
   10257 			b.SetControl(nil)
   10258 			b.Aux = nil
   10259 			return true
   10260 		}
   10261 	}
   10262 	return false
   10263 }
   10264