1 // autogenerated from gen/generic.rules: do not edit! 2 // generated with: cd gen; go run *.go 3 4 package ssa 5 6 import "math" 7 8 var _ = math.MinInt8 // in case not otherwise used 9 func rewriteValuegeneric(v *Value, config *Config) bool { 10 switch v.Op { 11 case OpAdd16: 12 return rewriteValuegeneric_OpAdd16(v, config) 13 case OpAdd32: 14 return rewriteValuegeneric_OpAdd32(v, config) 15 case OpAdd32F: 16 return rewriteValuegeneric_OpAdd32F(v, config) 17 case OpAdd64: 18 return rewriteValuegeneric_OpAdd64(v, config) 19 case OpAdd64F: 20 return rewriteValuegeneric_OpAdd64F(v, config) 21 case OpAdd8: 22 return rewriteValuegeneric_OpAdd8(v, config) 23 case OpAddPtr: 24 return rewriteValuegeneric_OpAddPtr(v, config) 25 case OpAnd16: 26 return rewriteValuegeneric_OpAnd16(v, config) 27 case OpAnd32: 28 return rewriteValuegeneric_OpAnd32(v, config) 29 case OpAnd64: 30 return rewriteValuegeneric_OpAnd64(v, config) 31 case OpAnd8: 32 return rewriteValuegeneric_OpAnd8(v, config) 33 case OpArg: 34 return rewriteValuegeneric_OpArg(v, config) 35 case OpArraySelect: 36 return rewriteValuegeneric_OpArraySelect(v, config) 37 case OpCom16: 38 return rewriteValuegeneric_OpCom16(v, config) 39 case OpCom32: 40 return rewriteValuegeneric_OpCom32(v, config) 41 case OpCom64: 42 return rewriteValuegeneric_OpCom64(v, config) 43 case OpCom8: 44 return rewriteValuegeneric_OpCom8(v, config) 45 case OpConstInterface: 46 return rewriteValuegeneric_OpConstInterface(v, config) 47 case OpConstSlice: 48 return rewriteValuegeneric_OpConstSlice(v, config) 49 case OpConstString: 50 return rewriteValuegeneric_OpConstString(v, config) 51 case OpConvert: 52 return rewriteValuegeneric_OpConvert(v, config) 53 case OpCvt32Fto64F: 54 return rewriteValuegeneric_OpCvt32Fto64F(v, config) 55 case OpCvt64Fto32F: 56 return rewriteValuegeneric_OpCvt64Fto32F(v, config) 57 case OpDiv32F: 58 return rewriteValuegeneric_OpDiv32F(v, config) 59 case OpDiv64: 60 return rewriteValuegeneric_OpDiv64(v, config) 61 case OpDiv64F: 62 return rewriteValuegeneric_OpDiv64F(v, config) 63 case OpDiv64u: 64 return rewriteValuegeneric_OpDiv64u(v, config) 65 case OpEq16: 66 return rewriteValuegeneric_OpEq16(v, config) 67 case OpEq32: 68 return rewriteValuegeneric_OpEq32(v, config) 69 case OpEq64: 70 return rewriteValuegeneric_OpEq64(v, config) 71 case OpEq8: 72 return rewriteValuegeneric_OpEq8(v, config) 73 case OpEqB: 74 return rewriteValuegeneric_OpEqB(v, config) 75 case OpEqInter: 76 return rewriteValuegeneric_OpEqInter(v, config) 77 case OpEqPtr: 78 return rewriteValuegeneric_OpEqPtr(v, config) 79 case OpEqSlice: 80 return rewriteValuegeneric_OpEqSlice(v, config) 81 case OpGeq16: 82 return rewriteValuegeneric_OpGeq16(v, config) 83 case OpGeq16U: 84 return rewriteValuegeneric_OpGeq16U(v, config) 85 case OpGeq32: 86 return rewriteValuegeneric_OpGeq32(v, config) 87 case OpGeq32U: 88 return rewriteValuegeneric_OpGeq32U(v, config) 89 case OpGeq64: 90 return rewriteValuegeneric_OpGeq64(v, config) 91 case OpGeq64U: 92 return rewriteValuegeneric_OpGeq64U(v, config) 93 case OpGeq8: 94 return rewriteValuegeneric_OpGeq8(v, config) 95 case OpGeq8U: 96 return rewriteValuegeneric_OpGeq8U(v, config) 97 case OpGreater16: 98 return rewriteValuegeneric_OpGreater16(v, config) 99 case OpGreater16U: 100 return rewriteValuegeneric_OpGreater16U(v, config) 101 case OpGreater32: 102 return rewriteValuegeneric_OpGreater32(v, config) 103 case OpGreater32U: 104 return rewriteValuegeneric_OpGreater32U(v, config) 105 case OpGreater64: 106 return rewriteValuegeneric_OpGreater64(v, config) 107 case OpGreater64U: 108 return rewriteValuegeneric_OpGreater64U(v, config) 109 case OpGreater8: 110 return rewriteValuegeneric_OpGreater8(v, config) 111 case OpGreater8U: 112 return rewriteValuegeneric_OpGreater8U(v, config) 113 case OpIMake: 114 return rewriteValuegeneric_OpIMake(v, config) 115 case OpIsInBounds: 116 return rewriteValuegeneric_OpIsInBounds(v, config) 117 case OpIsSliceInBounds: 118 return rewriteValuegeneric_OpIsSliceInBounds(v, config) 119 case OpLeq16: 120 return rewriteValuegeneric_OpLeq16(v, config) 121 case OpLeq16U: 122 return rewriteValuegeneric_OpLeq16U(v, config) 123 case OpLeq32: 124 return rewriteValuegeneric_OpLeq32(v, config) 125 case OpLeq32U: 126 return rewriteValuegeneric_OpLeq32U(v, config) 127 case OpLeq64: 128 return rewriteValuegeneric_OpLeq64(v, config) 129 case OpLeq64U: 130 return rewriteValuegeneric_OpLeq64U(v, config) 131 case OpLeq8: 132 return rewriteValuegeneric_OpLeq8(v, config) 133 case OpLeq8U: 134 return rewriteValuegeneric_OpLeq8U(v, config) 135 case OpLess16: 136 return rewriteValuegeneric_OpLess16(v, config) 137 case OpLess16U: 138 return rewriteValuegeneric_OpLess16U(v, config) 139 case OpLess32: 140 return rewriteValuegeneric_OpLess32(v, config) 141 case OpLess32U: 142 return rewriteValuegeneric_OpLess32U(v, config) 143 case OpLess64: 144 return rewriteValuegeneric_OpLess64(v, config) 145 case OpLess64U: 146 return rewriteValuegeneric_OpLess64U(v, config) 147 case OpLess8: 148 return rewriteValuegeneric_OpLess8(v, config) 149 case OpLess8U: 150 return rewriteValuegeneric_OpLess8U(v, config) 151 case OpLoad: 152 return rewriteValuegeneric_OpLoad(v, config) 153 case OpLsh16x16: 154 return rewriteValuegeneric_OpLsh16x16(v, config) 155 case OpLsh16x32: 156 return rewriteValuegeneric_OpLsh16x32(v, config) 157 case OpLsh16x64: 158 return rewriteValuegeneric_OpLsh16x64(v, config) 159 case OpLsh16x8: 160 return rewriteValuegeneric_OpLsh16x8(v, config) 161 case OpLsh32x16: 162 return rewriteValuegeneric_OpLsh32x16(v, config) 163 case OpLsh32x32: 164 return rewriteValuegeneric_OpLsh32x32(v, config) 165 case OpLsh32x64: 166 return rewriteValuegeneric_OpLsh32x64(v, config) 167 case OpLsh32x8: 168 return rewriteValuegeneric_OpLsh32x8(v, config) 169 case OpLsh64x16: 170 return rewriteValuegeneric_OpLsh64x16(v, config) 171 case OpLsh64x32: 172 return rewriteValuegeneric_OpLsh64x32(v, config) 173 case OpLsh64x64: 174 return rewriteValuegeneric_OpLsh64x64(v, config) 175 case OpLsh64x8: 176 return rewriteValuegeneric_OpLsh64x8(v, config) 177 case OpLsh8x16: 178 return rewriteValuegeneric_OpLsh8x16(v, config) 179 case OpLsh8x32: 180 return rewriteValuegeneric_OpLsh8x32(v, config) 181 case OpLsh8x64: 182 return rewriteValuegeneric_OpLsh8x64(v, config) 183 case OpLsh8x8: 184 return rewriteValuegeneric_OpLsh8x8(v, config) 185 case OpMod16: 186 return rewriteValuegeneric_OpMod16(v, config) 187 case OpMod16u: 188 return rewriteValuegeneric_OpMod16u(v, config) 189 case OpMod32: 190 return rewriteValuegeneric_OpMod32(v, config) 191 case OpMod32u: 192 return rewriteValuegeneric_OpMod32u(v, config) 193 case OpMod64: 194 return rewriteValuegeneric_OpMod64(v, config) 195 case OpMod64u: 196 return rewriteValuegeneric_OpMod64u(v, config) 197 case OpMod8: 198 return rewriteValuegeneric_OpMod8(v, config) 199 case OpMod8u: 200 return rewriteValuegeneric_OpMod8u(v, config) 201 case OpMul16: 202 return rewriteValuegeneric_OpMul16(v, config) 203 case OpMul32: 204 return rewriteValuegeneric_OpMul32(v, config) 205 case OpMul32F: 206 return rewriteValuegeneric_OpMul32F(v, config) 207 case OpMul64: 208 return rewriteValuegeneric_OpMul64(v, config) 209 case OpMul64F: 210 return rewriteValuegeneric_OpMul64F(v, config) 211 case OpMul8: 212 return rewriteValuegeneric_OpMul8(v, config) 213 case OpNeg16: 214 return rewriteValuegeneric_OpNeg16(v, config) 215 case OpNeg32: 216 return rewriteValuegeneric_OpNeg32(v, config) 217 case OpNeg64: 218 return rewriteValuegeneric_OpNeg64(v, config) 219 case OpNeg8: 220 return rewriteValuegeneric_OpNeg8(v, config) 221 case OpNeq16: 222 return rewriteValuegeneric_OpNeq16(v, config) 223 case OpNeq32: 224 return rewriteValuegeneric_OpNeq32(v, config) 225 case OpNeq64: 226 return rewriteValuegeneric_OpNeq64(v, config) 227 case OpNeq8: 228 return rewriteValuegeneric_OpNeq8(v, config) 229 case OpNeqB: 230 return rewriteValuegeneric_OpNeqB(v, config) 231 case OpNeqInter: 232 return rewriteValuegeneric_OpNeqInter(v, config) 233 case OpNeqPtr: 234 return rewriteValuegeneric_OpNeqPtr(v, config) 235 case OpNeqSlice: 236 return rewriteValuegeneric_OpNeqSlice(v, config) 237 case OpNilCheck: 238 return rewriteValuegeneric_OpNilCheck(v, config) 239 case OpNot: 240 return rewriteValuegeneric_OpNot(v, config) 241 case OpOffPtr: 242 return rewriteValuegeneric_OpOffPtr(v, config) 243 case OpOr16: 244 return rewriteValuegeneric_OpOr16(v, config) 245 case OpOr32: 246 return rewriteValuegeneric_OpOr32(v, config) 247 case OpOr64: 248 return rewriteValuegeneric_OpOr64(v, config) 249 case OpOr8: 250 return rewriteValuegeneric_OpOr8(v, config) 251 case OpPhi: 252 return rewriteValuegeneric_OpPhi(v, config) 253 case OpPtrIndex: 254 return rewriteValuegeneric_OpPtrIndex(v, config) 255 case OpRsh16Ux16: 256 return rewriteValuegeneric_OpRsh16Ux16(v, config) 257 case OpRsh16Ux32: 258 return rewriteValuegeneric_OpRsh16Ux32(v, config) 259 case OpRsh16Ux64: 260 return rewriteValuegeneric_OpRsh16Ux64(v, config) 261 case OpRsh16Ux8: 262 return rewriteValuegeneric_OpRsh16Ux8(v, config) 263 case OpRsh16x16: 264 return rewriteValuegeneric_OpRsh16x16(v, config) 265 case OpRsh16x32: 266 return rewriteValuegeneric_OpRsh16x32(v, config) 267 case OpRsh16x64: 268 return rewriteValuegeneric_OpRsh16x64(v, config) 269 case OpRsh16x8: 270 return rewriteValuegeneric_OpRsh16x8(v, config) 271 case OpRsh32Ux16: 272 return rewriteValuegeneric_OpRsh32Ux16(v, config) 273 case OpRsh32Ux32: 274 return rewriteValuegeneric_OpRsh32Ux32(v, config) 275 case OpRsh32Ux64: 276 return rewriteValuegeneric_OpRsh32Ux64(v, config) 277 case OpRsh32Ux8: 278 return rewriteValuegeneric_OpRsh32Ux8(v, config) 279 case OpRsh32x16: 280 return rewriteValuegeneric_OpRsh32x16(v, config) 281 case OpRsh32x32: 282 return rewriteValuegeneric_OpRsh32x32(v, config) 283 case OpRsh32x64: 284 return rewriteValuegeneric_OpRsh32x64(v, config) 285 case OpRsh32x8: 286 return rewriteValuegeneric_OpRsh32x8(v, config) 287 case OpRsh64Ux16: 288 return rewriteValuegeneric_OpRsh64Ux16(v, config) 289 case OpRsh64Ux32: 290 return rewriteValuegeneric_OpRsh64Ux32(v, config) 291 case OpRsh64Ux64: 292 return rewriteValuegeneric_OpRsh64Ux64(v, config) 293 case OpRsh64Ux8: 294 return rewriteValuegeneric_OpRsh64Ux8(v, config) 295 case OpRsh64x16: 296 return rewriteValuegeneric_OpRsh64x16(v, config) 297 case OpRsh64x32: 298 return rewriteValuegeneric_OpRsh64x32(v, config) 299 case OpRsh64x64: 300 return rewriteValuegeneric_OpRsh64x64(v, config) 301 case OpRsh64x8: 302 return rewriteValuegeneric_OpRsh64x8(v, config) 303 case OpRsh8Ux16: 304 return rewriteValuegeneric_OpRsh8Ux16(v, config) 305 case OpRsh8Ux32: 306 return rewriteValuegeneric_OpRsh8Ux32(v, config) 307 case OpRsh8Ux64: 308 return rewriteValuegeneric_OpRsh8Ux64(v, config) 309 case OpRsh8Ux8: 310 return rewriteValuegeneric_OpRsh8Ux8(v, config) 311 case OpRsh8x16: 312 return rewriteValuegeneric_OpRsh8x16(v, config) 313 case OpRsh8x32: 314 return rewriteValuegeneric_OpRsh8x32(v, config) 315 case OpRsh8x64: 316 return rewriteValuegeneric_OpRsh8x64(v, config) 317 case OpRsh8x8: 318 return rewriteValuegeneric_OpRsh8x8(v, config) 319 case OpSignExt16to32: 320 return rewriteValuegeneric_OpSignExt16to32(v, config) 321 case OpSignExt16to64: 322 return rewriteValuegeneric_OpSignExt16to64(v, config) 323 case OpSignExt32to64: 324 return rewriteValuegeneric_OpSignExt32to64(v, config) 325 case OpSignExt8to16: 326 return rewriteValuegeneric_OpSignExt8to16(v, config) 327 case OpSignExt8to32: 328 return rewriteValuegeneric_OpSignExt8to32(v, config) 329 case OpSignExt8to64: 330 return rewriteValuegeneric_OpSignExt8to64(v, config) 331 case OpSliceCap: 332 return rewriteValuegeneric_OpSliceCap(v, config) 333 case OpSliceLen: 334 return rewriteValuegeneric_OpSliceLen(v, config) 335 case OpSlicePtr: 336 return rewriteValuegeneric_OpSlicePtr(v, config) 337 case OpSlicemask: 338 return rewriteValuegeneric_OpSlicemask(v, config) 339 case OpSqrt: 340 return rewriteValuegeneric_OpSqrt(v, config) 341 case OpStore: 342 return rewriteValuegeneric_OpStore(v, config) 343 case OpStringLen: 344 return rewriteValuegeneric_OpStringLen(v, config) 345 case OpStringPtr: 346 return rewriteValuegeneric_OpStringPtr(v, config) 347 case OpStructSelect: 348 return rewriteValuegeneric_OpStructSelect(v, config) 349 case OpSub16: 350 return rewriteValuegeneric_OpSub16(v, config) 351 case OpSub32: 352 return rewriteValuegeneric_OpSub32(v, config) 353 case OpSub32F: 354 return rewriteValuegeneric_OpSub32F(v, config) 355 case OpSub64: 356 return rewriteValuegeneric_OpSub64(v, config) 357 case OpSub64F: 358 return rewriteValuegeneric_OpSub64F(v, config) 359 case OpSub8: 360 return rewriteValuegeneric_OpSub8(v, config) 361 case OpTrunc16to8: 362 return rewriteValuegeneric_OpTrunc16to8(v, config) 363 case OpTrunc32to16: 364 return rewriteValuegeneric_OpTrunc32to16(v, config) 365 case OpTrunc32to8: 366 return rewriteValuegeneric_OpTrunc32to8(v, config) 367 case OpTrunc64to16: 368 return rewriteValuegeneric_OpTrunc64to16(v, config) 369 case OpTrunc64to32: 370 return rewriteValuegeneric_OpTrunc64to32(v, config) 371 case OpTrunc64to8: 372 return rewriteValuegeneric_OpTrunc64to8(v, config) 373 case OpXor16: 374 return rewriteValuegeneric_OpXor16(v, config) 375 case OpXor32: 376 return rewriteValuegeneric_OpXor32(v, config) 377 case OpXor64: 378 return rewriteValuegeneric_OpXor64(v, config) 379 case OpXor8: 380 return rewriteValuegeneric_OpXor8(v, config) 381 case OpZero: 382 return rewriteValuegeneric_OpZero(v, config) 383 case OpZeroExt16to32: 384 return rewriteValuegeneric_OpZeroExt16to32(v, config) 385 case OpZeroExt16to64: 386 return rewriteValuegeneric_OpZeroExt16to64(v, config) 387 case OpZeroExt32to64: 388 return rewriteValuegeneric_OpZeroExt32to64(v, config) 389 case OpZeroExt8to16: 390 return rewriteValuegeneric_OpZeroExt8to16(v, config) 391 case OpZeroExt8to32: 392 return rewriteValuegeneric_OpZeroExt8to32(v, config) 393 case OpZeroExt8to64: 394 return rewriteValuegeneric_OpZeroExt8to64(v, config) 395 } 396 return false 397 } 398 func rewriteValuegeneric_OpAdd16(v *Value, config *Config) bool { 399 b := v.Block 400 _ = b 401 // match: (Add16 (Const16 [c]) (Const16 [d])) 402 // cond: 403 // result: (Const16 [int64(int16(c+d))]) 404 for { 405 v_0 := v.Args[0] 406 if v_0.Op != OpConst16 { 407 break 408 } 409 c := v_0.AuxInt 410 v_1 := v.Args[1] 411 if v_1.Op != OpConst16 { 412 break 413 } 414 d := v_1.AuxInt 415 v.reset(OpConst16) 416 v.AuxInt = int64(int16(c + d)) 417 return true 418 } 419 // match: (Add16 x (Const16 <t> [c])) 420 // cond: x.Op != OpConst16 421 // result: (Add16 (Const16 <t> [c]) x) 422 for { 423 x := v.Args[0] 424 v_1 := v.Args[1] 425 if v_1.Op != OpConst16 { 426 break 427 } 428 t := v_1.Type 429 c := v_1.AuxInt 430 if !(x.Op != OpConst16) { 431 break 432 } 433 v.reset(OpAdd16) 434 v0 := b.NewValue0(v.Line, OpConst16, t) 435 v0.AuxInt = c 436 v.AddArg(v0) 437 v.AddArg(x) 438 return true 439 } 440 // match: (Add16 (Const16 [0]) x) 441 // cond: 442 // result: x 443 for { 444 v_0 := v.Args[0] 445 if v_0.Op != OpConst16 { 446 break 447 } 448 if v_0.AuxInt != 0 { 449 break 450 } 451 x := v.Args[1] 452 v.reset(OpCopy) 453 v.Type = x.Type 454 v.AddArg(x) 455 return true 456 } 457 return false 458 } 459 func rewriteValuegeneric_OpAdd32(v *Value, config *Config) bool { 460 b := v.Block 461 _ = b 462 // match: (Add32 (Const32 [c]) (Const32 [d])) 463 // cond: 464 // result: (Const32 [int64(int32(c+d))]) 465 for { 466 v_0 := v.Args[0] 467 if v_0.Op != OpConst32 { 468 break 469 } 470 c := v_0.AuxInt 471 v_1 := v.Args[1] 472 if v_1.Op != OpConst32 { 473 break 474 } 475 d := v_1.AuxInt 476 v.reset(OpConst32) 477 v.AuxInt = int64(int32(c + d)) 478 return true 479 } 480 // match: (Add32 x (Const32 <t> [c])) 481 // cond: x.Op != OpConst32 482 // result: (Add32 (Const32 <t> [c]) x) 483 for { 484 x := v.Args[0] 485 v_1 := v.Args[1] 486 if v_1.Op != OpConst32 { 487 break 488 } 489 t := v_1.Type 490 c := v_1.AuxInt 491 if !(x.Op != OpConst32) { 492 break 493 } 494 v.reset(OpAdd32) 495 v0 := b.NewValue0(v.Line, OpConst32, t) 496 v0.AuxInt = c 497 v.AddArg(v0) 498 v.AddArg(x) 499 return true 500 } 501 // match: (Add32 (Const32 [0]) x) 502 // cond: 503 // result: x 504 for { 505 v_0 := v.Args[0] 506 if v_0.Op != OpConst32 { 507 break 508 } 509 if v_0.AuxInt != 0 { 510 break 511 } 512 x := v.Args[1] 513 v.reset(OpCopy) 514 v.Type = x.Type 515 v.AddArg(x) 516 return true 517 } 518 return false 519 } 520 func rewriteValuegeneric_OpAdd32F(v *Value, config *Config) bool { 521 b := v.Block 522 _ = b 523 // match: (Add32F (Const32F [c]) (Const32F [d])) 524 // cond: 525 // result: (Const32F [f2i(float64(i2f32(c) + i2f32(d)))]) 526 for { 527 v_0 := v.Args[0] 528 if v_0.Op != OpConst32F { 529 break 530 } 531 c := v_0.AuxInt 532 v_1 := v.Args[1] 533 if v_1.Op != OpConst32F { 534 break 535 } 536 d := v_1.AuxInt 537 v.reset(OpConst32F) 538 v.AuxInt = f2i(float64(i2f32(c) + i2f32(d))) 539 return true 540 } 541 // match: (Add32F x (Const32F [0])) 542 // cond: 543 // result: x 544 for { 545 x := v.Args[0] 546 v_1 := v.Args[1] 547 if v_1.Op != OpConst32F { 548 break 549 } 550 if v_1.AuxInt != 0 { 551 break 552 } 553 v.reset(OpCopy) 554 v.Type = x.Type 555 v.AddArg(x) 556 return true 557 } 558 // match: (Add32F (Const32F [0]) x) 559 // cond: 560 // result: x 561 for { 562 v_0 := v.Args[0] 563 if v_0.Op != OpConst32F { 564 break 565 } 566 if v_0.AuxInt != 0 { 567 break 568 } 569 x := v.Args[1] 570 v.reset(OpCopy) 571 v.Type = x.Type 572 v.AddArg(x) 573 return true 574 } 575 return false 576 } 577 func rewriteValuegeneric_OpAdd64(v *Value, config *Config) bool { 578 b := v.Block 579 _ = b 580 // match: (Add64 (Const64 [c]) (Const64 [d])) 581 // cond: 582 // result: (Const64 [c+d]) 583 for { 584 v_0 := v.Args[0] 585 if v_0.Op != OpConst64 { 586 break 587 } 588 c := v_0.AuxInt 589 v_1 := v.Args[1] 590 if v_1.Op != OpConst64 { 591 break 592 } 593 d := v_1.AuxInt 594 v.reset(OpConst64) 595 v.AuxInt = c + d 596 return true 597 } 598 // match: (Add64 x (Const64 <t> [c])) 599 // cond: x.Op != OpConst64 600 // result: (Add64 (Const64 <t> [c]) x) 601 for { 602 x := v.Args[0] 603 v_1 := v.Args[1] 604 if v_1.Op != OpConst64 { 605 break 606 } 607 t := v_1.Type 608 c := v_1.AuxInt 609 if !(x.Op != OpConst64) { 610 break 611 } 612 v.reset(OpAdd64) 613 v0 := b.NewValue0(v.Line, OpConst64, t) 614 v0.AuxInt = c 615 v.AddArg(v0) 616 v.AddArg(x) 617 return true 618 } 619 // match: (Add64 (Const64 [0]) x) 620 // cond: 621 // result: x 622 for { 623 v_0 := v.Args[0] 624 if v_0.Op != OpConst64 { 625 break 626 } 627 if v_0.AuxInt != 0 { 628 break 629 } 630 x := v.Args[1] 631 v.reset(OpCopy) 632 v.Type = x.Type 633 v.AddArg(x) 634 return true 635 } 636 return false 637 } 638 func rewriteValuegeneric_OpAdd64F(v *Value, config *Config) bool { 639 b := v.Block 640 _ = b 641 // match: (Add64F (Const64F [c]) (Const64F [d])) 642 // cond: 643 // result: (Const64F [f2i(i2f(c) + i2f(d))]) 644 for { 645 v_0 := v.Args[0] 646 if v_0.Op != OpConst64F { 647 break 648 } 649 c := v_0.AuxInt 650 v_1 := v.Args[1] 651 if v_1.Op != OpConst64F { 652 break 653 } 654 d := v_1.AuxInt 655 v.reset(OpConst64F) 656 v.AuxInt = f2i(i2f(c) + i2f(d)) 657 return true 658 } 659 // match: (Add64F x (Const64F [0])) 660 // cond: 661 // result: x 662 for { 663 x := v.Args[0] 664 v_1 := v.Args[1] 665 if v_1.Op != OpConst64F { 666 break 667 } 668 if v_1.AuxInt != 0 { 669 break 670 } 671 v.reset(OpCopy) 672 v.Type = x.Type 673 v.AddArg(x) 674 return true 675 } 676 // match: (Add64F (Const64F [0]) x) 677 // cond: 678 // result: x 679 for { 680 v_0 := v.Args[0] 681 if v_0.Op != OpConst64F { 682 break 683 } 684 if v_0.AuxInt != 0 { 685 break 686 } 687 x := v.Args[1] 688 v.reset(OpCopy) 689 v.Type = x.Type 690 v.AddArg(x) 691 return true 692 } 693 return false 694 } 695 func rewriteValuegeneric_OpAdd8(v *Value, config *Config) bool { 696 b := v.Block 697 _ = b 698 // match: (Add8 (Const8 [c]) (Const8 [d])) 699 // cond: 700 // result: (Const8 [int64(int8(c+d))]) 701 for { 702 v_0 := v.Args[0] 703 if v_0.Op != OpConst8 { 704 break 705 } 706 c := v_0.AuxInt 707 v_1 := v.Args[1] 708 if v_1.Op != OpConst8 { 709 break 710 } 711 d := v_1.AuxInt 712 v.reset(OpConst8) 713 v.AuxInt = int64(int8(c + d)) 714 return true 715 } 716 // match: (Add8 x (Const8 <t> [c])) 717 // cond: x.Op != OpConst8 718 // result: (Add8 (Const8 <t> [c]) x) 719 for { 720 x := v.Args[0] 721 v_1 := v.Args[1] 722 if v_1.Op != OpConst8 { 723 break 724 } 725 t := v_1.Type 726 c := v_1.AuxInt 727 if !(x.Op != OpConst8) { 728 break 729 } 730 v.reset(OpAdd8) 731 v0 := b.NewValue0(v.Line, OpConst8, t) 732 v0.AuxInt = c 733 v.AddArg(v0) 734 v.AddArg(x) 735 return true 736 } 737 // match: (Add8 (Const8 [0]) x) 738 // cond: 739 // result: x 740 for { 741 v_0 := v.Args[0] 742 if v_0.Op != OpConst8 { 743 break 744 } 745 if v_0.AuxInt != 0 { 746 break 747 } 748 x := v.Args[1] 749 v.reset(OpCopy) 750 v.Type = x.Type 751 v.AddArg(x) 752 return true 753 } 754 return false 755 } 756 func rewriteValuegeneric_OpAddPtr(v *Value, config *Config) bool { 757 b := v.Block 758 _ = b 759 // match: (AddPtr <t> x (Const64 [c])) 760 // cond: 761 // result: (OffPtr <t> x [c]) 762 for { 763 t := v.Type 764 x := v.Args[0] 765 v_1 := v.Args[1] 766 if v_1.Op != OpConst64 { 767 break 768 } 769 c := v_1.AuxInt 770 v.reset(OpOffPtr) 771 v.Type = t 772 v.AuxInt = c 773 v.AddArg(x) 774 return true 775 } 776 return false 777 } 778 func rewriteValuegeneric_OpAnd16(v *Value, config *Config) bool { 779 b := v.Block 780 _ = b 781 // match: (And16 x (Const16 <t> [c])) 782 // cond: x.Op != OpConst16 783 // result: (And16 (Const16 <t> [c]) x) 784 for { 785 x := v.Args[0] 786 v_1 := v.Args[1] 787 if v_1.Op != OpConst16 { 788 break 789 } 790 t := v_1.Type 791 c := v_1.AuxInt 792 if !(x.Op != OpConst16) { 793 break 794 } 795 v.reset(OpAnd16) 796 v0 := b.NewValue0(v.Line, OpConst16, t) 797 v0.AuxInt = c 798 v.AddArg(v0) 799 v.AddArg(x) 800 return true 801 } 802 // match: (And16 x x) 803 // cond: 804 // result: x 805 for { 806 x := v.Args[0] 807 if x != v.Args[1] { 808 break 809 } 810 v.reset(OpCopy) 811 v.Type = x.Type 812 v.AddArg(x) 813 return true 814 } 815 // match: (And16 (Const16 [-1]) x) 816 // cond: 817 // result: x 818 for { 819 v_0 := v.Args[0] 820 if v_0.Op != OpConst16 { 821 break 822 } 823 if v_0.AuxInt != -1 { 824 break 825 } 826 x := v.Args[1] 827 v.reset(OpCopy) 828 v.Type = x.Type 829 v.AddArg(x) 830 return true 831 } 832 // match: (And16 (Const16 [0]) _) 833 // cond: 834 // result: (Const16 [0]) 835 for { 836 v_0 := v.Args[0] 837 if v_0.Op != OpConst16 { 838 break 839 } 840 if v_0.AuxInt != 0 { 841 break 842 } 843 v.reset(OpConst16) 844 v.AuxInt = 0 845 return true 846 } 847 // match: (And16 x (And16 x y)) 848 // cond: 849 // result: (And16 x y) 850 for { 851 x := v.Args[0] 852 v_1 := v.Args[1] 853 if v_1.Op != OpAnd16 { 854 break 855 } 856 if x != v_1.Args[0] { 857 break 858 } 859 y := v_1.Args[1] 860 v.reset(OpAnd16) 861 v.AddArg(x) 862 v.AddArg(y) 863 return true 864 } 865 // match: (And16 x (And16 y x)) 866 // cond: 867 // result: (And16 x y) 868 for { 869 x := v.Args[0] 870 v_1 := v.Args[1] 871 if v_1.Op != OpAnd16 { 872 break 873 } 874 y := v_1.Args[0] 875 if x != v_1.Args[1] { 876 break 877 } 878 v.reset(OpAnd16) 879 v.AddArg(x) 880 v.AddArg(y) 881 return true 882 } 883 // match: (And16 (And16 x y) x) 884 // cond: 885 // result: (And16 x y) 886 for { 887 v_0 := v.Args[0] 888 if v_0.Op != OpAnd16 { 889 break 890 } 891 x := v_0.Args[0] 892 y := v_0.Args[1] 893 if x != v.Args[1] { 894 break 895 } 896 v.reset(OpAnd16) 897 v.AddArg(x) 898 v.AddArg(y) 899 return true 900 } 901 // match: (And16 (And16 x y) y) 902 // cond: 903 // result: (And16 x y) 904 for { 905 v_0 := v.Args[0] 906 if v_0.Op != OpAnd16 { 907 break 908 } 909 x := v_0.Args[0] 910 y := v_0.Args[1] 911 if y != v.Args[1] { 912 break 913 } 914 v.reset(OpAnd16) 915 v.AddArg(x) 916 v.AddArg(y) 917 return true 918 } 919 return false 920 } 921 func rewriteValuegeneric_OpAnd32(v *Value, config *Config) bool { 922 b := v.Block 923 _ = b 924 // match: (And32 x (Const32 <t> [c])) 925 // cond: x.Op != OpConst32 926 // result: (And32 (Const32 <t> [c]) x) 927 for { 928 x := v.Args[0] 929 v_1 := v.Args[1] 930 if v_1.Op != OpConst32 { 931 break 932 } 933 t := v_1.Type 934 c := v_1.AuxInt 935 if !(x.Op != OpConst32) { 936 break 937 } 938 v.reset(OpAnd32) 939 v0 := b.NewValue0(v.Line, OpConst32, t) 940 v0.AuxInt = c 941 v.AddArg(v0) 942 v.AddArg(x) 943 return true 944 } 945 // match: (And32 x x) 946 // cond: 947 // result: x 948 for { 949 x := v.Args[0] 950 if x != v.Args[1] { 951 break 952 } 953 v.reset(OpCopy) 954 v.Type = x.Type 955 v.AddArg(x) 956 return true 957 } 958 // match: (And32 (Const32 [-1]) x) 959 // cond: 960 // result: x 961 for { 962 v_0 := v.Args[0] 963 if v_0.Op != OpConst32 { 964 break 965 } 966 if v_0.AuxInt != -1 { 967 break 968 } 969 x := v.Args[1] 970 v.reset(OpCopy) 971 v.Type = x.Type 972 v.AddArg(x) 973 return true 974 } 975 // match: (And32 (Const32 [0]) _) 976 // cond: 977 // result: (Const32 [0]) 978 for { 979 v_0 := v.Args[0] 980 if v_0.Op != OpConst32 { 981 break 982 } 983 if v_0.AuxInt != 0 { 984 break 985 } 986 v.reset(OpConst32) 987 v.AuxInt = 0 988 return true 989 } 990 // match: (And32 x (And32 x y)) 991 // cond: 992 // result: (And32 x y) 993 for { 994 x := v.Args[0] 995 v_1 := v.Args[1] 996 if v_1.Op != OpAnd32 { 997 break 998 } 999 if x != v_1.Args[0] { 1000 break 1001 } 1002 y := v_1.Args[1] 1003 v.reset(OpAnd32) 1004 v.AddArg(x) 1005 v.AddArg(y) 1006 return true 1007 } 1008 // match: (And32 x (And32 y x)) 1009 // cond: 1010 // result: (And32 x y) 1011 for { 1012 x := v.Args[0] 1013 v_1 := v.Args[1] 1014 if v_1.Op != OpAnd32 { 1015 break 1016 } 1017 y := v_1.Args[0] 1018 if x != v_1.Args[1] { 1019 break 1020 } 1021 v.reset(OpAnd32) 1022 v.AddArg(x) 1023 v.AddArg(y) 1024 return true 1025 } 1026 // match: (And32 (And32 x y) x) 1027 // cond: 1028 // result: (And32 x y) 1029 for { 1030 v_0 := v.Args[0] 1031 if v_0.Op != OpAnd32 { 1032 break 1033 } 1034 x := v_0.Args[0] 1035 y := v_0.Args[1] 1036 if x != v.Args[1] { 1037 break 1038 } 1039 v.reset(OpAnd32) 1040 v.AddArg(x) 1041 v.AddArg(y) 1042 return true 1043 } 1044 // match: (And32 (And32 x y) y) 1045 // cond: 1046 // result: (And32 x y) 1047 for { 1048 v_0 := v.Args[0] 1049 if v_0.Op != OpAnd32 { 1050 break 1051 } 1052 x := v_0.Args[0] 1053 y := v_0.Args[1] 1054 if y != v.Args[1] { 1055 break 1056 } 1057 v.reset(OpAnd32) 1058 v.AddArg(x) 1059 v.AddArg(y) 1060 return true 1061 } 1062 return false 1063 } 1064 func rewriteValuegeneric_OpAnd64(v *Value, config *Config) bool { 1065 b := v.Block 1066 _ = b 1067 // match: (And64 x (Const64 <t> [c])) 1068 // cond: x.Op != OpConst64 1069 // result: (And64 (Const64 <t> [c]) x) 1070 for { 1071 x := v.Args[0] 1072 v_1 := v.Args[1] 1073 if v_1.Op != OpConst64 { 1074 break 1075 } 1076 t := v_1.Type 1077 c := v_1.AuxInt 1078 if !(x.Op != OpConst64) { 1079 break 1080 } 1081 v.reset(OpAnd64) 1082 v0 := b.NewValue0(v.Line, OpConst64, t) 1083 v0.AuxInt = c 1084 v.AddArg(v0) 1085 v.AddArg(x) 1086 return true 1087 } 1088 // match: (And64 x x) 1089 // cond: 1090 // result: x 1091 for { 1092 x := v.Args[0] 1093 if x != v.Args[1] { 1094 break 1095 } 1096 v.reset(OpCopy) 1097 v.Type = x.Type 1098 v.AddArg(x) 1099 return true 1100 } 1101 // match: (And64 (Const64 [-1]) x) 1102 // cond: 1103 // result: x 1104 for { 1105 v_0 := v.Args[0] 1106 if v_0.Op != OpConst64 { 1107 break 1108 } 1109 if v_0.AuxInt != -1 { 1110 break 1111 } 1112 x := v.Args[1] 1113 v.reset(OpCopy) 1114 v.Type = x.Type 1115 v.AddArg(x) 1116 return true 1117 } 1118 // match: (And64 (Const64 [0]) _) 1119 // cond: 1120 // result: (Const64 [0]) 1121 for { 1122 v_0 := v.Args[0] 1123 if v_0.Op != OpConst64 { 1124 break 1125 } 1126 if v_0.AuxInt != 0 { 1127 break 1128 } 1129 v.reset(OpConst64) 1130 v.AuxInt = 0 1131 return true 1132 } 1133 // match: (And64 x (And64 x y)) 1134 // cond: 1135 // result: (And64 x y) 1136 for { 1137 x := v.Args[0] 1138 v_1 := v.Args[1] 1139 if v_1.Op != OpAnd64 { 1140 break 1141 } 1142 if x != v_1.Args[0] { 1143 break 1144 } 1145 y := v_1.Args[1] 1146 v.reset(OpAnd64) 1147 v.AddArg(x) 1148 v.AddArg(y) 1149 return true 1150 } 1151 // match: (And64 x (And64 y x)) 1152 // cond: 1153 // result: (And64 x y) 1154 for { 1155 x := v.Args[0] 1156 v_1 := v.Args[1] 1157 if v_1.Op != OpAnd64 { 1158 break 1159 } 1160 y := v_1.Args[0] 1161 if x != v_1.Args[1] { 1162 break 1163 } 1164 v.reset(OpAnd64) 1165 v.AddArg(x) 1166 v.AddArg(y) 1167 return true 1168 } 1169 // match: (And64 (And64 x y) x) 1170 // cond: 1171 // result: (And64 x y) 1172 for { 1173 v_0 := v.Args[0] 1174 if v_0.Op != OpAnd64 { 1175 break 1176 } 1177 x := v_0.Args[0] 1178 y := v_0.Args[1] 1179 if x != v.Args[1] { 1180 break 1181 } 1182 v.reset(OpAnd64) 1183 v.AddArg(x) 1184 v.AddArg(y) 1185 return true 1186 } 1187 // match: (And64 (And64 x y) y) 1188 // cond: 1189 // result: (And64 x y) 1190 for { 1191 v_0 := v.Args[0] 1192 if v_0.Op != OpAnd64 { 1193 break 1194 } 1195 x := v_0.Args[0] 1196 y := v_0.Args[1] 1197 if y != v.Args[1] { 1198 break 1199 } 1200 v.reset(OpAnd64) 1201 v.AddArg(x) 1202 v.AddArg(y) 1203 return true 1204 } 1205 // match: (And64 <t> (Const64 [y]) x) 1206 // cond: nlz(y) + nto(y) == 64 && nto(y) >= 32 1207 // result: (Rsh64Ux64 (Lsh64x64 <t> x (Const64 <t> [nlz(y)])) (Const64 <t> [nlz(y)])) 1208 for { 1209 t := v.Type 1210 v_0 := v.Args[0] 1211 if v_0.Op != OpConst64 { 1212 break 1213 } 1214 y := v_0.AuxInt 1215 x := v.Args[1] 1216 if !(nlz(y)+nto(y) == 64 && nto(y) >= 32) { 1217 break 1218 } 1219 v.reset(OpRsh64Ux64) 1220 v0 := b.NewValue0(v.Line, OpLsh64x64, t) 1221 v0.AddArg(x) 1222 v1 := b.NewValue0(v.Line, OpConst64, t) 1223 v1.AuxInt = nlz(y) 1224 v0.AddArg(v1) 1225 v.AddArg(v0) 1226 v2 := b.NewValue0(v.Line, OpConst64, t) 1227 v2.AuxInt = nlz(y) 1228 v.AddArg(v2) 1229 return true 1230 } 1231 // match: (And64 <t> (Const64 [y]) x) 1232 // cond: nlo(y) + ntz(y) == 64 && ntz(y) >= 32 1233 // result: (Lsh64x64 (Rsh64Ux64 <t> x (Const64 <t> [ntz(y)])) (Const64 <t> [ntz(y)])) 1234 for { 1235 t := v.Type 1236 v_0 := v.Args[0] 1237 if v_0.Op != OpConst64 { 1238 break 1239 } 1240 y := v_0.AuxInt 1241 x := v.Args[1] 1242 if !(nlo(y)+ntz(y) == 64 && ntz(y) >= 32) { 1243 break 1244 } 1245 v.reset(OpLsh64x64) 1246 v0 := b.NewValue0(v.Line, OpRsh64Ux64, t) 1247 v0.AddArg(x) 1248 v1 := b.NewValue0(v.Line, OpConst64, t) 1249 v1.AuxInt = ntz(y) 1250 v0.AddArg(v1) 1251 v.AddArg(v0) 1252 v2 := b.NewValue0(v.Line, OpConst64, t) 1253 v2.AuxInt = ntz(y) 1254 v.AddArg(v2) 1255 return true 1256 } 1257 return false 1258 } 1259 func rewriteValuegeneric_OpAnd8(v *Value, config *Config) bool { 1260 b := v.Block 1261 _ = b 1262 // match: (And8 x (Const8 <t> [c])) 1263 // cond: x.Op != OpConst8 1264 // result: (And8 (Const8 <t> [c]) x) 1265 for { 1266 x := v.Args[0] 1267 v_1 := v.Args[1] 1268 if v_1.Op != OpConst8 { 1269 break 1270 } 1271 t := v_1.Type 1272 c := v_1.AuxInt 1273 if !(x.Op != OpConst8) { 1274 break 1275 } 1276 v.reset(OpAnd8) 1277 v0 := b.NewValue0(v.Line, OpConst8, t) 1278 v0.AuxInt = c 1279 v.AddArg(v0) 1280 v.AddArg(x) 1281 return true 1282 } 1283 // match: (And8 x x) 1284 // cond: 1285 // result: x 1286 for { 1287 x := v.Args[0] 1288 if x != v.Args[1] { 1289 break 1290 } 1291 v.reset(OpCopy) 1292 v.Type = x.Type 1293 v.AddArg(x) 1294 return true 1295 } 1296 // match: (And8 (Const8 [-1]) x) 1297 // cond: 1298 // result: x 1299 for { 1300 v_0 := v.Args[0] 1301 if v_0.Op != OpConst8 { 1302 break 1303 } 1304 if v_0.AuxInt != -1 { 1305 break 1306 } 1307 x := v.Args[1] 1308 v.reset(OpCopy) 1309 v.Type = x.Type 1310 v.AddArg(x) 1311 return true 1312 } 1313 // match: (And8 (Const8 [0]) _) 1314 // cond: 1315 // result: (Const8 [0]) 1316 for { 1317 v_0 := v.Args[0] 1318 if v_0.Op != OpConst8 { 1319 break 1320 } 1321 if v_0.AuxInt != 0 { 1322 break 1323 } 1324 v.reset(OpConst8) 1325 v.AuxInt = 0 1326 return true 1327 } 1328 // match: (And8 x (And8 x y)) 1329 // cond: 1330 // result: (And8 x y) 1331 for { 1332 x := v.Args[0] 1333 v_1 := v.Args[1] 1334 if v_1.Op != OpAnd8 { 1335 break 1336 } 1337 if x != v_1.Args[0] { 1338 break 1339 } 1340 y := v_1.Args[1] 1341 v.reset(OpAnd8) 1342 v.AddArg(x) 1343 v.AddArg(y) 1344 return true 1345 } 1346 // match: (And8 x (And8 y x)) 1347 // cond: 1348 // result: (And8 x y) 1349 for { 1350 x := v.Args[0] 1351 v_1 := v.Args[1] 1352 if v_1.Op != OpAnd8 { 1353 break 1354 } 1355 y := v_1.Args[0] 1356 if x != v_1.Args[1] { 1357 break 1358 } 1359 v.reset(OpAnd8) 1360 v.AddArg(x) 1361 v.AddArg(y) 1362 return true 1363 } 1364 // match: (And8 (And8 x y) x) 1365 // cond: 1366 // result: (And8 x y) 1367 for { 1368 v_0 := v.Args[0] 1369 if v_0.Op != OpAnd8 { 1370 break 1371 } 1372 x := v_0.Args[0] 1373 y := v_0.Args[1] 1374 if x != v.Args[1] { 1375 break 1376 } 1377 v.reset(OpAnd8) 1378 v.AddArg(x) 1379 v.AddArg(y) 1380 return true 1381 } 1382 // match: (And8 (And8 x y) y) 1383 // cond: 1384 // result: (And8 x y) 1385 for { 1386 v_0 := v.Args[0] 1387 if v_0.Op != OpAnd8 { 1388 break 1389 } 1390 x := v_0.Args[0] 1391 y := v_0.Args[1] 1392 if y != v.Args[1] { 1393 break 1394 } 1395 v.reset(OpAnd8) 1396 v.AddArg(x) 1397 v.AddArg(y) 1398 return true 1399 } 1400 return false 1401 } 1402 func rewriteValuegeneric_OpArg(v *Value, config *Config) bool { 1403 b := v.Block 1404 _ = b 1405 // match: (Arg {n} [off]) 1406 // cond: v.Type.IsString() 1407 // result: (StringMake (Arg <config.fe.TypeBytePtr()> {n} [off]) (Arg <config.fe.TypeInt()> {n} [off+config.PtrSize])) 1408 for { 1409 off := v.AuxInt 1410 n := v.Aux 1411 if !(v.Type.IsString()) { 1412 break 1413 } 1414 v.reset(OpStringMake) 1415 v0 := b.NewValue0(v.Line, OpArg, config.fe.TypeBytePtr()) 1416 v0.AuxInt = off 1417 v0.Aux = n 1418 v.AddArg(v0) 1419 v1 := b.NewValue0(v.Line, OpArg, config.fe.TypeInt()) 1420 v1.AuxInt = off + config.PtrSize 1421 v1.Aux = n 1422 v.AddArg(v1) 1423 return true 1424 } 1425 // match: (Arg {n} [off]) 1426 // cond: v.Type.IsSlice() 1427 // result: (SliceMake (Arg <v.Type.ElemType().PtrTo()> {n} [off]) (Arg <config.fe.TypeInt()> {n} [off+config.PtrSize]) (Arg <config.fe.TypeInt()> {n} [off+2*config.PtrSize])) 1428 for { 1429 off := v.AuxInt 1430 n := v.Aux 1431 if !(v.Type.IsSlice()) { 1432 break 1433 } 1434 v.reset(OpSliceMake) 1435 v0 := b.NewValue0(v.Line, OpArg, v.Type.ElemType().PtrTo()) 1436 v0.AuxInt = off 1437 v0.Aux = n 1438 v.AddArg(v0) 1439 v1 := b.NewValue0(v.Line, OpArg, config.fe.TypeInt()) 1440 v1.AuxInt = off + config.PtrSize 1441 v1.Aux = n 1442 v.AddArg(v1) 1443 v2 := b.NewValue0(v.Line, OpArg, config.fe.TypeInt()) 1444 v2.AuxInt = off + 2*config.PtrSize 1445 v2.Aux = n 1446 v.AddArg(v2) 1447 return true 1448 } 1449 // match: (Arg {n} [off]) 1450 // cond: v.Type.IsInterface() 1451 // result: (IMake (Arg <config.fe.TypeBytePtr()> {n} [off]) (Arg <config.fe.TypeBytePtr()> {n} [off+config.PtrSize])) 1452 for { 1453 off := v.AuxInt 1454 n := v.Aux 1455 if !(v.Type.IsInterface()) { 1456 break 1457 } 1458 v.reset(OpIMake) 1459 v0 := b.NewValue0(v.Line, OpArg, config.fe.TypeBytePtr()) 1460 v0.AuxInt = off 1461 v0.Aux = n 1462 v.AddArg(v0) 1463 v1 := b.NewValue0(v.Line, OpArg, config.fe.TypeBytePtr()) 1464 v1.AuxInt = off + config.PtrSize 1465 v1.Aux = n 1466 v.AddArg(v1) 1467 return true 1468 } 1469 // match: (Arg {n} [off]) 1470 // cond: v.Type.IsComplex() && v.Type.Size() == 16 1471 // result: (ComplexMake (Arg <config.fe.TypeFloat64()> {n} [off]) (Arg <config.fe.TypeFloat64()> {n} [off+8])) 1472 for { 1473 off := v.AuxInt 1474 n := v.Aux 1475 if !(v.Type.IsComplex() && v.Type.Size() == 16) { 1476 break 1477 } 1478 v.reset(OpComplexMake) 1479 v0 := b.NewValue0(v.Line, OpArg, config.fe.TypeFloat64()) 1480 v0.AuxInt = off 1481 v0.Aux = n 1482 v.AddArg(v0) 1483 v1 := b.NewValue0(v.Line, OpArg, config.fe.TypeFloat64()) 1484 v1.AuxInt = off + 8 1485 v1.Aux = n 1486 v.AddArg(v1) 1487 return true 1488 } 1489 // match: (Arg {n} [off]) 1490 // cond: v.Type.IsComplex() && v.Type.Size() == 8 1491 // result: (ComplexMake (Arg <config.fe.TypeFloat32()> {n} [off]) (Arg <config.fe.TypeFloat32()> {n} [off+4])) 1492 for { 1493 off := v.AuxInt 1494 n := v.Aux 1495 if !(v.Type.IsComplex() && v.Type.Size() == 8) { 1496 break 1497 } 1498 v.reset(OpComplexMake) 1499 v0 := b.NewValue0(v.Line, OpArg, config.fe.TypeFloat32()) 1500 v0.AuxInt = off 1501 v0.Aux = n 1502 v.AddArg(v0) 1503 v1 := b.NewValue0(v.Line, OpArg, config.fe.TypeFloat32()) 1504 v1.AuxInt = off + 4 1505 v1.Aux = n 1506 v.AddArg(v1) 1507 return true 1508 } 1509 // match: (Arg <t>) 1510 // cond: t.IsStruct() && t.NumFields() == 0 && config.fe.CanSSA(t) 1511 // result: (StructMake0) 1512 for { 1513 t := v.Type 1514 if !(t.IsStruct() && t.NumFields() == 0 && config.fe.CanSSA(t)) { 1515 break 1516 } 1517 v.reset(OpStructMake0) 1518 return true 1519 } 1520 // match: (Arg <t> {n} [off]) 1521 // cond: t.IsStruct() && t.NumFields() == 1 && config.fe.CanSSA(t) 1522 // result: (StructMake1 (Arg <t.FieldType(0)> {n} [off+t.FieldOff(0)])) 1523 for { 1524 t := v.Type 1525 off := v.AuxInt 1526 n := v.Aux 1527 if !(t.IsStruct() && t.NumFields() == 1 && config.fe.CanSSA(t)) { 1528 break 1529 } 1530 v.reset(OpStructMake1) 1531 v0 := b.NewValue0(v.Line, OpArg, t.FieldType(0)) 1532 v0.AuxInt = off + t.FieldOff(0) 1533 v0.Aux = n 1534 v.AddArg(v0) 1535 return true 1536 } 1537 // match: (Arg <t> {n} [off]) 1538 // cond: t.IsStruct() && t.NumFields() == 2 && config.fe.CanSSA(t) 1539 // result: (StructMake2 (Arg <t.FieldType(0)> {n} [off+t.FieldOff(0)]) (Arg <t.FieldType(1)> {n} [off+t.FieldOff(1)])) 1540 for { 1541 t := v.Type 1542 off := v.AuxInt 1543 n := v.Aux 1544 if !(t.IsStruct() && t.NumFields() == 2 && config.fe.CanSSA(t)) { 1545 break 1546 } 1547 v.reset(OpStructMake2) 1548 v0 := b.NewValue0(v.Line, OpArg, t.FieldType(0)) 1549 v0.AuxInt = off + t.FieldOff(0) 1550 v0.Aux = n 1551 v.AddArg(v0) 1552 v1 := b.NewValue0(v.Line, OpArg, t.FieldType(1)) 1553 v1.AuxInt = off + t.FieldOff(1) 1554 v1.Aux = n 1555 v.AddArg(v1) 1556 return true 1557 } 1558 // match: (Arg <t> {n} [off]) 1559 // cond: t.IsStruct() && t.NumFields() == 3 && config.fe.CanSSA(t) 1560 // result: (StructMake3 (Arg <t.FieldType(0)> {n} [off+t.FieldOff(0)]) (Arg <t.FieldType(1)> {n} [off+t.FieldOff(1)]) (Arg <t.FieldType(2)> {n} [off+t.FieldOff(2)])) 1561 for { 1562 t := v.Type 1563 off := v.AuxInt 1564 n := v.Aux 1565 if !(t.IsStruct() && t.NumFields() == 3 && config.fe.CanSSA(t)) { 1566 break 1567 } 1568 v.reset(OpStructMake3) 1569 v0 := b.NewValue0(v.Line, OpArg, t.FieldType(0)) 1570 v0.AuxInt = off + t.FieldOff(0) 1571 v0.Aux = n 1572 v.AddArg(v0) 1573 v1 := b.NewValue0(v.Line, OpArg, t.FieldType(1)) 1574 v1.AuxInt = off + t.FieldOff(1) 1575 v1.Aux = n 1576 v.AddArg(v1) 1577 v2 := b.NewValue0(v.Line, OpArg, t.FieldType(2)) 1578 v2.AuxInt = off + t.FieldOff(2) 1579 v2.Aux = n 1580 v.AddArg(v2) 1581 return true 1582 } 1583 // match: (Arg <t> {n} [off]) 1584 // cond: t.IsStruct() && t.NumFields() == 4 && config.fe.CanSSA(t) 1585 // result: (StructMake4 (Arg <t.FieldType(0)> {n} [off+t.FieldOff(0)]) (Arg <t.FieldType(1)> {n} [off+t.FieldOff(1)]) (Arg <t.FieldType(2)> {n} [off+t.FieldOff(2)]) (Arg <t.FieldType(3)> {n} [off+t.FieldOff(3)])) 1586 for { 1587 t := v.Type 1588 off := v.AuxInt 1589 n := v.Aux 1590 if !(t.IsStruct() && t.NumFields() == 4 && config.fe.CanSSA(t)) { 1591 break 1592 } 1593 v.reset(OpStructMake4) 1594 v0 := b.NewValue0(v.Line, OpArg, t.FieldType(0)) 1595 v0.AuxInt = off + t.FieldOff(0) 1596 v0.Aux = n 1597 v.AddArg(v0) 1598 v1 := b.NewValue0(v.Line, OpArg, t.FieldType(1)) 1599 v1.AuxInt = off + t.FieldOff(1) 1600 v1.Aux = n 1601 v.AddArg(v1) 1602 v2 := b.NewValue0(v.Line, OpArg, t.FieldType(2)) 1603 v2.AuxInt = off + t.FieldOff(2) 1604 v2.Aux = n 1605 v.AddArg(v2) 1606 v3 := b.NewValue0(v.Line, OpArg, t.FieldType(3)) 1607 v3.AuxInt = off + t.FieldOff(3) 1608 v3.Aux = n 1609 v.AddArg(v3) 1610 return true 1611 } 1612 // match: (Arg <t>) 1613 // cond: t.IsArray() && t.NumElem() == 0 1614 // result: (ArrayMake0) 1615 for { 1616 t := v.Type 1617 if !(t.IsArray() && t.NumElem() == 0) { 1618 break 1619 } 1620 v.reset(OpArrayMake0) 1621 return true 1622 } 1623 // match: (Arg <t> {n} [off]) 1624 // cond: t.IsArray() && t.NumElem() == 1 && config.fe.CanSSA(t) 1625 // result: (ArrayMake1 (Arg <t.ElemType()> {n} [off])) 1626 for { 1627 t := v.Type 1628 off := v.AuxInt 1629 n := v.Aux 1630 if !(t.IsArray() && t.NumElem() == 1 && config.fe.CanSSA(t)) { 1631 break 1632 } 1633 v.reset(OpArrayMake1) 1634 v0 := b.NewValue0(v.Line, OpArg, t.ElemType()) 1635 v0.AuxInt = off 1636 v0.Aux = n 1637 v.AddArg(v0) 1638 return true 1639 } 1640 return false 1641 } 1642 func rewriteValuegeneric_OpArraySelect(v *Value, config *Config) bool { 1643 b := v.Block 1644 _ = b 1645 // match: (ArraySelect (ArrayMake1 x)) 1646 // cond: 1647 // result: x 1648 for { 1649 v_0 := v.Args[0] 1650 if v_0.Op != OpArrayMake1 { 1651 break 1652 } 1653 x := v_0.Args[0] 1654 v.reset(OpCopy) 1655 v.Type = x.Type 1656 v.AddArg(x) 1657 return true 1658 } 1659 // match: (ArraySelect [0] (Load ptr mem)) 1660 // cond: 1661 // result: (Load ptr mem) 1662 for { 1663 if v.AuxInt != 0 { 1664 break 1665 } 1666 v_0 := v.Args[0] 1667 if v_0.Op != OpLoad { 1668 break 1669 } 1670 ptr := v_0.Args[0] 1671 mem := v_0.Args[1] 1672 v.reset(OpLoad) 1673 v.AddArg(ptr) 1674 v.AddArg(mem) 1675 return true 1676 } 1677 // match: (ArraySelect [0] x:(IData _)) 1678 // cond: 1679 // result: x 1680 for { 1681 if v.AuxInt != 0 { 1682 break 1683 } 1684 x := v.Args[0] 1685 if x.Op != OpIData { 1686 break 1687 } 1688 v.reset(OpCopy) 1689 v.Type = x.Type 1690 v.AddArg(x) 1691 return true 1692 } 1693 return false 1694 } 1695 func rewriteValuegeneric_OpCom16(v *Value, config *Config) bool { 1696 b := v.Block 1697 _ = b 1698 // match: (Com16 (Com16 x)) 1699 // cond: 1700 // result: x 1701 for { 1702 v_0 := v.Args[0] 1703 if v_0.Op != OpCom16 { 1704 break 1705 } 1706 x := v_0.Args[0] 1707 v.reset(OpCopy) 1708 v.Type = x.Type 1709 v.AddArg(x) 1710 return true 1711 } 1712 return false 1713 } 1714 func rewriteValuegeneric_OpCom32(v *Value, config *Config) bool { 1715 b := v.Block 1716 _ = b 1717 // match: (Com32 (Com32 x)) 1718 // cond: 1719 // result: x 1720 for { 1721 v_0 := v.Args[0] 1722 if v_0.Op != OpCom32 { 1723 break 1724 } 1725 x := v_0.Args[0] 1726 v.reset(OpCopy) 1727 v.Type = x.Type 1728 v.AddArg(x) 1729 return true 1730 } 1731 return false 1732 } 1733 func rewriteValuegeneric_OpCom64(v *Value, config *Config) bool { 1734 b := v.Block 1735 _ = b 1736 // match: (Com64 (Com64 x)) 1737 // cond: 1738 // result: x 1739 for { 1740 v_0 := v.Args[0] 1741 if v_0.Op != OpCom64 { 1742 break 1743 } 1744 x := v_0.Args[0] 1745 v.reset(OpCopy) 1746 v.Type = x.Type 1747 v.AddArg(x) 1748 return true 1749 } 1750 return false 1751 } 1752 func rewriteValuegeneric_OpCom8(v *Value, config *Config) bool { 1753 b := v.Block 1754 _ = b 1755 // match: (Com8 (Com8 x)) 1756 // cond: 1757 // result: x 1758 for { 1759 v_0 := v.Args[0] 1760 if v_0.Op != OpCom8 { 1761 break 1762 } 1763 x := v_0.Args[0] 1764 v.reset(OpCopy) 1765 v.Type = x.Type 1766 v.AddArg(x) 1767 return true 1768 } 1769 return false 1770 } 1771 func rewriteValuegeneric_OpConstInterface(v *Value, config *Config) bool { 1772 b := v.Block 1773 _ = b 1774 // match: (ConstInterface) 1775 // cond: 1776 // result: (IMake (ConstNil <config.fe.TypeBytePtr()>) (ConstNil <config.fe.TypeBytePtr()>)) 1777 for { 1778 v.reset(OpIMake) 1779 v0 := b.NewValue0(v.Line, OpConstNil, config.fe.TypeBytePtr()) 1780 v.AddArg(v0) 1781 v1 := b.NewValue0(v.Line, OpConstNil, config.fe.TypeBytePtr()) 1782 v.AddArg(v1) 1783 return true 1784 } 1785 } 1786 func rewriteValuegeneric_OpConstSlice(v *Value, config *Config) bool { 1787 b := v.Block 1788 _ = b 1789 // match: (ConstSlice) 1790 // cond: config.PtrSize == 4 1791 // result: (SliceMake (ConstNil <v.Type.ElemType().PtrTo()>) (Const32 <config.fe.TypeInt()> [0]) (Const32 <config.fe.TypeInt()> [0])) 1792 for { 1793 if !(config.PtrSize == 4) { 1794 break 1795 } 1796 v.reset(OpSliceMake) 1797 v0 := b.NewValue0(v.Line, OpConstNil, v.Type.ElemType().PtrTo()) 1798 v.AddArg(v0) 1799 v1 := b.NewValue0(v.Line, OpConst32, config.fe.TypeInt()) 1800 v1.AuxInt = 0 1801 v.AddArg(v1) 1802 v2 := b.NewValue0(v.Line, OpConst32, config.fe.TypeInt()) 1803 v2.AuxInt = 0 1804 v.AddArg(v2) 1805 return true 1806 } 1807 // match: (ConstSlice) 1808 // cond: config.PtrSize == 8 1809 // result: (SliceMake (ConstNil <v.Type.ElemType().PtrTo()>) (Const64 <config.fe.TypeInt()> [0]) (Const64 <config.fe.TypeInt()> [0])) 1810 for { 1811 if !(config.PtrSize == 8) { 1812 break 1813 } 1814 v.reset(OpSliceMake) 1815 v0 := b.NewValue0(v.Line, OpConstNil, v.Type.ElemType().PtrTo()) 1816 v.AddArg(v0) 1817 v1 := b.NewValue0(v.Line, OpConst64, config.fe.TypeInt()) 1818 v1.AuxInt = 0 1819 v.AddArg(v1) 1820 v2 := b.NewValue0(v.Line, OpConst64, config.fe.TypeInt()) 1821 v2.AuxInt = 0 1822 v.AddArg(v2) 1823 return true 1824 } 1825 return false 1826 } 1827 func rewriteValuegeneric_OpConstString(v *Value, config *Config) bool { 1828 b := v.Block 1829 _ = b 1830 // match: (ConstString {s}) 1831 // cond: config.PtrSize == 4 && s.(string) == "" 1832 // result: (StringMake (ConstNil) (Const32 <config.fe.TypeInt()> [0])) 1833 for { 1834 s := v.Aux 1835 if !(config.PtrSize == 4 && s.(string) == "") { 1836 break 1837 } 1838 v.reset(OpStringMake) 1839 v0 := b.NewValue0(v.Line, OpConstNil, config.fe.TypeBytePtr()) 1840 v.AddArg(v0) 1841 v1 := b.NewValue0(v.Line, OpConst32, config.fe.TypeInt()) 1842 v1.AuxInt = 0 1843 v.AddArg(v1) 1844 return true 1845 } 1846 // match: (ConstString {s}) 1847 // cond: config.PtrSize == 8 && s.(string) == "" 1848 // result: (StringMake (ConstNil) (Const64 <config.fe.TypeInt()> [0])) 1849 for { 1850 s := v.Aux 1851 if !(config.PtrSize == 8 && s.(string) == "") { 1852 break 1853 } 1854 v.reset(OpStringMake) 1855 v0 := b.NewValue0(v.Line, OpConstNil, config.fe.TypeBytePtr()) 1856 v.AddArg(v0) 1857 v1 := b.NewValue0(v.Line, OpConst64, config.fe.TypeInt()) 1858 v1.AuxInt = 0 1859 v.AddArg(v1) 1860 return true 1861 } 1862 // match: (ConstString {s}) 1863 // cond: config.PtrSize == 4 && s.(string) != "" 1864 // result: (StringMake (Addr <config.fe.TypeBytePtr()> {config.fe.StringData(s.(string))} (SB)) (Const32 <config.fe.TypeInt()> [int64(len(s.(string)))])) 1865 for { 1866 s := v.Aux 1867 if !(config.PtrSize == 4 && s.(string) != "") { 1868 break 1869 } 1870 v.reset(OpStringMake) 1871 v0 := b.NewValue0(v.Line, OpAddr, config.fe.TypeBytePtr()) 1872 v0.Aux = config.fe.StringData(s.(string)) 1873 v1 := b.NewValue0(v.Line, OpSB, config.fe.TypeUintptr()) 1874 v0.AddArg(v1) 1875 v.AddArg(v0) 1876 v2 := b.NewValue0(v.Line, OpConst32, config.fe.TypeInt()) 1877 v2.AuxInt = int64(len(s.(string))) 1878 v.AddArg(v2) 1879 return true 1880 } 1881 // match: (ConstString {s}) 1882 // cond: config.PtrSize == 8 && s.(string) != "" 1883 // result: (StringMake (Addr <config.fe.TypeBytePtr()> {config.fe.StringData(s.(string))} (SB)) (Const64 <config.fe.TypeInt()> [int64(len(s.(string)))])) 1884 for { 1885 s := v.Aux 1886 if !(config.PtrSize == 8 && s.(string) != "") { 1887 break 1888 } 1889 v.reset(OpStringMake) 1890 v0 := b.NewValue0(v.Line, OpAddr, config.fe.TypeBytePtr()) 1891 v0.Aux = config.fe.StringData(s.(string)) 1892 v1 := b.NewValue0(v.Line, OpSB, config.fe.TypeUintptr()) 1893 v0.AddArg(v1) 1894 v.AddArg(v0) 1895 v2 := b.NewValue0(v.Line, OpConst64, config.fe.TypeInt()) 1896 v2.AuxInt = int64(len(s.(string))) 1897 v.AddArg(v2) 1898 return true 1899 } 1900 return false 1901 } 1902 func rewriteValuegeneric_OpConvert(v *Value, config *Config) bool { 1903 b := v.Block 1904 _ = b 1905 // match: (Convert (Add64 (Convert ptr mem) off) mem) 1906 // cond: 1907 // result: (Add64 ptr off) 1908 for { 1909 v_0 := v.Args[0] 1910 if v_0.Op != OpAdd64 { 1911 break 1912 } 1913 v_0_0 := v_0.Args[0] 1914 if v_0_0.Op != OpConvert { 1915 break 1916 } 1917 ptr := v_0_0.Args[0] 1918 mem := v_0_0.Args[1] 1919 off := v_0.Args[1] 1920 if mem != v.Args[1] { 1921 break 1922 } 1923 v.reset(OpAdd64) 1924 v.AddArg(ptr) 1925 v.AddArg(off) 1926 return true 1927 } 1928 // match: (Convert (Add64 off (Convert ptr mem)) mem) 1929 // cond: 1930 // result: (Add64 ptr off) 1931 for { 1932 v_0 := v.Args[0] 1933 if v_0.Op != OpAdd64 { 1934 break 1935 } 1936 off := v_0.Args[0] 1937 v_0_1 := v_0.Args[1] 1938 if v_0_1.Op != OpConvert { 1939 break 1940 } 1941 ptr := v_0_1.Args[0] 1942 mem := v_0_1.Args[1] 1943 if mem != v.Args[1] { 1944 break 1945 } 1946 v.reset(OpAdd64) 1947 v.AddArg(ptr) 1948 v.AddArg(off) 1949 return true 1950 } 1951 // match: (Convert (Convert ptr mem) mem) 1952 // cond: 1953 // result: ptr 1954 for { 1955 v_0 := v.Args[0] 1956 if v_0.Op != OpConvert { 1957 break 1958 } 1959 ptr := v_0.Args[0] 1960 mem := v_0.Args[1] 1961 if mem != v.Args[1] { 1962 break 1963 } 1964 v.reset(OpCopy) 1965 v.Type = ptr.Type 1966 v.AddArg(ptr) 1967 return true 1968 } 1969 return false 1970 } 1971 func rewriteValuegeneric_OpCvt32Fto64F(v *Value, config *Config) bool { 1972 b := v.Block 1973 _ = b 1974 // match: (Cvt32Fto64F (Const32F [c])) 1975 // cond: 1976 // result: (Const64F [c]) 1977 for { 1978 v_0 := v.Args[0] 1979 if v_0.Op != OpConst32F { 1980 break 1981 } 1982 c := v_0.AuxInt 1983 v.reset(OpConst64F) 1984 v.AuxInt = c 1985 return true 1986 } 1987 return false 1988 } 1989 func rewriteValuegeneric_OpCvt64Fto32F(v *Value, config *Config) bool { 1990 b := v.Block 1991 _ = b 1992 // match: (Cvt64Fto32F (Const64F [c])) 1993 // cond: 1994 // result: (Const32F [f2i(float64(i2f32(c)))]) 1995 for { 1996 v_0 := v.Args[0] 1997 if v_0.Op != OpConst64F { 1998 break 1999 } 2000 c := v_0.AuxInt 2001 v.reset(OpConst32F) 2002 v.AuxInt = f2i(float64(i2f32(c))) 2003 return true 2004 } 2005 return false 2006 } 2007 func rewriteValuegeneric_OpDiv32F(v *Value, config *Config) bool { 2008 b := v.Block 2009 _ = b 2010 // match: (Div32F x (Const32F [f2i(1)])) 2011 // cond: 2012 // result: x 2013 for { 2014 x := v.Args[0] 2015 v_1 := v.Args[1] 2016 if v_1.Op != OpConst32F { 2017 break 2018 } 2019 if v_1.AuxInt != f2i(1) { 2020 break 2021 } 2022 v.reset(OpCopy) 2023 v.Type = x.Type 2024 v.AddArg(x) 2025 return true 2026 } 2027 // match: (Div32F x (Const32F [f2i(-1)])) 2028 // cond: 2029 // result: (Neg32F x) 2030 for { 2031 x := v.Args[0] 2032 v_1 := v.Args[1] 2033 if v_1.Op != OpConst32F { 2034 break 2035 } 2036 if v_1.AuxInt != f2i(-1) { 2037 break 2038 } 2039 v.reset(OpNeg32F) 2040 v.AddArg(x) 2041 return true 2042 } 2043 return false 2044 } 2045 func rewriteValuegeneric_OpDiv64(v *Value, config *Config) bool { 2046 b := v.Block 2047 _ = b 2048 // match: (Div64 <t> x (Const64 [c])) 2049 // cond: c > 0 && smagic64ok(c) && smagic64m(c) > 0 2050 // result: (Sub64 <t> (Rsh64x64 <t> (Hmul64 <t> (Const64 <t> [smagic64m(c)]) x) (Const64 <t> [smagic64s(c)])) (Rsh64x64 <t> x (Const64 <t> [63]))) 2051 for { 2052 t := v.Type 2053 x := v.Args[0] 2054 v_1 := v.Args[1] 2055 if v_1.Op != OpConst64 { 2056 break 2057 } 2058 c := v_1.AuxInt 2059 if !(c > 0 && smagic64ok(c) && smagic64m(c) > 0) { 2060 break 2061 } 2062 v.reset(OpSub64) 2063 v.Type = t 2064 v0 := b.NewValue0(v.Line, OpRsh64x64, t) 2065 v1 := b.NewValue0(v.Line, OpHmul64, t) 2066 v2 := b.NewValue0(v.Line, OpConst64, t) 2067 v2.AuxInt = smagic64m(c) 2068 v1.AddArg(v2) 2069 v1.AddArg(x) 2070 v0.AddArg(v1) 2071 v3 := b.NewValue0(v.Line, OpConst64, t) 2072 v3.AuxInt = smagic64s(c) 2073 v0.AddArg(v3) 2074 v.AddArg(v0) 2075 v4 := b.NewValue0(v.Line, OpRsh64x64, t) 2076 v4.AddArg(x) 2077 v5 := b.NewValue0(v.Line, OpConst64, t) 2078 v5.AuxInt = 63 2079 v4.AddArg(v5) 2080 v.AddArg(v4) 2081 return true 2082 } 2083 // match: (Div64 <t> x (Const64 [c])) 2084 // cond: c > 0 && smagic64ok(c) && smagic64m(c) < 0 2085 // result: (Sub64 <t> (Rsh64x64 <t> (Add64 <t> (Hmul64 <t> (Const64 <t> [smagic64m(c)]) x) x) (Const64 <t> [smagic64s(c)])) (Rsh64x64 <t> x (Const64 <t> [63]))) 2086 for { 2087 t := v.Type 2088 x := v.Args[0] 2089 v_1 := v.Args[1] 2090 if v_1.Op != OpConst64 { 2091 break 2092 } 2093 c := v_1.AuxInt 2094 if !(c > 0 && smagic64ok(c) && smagic64m(c) < 0) { 2095 break 2096 } 2097 v.reset(OpSub64) 2098 v.Type = t 2099 v0 := b.NewValue0(v.Line, OpRsh64x64, t) 2100 v1 := b.NewValue0(v.Line, OpAdd64, t) 2101 v2 := b.NewValue0(v.Line, OpHmul64, t) 2102 v3 := b.NewValue0(v.Line, OpConst64, t) 2103 v3.AuxInt = smagic64m(c) 2104 v2.AddArg(v3) 2105 v2.AddArg(x) 2106 v1.AddArg(v2) 2107 v1.AddArg(x) 2108 v0.AddArg(v1) 2109 v4 := b.NewValue0(v.Line, OpConst64, t) 2110 v4.AuxInt = smagic64s(c) 2111 v0.AddArg(v4) 2112 v.AddArg(v0) 2113 v5 := b.NewValue0(v.Line, OpRsh64x64, t) 2114 v5.AddArg(x) 2115 v6 := b.NewValue0(v.Line, OpConst64, t) 2116 v6.AuxInt = 63 2117 v5.AddArg(v6) 2118 v.AddArg(v5) 2119 return true 2120 } 2121 // match: (Div64 <t> x (Const64 [c])) 2122 // cond: c < 0 && smagic64ok(c) && smagic64m(c) > 0 2123 // result: (Neg64 <t> (Sub64 <t> (Rsh64x64 <t> (Hmul64 <t> (Const64 <t> [smagic64m(c)]) x) (Const64 <t> [smagic64s(c)])) (Rsh64x64 <t> x (Const64 <t> [63])))) 2124 for { 2125 t := v.Type 2126 x := v.Args[0] 2127 v_1 := v.Args[1] 2128 if v_1.Op != OpConst64 { 2129 break 2130 } 2131 c := v_1.AuxInt 2132 if !(c < 0 && smagic64ok(c) && smagic64m(c) > 0) { 2133 break 2134 } 2135 v.reset(OpNeg64) 2136 v.Type = t 2137 v0 := b.NewValue0(v.Line, OpSub64, t) 2138 v1 := b.NewValue0(v.Line, OpRsh64x64, t) 2139 v2 := b.NewValue0(v.Line, OpHmul64, t) 2140 v3 := b.NewValue0(v.Line, OpConst64, t) 2141 v3.AuxInt = smagic64m(c) 2142 v2.AddArg(v3) 2143 v2.AddArg(x) 2144 v1.AddArg(v2) 2145 v4 := b.NewValue0(v.Line, OpConst64, t) 2146 v4.AuxInt = smagic64s(c) 2147 v1.AddArg(v4) 2148 v0.AddArg(v1) 2149 v5 := b.NewValue0(v.Line, OpRsh64x64, t) 2150 v5.AddArg(x) 2151 v6 := b.NewValue0(v.Line, OpConst64, t) 2152 v6.AuxInt = 63 2153 v5.AddArg(v6) 2154 v0.AddArg(v5) 2155 v.AddArg(v0) 2156 return true 2157 } 2158 // match: (Div64 <t> x (Const64 [c])) 2159 // cond: c < 0 && smagic64ok(c) && smagic64m(c) < 0 2160 // result: (Neg64 <t> (Sub64 <t> (Rsh64x64 <t> (Add64 <t> (Hmul64 <t> (Const64 <t> [smagic64m(c)]) x) x) (Const64 <t> [smagic64s(c)])) (Rsh64x64 <t> x (Const64 <t> [63])))) 2161 for { 2162 t := v.Type 2163 x := v.Args[0] 2164 v_1 := v.Args[1] 2165 if v_1.Op != OpConst64 { 2166 break 2167 } 2168 c := v_1.AuxInt 2169 if !(c < 0 && smagic64ok(c) && smagic64m(c) < 0) { 2170 break 2171 } 2172 v.reset(OpNeg64) 2173 v.Type = t 2174 v0 := b.NewValue0(v.Line, OpSub64, t) 2175 v1 := b.NewValue0(v.Line, OpRsh64x64, t) 2176 v2 := b.NewValue0(v.Line, OpAdd64, t) 2177 v3 := b.NewValue0(v.Line, OpHmul64, t) 2178 v4 := b.NewValue0(v.Line, OpConst64, t) 2179 v4.AuxInt = smagic64m(c) 2180 v3.AddArg(v4) 2181 v3.AddArg(x) 2182 v2.AddArg(v3) 2183 v2.AddArg(x) 2184 v1.AddArg(v2) 2185 v5 := b.NewValue0(v.Line, OpConst64, t) 2186 v5.AuxInt = smagic64s(c) 2187 v1.AddArg(v5) 2188 v0.AddArg(v1) 2189 v6 := b.NewValue0(v.Line, OpRsh64x64, t) 2190 v6.AddArg(x) 2191 v7 := b.NewValue0(v.Line, OpConst64, t) 2192 v7.AuxInt = 63 2193 v6.AddArg(v7) 2194 v0.AddArg(v6) 2195 v.AddArg(v0) 2196 return true 2197 } 2198 return false 2199 } 2200 func rewriteValuegeneric_OpDiv64F(v *Value, config *Config) bool { 2201 b := v.Block 2202 _ = b 2203 // match: (Div64F x (Const64F [f2i(1)])) 2204 // cond: 2205 // result: x 2206 for { 2207 x := v.Args[0] 2208 v_1 := v.Args[1] 2209 if v_1.Op != OpConst64F { 2210 break 2211 } 2212 if v_1.AuxInt != f2i(1) { 2213 break 2214 } 2215 v.reset(OpCopy) 2216 v.Type = x.Type 2217 v.AddArg(x) 2218 return true 2219 } 2220 // match: (Div64F x (Const64F [f2i(-1)])) 2221 // cond: 2222 // result: (Neg32F x) 2223 for { 2224 x := v.Args[0] 2225 v_1 := v.Args[1] 2226 if v_1.Op != OpConst64F { 2227 break 2228 } 2229 if v_1.AuxInt != f2i(-1) { 2230 break 2231 } 2232 v.reset(OpNeg32F) 2233 v.AddArg(x) 2234 return true 2235 } 2236 return false 2237 } 2238 func rewriteValuegeneric_OpDiv64u(v *Value, config *Config) bool { 2239 b := v.Block 2240 _ = b 2241 // match: (Div64u <t> n (Const64 [c])) 2242 // cond: isPowerOfTwo(c) 2243 // result: (Rsh64Ux64 n (Const64 <t> [log2(c)])) 2244 for { 2245 t := v.Type 2246 n := v.Args[0] 2247 v_1 := v.Args[1] 2248 if v_1.Op != OpConst64 { 2249 break 2250 } 2251 c := v_1.AuxInt 2252 if !(isPowerOfTwo(c)) { 2253 break 2254 } 2255 v.reset(OpRsh64Ux64) 2256 v.AddArg(n) 2257 v0 := b.NewValue0(v.Line, OpConst64, t) 2258 v0.AuxInt = log2(c) 2259 v.AddArg(v0) 2260 return true 2261 } 2262 // match: (Div64u <t> x (Const64 [c])) 2263 // cond: umagic64ok(c) && !umagic64a(c) 2264 // result: (Rsh64Ux64 (Hmul64u <t> (Const64 <t> [umagic64m(c)]) x) (Const64 <t> [umagic64s(c)])) 2265 for { 2266 t := v.Type 2267 x := v.Args[0] 2268 v_1 := v.Args[1] 2269 if v_1.Op != OpConst64 { 2270 break 2271 } 2272 c := v_1.AuxInt 2273 if !(umagic64ok(c) && !umagic64a(c)) { 2274 break 2275 } 2276 v.reset(OpRsh64Ux64) 2277 v0 := b.NewValue0(v.Line, OpHmul64u, t) 2278 v1 := b.NewValue0(v.Line, OpConst64, t) 2279 v1.AuxInt = umagic64m(c) 2280 v0.AddArg(v1) 2281 v0.AddArg(x) 2282 v.AddArg(v0) 2283 v2 := b.NewValue0(v.Line, OpConst64, t) 2284 v2.AuxInt = umagic64s(c) 2285 v.AddArg(v2) 2286 return true 2287 } 2288 // match: (Div64u <t> x (Const64 [c])) 2289 // cond: umagic64ok(c) && umagic64a(c) 2290 // result: (Rsh64Ux64 (Avg64u <t> (Hmul64u <t> x (Const64 <t> [umagic64m(c)])) x) (Const64 <t> [umagic64s(c)-1])) 2291 for { 2292 t := v.Type 2293 x := v.Args[0] 2294 v_1 := v.Args[1] 2295 if v_1.Op != OpConst64 { 2296 break 2297 } 2298 c := v_1.AuxInt 2299 if !(umagic64ok(c) && umagic64a(c)) { 2300 break 2301 } 2302 v.reset(OpRsh64Ux64) 2303 v0 := b.NewValue0(v.Line, OpAvg64u, t) 2304 v1 := b.NewValue0(v.Line, OpHmul64u, t) 2305 v1.AddArg(x) 2306 v2 := b.NewValue0(v.Line, OpConst64, t) 2307 v2.AuxInt = umagic64m(c) 2308 v1.AddArg(v2) 2309 v0.AddArg(v1) 2310 v0.AddArg(x) 2311 v.AddArg(v0) 2312 v3 := b.NewValue0(v.Line, OpConst64, t) 2313 v3.AuxInt = umagic64s(c) - 1 2314 v.AddArg(v3) 2315 return true 2316 } 2317 return false 2318 } 2319 func rewriteValuegeneric_OpEq16(v *Value, config *Config) bool { 2320 b := v.Block 2321 _ = b 2322 // match: (Eq16 x x) 2323 // cond: 2324 // result: (ConstBool [1]) 2325 for { 2326 x := v.Args[0] 2327 if x != v.Args[1] { 2328 break 2329 } 2330 v.reset(OpConstBool) 2331 v.AuxInt = 1 2332 return true 2333 } 2334 // match: (Eq16 (Const16 <t> [c]) (Add16 (Const16 <t> [d]) x)) 2335 // cond: 2336 // result: (Eq16 (Const16 <t> [int64(int16(c-d))]) x) 2337 for { 2338 v_0 := v.Args[0] 2339 if v_0.Op != OpConst16 { 2340 break 2341 } 2342 t := v_0.Type 2343 c := v_0.AuxInt 2344 v_1 := v.Args[1] 2345 if v_1.Op != OpAdd16 { 2346 break 2347 } 2348 v_1_0 := v_1.Args[0] 2349 if v_1_0.Op != OpConst16 { 2350 break 2351 } 2352 if v_1_0.Type != t { 2353 break 2354 } 2355 d := v_1_0.AuxInt 2356 x := v_1.Args[1] 2357 v.reset(OpEq16) 2358 v0 := b.NewValue0(v.Line, OpConst16, t) 2359 v0.AuxInt = int64(int16(c - d)) 2360 v.AddArg(v0) 2361 v.AddArg(x) 2362 return true 2363 } 2364 // match: (Eq16 x (Const16 <t> [c])) 2365 // cond: x.Op != OpConst16 2366 // result: (Eq16 (Const16 <t> [c]) x) 2367 for { 2368 x := v.Args[0] 2369 v_1 := v.Args[1] 2370 if v_1.Op != OpConst16 { 2371 break 2372 } 2373 t := v_1.Type 2374 c := v_1.AuxInt 2375 if !(x.Op != OpConst16) { 2376 break 2377 } 2378 v.reset(OpEq16) 2379 v0 := b.NewValue0(v.Line, OpConst16, t) 2380 v0.AuxInt = c 2381 v.AddArg(v0) 2382 v.AddArg(x) 2383 return true 2384 } 2385 // match: (Eq16 (Const16 [c]) (Const16 [d])) 2386 // cond: 2387 // result: (ConstBool [b2i(c == d)]) 2388 for { 2389 v_0 := v.Args[0] 2390 if v_0.Op != OpConst16 { 2391 break 2392 } 2393 c := v_0.AuxInt 2394 v_1 := v.Args[1] 2395 if v_1.Op != OpConst16 { 2396 break 2397 } 2398 d := v_1.AuxInt 2399 v.reset(OpConstBool) 2400 v.AuxInt = b2i(c == d) 2401 return true 2402 } 2403 return false 2404 } 2405 func rewriteValuegeneric_OpEq32(v *Value, config *Config) bool { 2406 b := v.Block 2407 _ = b 2408 // match: (Eq32 x x) 2409 // cond: 2410 // result: (ConstBool [1]) 2411 for { 2412 x := v.Args[0] 2413 if x != v.Args[1] { 2414 break 2415 } 2416 v.reset(OpConstBool) 2417 v.AuxInt = 1 2418 return true 2419 } 2420 // match: (Eq32 (Const32 <t> [c]) (Add32 (Const32 <t> [d]) x)) 2421 // cond: 2422 // result: (Eq32 (Const32 <t> [int64(int32(c-d))]) x) 2423 for { 2424 v_0 := v.Args[0] 2425 if v_0.Op != OpConst32 { 2426 break 2427 } 2428 t := v_0.Type 2429 c := v_0.AuxInt 2430 v_1 := v.Args[1] 2431 if v_1.Op != OpAdd32 { 2432 break 2433 } 2434 v_1_0 := v_1.Args[0] 2435 if v_1_0.Op != OpConst32 { 2436 break 2437 } 2438 if v_1_0.Type != t { 2439 break 2440 } 2441 d := v_1_0.AuxInt 2442 x := v_1.Args[1] 2443 v.reset(OpEq32) 2444 v0 := b.NewValue0(v.Line, OpConst32, t) 2445 v0.AuxInt = int64(int32(c - d)) 2446 v.AddArg(v0) 2447 v.AddArg(x) 2448 return true 2449 } 2450 // match: (Eq32 x (Const32 <t> [c])) 2451 // cond: x.Op != OpConst32 2452 // result: (Eq32 (Const32 <t> [c]) x) 2453 for { 2454 x := v.Args[0] 2455 v_1 := v.Args[1] 2456 if v_1.Op != OpConst32 { 2457 break 2458 } 2459 t := v_1.Type 2460 c := v_1.AuxInt 2461 if !(x.Op != OpConst32) { 2462 break 2463 } 2464 v.reset(OpEq32) 2465 v0 := b.NewValue0(v.Line, OpConst32, t) 2466 v0.AuxInt = c 2467 v.AddArg(v0) 2468 v.AddArg(x) 2469 return true 2470 } 2471 // match: (Eq32 (Const32 [c]) (Const32 [d])) 2472 // cond: 2473 // result: (ConstBool [b2i(c == d)]) 2474 for { 2475 v_0 := v.Args[0] 2476 if v_0.Op != OpConst32 { 2477 break 2478 } 2479 c := v_0.AuxInt 2480 v_1 := v.Args[1] 2481 if v_1.Op != OpConst32 { 2482 break 2483 } 2484 d := v_1.AuxInt 2485 v.reset(OpConstBool) 2486 v.AuxInt = b2i(c == d) 2487 return true 2488 } 2489 return false 2490 } 2491 func rewriteValuegeneric_OpEq64(v *Value, config *Config) bool { 2492 b := v.Block 2493 _ = b 2494 // match: (Eq64 x x) 2495 // cond: 2496 // result: (ConstBool [1]) 2497 for { 2498 x := v.Args[0] 2499 if x != v.Args[1] { 2500 break 2501 } 2502 v.reset(OpConstBool) 2503 v.AuxInt = 1 2504 return true 2505 } 2506 // match: (Eq64 (Const64 <t> [c]) (Add64 (Const64 <t> [d]) x)) 2507 // cond: 2508 // result: (Eq64 (Const64 <t> [c-d]) x) 2509 for { 2510 v_0 := v.Args[0] 2511 if v_0.Op != OpConst64 { 2512 break 2513 } 2514 t := v_0.Type 2515 c := v_0.AuxInt 2516 v_1 := v.Args[1] 2517 if v_1.Op != OpAdd64 { 2518 break 2519 } 2520 v_1_0 := v_1.Args[0] 2521 if v_1_0.Op != OpConst64 { 2522 break 2523 } 2524 if v_1_0.Type != t { 2525 break 2526 } 2527 d := v_1_0.AuxInt 2528 x := v_1.Args[1] 2529 v.reset(OpEq64) 2530 v0 := b.NewValue0(v.Line, OpConst64, t) 2531 v0.AuxInt = c - d 2532 v.AddArg(v0) 2533 v.AddArg(x) 2534 return true 2535 } 2536 // match: (Eq64 x (Const64 <t> [c])) 2537 // cond: x.Op != OpConst64 2538 // result: (Eq64 (Const64 <t> [c]) x) 2539 for { 2540 x := v.Args[0] 2541 v_1 := v.Args[1] 2542 if v_1.Op != OpConst64 { 2543 break 2544 } 2545 t := v_1.Type 2546 c := v_1.AuxInt 2547 if !(x.Op != OpConst64) { 2548 break 2549 } 2550 v.reset(OpEq64) 2551 v0 := b.NewValue0(v.Line, OpConst64, t) 2552 v0.AuxInt = c 2553 v.AddArg(v0) 2554 v.AddArg(x) 2555 return true 2556 } 2557 // match: (Eq64 (Const64 [c]) (Const64 [d])) 2558 // cond: 2559 // result: (ConstBool [b2i(c == d)]) 2560 for { 2561 v_0 := v.Args[0] 2562 if v_0.Op != OpConst64 { 2563 break 2564 } 2565 c := v_0.AuxInt 2566 v_1 := v.Args[1] 2567 if v_1.Op != OpConst64 { 2568 break 2569 } 2570 d := v_1.AuxInt 2571 v.reset(OpConstBool) 2572 v.AuxInt = b2i(c == d) 2573 return true 2574 } 2575 return false 2576 } 2577 func rewriteValuegeneric_OpEq8(v *Value, config *Config) bool { 2578 b := v.Block 2579 _ = b 2580 // match: (Eq8 x x) 2581 // cond: 2582 // result: (ConstBool [1]) 2583 for { 2584 x := v.Args[0] 2585 if x != v.Args[1] { 2586 break 2587 } 2588 v.reset(OpConstBool) 2589 v.AuxInt = 1 2590 return true 2591 } 2592 // match: (Eq8 (Const8 <t> [c]) (Add8 (Const8 <t> [d]) x)) 2593 // cond: 2594 // result: (Eq8 (Const8 <t> [int64(int8(c-d))]) x) 2595 for { 2596 v_0 := v.Args[0] 2597 if v_0.Op != OpConst8 { 2598 break 2599 } 2600 t := v_0.Type 2601 c := v_0.AuxInt 2602 v_1 := v.Args[1] 2603 if v_1.Op != OpAdd8 { 2604 break 2605 } 2606 v_1_0 := v_1.Args[0] 2607 if v_1_0.Op != OpConst8 { 2608 break 2609 } 2610 if v_1_0.Type != t { 2611 break 2612 } 2613 d := v_1_0.AuxInt 2614 x := v_1.Args[1] 2615 v.reset(OpEq8) 2616 v0 := b.NewValue0(v.Line, OpConst8, t) 2617 v0.AuxInt = int64(int8(c - d)) 2618 v.AddArg(v0) 2619 v.AddArg(x) 2620 return true 2621 } 2622 // match: (Eq8 x (Const8 <t> [c])) 2623 // cond: x.Op != OpConst8 2624 // result: (Eq8 (Const8 <t> [c]) x) 2625 for { 2626 x := v.Args[0] 2627 v_1 := v.Args[1] 2628 if v_1.Op != OpConst8 { 2629 break 2630 } 2631 t := v_1.Type 2632 c := v_1.AuxInt 2633 if !(x.Op != OpConst8) { 2634 break 2635 } 2636 v.reset(OpEq8) 2637 v0 := b.NewValue0(v.Line, OpConst8, t) 2638 v0.AuxInt = c 2639 v.AddArg(v0) 2640 v.AddArg(x) 2641 return true 2642 } 2643 // match: (Eq8 (Const8 [c]) (Const8 [d])) 2644 // cond: 2645 // result: (ConstBool [b2i(c == d)]) 2646 for { 2647 v_0 := v.Args[0] 2648 if v_0.Op != OpConst8 { 2649 break 2650 } 2651 c := v_0.AuxInt 2652 v_1 := v.Args[1] 2653 if v_1.Op != OpConst8 { 2654 break 2655 } 2656 d := v_1.AuxInt 2657 v.reset(OpConstBool) 2658 v.AuxInt = b2i(c == d) 2659 return true 2660 } 2661 return false 2662 } 2663 func rewriteValuegeneric_OpEqB(v *Value, config *Config) bool { 2664 b := v.Block 2665 _ = b 2666 // match: (EqB (ConstBool [c]) (ConstBool [d])) 2667 // cond: 2668 // result: (ConstBool [b2i(c == d)]) 2669 for { 2670 v_0 := v.Args[0] 2671 if v_0.Op != OpConstBool { 2672 break 2673 } 2674 c := v_0.AuxInt 2675 v_1 := v.Args[1] 2676 if v_1.Op != OpConstBool { 2677 break 2678 } 2679 d := v_1.AuxInt 2680 v.reset(OpConstBool) 2681 v.AuxInt = b2i(c == d) 2682 return true 2683 } 2684 // match: (EqB (ConstBool [0]) x) 2685 // cond: 2686 // result: (Not x) 2687 for { 2688 v_0 := v.Args[0] 2689 if v_0.Op != OpConstBool { 2690 break 2691 } 2692 if v_0.AuxInt != 0 { 2693 break 2694 } 2695 x := v.Args[1] 2696 v.reset(OpNot) 2697 v.AddArg(x) 2698 return true 2699 } 2700 // match: (EqB (ConstBool [1]) x) 2701 // cond: 2702 // result: x 2703 for { 2704 v_0 := v.Args[0] 2705 if v_0.Op != OpConstBool { 2706 break 2707 } 2708 if v_0.AuxInt != 1 { 2709 break 2710 } 2711 x := v.Args[1] 2712 v.reset(OpCopy) 2713 v.Type = x.Type 2714 v.AddArg(x) 2715 return true 2716 } 2717 return false 2718 } 2719 func rewriteValuegeneric_OpEqInter(v *Value, config *Config) bool { 2720 b := v.Block 2721 _ = b 2722 // match: (EqInter x y) 2723 // cond: 2724 // result: (EqPtr (ITab x) (ITab y)) 2725 for { 2726 x := v.Args[0] 2727 y := v.Args[1] 2728 v.reset(OpEqPtr) 2729 v0 := b.NewValue0(v.Line, OpITab, config.fe.TypeBytePtr()) 2730 v0.AddArg(x) 2731 v.AddArg(v0) 2732 v1 := b.NewValue0(v.Line, OpITab, config.fe.TypeBytePtr()) 2733 v1.AddArg(y) 2734 v.AddArg(v1) 2735 return true 2736 } 2737 } 2738 func rewriteValuegeneric_OpEqPtr(v *Value, config *Config) bool { 2739 b := v.Block 2740 _ = b 2741 // match: (EqPtr p (ConstNil)) 2742 // cond: 2743 // result: (Not (IsNonNil p)) 2744 for { 2745 p := v.Args[0] 2746 v_1 := v.Args[1] 2747 if v_1.Op != OpConstNil { 2748 break 2749 } 2750 v.reset(OpNot) 2751 v0 := b.NewValue0(v.Line, OpIsNonNil, config.fe.TypeBool()) 2752 v0.AddArg(p) 2753 v.AddArg(v0) 2754 return true 2755 } 2756 // match: (EqPtr (ConstNil) p) 2757 // cond: 2758 // result: (Not (IsNonNil p)) 2759 for { 2760 v_0 := v.Args[0] 2761 if v_0.Op != OpConstNil { 2762 break 2763 } 2764 p := v.Args[1] 2765 v.reset(OpNot) 2766 v0 := b.NewValue0(v.Line, OpIsNonNil, config.fe.TypeBool()) 2767 v0.AddArg(p) 2768 v.AddArg(v0) 2769 return true 2770 } 2771 return false 2772 } 2773 func rewriteValuegeneric_OpEqSlice(v *Value, config *Config) bool { 2774 b := v.Block 2775 _ = b 2776 // match: (EqSlice x y) 2777 // cond: 2778 // result: (EqPtr (SlicePtr x) (SlicePtr y)) 2779 for { 2780 x := v.Args[0] 2781 y := v.Args[1] 2782 v.reset(OpEqPtr) 2783 v0 := b.NewValue0(v.Line, OpSlicePtr, config.fe.TypeBytePtr()) 2784 v0.AddArg(x) 2785 v.AddArg(v0) 2786 v1 := b.NewValue0(v.Line, OpSlicePtr, config.fe.TypeBytePtr()) 2787 v1.AddArg(y) 2788 v.AddArg(v1) 2789 return true 2790 } 2791 } 2792 func rewriteValuegeneric_OpGeq16(v *Value, config *Config) bool { 2793 b := v.Block 2794 _ = b 2795 // match: (Geq16 (Const16 [c]) (Const16 [d])) 2796 // cond: 2797 // result: (ConstBool [b2i(c >= d)]) 2798 for { 2799 v_0 := v.Args[0] 2800 if v_0.Op != OpConst16 { 2801 break 2802 } 2803 c := v_0.AuxInt 2804 v_1 := v.Args[1] 2805 if v_1.Op != OpConst16 { 2806 break 2807 } 2808 d := v_1.AuxInt 2809 v.reset(OpConstBool) 2810 v.AuxInt = b2i(c >= d) 2811 return true 2812 } 2813 return false 2814 } 2815 func rewriteValuegeneric_OpGeq16U(v *Value, config *Config) bool { 2816 b := v.Block 2817 _ = b 2818 // match: (Geq16U (Const16 [c]) (Const16 [d])) 2819 // cond: 2820 // result: (ConstBool [b2i(uint16(c) >= uint16(d))]) 2821 for { 2822 v_0 := v.Args[0] 2823 if v_0.Op != OpConst16 { 2824 break 2825 } 2826 c := v_0.AuxInt 2827 v_1 := v.Args[1] 2828 if v_1.Op != OpConst16 { 2829 break 2830 } 2831 d := v_1.AuxInt 2832 v.reset(OpConstBool) 2833 v.AuxInt = b2i(uint16(c) >= uint16(d)) 2834 return true 2835 } 2836 return false 2837 } 2838 func rewriteValuegeneric_OpGeq32(v *Value, config *Config) bool { 2839 b := v.Block 2840 _ = b 2841 // match: (Geq32 (Const32 [c]) (Const32 [d])) 2842 // cond: 2843 // result: (ConstBool [b2i(c >= d)]) 2844 for { 2845 v_0 := v.Args[0] 2846 if v_0.Op != OpConst32 { 2847 break 2848 } 2849 c := v_0.AuxInt 2850 v_1 := v.Args[1] 2851 if v_1.Op != OpConst32 { 2852 break 2853 } 2854 d := v_1.AuxInt 2855 v.reset(OpConstBool) 2856 v.AuxInt = b2i(c >= d) 2857 return true 2858 } 2859 return false 2860 } 2861 func rewriteValuegeneric_OpGeq32U(v *Value, config *Config) bool { 2862 b := v.Block 2863 _ = b 2864 // match: (Geq32U (Const32 [c]) (Const32 [d])) 2865 // cond: 2866 // result: (ConstBool [b2i(uint32(c) >= uint32(d))]) 2867 for { 2868 v_0 := v.Args[0] 2869 if v_0.Op != OpConst32 { 2870 break 2871 } 2872 c := v_0.AuxInt 2873 v_1 := v.Args[1] 2874 if v_1.Op != OpConst32 { 2875 break 2876 } 2877 d := v_1.AuxInt 2878 v.reset(OpConstBool) 2879 v.AuxInt = b2i(uint32(c) >= uint32(d)) 2880 return true 2881 } 2882 return false 2883 } 2884 func rewriteValuegeneric_OpGeq64(v *Value, config *Config) bool { 2885 b := v.Block 2886 _ = b 2887 // match: (Geq64 (Const64 [c]) (Const64 [d])) 2888 // cond: 2889 // result: (ConstBool [b2i(c >= d)]) 2890 for { 2891 v_0 := v.Args[0] 2892 if v_0.Op != OpConst64 { 2893 break 2894 } 2895 c := v_0.AuxInt 2896 v_1 := v.Args[1] 2897 if v_1.Op != OpConst64 { 2898 break 2899 } 2900 d := v_1.AuxInt 2901 v.reset(OpConstBool) 2902 v.AuxInt = b2i(c >= d) 2903 return true 2904 } 2905 return false 2906 } 2907 func rewriteValuegeneric_OpGeq64U(v *Value, config *Config) bool { 2908 b := v.Block 2909 _ = b 2910 // match: (Geq64U (Const64 [c]) (Const64 [d])) 2911 // cond: 2912 // result: (ConstBool [b2i(uint64(c) >= uint64(d))]) 2913 for { 2914 v_0 := v.Args[0] 2915 if v_0.Op != OpConst64 { 2916 break 2917 } 2918 c := v_0.AuxInt 2919 v_1 := v.Args[1] 2920 if v_1.Op != OpConst64 { 2921 break 2922 } 2923 d := v_1.AuxInt 2924 v.reset(OpConstBool) 2925 v.AuxInt = b2i(uint64(c) >= uint64(d)) 2926 return true 2927 } 2928 return false 2929 } 2930 func rewriteValuegeneric_OpGeq8(v *Value, config *Config) bool { 2931 b := v.Block 2932 _ = b 2933 // match: (Geq8 (Const8 [c]) (Const8 [d])) 2934 // cond: 2935 // result: (ConstBool [b2i(c >= d)]) 2936 for { 2937 v_0 := v.Args[0] 2938 if v_0.Op != OpConst8 { 2939 break 2940 } 2941 c := v_0.AuxInt 2942 v_1 := v.Args[1] 2943 if v_1.Op != OpConst8 { 2944 break 2945 } 2946 d := v_1.AuxInt 2947 v.reset(OpConstBool) 2948 v.AuxInt = b2i(c >= d) 2949 return true 2950 } 2951 return false 2952 } 2953 func rewriteValuegeneric_OpGeq8U(v *Value, config *Config) bool { 2954 b := v.Block 2955 _ = b 2956 // match: (Geq8U (Const8 [c]) (Const8 [d])) 2957 // cond: 2958 // result: (ConstBool [b2i(uint8(c) >= uint8(d))]) 2959 for { 2960 v_0 := v.Args[0] 2961 if v_0.Op != OpConst8 { 2962 break 2963 } 2964 c := v_0.AuxInt 2965 v_1 := v.Args[1] 2966 if v_1.Op != OpConst8 { 2967 break 2968 } 2969 d := v_1.AuxInt 2970 v.reset(OpConstBool) 2971 v.AuxInt = b2i(uint8(c) >= uint8(d)) 2972 return true 2973 } 2974 return false 2975 } 2976 func rewriteValuegeneric_OpGreater16(v *Value, config *Config) bool { 2977 b := v.Block 2978 _ = b 2979 // match: (Greater16 (Const16 [c]) (Const16 [d])) 2980 // cond: 2981 // result: (ConstBool [b2i(c > d)]) 2982 for { 2983 v_0 := v.Args[0] 2984 if v_0.Op != OpConst16 { 2985 break 2986 } 2987 c := v_0.AuxInt 2988 v_1 := v.Args[1] 2989 if v_1.Op != OpConst16 { 2990 break 2991 } 2992 d := v_1.AuxInt 2993 v.reset(OpConstBool) 2994 v.AuxInt = b2i(c > d) 2995 return true 2996 } 2997 return false 2998 } 2999 func rewriteValuegeneric_OpGreater16U(v *Value, config *Config) bool { 3000 b := v.Block 3001 _ = b 3002 // match: (Greater16U (Const16 [c]) (Const16 [d])) 3003 // cond: 3004 // result: (ConstBool [b2i(uint16(c) > uint16(d))]) 3005 for { 3006 v_0 := v.Args[0] 3007 if v_0.Op != OpConst16 { 3008 break 3009 } 3010 c := v_0.AuxInt 3011 v_1 := v.Args[1] 3012 if v_1.Op != OpConst16 { 3013 break 3014 } 3015 d := v_1.AuxInt 3016 v.reset(OpConstBool) 3017 v.AuxInt = b2i(uint16(c) > uint16(d)) 3018 return true 3019 } 3020 return false 3021 } 3022 func rewriteValuegeneric_OpGreater32(v *Value, config *Config) bool { 3023 b := v.Block 3024 _ = b 3025 // match: (Greater32 (Const32 [c]) (Const32 [d])) 3026 // cond: 3027 // result: (ConstBool [b2i(c > d)]) 3028 for { 3029 v_0 := v.Args[0] 3030 if v_0.Op != OpConst32 { 3031 break 3032 } 3033 c := v_0.AuxInt 3034 v_1 := v.Args[1] 3035 if v_1.Op != OpConst32 { 3036 break 3037 } 3038 d := v_1.AuxInt 3039 v.reset(OpConstBool) 3040 v.AuxInt = b2i(c > d) 3041 return true 3042 } 3043 return false 3044 } 3045 func rewriteValuegeneric_OpGreater32U(v *Value, config *Config) bool { 3046 b := v.Block 3047 _ = b 3048 // match: (Greater32U (Const32 [c]) (Const32 [d])) 3049 // cond: 3050 // result: (ConstBool [b2i(uint32(c) > uint32(d))]) 3051 for { 3052 v_0 := v.Args[0] 3053 if v_0.Op != OpConst32 { 3054 break 3055 } 3056 c := v_0.AuxInt 3057 v_1 := v.Args[1] 3058 if v_1.Op != OpConst32 { 3059 break 3060 } 3061 d := v_1.AuxInt 3062 v.reset(OpConstBool) 3063 v.AuxInt = b2i(uint32(c) > uint32(d)) 3064 return true 3065 } 3066 return false 3067 } 3068 func rewriteValuegeneric_OpGreater64(v *Value, config *Config) bool { 3069 b := v.Block 3070 _ = b 3071 // match: (Greater64 (Const64 [c]) (Const64 [d])) 3072 // cond: 3073 // result: (ConstBool [b2i(c > d)]) 3074 for { 3075 v_0 := v.Args[0] 3076 if v_0.Op != OpConst64 { 3077 break 3078 } 3079 c := v_0.AuxInt 3080 v_1 := v.Args[1] 3081 if v_1.Op != OpConst64 { 3082 break 3083 } 3084 d := v_1.AuxInt 3085 v.reset(OpConstBool) 3086 v.AuxInt = b2i(c > d) 3087 return true 3088 } 3089 return false 3090 } 3091 func rewriteValuegeneric_OpGreater64U(v *Value, config *Config) bool { 3092 b := v.Block 3093 _ = b 3094 // match: (Greater64U (Const64 [c]) (Const64 [d])) 3095 // cond: 3096 // result: (ConstBool [b2i(uint64(c) > uint64(d))]) 3097 for { 3098 v_0 := v.Args[0] 3099 if v_0.Op != OpConst64 { 3100 break 3101 } 3102 c := v_0.AuxInt 3103 v_1 := v.Args[1] 3104 if v_1.Op != OpConst64 { 3105 break 3106 } 3107 d := v_1.AuxInt 3108 v.reset(OpConstBool) 3109 v.AuxInt = b2i(uint64(c) > uint64(d)) 3110 return true 3111 } 3112 return false 3113 } 3114 func rewriteValuegeneric_OpGreater8(v *Value, config *Config) bool { 3115 b := v.Block 3116 _ = b 3117 // match: (Greater8 (Const8 [c]) (Const8 [d])) 3118 // cond: 3119 // result: (ConstBool [b2i(c > d)]) 3120 for { 3121 v_0 := v.Args[0] 3122 if v_0.Op != OpConst8 { 3123 break 3124 } 3125 c := v_0.AuxInt 3126 v_1 := v.Args[1] 3127 if v_1.Op != OpConst8 { 3128 break 3129 } 3130 d := v_1.AuxInt 3131 v.reset(OpConstBool) 3132 v.AuxInt = b2i(c > d) 3133 return true 3134 } 3135 return false 3136 } 3137 func rewriteValuegeneric_OpGreater8U(v *Value, config *Config) bool { 3138 b := v.Block 3139 _ = b 3140 // match: (Greater8U (Const8 [c]) (Const8 [d])) 3141 // cond: 3142 // result: (ConstBool [b2i(uint8(c) > uint8(d))]) 3143 for { 3144 v_0 := v.Args[0] 3145 if v_0.Op != OpConst8 { 3146 break 3147 } 3148 c := v_0.AuxInt 3149 v_1 := v.Args[1] 3150 if v_1.Op != OpConst8 { 3151 break 3152 } 3153 d := v_1.AuxInt 3154 v.reset(OpConstBool) 3155 v.AuxInt = b2i(uint8(c) > uint8(d)) 3156 return true 3157 } 3158 return false 3159 } 3160 func rewriteValuegeneric_OpIMake(v *Value, config *Config) bool { 3161 b := v.Block 3162 _ = b 3163 // match: (IMake typ (StructMake1 val)) 3164 // cond: 3165 // result: (IMake typ val) 3166 for { 3167 typ := v.Args[0] 3168 v_1 := v.Args[1] 3169 if v_1.Op != OpStructMake1 { 3170 break 3171 } 3172 val := v_1.Args[0] 3173 v.reset(OpIMake) 3174 v.AddArg(typ) 3175 v.AddArg(val) 3176 return true 3177 } 3178 // match: (IMake typ (ArrayMake1 val)) 3179 // cond: 3180 // result: (IMake typ val) 3181 for { 3182 typ := v.Args[0] 3183 v_1 := v.Args[1] 3184 if v_1.Op != OpArrayMake1 { 3185 break 3186 } 3187 val := v_1.Args[0] 3188 v.reset(OpIMake) 3189 v.AddArg(typ) 3190 v.AddArg(val) 3191 return true 3192 } 3193 return false 3194 } 3195 func rewriteValuegeneric_OpIsInBounds(v *Value, config *Config) bool { 3196 b := v.Block 3197 _ = b 3198 // match: (IsInBounds (ZeroExt8to32 _) (Const32 [c])) 3199 // cond: (1 << 8) <= c 3200 // result: (ConstBool [1]) 3201 for { 3202 v_0 := v.Args[0] 3203 if v_0.Op != OpZeroExt8to32 { 3204 break 3205 } 3206 v_1 := v.Args[1] 3207 if v_1.Op != OpConst32 { 3208 break 3209 } 3210 c := v_1.AuxInt 3211 if !((1 << 8) <= c) { 3212 break 3213 } 3214 v.reset(OpConstBool) 3215 v.AuxInt = 1 3216 return true 3217 } 3218 // match: (IsInBounds (ZeroExt8to64 _) (Const64 [c])) 3219 // cond: (1 << 8) <= c 3220 // result: (ConstBool [1]) 3221 for { 3222 v_0 := v.Args[0] 3223 if v_0.Op != OpZeroExt8to64 { 3224 break 3225 } 3226 v_1 := v.Args[1] 3227 if v_1.Op != OpConst64 { 3228 break 3229 } 3230 c := v_1.AuxInt 3231 if !((1 << 8) <= c) { 3232 break 3233 } 3234 v.reset(OpConstBool) 3235 v.AuxInt = 1 3236 return true 3237 } 3238 // match: (IsInBounds (ZeroExt16to32 _) (Const32 [c])) 3239 // cond: (1 << 16) <= c 3240 // result: (ConstBool [1]) 3241 for { 3242 v_0 := v.Args[0] 3243 if v_0.Op != OpZeroExt16to32 { 3244 break 3245 } 3246 v_1 := v.Args[1] 3247 if v_1.Op != OpConst32 { 3248 break 3249 } 3250 c := v_1.AuxInt 3251 if !((1 << 16) <= c) { 3252 break 3253 } 3254 v.reset(OpConstBool) 3255 v.AuxInt = 1 3256 return true 3257 } 3258 // match: (IsInBounds (ZeroExt16to64 _) (Const64 [c])) 3259 // cond: (1 << 16) <= c 3260 // result: (ConstBool [1]) 3261 for { 3262 v_0 := v.Args[0] 3263 if v_0.Op != OpZeroExt16to64 { 3264 break 3265 } 3266 v_1 := v.Args[1] 3267 if v_1.Op != OpConst64 { 3268 break 3269 } 3270 c := v_1.AuxInt 3271 if !((1 << 16) <= c) { 3272 break 3273 } 3274 v.reset(OpConstBool) 3275 v.AuxInt = 1 3276 return true 3277 } 3278 // match: (IsInBounds x x) 3279 // cond: 3280 // result: (ConstBool [0]) 3281 for { 3282 x := v.Args[0] 3283 if x != v.Args[1] { 3284 break 3285 } 3286 v.reset(OpConstBool) 3287 v.AuxInt = 0 3288 return true 3289 } 3290 // match: (IsInBounds (And32 (Const32 [c]) _) (Const32 [d])) 3291 // cond: 0 <= c && c < d 3292 // result: (ConstBool [1]) 3293 for { 3294 v_0 := v.Args[0] 3295 if v_0.Op != OpAnd32 { 3296 break 3297 } 3298 v_0_0 := v_0.Args[0] 3299 if v_0_0.Op != OpConst32 { 3300 break 3301 } 3302 c := v_0_0.AuxInt 3303 v_1 := v.Args[1] 3304 if v_1.Op != OpConst32 { 3305 break 3306 } 3307 d := v_1.AuxInt 3308 if !(0 <= c && c < d) { 3309 break 3310 } 3311 v.reset(OpConstBool) 3312 v.AuxInt = 1 3313 return true 3314 } 3315 // match: (IsInBounds (And64 (Const64 [c]) _) (Const64 [d])) 3316 // cond: 0 <= c && c < d 3317 // result: (ConstBool [1]) 3318 for { 3319 v_0 := v.Args[0] 3320 if v_0.Op != OpAnd64 { 3321 break 3322 } 3323 v_0_0 := v_0.Args[0] 3324 if v_0_0.Op != OpConst64 { 3325 break 3326 } 3327 c := v_0_0.AuxInt 3328 v_1 := v.Args[1] 3329 if v_1.Op != OpConst64 { 3330 break 3331 } 3332 d := v_1.AuxInt 3333 if !(0 <= c && c < d) { 3334 break 3335 } 3336 v.reset(OpConstBool) 3337 v.AuxInt = 1 3338 return true 3339 } 3340 // match: (IsInBounds (Const32 [c]) (Const32 [d])) 3341 // cond: 3342 // result: (ConstBool [b2i(0 <= c && c < d)]) 3343 for { 3344 v_0 := v.Args[0] 3345 if v_0.Op != OpConst32 { 3346 break 3347 } 3348 c := v_0.AuxInt 3349 v_1 := v.Args[1] 3350 if v_1.Op != OpConst32 { 3351 break 3352 } 3353 d := v_1.AuxInt 3354 v.reset(OpConstBool) 3355 v.AuxInt = b2i(0 <= c && c < d) 3356 return true 3357 } 3358 // match: (IsInBounds (Const64 [c]) (Const64 [d])) 3359 // cond: 3360 // result: (ConstBool [b2i(0 <= c && c < d)]) 3361 for { 3362 v_0 := v.Args[0] 3363 if v_0.Op != OpConst64 { 3364 break 3365 } 3366 c := v_0.AuxInt 3367 v_1 := v.Args[1] 3368 if v_1.Op != OpConst64 { 3369 break 3370 } 3371 d := v_1.AuxInt 3372 v.reset(OpConstBool) 3373 v.AuxInt = b2i(0 <= c && c < d) 3374 return true 3375 } 3376 // match: (IsInBounds (Mod32u _ y) y) 3377 // cond: 3378 // result: (ConstBool [1]) 3379 for { 3380 v_0 := v.Args[0] 3381 if v_0.Op != OpMod32u { 3382 break 3383 } 3384 y := v_0.Args[1] 3385 if y != v.Args[1] { 3386 break 3387 } 3388 v.reset(OpConstBool) 3389 v.AuxInt = 1 3390 return true 3391 } 3392 // match: (IsInBounds (Mod64u _ y) y) 3393 // cond: 3394 // result: (ConstBool [1]) 3395 for { 3396 v_0 := v.Args[0] 3397 if v_0.Op != OpMod64u { 3398 break 3399 } 3400 y := v_0.Args[1] 3401 if y != v.Args[1] { 3402 break 3403 } 3404 v.reset(OpConstBool) 3405 v.AuxInt = 1 3406 return true 3407 } 3408 return false 3409 } 3410 func rewriteValuegeneric_OpIsSliceInBounds(v *Value, config *Config) bool { 3411 b := v.Block 3412 _ = b 3413 // match: (IsSliceInBounds x x) 3414 // cond: 3415 // result: (ConstBool [1]) 3416 for { 3417 x := v.Args[0] 3418 if x != v.Args[1] { 3419 break 3420 } 3421 v.reset(OpConstBool) 3422 v.AuxInt = 1 3423 return true 3424 } 3425 // match: (IsSliceInBounds (And32 (Const32 [c]) _) (Const32 [d])) 3426 // cond: 0 <= c && c <= d 3427 // result: (ConstBool [1]) 3428 for { 3429 v_0 := v.Args[0] 3430 if v_0.Op != OpAnd32 { 3431 break 3432 } 3433 v_0_0 := v_0.Args[0] 3434 if v_0_0.Op != OpConst32 { 3435 break 3436 } 3437 c := v_0_0.AuxInt 3438 v_1 := v.Args[1] 3439 if v_1.Op != OpConst32 { 3440 break 3441 } 3442 d := v_1.AuxInt 3443 if !(0 <= c && c <= d) { 3444 break 3445 } 3446 v.reset(OpConstBool) 3447 v.AuxInt = 1 3448 return true 3449 } 3450 // match: (IsSliceInBounds (And64 (Const64 [c]) _) (Const64 [d])) 3451 // cond: 0 <= c && c <= d 3452 // result: (ConstBool [1]) 3453 for { 3454 v_0 := v.Args[0] 3455 if v_0.Op != OpAnd64 { 3456 break 3457 } 3458 v_0_0 := v_0.Args[0] 3459 if v_0_0.Op != OpConst64 { 3460 break 3461 } 3462 c := v_0_0.AuxInt 3463 v_1 := v.Args[1] 3464 if v_1.Op != OpConst64 { 3465 break 3466 } 3467 d := v_1.AuxInt 3468 if !(0 <= c && c <= d) { 3469 break 3470 } 3471 v.reset(OpConstBool) 3472 v.AuxInt = 1 3473 return true 3474 } 3475 // match: (IsSliceInBounds (Const32 [0]) _) 3476 // cond: 3477 // result: (ConstBool [1]) 3478 for { 3479 v_0 := v.Args[0] 3480 if v_0.Op != OpConst32 { 3481 break 3482 } 3483 if v_0.AuxInt != 0 { 3484 break 3485 } 3486 v.reset(OpConstBool) 3487 v.AuxInt = 1 3488 return true 3489 } 3490 // match: (IsSliceInBounds (Const64 [0]) _) 3491 // cond: 3492 // result: (ConstBool [1]) 3493 for { 3494 v_0 := v.Args[0] 3495 if v_0.Op != OpConst64 { 3496 break 3497 } 3498 if v_0.AuxInt != 0 { 3499 break 3500 } 3501 v.reset(OpConstBool) 3502 v.AuxInt = 1 3503 return true 3504 } 3505 // match: (IsSliceInBounds (Const32 [c]) (Const32 [d])) 3506 // cond: 3507 // result: (ConstBool [b2i(0 <= c && c <= d)]) 3508 for { 3509 v_0 := v.Args[0] 3510 if v_0.Op != OpConst32 { 3511 break 3512 } 3513 c := v_0.AuxInt 3514 v_1 := v.Args[1] 3515 if v_1.Op != OpConst32 { 3516 break 3517 } 3518 d := v_1.AuxInt 3519 v.reset(OpConstBool) 3520 v.AuxInt = b2i(0 <= c && c <= d) 3521 return true 3522 } 3523 // match: (IsSliceInBounds (Const64 [c]) (Const64 [d])) 3524 // cond: 3525 // result: (ConstBool [b2i(0 <= c && c <= d)]) 3526 for { 3527 v_0 := v.Args[0] 3528 if v_0.Op != OpConst64 { 3529 break 3530 } 3531 c := v_0.AuxInt 3532 v_1 := v.Args[1] 3533 if v_1.Op != OpConst64 { 3534 break 3535 } 3536 d := v_1.AuxInt 3537 v.reset(OpConstBool) 3538 v.AuxInt = b2i(0 <= c && c <= d) 3539 return true 3540 } 3541 // match: (IsSliceInBounds (SliceLen x) (SliceCap x)) 3542 // cond: 3543 // result: (ConstBool [1]) 3544 for { 3545 v_0 := v.Args[0] 3546 if v_0.Op != OpSliceLen { 3547 break 3548 } 3549 x := v_0.Args[0] 3550 v_1 := v.Args[1] 3551 if v_1.Op != OpSliceCap { 3552 break 3553 } 3554 if x != v_1.Args[0] { 3555 break 3556 } 3557 v.reset(OpConstBool) 3558 v.AuxInt = 1 3559 return true 3560 } 3561 return false 3562 } 3563 func rewriteValuegeneric_OpLeq16(v *Value, config *Config) bool { 3564 b := v.Block 3565 _ = b 3566 // match: (Leq16 (Const16 [c]) (Const16 [d])) 3567 // cond: 3568 // result: (ConstBool [b2i(c <= d)]) 3569 for { 3570 v_0 := v.Args[0] 3571 if v_0.Op != OpConst16 { 3572 break 3573 } 3574 c := v_0.AuxInt 3575 v_1 := v.Args[1] 3576 if v_1.Op != OpConst16 { 3577 break 3578 } 3579 d := v_1.AuxInt 3580 v.reset(OpConstBool) 3581 v.AuxInt = b2i(c <= d) 3582 return true 3583 } 3584 return false 3585 } 3586 func rewriteValuegeneric_OpLeq16U(v *Value, config *Config) bool { 3587 b := v.Block 3588 _ = b 3589 // match: (Leq16U (Const16 [c]) (Const16 [d])) 3590 // cond: 3591 // result: (ConstBool [b2i(uint16(c) <= uint16(d))]) 3592 for { 3593 v_0 := v.Args[0] 3594 if v_0.Op != OpConst16 { 3595 break 3596 } 3597 c := v_0.AuxInt 3598 v_1 := v.Args[1] 3599 if v_1.Op != OpConst16 { 3600 break 3601 } 3602 d := v_1.AuxInt 3603 v.reset(OpConstBool) 3604 v.AuxInt = b2i(uint16(c) <= uint16(d)) 3605 return true 3606 } 3607 return false 3608 } 3609 func rewriteValuegeneric_OpLeq32(v *Value, config *Config) bool { 3610 b := v.Block 3611 _ = b 3612 // match: (Leq32 (Const32 [c]) (Const32 [d])) 3613 // cond: 3614 // result: (ConstBool [b2i(c <= d)]) 3615 for { 3616 v_0 := v.Args[0] 3617 if v_0.Op != OpConst32 { 3618 break 3619 } 3620 c := v_0.AuxInt 3621 v_1 := v.Args[1] 3622 if v_1.Op != OpConst32 { 3623 break 3624 } 3625 d := v_1.AuxInt 3626 v.reset(OpConstBool) 3627 v.AuxInt = b2i(c <= d) 3628 return true 3629 } 3630 return false 3631 } 3632 func rewriteValuegeneric_OpLeq32U(v *Value, config *Config) bool { 3633 b := v.Block 3634 _ = b 3635 // match: (Leq32U (Const32 [c]) (Const32 [d])) 3636 // cond: 3637 // result: (ConstBool [b2i(uint32(c) <= uint32(d))]) 3638 for { 3639 v_0 := v.Args[0] 3640 if v_0.Op != OpConst32 { 3641 break 3642 } 3643 c := v_0.AuxInt 3644 v_1 := v.Args[1] 3645 if v_1.Op != OpConst32 { 3646 break 3647 } 3648 d := v_1.AuxInt 3649 v.reset(OpConstBool) 3650 v.AuxInt = b2i(uint32(c) <= uint32(d)) 3651 return true 3652 } 3653 return false 3654 } 3655 func rewriteValuegeneric_OpLeq64(v *Value, config *Config) bool { 3656 b := v.Block 3657 _ = b 3658 // match: (Leq64 (Const64 [c]) (Const64 [d])) 3659 // cond: 3660 // result: (ConstBool [b2i(c <= d)]) 3661 for { 3662 v_0 := v.Args[0] 3663 if v_0.Op != OpConst64 { 3664 break 3665 } 3666 c := v_0.AuxInt 3667 v_1 := v.Args[1] 3668 if v_1.Op != OpConst64 { 3669 break 3670 } 3671 d := v_1.AuxInt 3672 v.reset(OpConstBool) 3673 v.AuxInt = b2i(c <= d) 3674 return true 3675 } 3676 return false 3677 } 3678 func rewriteValuegeneric_OpLeq64U(v *Value, config *Config) bool { 3679 b := v.Block 3680 _ = b 3681 // match: (Leq64U (Const64 [c]) (Const64 [d])) 3682 // cond: 3683 // result: (ConstBool [b2i(uint64(c) <= uint64(d))]) 3684 for { 3685 v_0 := v.Args[0] 3686 if v_0.Op != OpConst64 { 3687 break 3688 } 3689 c := v_0.AuxInt 3690 v_1 := v.Args[1] 3691 if v_1.Op != OpConst64 { 3692 break 3693 } 3694 d := v_1.AuxInt 3695 v.reset(OpConstBool) 3696 v.AuxInt = b2i(uint64(c) <= uint64(d)) 3697 return true 3698 } 3699 return false 3700 } 3701 func rewriteValuegeneric_OpLeq8(v *Value, config *Config) bool { 3702 b := v.Block 3703 _ = b 3704 // match: (Leq8 (Const8 [c]) (Const8 [d])) 3705 // cond: 3706 // result: (ConstBool [b2i(c <= d)]) 3707 for { 3708 v_0 := v.Args[0] 3709 if v_0.Op != OpConst8 { 3710 break 3711 } 3712 c := v_0.AuxInt 3713 v_1 := v.Args[1] 3714 if v_1.Op != OpConst8 { 3715 break 3716 } 3717 d := v_1.AuxInt 3718 v.reset(OpConstBool) 3719 v.AuxInt = b2i(c <= d) 3720 return true 3721 } 3722 return false 3723 } 3724 func rewriteValuegeneric_OpLeq8U(v *Value, config *Config) bool { 3725 b := v.Block 3726 _ = b 3727 // match: (Leq8U (Const8 [c]) (Const8 [d])) 3728 // cond: 3729 // result: (ConstBool [b2i(uint8(c) <= uint8(d))]) 3730 for { 3731 v_0 := v.Args[0] 3732 if v_0.Op != OpConst8 { 3733 break 3734 } 3735 c := v_0.AuxInt 3736 v_1 := v.Args[1] 3737 if v_1.Op != OpConst8 { 3738 break 3739 } 3740 d := v_1.AuxInt 3741 v.reset(OpConstBool) 3742 v.AuxInt = b2i(uint8(c) <= uint8(d)) 3743 return true 3744 } 3745 return false 3746 } 3747 func rewriteValuegeneric_OpLess16(v *Value, config *Config) bool { 3748 b := v.Block 3749 _ = b 3750 // match: (Less16 (Const16 [c]) (Const16 [d])) 3751 // cond: 3752 // result: (ConstBool [b2i(c < d)]) 3753 for { 3754 v_0 := v.Args[0] 3755 if v_0.Op != OpConst16 { 3756 break 3757 } 3758 c := v_0.AuxInt 3759 v_1 := v.Args[1] 3760 if v_1.Op != OpConst16 { 3761 break 3762 } 3763 d := v_1.AuxInt 3764 v.reset(OpConstBool) 3765 v.AuxInt = b2i(c < d) 3766 return true 3767 } 3768 return false 3769 } 3770 func rewriteValuegeneric_OpLess16U(v *Value, config *Config) bool { 3771 b := v.Block 3772 _ = b 3773 // match: (Less16U (Const16 [c]) (Const16 [d])) 3774 // cond: 3775 // result: (ConstBool [b2i(uint16(c) < uint16(d))]) 3776 for { 3777 v_0 := v.Args[0] 3778 if v_0.Op != OpConst16 { 3779 break 3780 } 3781 c := v_0.AuxInt 3782 v_1 := v.Args[1] 3783 if v_1.Op != OpConst16 { 3784 break 3785 } 3786 d := v_1.AuxInt 3787 v.reset(OpConstBool) 3788 v.AuxInt = b2i(uint16(c) < uint16(d)) 3789 return true 3790 } 3791 return false 3792 } 3793 func rewriteValuegeneric_OpLess32(v *Value, config *Config) bool { 3794 b := v.Block 3795 _ = b 3796 // match: (Less32 (Const32 [c]) (Const32 [d])) 3797 // cond: 3798 // result: (ConstBool [b2i(c < d)]) 3799 for { 3800 v_0 := v.Args[0] 3801 if v_0.Op != OpConst32 { 3802 break 3803 } 3804 c := v_0.AuxInt 3805 v_1 := v.Args[1] 3806 if v_1.Op != OpConst32 { 3807 break 3808 } 3809 d := v_1.AuxInt 3810 v.reset(OpConstBool) 3811 v.AuxInt = b2i(c < d) 3812 return true 3813 } 3814 return false 3815 } 3816 func rewriteValuegeneric_OpLess32U(v *Value, config *Config) bool { 3817 b := v.Block 3818 _ = b 3819 // match: (Less32U (Const32 [c]) (Const32 [d])) 3820 // cond: 3821 // result: (ConstBool [b2i(uint32(c) < uint32(d))]) 3822 for { 3823 v_0 := v.Args[0] 3824 if v_0.Op != OpConst32 { 3825 break 3826 } 3827 c := v_0.AuxInt 3828 v_1 := v.Args[1] 3829 if v_1.Op != OpConst32 { 3830 break 3831 } 3832 d := v_1.AuxInt 3833 v.reset(OpConstBool) 3834 v.AuxInt = b2i(uint32(c) < uint32(d)) 3835 return true 3836 } 3837 return false 3838 } 3839 func rewriteValuegeneric_OpLess64(v *Value, config *Config) bool { 3840 b := v.Block 3841 _ = b 3842 // match: (Less64 (Const64 [c]) (Const64 [d])) 3843 // cond: 3844 // result: (ConstBool [b2i(c < d)]) 3845 for { 3846 v_0 := v.Args[0] 3847 if v_0.Op != OpConst64 { 3848 break 3849 } 3850 c := v_0.AuxInt 3851 v_1 := v.Args[1] 3852 if v_1.Op != OpConst64 { 3853 break 3854 } 3855 d := v_1.AuxInt 3856 v.reset(OpConstBool) 3857 v.AuxInt = b2i(c < d) 3858 return true 3859 } 3860 return false 3861 } 3862 func rewriteValuegeneric_OpLess64U(v *Value, config *Config) bool { 3863 b := v.Block 3864 _ = b 3865 // match: (Less64U (Const64 [c]) (Const64 [d])) 3866 // cond: 3867 // result: (ConstBool [b2i(uint64(c) < uint64(d))]) 3868 for { 3869 v_0 := v.Args[0] 3870 if v_0.Op != OpConst64 { 3871 break 3872 } 3873 c := v_0.AuxInt 3874 v_1 := v.Args[1] 3875 if v_1.Op != OpConst64 { 3876 break 3877 } 3878 d := v_1.AuxInt 3879 v.reset(OpConstBool) 3880 v.AuxInt = b2i(uint64(c) < uint64(d)) 3881 return true 3882 } 3883 return false 3884 } 3885 func rewriteValuegeneric_OpLess8(v *Value, config *Config) bool { 3886 b := v.Block 3887 _ = b 3888 // match: (Less8 (Const8 [c]) (Const8 [d])) 3889 // cond: 3890 // result: (ConstBool [b2i(c < d)]) 3891 for { 3892 v_0 := v.Args[0] 3893 if v_0.Op != OpConst8 { 3894 break 3895 } 3896 c := v_0.AuxInt 3897 v_1 := v.Args[1] 3898 if v_1.Op != OpConst8 { 3899 break 3900 } 3901 d := v_1.AuxInt 3902 v.reset(OpConstBool) 3903 v.AuxInt = b2i(c < d) 3904 return true 3905 } 3906 return false 3907 } 3908 func rewriteValuegeneric_OpLess8U(v *Value, config *Config) bool { 3909 b := v.Block 3910 _ = b 3911 // match: (Less8U (Const8 [c]) (Const8 [d])) 3912 // cond: 3913 // result: (ConstBool [b2i(uint8(c) < uint8(d))]) 3914 for { 3915 v_0 := v.Args[0] 3916 if v_0.Op != OpConst8 { 3917 break 3918 } 3919 c := v_0.AuxInt 3920 v_1 := v.Args[1] 3921 if v_1.Op != OpConst8 { 3922 break 3923 } 3924 d := v_1.AuxInt 3925 v.reset(OpConstBool) 3926 v.AuxInt = b2i(uint8(c) < uint8(d)) 3927 return true 3928 } 3929 return false 3930 } 3931 func rewriteValuegeneric_OpLoad(v *Value, config *Config) bool { 3932 b := v.Block 3933 _ = b 3934 // match: (Load <t1> p1 (Store [w] p2 x _)) 3935 // cond: isSamePtr(p1,p2) && t1.Compare(x.Type)==CMPeq && w == t1.Size() 3936 // result: x 3937 for { 3938 t1 := v.Type 3939 p1 := v.Args[0] 3940 v_1 := v.Args[1] 3941 if v_1.Op != OpStore { 3942 break 3943 } 3944 w := v_1.AuxInt 3945 p2 := v_1.Args[0] 3946 x := v_1.Args[1] 3947 if !(isSamePtr(p1, p2) && t1.Compare(x.Type) == CMPeq && w == t1.Size()) { 3948 break 3949 } 3950 v.reset(OpCopy) 3951 v.Type = x.Type 3952 v.AddArg(x) 3953 return true 3954 } 3955 // match: (Load <t> _ _) 3956 // cond: t.IsStruct() && t.NumFields() == 0 && config.fe.CanSSA(t) 3957 // result: (StructMake0) 3958 for { 3959 t := v.Type 3960 if !(t.IsStruct() && t.NumFields() == 0 && config.fe.CanSSA(t)) { 3961 break 3962 } 3963 v.reset(OpStructMake0) 3964 return true 3965 } 3966 // match: (Load <t> ptr mem) 3967 // cond: t.IsStruct() && t.NumFields() == 1 && config.fe.CanSSA(t) 3968 // result: (StructMake1 (Load <t.FieldType(0)> ptr mem)) 3969 for { 3970 t := v.Type 3971 ptr := v.Args[0] 3972 mem := v.Args[1] 3973 if !(t.IsStruct() && t.NumFields() == 1 && config.fe.CanSSA(t)) { 3974 break 3975 } 3976 v.reset(OpStructMake1) 3977 v0 := b.NewValue0(v.Line, OpLoad, t.FieldType(0)) 3978 v0.AddArg(ptr) 3979 v0.AddArg(mem) 3980 v.AddArg(v0) 3981 return true 3982 } 3983 // match: (Load <t> ptr mem) 3984 // cond: t.IsStruct() && t.NumFields() == 2 && config.fe.CanSSA(t) 3985 // result: (StructMake2 (Load <t.FieldType(0)> ptr mem) (Load <t.FieldType(1)> (OffPtr <t.FieldType(1).PtrTo()> [t.FieldOff(1)] ptr) mem)) 3986 for { 3987 t := v.Type 3988 ptr := v.Args[0] 3989 mem := v.Args[1] 3990 if !(t.IsStruct() && t.NumFields() == 2 && config.fe.CanSSA(t)) { 3991 break 3992 } 3993 v.reset(OpStructMake2) 3994 v0 := b.NewValue0(v.Line, OpLoad, t.FieldType(0)) 3995 v0.AddArg(ptr) 3996 v0.AddArg(mem) 3997 v.AddArg(v0) 3998 v1 := b.NewValue0(v.Line, OpLoad, t.FieldType(1)) 3999 v2 := b.NewValue0(v.Line, OpOffPtr, t.FieldType(1).PtrTo()) 4000 v2.AuxInt = t.FieldOff(1) 4001 v2.AddArg(ptr) 4002 v1.AddArg(v2) 4003 v1.AddArg(mem) 4004 v.AddArg(v1) 4005 return true 4006 } 4007 // match: (Load <t> ptr mem) 4008 // cond: t.IsStruct() && t.NumFields() == 3 && config.fe.CanSSA(t) 4009 // result: (StructMake3 (Load <t.FieldType(0)> ptr mem) (Load <t.FieldType(1)> (OffPtr <t.FieldType(1).PtrTo()> [t.FieldOff(1)] ptr) mem) (Load <t.FieldType(2)> (OffPtr <t.FieldType(2).PtrTo()> [t.FieldOff(2)] ptr) mem)) 4010 for { 4011 t := v.Type 4012 ptr := v.Args[0] 4013 mem := v.Args[1] 4014 if !(t.IsStruct() && t.NumFields() == 3 && config.fe.CanSSA(t)) { 4015 break 4016 } 4017 v.reset(OpStructMake3) 4018 v0 := b.NewValue0(v.Line, OpLoad, t.FieldType(0)) 4019 v0.AddArg(ptr) 4020 v0.AddArg(mem) 4021 v.AddArg(v0) 4022 v1 := b.NewValue0(v.Line, OpLoad, t.FieldType(1)) 4023 v2 := b.NewValue0(v.Line, OpOffPtr, t.FieldType(1).PtrTo()) 4024 v2.AuxInt = t.FieldOff(1) 4025 v2.AddArg(ptr) 4026 v1.AddArg(v2) 4027 v1.AddArg(mem) 4028 v.AddArg(v1) 4029 v3 := b.NewValue0(v.Line, OpLoad, t.FieldType(2)) 4030 v4 := b.NewValue0(v.Line, OpOffPtr, t.FieldType(2).PtrTo()) 4031 v4.AuxInt = t.FieldOff(2) 4032 v4.AddArg(ptr) 4033 v3.AddArg(v4) 4034 v3.AddArg(mem) 4035 v.AddArg(v3) 4036 return true 4037 } 4038 // match: (Load <t> ptr mem) 4039 // cond: t.IsStruct() && t.NumFields() == 4 && config.fe.CanSSA(t) 4040 // result: (StructMake4 (Load <t.FieldType(0)> ptr mem) (Load <t.FieldType(1)> (OffPtr <t.FieldType(1).PtrTo()> [t.FieldOff(1)] ptr) mem) (Load <t.FieldType(2)> (OffPtr <t.FieldType(2).PtrTo()> [t.FieldOff(2)] ptr) mem) (Load <t.FieldType(3)> (OffPtr <t.FieldType(3).PtrTo()> [t.FieldOff(3)] ptr) mem)) 4041 for { 4042 t := v.Type 4043 ptr := v.Args[0] 4044 mem := v.Args[1] 4045 if !(t.IsStruct() && t.NumFields() == 4 && config.fe.CanSSA(t)) { 4046 break 4047 } 4048 v.reset(OpStructMake4) 4049 v0 := b.NewValue0(v.Line, OpLoad, t.FieldType(0)) 4050 v0.AddArg(ptr) 4051 v0.AddArg(mem) 4052 v.AddArg(v0) 4053 v1 := b.NewValue0(v.Line, OpLoad, t.FieldType(1)) 4054 v2 := b.NewValue0(v.Line, OpOffPtr, t.FieldType(1).PtrTo()) 4055 v2.AuxInt = t.FieldOff(1) 4056 v2.AddArg(ptr) 4057 v1.AddArg(v2) 4058 v1.AddArg(mem) 4059 v.AddArg(v1) 4060 v3 := b.NewValue0(v.Line, OpLoad, t.FieldType(2)) 4061 v4 := b.NewValue0(v.Line, OpOffPtr, t.FieldType(2).PtrTo()) 4062 v4.AuxInt = t.FieldOff(2) 4063 v4.AddArg(ptr) 4064 v3.AddArg(v4) 4065 v3.AddArg(mem) 4066 v.AddArg(v3) 4067 v5 := b.NewValue0(v.Line, OpLoad, t.FieldType(3)) 4068 v6 := b.NewValue0(v.Line, OpOffPtr, t.FieldType(3).PtrTo()) 4069 v6.AuxInt = t.FieldOff(3) 4070 v6.AddArg(ptr) 4071 v5.AddArg(v6) 4072 v5.AddArg(mem) 4073 v.AddArg(v5) 4074 return true 4075 } 4076 // match: (Load <t> _ _) 4077 // cond: t.IsArray() && t.NumElem() == 0 4078 // result: (ArrayMake0) 4079 for { 4080 t := v.Type 4081 if !(t.IsArray() && t.NumElem() == 0) { 4082 break 4083 } 4084 v.reset(OpArrayMake0) 4085 return true 4086 } 4087 // match: (Load <t> ptr mem) 4088 // cond: t.IsArray() && t.NumElem() == 1 && config.fe.CanSSA(t) 4089 // result: (ArrayMake1 (Load <t.ElemType()> ptr mem)) 4090 for { 4091 t := v.Type 4092 ptr := v.Args[0] 4093 mem := v.Args[1] 4094 if !(t.IsArray() && t.NumElem() == 1 && config.fe.CanSSA(t)) { 4095 break 4096 } 4097 v.reset(OpArrayMake1) 4098 v0 := b.NewValue0(v.Line, OpLoad, t.ElemType()) 4099 v0.AddArg(ptr) 4100 v0.AddArg(mem) 4101 v.AddArg(v0) 4102 return true 4103 } 4104 return false 4105 } 4106 func rewriteValuegeneric_OpLsh16x16(v *Value, config *Config) bool { 4107 b := v.Block 4108 _ = b 4109 // match: (Lsh16x16 <t> x (Const16 [c])) 4110 // cond: 4111 // result: (Lsh16x64 x (Const64 <t> [int64(uint16(c))])) 4112 for { 4113 t := v.Type 4114 x := v.Args[0] 4115 v_1 := v.Args[1] 4116 if v_1.Op != OpConst16 { 4117 break 4118 } 4119 c := v_1.AuxInt 4120 v.reset(OpLsh16x64) 4121 v.AddArg(x) 4122 v0 := b.NewValue0(v.Line, OpConst64, t) 4123 v0.AuxInt = int64(uint16(c)) 4124 v.AddArg(v0) 4125 return true 4126 } 4127 // match: (Lsh16x16 (Const16 [0]) _) 4128 // cond: 4129 // result: (Const16 [0]) 4130 for { 4131 v_0 := v.Args[0] 4132 if v_0.Op != OpConst16 { 4133 break 4134 } 4135 if v_0.AuxInt != 0 { 4136 break 4137 } 4138 v.reset(OpConst16) 4139 v.AuxInt = 0 4140 return true 4141 } 4142 return false 4143 } 4144 func rewriteValuegeneric_OpLsh16x32(v *Value, config *Config) bool { 4145 b := v.Block 4146 _ = b 4147 // match: (Lsh16x32 <t> x (Const32 [c])) 4148 // cond: 4149 // result: (Lsh16x64 x (Const64 <t> [int64(uint32(c))])) 4150 for { 4151 t := v.Type 4152 x := v.Args[0] 4153 v_1 := v.Args[1] 4154 if v_1.Op != OpConst32 { 4155 break 4156 } 4157 c := v_1.AuxInt 4158 v.reset(OpLsh16x64) 4159 v.AddArg(x) 4160 v0 := b.NewValue0(v.Line, OpConst64, t) 4161 v0.AuxInt = int64(uint32(c)) 4162 v.AddArg(v0) 4163 return true 4164 } 4165 // match: (Lsh16x32 (Const16 [0]) _) 4166 // cond: 4167 // result: (Const16 [0]) 4168 for { 4169 v_0 := v.Args[0] 4170 if v_0.Op != OpConst16 { 4171 break 4172 } 4173 if v_0.AuxInt != 0 { 4174 break 4175 } 4176 v.reset(OpConst16) 4177 v.AuxInt = 0 4178 return true 4179 } 4180 return false 4181 } 4182 func rewriteValuegeneric_OpLsh16x64(v *Value, config *Config) bool { 4183 b := v.Block 4184 _ = b 4185 // match: (Lsh16x64 (Const16 [c]) (Const64 [d])) 4186 // cond: 4187 // result: (Const16 [int64(int16(c) << uint64(d))]) 4188 for { 4189 v_0 := v.Args[0] 4190 if v_0.Op != OpConst16 { 4191 break 4192 } 4193 c := v_0.AuxInt 4194 v_1 := v.Args[1] 4195 if v_1.Op != OpConst64 { 4196 break 4197 } 4198 d := v_1.AuxInt 4199 v.reset(OpConst16) 4200 v.AuxInt = int64(int16(c) << uint64(d)) 4201 return true 4202 } 4203 // match: (Lsh16x64 x (Const64 [0])) 4204 // cond: 4205 // result: x 4206 for { 4207 x := v.Args[0] 4208 v_1 := v.Args[1] 4209 if v_1.Op != OpConst64 { 4210 break 4211 } 4212 if v_1.AuxInt != 0 { 4213 break 4214 } 4215 v.reset(OpCopy) 4216 v.Type = x.Type 4217 v.AddArg(x) 4218 return true 4219 } 4220 // match: (Lsh16x64 (Const16 [0]) _) 4221 // cond: 4222 // result: (Const16 [0]) 4223 for { 4224 v_0 := v.Args[0] 4225 if v_0.Op != OpConst16 { 4226 break 4227 } 4228 if v_0.AuxInt != 0 { 4229 break 4230 } 4231 v.reset(OpConst16) 4232 v.AuxInt = 0 4233 return true 4234 } 4235 // match: (Lsh16x64 _ (Const64 [c])) 4236 // cond: uint64(c) >= 16 4237 // result: (Const16 [0]) 4238 for { 4239 v_1 := v.Args[1] 4240 if v_1.Op != OpConst64 { 4241 break 4242 } 4243 c := v_1.AuxInt 4244 if !(uint64(c) >= 16) { 4245 break 4246 } 4247 v.reset(OpConst16) 4248 v.AuxInt = 0 4249 return true 4250 } 4251 // match: (Lsh16x64 <t> (Lsh16x64 x (Const64 [c])) (Const64 [d])) 4252 // cond: !uaddOvf(c,d) 4253 // result: (Lsh16x64 x (Const64 <t> [c+d])) 4254 for { 4255 t := v.Type 4256 v_0 := v.Args[0] 4257 if v_0.Op != OpLsh16x64 { 4258 break 4259 } 4260 x := v_0.Args[0] 4261 v_0_1 := v_0.Args[1] 4262 if v_0_1.Op != OpConst64 { 4263 break 4264 } 4265 c := v_0_1.AuxInt 4266 v_1 := v.Args[1] 4267 if v_1.Op != OpConst64 { 4268 break 4269 } 4270 d := v_1.AuxInt 4271 if !(!uaddOvf(c, d)) { 4272 break 4273 } 4274 v.reset(OpLsh16x64) 4275 v.AddArg(x) 4276 v0 := b.NewValue0(v.Line, OpConst64, t) 4277 v0.AuxInt = c + d 4278 v.AddArg(v0) 4279 return true 4280 } 4281 // match: (Lsh16x64 (Rsh16Ux64 (Lsh16x64 x (Const64 [c1])) (Const64 [c2])) (Const64 [c3])) 4282 // cond: uint64(c1) >= uint64(c2) && uint64(c3) >= uint64(c2) && !uaddOvf(c1-c2, c3) 4283 // result: (Lsh16x64 x (Const64 <config.fe.TypeUInt64()> [c1-c2+c3])) 4284 for { 4285 v_0 := v.Args[0] 4286 if v_0.Op != OpRsh16Ux64 { 4287 break 4288 } 4289 v_0_0 := v_0.Args[0] 4290 if v_0_0.Op != OpLsh16x64 { 4291 break 4292 } 4293 x := v_0_0.Args[0] 4294 v_0_0_1 := v_0_0.Args[1] 4295 if v_0_0_1.Op != OpConst64 { 4296 break 4297 } 4298 c1 := v_0_0_1.AuxInt 4299 v_0_1 := v_0.Args[1] 4300 if v_0_1.Op != OpConst64 { 4301 break 4302 } 4303 c2 := v_0_1.AuxInt 4304 v_1 := v.Args[1] 4305 if v_1.Op != OpConst64 { 4306 break 4307 } 4308 c3 := v_1.AuxInt 4309 if !(uint64(c1) >= uint64(c2) && uint64(c3) >= uint64(c2) && !uaddOvf(c1-c2, c3)) { 4310 break 4311 } 4312 v.reset(OpLsh16x64) 4313 v.AddArg(x) 4314 v0 := b.NewValue0(v.Line, OpConst64, config.fe.TypeUInt64()) 4315 v0.AuxInt = c1 - c2 + c3 4316 v.AddArg(v0) 4317 return true 4318 } 4319 return false 4320 } 4321 func rewriteValuegeneric_OpLsh16x8(v *Value, config *Config) bool { 4322 b := v.Block 4323 _ = b 4324 // match: (Lsh16x8 <t> x (Const8 [c])) 4325 // cond: 4326 // result: (Lsh16x64 x (Const64 <t> [int64(uint8(c))])) 4327 for { 4328 t := v.Type 4329 x := v.Args[0] 4330 v_1 := v.Args[1] 4331 if v_1.Op != OpConst8 { 4332 break 4333 } 4334 c := v_1.AuxInt 4335 v.reset(OpLsh16x64) 4336 v.AddArg(x) 4337 v0 := b.NewValue0(v.Line, OpConst64, t) 4338 v0.AuxInt = int64(uint8(c)) 4339 v.AddArg(v0) 4340 return true 4341 } 4342 // match: (Lsh16x8 (Const16 [0]) _) 4343 // cond: 4344 // result: (Const16 [0]) 4345 for { 4346 v_0 := v.Args[0] 4347 if v_0.Op != OpConst16 { 4348 break 4349 } 4350 if v_0.AuxInt != 0 { 4351 break 4352 } 4353 v.reset(OpConst16) 4354 v.AuxInt = 0 4355 return true 4356 } 4357 return false 4358 } 4359 func rewriteValuegeneric_OpLsh32x16(v *Value, config *Config) bool { 4360 b := v.Block 4361 _ = b 4362 // match: (Lsh32x16 <t> x (Const16 [c])) 4363 // cond: 4364 // result: (Lsh32x64 x (Const64 <t> [int64(uint16(c))])) 4365 for { 4366 t := v.Type 4367 x := v.Args[0] 4368 v_1 := v.Args[1] 4369 if v_1.Op != OpConst16 { 4370 break 4371 } 4372 c := v_1.AuxInt 4373 v.reset(OpLsh32x64) 4374 v.AddArg(x) 4375 v0 := b.NewValue0(v.Line, OpConst64, t) 4376 v0.AuxInt = int64(uint16(c)) 4377 v.AddArg(v0) 4378 return true 4379 } 4380 // match: (Lsh32x16 (Const32 [0]) _) 4381 // cond: 4382 // result: (Const32 [0]) 4383 for { 4384 v_0 := v.Args[0] 4385 if v_0.Op != OpConst32 { 4386 break 4387 } 4388 if v_0.AuxInt != 0 { 4389 break 4390 } 4391 v.reset(OpConst32) 4392 v.AuxInt = 0 4393 return true 4394 } 4395 return false 4396 } 4397 func rewriteValuegeneric_OpLsh32x32(v *Value, config *Config) bool { 4398 b := v.Block 4399 _ = b 4400 // match: (Lsh32x32 <t> x (Const32 [c])) 4401 // cond: 4402 // result: (Lsh32x64 x (Const64 <t> [int64(uint32(c))])) 4403 for { 4404 t := v.Type 4405 x := v.Args[0] 4406 v_1 := v.Args[1] 4407 if v_1.Op != OpConst32 { 4408 break 4409 } 4410 c := v_1.AuxInt 4411 v.reset(OpLsh32x64) 4412 v.AddArg(x) 4413 v0 := b.NewValue0(v.Line, OpConst64, t) 4414 v0.AuxInt = int64(uint32(c)) 4415 v.AddArg(v0) 4416 return true 4417 } 4418 // match: (Lsh32x32 (Const32 [0]) _) 4419 // cond: 4420 // result: (Const32 [0]) 4421 for { 4422 v_0 := v.Args[0] 4423 if v_0.Op != OpConst32 { 4424 break 4425 } 4426 if v_0.AuxInt != 0 { 4427 break 4428 } 4429 v.reset(OpConst32) 4430 v.AuxInt = 0 4431 return true 4432 } 4433 return false 4434 } 4435 func rewriteValuegeneric_OpLsh32x64(v *Value, config *Config) bool { 4436 b := v.Block 4437 _ = b 4438 // match: (Lsh32x64 (Const32 [c]) (Const64 [d])) 4439 // cond: 4440 // result: (Const32 [int64(int32(c) << uint64(d))]) 4441 for { 4442 v_0 := v.Args[0] 4443 if v_0.Op != OpConst32 { 4444 break 4445 } 4446 c := v_0.AuxInt 4447 v_1 := v.Args[1] 4448 if v_1.Op != OpConst64 { 4449 break 4450 } 4451 d := v_1.AuxInt 4452 v.reset(OpConst32) 4453 v.AuxInt = int64(int32(c) << uint64(d)) 4454 return true 4455 } 4456 // match: (Lsh32x64 x (Const64 [0])) 4457 // cond: 4458 // result: x 4459 for { 4460 x := v.Args[0] 4461 v_1 := v.Args[1] 4462 if v_1.Op != OpConst64 { 4463 break 4464 } 4465 if v_1.AuxInt != 0 { 4466 break 4467 } 4468 v.reset(OpCopy) 4469 v.Type = x.Type 4470 v.AddArg(x) 4471 return true 4472 } 4473 // match: (Lsh32x64 (Const32 [0]) _) 4474 // cond: 4475 // result: (Const32 [0]) 4476 for { 4477 v_0 := v.Args[0] 4478 if v_0.Op != OpConst32 { 4479 break 4480 } 4481 if v_0.AuxInt != 0 { 4482 break 4483 } 4484 v.reset(OpConst32) 4485 v.AuxInt = 0 4486 return true 4487 } 4488 // match: (Lsh32x64 _ (Const64 [c])) 4489 // cond: uint64(c) >= 32 4490 // result: (Const32 [0]) 4491 for { 4492 v_1 := v.Args[1] 4493 if v_1.Op != OpConst64 { 4494 break 4495 } 4496 c := v_1.AuxInt 4497 if !(uint64(c) >= 32) { 4498 break 4499 } 4500 v.reset(OpConst32) 4501 v.AuxInt = 0 4502 return true 4503 } 4504 // match: (Lsh32x64 <t> (Lsh32x64 x (Const64 [c])) (Const64 [d])) 4505 // cond: !uaddOvf(c,d) 4506 // result: (Lsh32x64 x (Const64 <t> [c+d])) 4507 for { 4508 t := v.Type 4509 v_0 := v.Args[0] 4510 if v_0.Op != OpLsh32x64 { 4511 break 4512 } 4513 x := v_0.Args[0] 4514 v_0_1 := v_0.Args[1] 4515 if v_0_1.Op != OpConst64 { 4516 break 4517 } 4518 c := v_0_1.AuxInt 4519 v_1 := v.Args[1] 4520 if v_1.Op != OpConst64 { 4521 break 4522 } 4523 d := v_1.AuxInt 4524 if !(!uaddOvf(c, d)) { 4525 break 4526 } 4527 v.reset(OpLsh32x64) 4528 v.AddArg(x) 4529 v0 := b.NewValue0(v.Line, OpConst64, t) 4530 v0.AuxInt = c + d 4531 v.AddArg(v0) 4532 return true 4533 } 4534 // match: (Lsh32x64 (Rsh32Ux64 (Lsh32x64 x (Const64 [c1])) (Const64 [c2])) (Const64 [c3])) 4535 // cond: uint64(c1) >= uint64(c2) && uint64(c3) >= uint64(c2) && !uaddOvf(c1-c2, c3) 4536 // result: (Lsh32x64 x (Const64 <config.fe.TypeUInt64()> [c1-c2+c3])) 4537 for { 4538 v_0 := v.Args[0] 4539 if v_0.Op != OpRsh32Ux64 { 4540 break 4541 } 4542 v_0_0 := v_0.Args[0] 4543 if v_0_0.Op != OpLsh32x64 { 4544 break 4545 } 4546 x := v_0_0.Args[0] 4547 v_0_0_1 := v_0_0.Args[1] 4548 if v_0_0_1.Op != OpConst64 { 4549 break 4550 } 4551 c1 := v_0_0_1.AuxInt 4552 v_0_1 := v_0.Args[1] 4553 if v_0_1.Op != OpConst64 { 4554 break 4555 } 4556 c2 := v_0_1.AuxInt 4557 v_1 := v.Args[1] 4558 if v_1.Op != OpConst64 { 4559 break 4560 } 4561 c3 := v_1.AuxInt 4562 if !(uint64(c1) >= uint64(c2) && uint64(c3) >= uint64(c2) && !uaddOvf(c1-c2, c3)) { 4563 break 4564 } 4565 v.reset(OpLsh32x64) 4566 v.AddArg(x) 4567 v0 := b.NewValue0(v.Line, OpConst64, config.fe.TypeUInt64()) 4568 v0.AuxInt = c1 - c2 + c3 4569 v.AddArg(v0) 4570 return true 4571 } 4572 return false 4573 } 4574 func rewriteValuegeneric_OpLsh32x8(v *Value, config *Config) bool { 4575 b := v.Block 4576 _ = b 4577 // match: (Lsh32x8 <t> x (Const8 [c])) 4578 // cond: 4579 // result: (Lsh32x64 x (Const64 <t> [int64(uint8(c))])) 4580 for { 4581 t := v.Type 4582 x := v.Args[0] 4583 v_1 := v.Args[1] 4584 if v_1.Op != OpConst8 { 4585 break 4586 } 4587 c := v_1.AuxInt 4588 v.reset(OpLsh32x64) 4589 v.AddArg(x) 4590 v0 := b.NewValue0(v.Line, OpConst64, t) 4591 v0.AuxInt = int64(uint8(c)) 4592 v.AddArg(v0) 4593 return true 4594 } 4595 // match: (Lsh32x8 (Const32 [0]) _) 4596 // cond: 4597 // result: (Const32 [0]) 4598 for { 4599 v_0 := v.Args[0] 4600 if v_0.Op != OpConst32 { 4601 break 4602 } 4603 if v_0.AuxInt != 0 { 4604 break 4605 } 4606 v.reset(OpConst32) 4607 v.AuxInt = 0 4608 return true 4609 } 4610 return false 4611 } 4612 func rewriteValuegeneric_OpLsh64x16(v *Value, config *Config) bool { 4613 b := v.Block 4614 _ = b 4615 // match: (Lsh64x16 <t> x (Const16 [c])) 4616 // cond: 4617 // result: (Lsh64x64 x (Const64 <t> [int64(uint16(c))])) 4618 for { 4619 t := v.Type 4620 x := v.Args[0] 4621 v_1 := v.Args[1] 4622 if v_1.Op != OpConst16 { 4623 break 4624 } 4625 c := v_1.AuxInt 4626 v.reset(OpLsh64x64) 4627 v.AddArg(x) 4628 v0 := b.NewValue0(v.Line, OpConst64, t) 4629 v0.AuxInt = int64(uint16(c)) 4630 v.AddArg(v0) 4631 return true 4632 } 4633 // match: (Lsh64x16 (Const64 [0]) _) 4634 // cond: 4635 // result: (Const64 [0]) 4636 for { 4637 v_0 := v.Args[0] 4638 if v_0.Op != OpConst64 { 4639 break 4640 } 4641 if v_0.AuxInt != 0 { 4642 break 4643 } 4644 v.reset(OpConst64) 4645 v.AuxInt = 0 4646 return true 4647 } 4648 return false 4649 } 4650 func rewriteValuegeneric_OpLsh64x32(v *Value, config *Config) bool { 4651 b := v.Block 4652 _ = b 4653 // match: (Lsh64x32 <t> x (Const32 [c])) 4654 // cond: 4655 // result: (Lsh64x64 x (Const64 <t> [int64(uint32(c))])) 4656 for { 4657 t := v.Type 4658 x := v.Args[0] 4659 v_1 := v.Args[1] 4660 if v_1.Op != OpConst32 { 4661 break 4662 } 4663 c := v_1.AuxInt 4664 v.reset(OpLsh64x64) 4665 v.AddArg(x) 4666 v0 := b.NewValue0(v.Line, OpConst64, t) 4667 v0.AuxInt = int64(uint32(c)) 4668 v.AddArg(v0) 4669 return true 4670 } 4671 // match: (Lsh64x32 (Const64 [0]) _) 4672 // cond: 4673 // result: (Const64 [0]) 4674 for { 4675 v_0 := v.Args[0] 4676 if v_0.Op != OpConst64 { 4677 break 4678 } 4679 if v_0.AuxInt != 0 { 4680 break 4681 } 4682 v.reset(OpConst64) 4683 v.AuxInt = 0 4684 return true 4685 } 4686 return false 4687 } 4688 func rewriteValuegeneric_OpLsh64x64(v *Value, config *Config) bool { 4689 b := v.Block 4690 _ = b 4691 // match: (Lsh64x64 (Const64 [c]) (Const64 [d])) 4692 // cond: 4693 // result: (Const64 [c << uint64(d)]) 4694 for { 4695 v_0 := v.Args[0] 4696 if v_0.Op != OpConst64 { 4697 break 4698 } 4699 c := v_0.AuxInt 4700 v_1 := v.Args[1] 4701 if v_1.Op != OpConst64 { 4702 break 4703 } 4704 d := v_1.AuxInt 4705 v.reset(OpConst64) 4706 v.AuxInt = c << uint64(d) 4707 return true 4708 } 4709 // match: (Lsh64x64 x (Const64 [0])) 4710 // cond: 4711 // result: x 4712 for { 4713 x := v.Args[0] 4714 v_1 := v.Args[1] 4715 if v_1.Op != OpConst64 { 4716 break 4717 } 4718 if v_1.AuxInt != 0 { 4719 break 4720 } 4721 v.reset(OpCopy) 4722 v.Type = x.Type 4723 v.AddArg(x) 4724 return true 4725 } 4726 // match: (Lsh64x64 (Const64 [0]) _) 4727 // cond: 4728 // result: (Const64 [0]) 4729 for { 4730 v_0 := v.Args[0] 4731 if v_0.Op != OpConst64 { 4732 break 4733 } 4734 if v_0.AuxInt != 0 { 4735 break 4736 } 4737 v.reset(OpConst64) 4738 v.AuxInt = 0 4739 return true 4740 } 4741 // match: (Lsh64x64 _ (Const64 [c])) 4742 // cond: uint64(c) >= 64 4743 // result: (Const64 [0]) 4744 for { 4745 v_1 := v.Args[1] 4746 if v_1.Op != OpConst64 { 4747 break 4748 } 4749 c := v_1.AuxInt 4750 if !(uint64(c) >= 64) { 4751 break 4752 } 4753 v.reset(OpConst64) 4754 v.AuxInt = 0 4755 return true 4756 } 4757 // match: (Lsh64x64 <t> (Lsh64x64 x (Const64 [c])) (Const64 [d])) 4758 // cond: !uaddOvf(c,d) 4759 // result: (Lsh64x64 x (Const64 <t> [c+d])) 4760 for { 4761 t := v.Type 4762 v_0 := v.Args[0] 4763 if v_0.Op != OpLsh64x64 { 4764 break 4765 } 4766 x := v_0.Args[0] 4767 v_0_1 := v_0.Args[1] 4768 if v_0_1.Op != OpConst64 { 4769 break 4770 } 4771 c := v_0_1.AuxInt 4772 v_1 := v.Args[1] 4773 if v_1.Op != OpConst64 { 4774 break 4775 } 4776 d := v_1.AuxInt 4777 if !(!uaddOvf(c, d)) { 4778 break 4779 } 4780 v.reset(OpLsh64x64) 4781 v.AddArg(x) 4782 v0 := b.NewValue0(v.Line, OpConst64, t) 4783 v0.AuxInt = c + d 4784 v.AddArg(v0) 4785 return true 4786 } 4787 // match: (Lsh64x64 (Rsh64Ux64 (Lsh64x64 x (Const64 [c1])) (Const64 [c2])) (Const64 [c3])) 4788 // cond: uint64(c1) >= uint64(c2) && uint64(c3) >= uint64(c2) && !uaddOvf(c1-c2, c3) 4789 // result: (Lsh64x64 x (Const64 <config.fe.TypeUInt64()> [c1-c2+c3])) 4790 for { 4791 v_0 := v.Args[0] 4792 if v_0.Op != OpRsh64Ux64 { 4793 break 4794 } 4795 v_0_0 := v_0.Args[0] 4796 if v_0_0.Op != OpLsh64x64 { 4797 break 4798 } 4799 x := v_0_0.Args[0] 4800 v_0_0_1 := v_0_0.Args[1] 4801 if v_0_0_1.Op != OpConst64 { 4802 break 4803 } 4804 c1 := v_0_0_1.AuxInt 4805 v_0_1 := v_0.Args[1] 4806 if v_0_1.Op != OpConst64 { 4807 break 4808 } 4809 c2 := v_0_1.AuxInt 4810 v_1 := v.Args[1] 4811 if v_1.Op != OpConst64 { 4812 break 4813 } 4814 c3 := v_1.AuxInt 4815 if !(uint64(c1) >= uint64(c2) && uint64(c3) >= uint64(c2) && !uaddOvf(c1-c2, c3)) { 4816 break 4817 } 4818 v.reset(OpLsh64x64) 4819 v.AddArg(x) 4820 v0 := b.NewValue0(v.Line, OpConst64, config.fe.TypeUInt64()) 4821 v0.AuxInt = c1 - c2 + c3 4822 v.AddArg(v0) 4823 return true 4824 } 4825 return false 4826 } 4827 func rewriteValuegeneric_OpLsh64x8(v *Value, config *Config) bool { 4828 b := v.Block 4829 _ = b 4830 // match: (Lsh64x8 <t> x (Const8 [c])) 4831 // cond: 4832 // result: (Lsh64x64 x (Const64 <t> [int64(uint8(c))])) 4833 for { 4834 t := v.Type 4835 x := v.Args[0] 4836 v_1 := v.Args[1] 4837 if v_1.Op != OpConst8 { 4838 break 4839 } 4840 c := v_1.AuxInt 4841 v.reset(OpLsh64x64) 4842 v.AddArg(x) 4843 v0 := b.NewValue0(v.Line, OpConst64, t) 4844 v0.AuxInt = int64(uint8(c)) 4845 v.AddArg(v0) 4846 return true 4847 } 4848 // match: (Lsh64x8 (Const64 [0]) _) 4849 // cond: 4850 // result: (Const64 [0]) 4851 for { 4852 v_0 := v.Args[0] 4853 if v_0.Op != OpConst64 { 4854 break 4855 } 4856 if v_0.AuxInt != 0 { 4857 break 4858 } 4859 v.reset(OpConst64) 4860 v.AuxInt = 0 4861 return true 4862 } 4863 return false 4864 } 4865 func rewriteValuegeneric_OpLsh8x16(v *Value, config *Config) bool { 4866 b := v.Block 4867 _ = b 4868 // match: (Lsh8x16 <t> x (Const16 [c])) 4869 // cond: 4870 // result: (Lsh8x64 x (Const64 <t> [int64(uint16(c))])) 4871 for { 4872 t := v.Type 4873 x := v.Args[0] 4874 v_1 := v.Args[1] 4875 if v_1.Op != OpConst16 { 4876 break 4877 } 4878 c := v_1.AuxInt 4879 v.reset(OpLsh8x64) 4880 v.AddArg(x) 4881 v0 := b.NewValue0(v.Line, OpConst64, t) 4882 v0.AuxInt = int64(uint16(c)) 4883 v.AddArg(v0) 4884 return true 4885 } 4886 // match: (Lsh8x16 (Const8 [0]) _) 4887 // cond: 4888 // result: (Const8 [0]) 4889 for { 4890 v_0 := v.Args[0] 4891 if v_0.Op != OpConst8 { 4892 break 4893 } 4894 if v_0.AuxInt != 0 { 4895 break 4896 } 4897 v.reset(OpConst8) 4898 v.AuxInt = 0 4899 return true 4900 } 4901 return false 4902 } 4903 func rewriteValuegeneric_OpLsh8x32(v *Value, config *Config) bool { 4904 b := v.Block 4905 _ = b 4906 // match: (Lsh8x32 <t> x (Const32 [c])) 4907 // cond: 4908 // result: (Lsh8x64 x (Const64 <t> [int64(uint32(c))])) 4909 for { 4910 t := v.Type 4911 x := v.Args[0] 4912 v_1 := v.Args[1] 4913 if v_1.Op != OpConst32 { 4914 break 4915 } 4916 c := v_1.AuxInt 4917 v.reset(OpLsh8x64) 4918 v.AddArg(x) 4919 v0 := b.NewValue0(v.Line, OpConst64, t) 4920 v0.AuxInt = int64(uint32(c)) 4921 v.AddArg(v0) 4922 return true 4923 } 4924 // match: (Lsh8x32 (Const8 [0]) _) 4925 // cond: 4926 // result: (Const8 [0]) 4927 for { 4928 v_0 := v.Args[0] 4929 if v_0.Op != OpConst8 { 4930 break 4931 } 4932 if v_0.AuxInt != 0 { 4933 break 4934 } 4935 v.reset(OpConst8) 4936 v.AuxInt = 0 4937 return true 4938 } 4939 return false 4940 } 4941 func rewriteValuegeneric_OpLsh8x64(v *Value, config *Config) bool { 4942 b := v.Block 4943 _ = b 4944 // match: (Lsh8x64 (Const8 [c]) (Const64 [d])) 4945 // cond: 4946 // result: (Const8 [int64(int8(c) << uint64(d))]) 4947 for { 4948 v_0 := v.Args[0] 4949 if v_0.Op != OpConst8 { 4950 break 4951 } 4952 c := v_0.AuxInt 4953 v_1 := v.Args[1] 4954 if v_1.Op != OpConst64 { 4955 break 4956 } 4957 d := v_1.AuxInt 4958 v.reset(OpConst8) 4959 v.AuxInt = int64(int8(c) << uint64(d)) 4960 return true 4961 } 4962 // match: (Lsh8x64 x (Const64 [0])) 4963 // cond: 4964 // result: x 4965 for { 4966 x := v.Args[0] 4967 v_1 := v.Args[1] 4968 if v_1.Op != OpConst64 { 4969 break 4970 } 4971 if v_1.AuxInt != 0 { 4972 break 4973 } 4974 v.reset(OpCopy) 4975 v.Type = x.Type 4976 v.AddArg(x) 4977 return true 4978 } 4979 // match: (Lsh8x64 (Const8 [0]) _) 4980 // cond: 4981 // result: (Const8 [0]) 4982 for { 4983 v_0 := v.Args[0] 4984 if v_0.Op != OpConst8 { 4985 break 4986 } 4987 if v_0.AuxInt != 0 { 4988 break 4989 } 4990 v.reset(OpConst8) 4991 v.AuxInt = 0 4992 return true 4993 } 4994 // match: (Lsh8x64 _ (Const64 [c])) 4995 // cond: uint64(c) >= 8 4996 // result: (Const8 [0]) 4997 for { 4998 v_1 := v.Args[1] 4999 if v_1.Op != OpConst64 { 5000 break 5001 } 5002 c := v_1.AuxInt 5003 if !(uint64(c) >= 8) { 5004 break 5005 } 5006 v.reset(OpConst8) 5007 v.AuxInt = 0 5008 return true 5009 } 5010 // match: (Lsh8x64 <t> (Lsh8x64 x (Const64 [c])) (Const64 [d])) 5011 // cond: !uaddOvf(c,d) 5012 // result: (Lsh8x64 x (Const64 <t> [c+d])) 5013 for { 5014 t := v.Type 5015 v_0 := v.Args[0] 5016 if v_0.Op != OpLsh8x64 { 5017 break 5018 } 5019 x := v_0.Args[0] 5020 v_0_1 := v_0.Args[1] 5021 if v_0_1.Op != OpConst64 { 5022 break 5023 } 5024 c := v_0_1.AuxInt 5025 v_1 := v.Args[1] 5026 if v_1.Op != OpConst64 { 5027 break 5028 } 5029 d := v_1.AuxInt 5030 if !(!uaddOvf(c, d)) { 5031 break 5032 } 5033 v.reset(OpLsh8x64) 5034 v.AddArg(x) 5035 v0 := b.NewValue0(v.Line, OpConst64, t) 5036 v0.AuxInt = c + d 5037 v.AddArg(v0) 5038 return true 5039 } 5040 // match: (Lsh8x64 (Rsh8Ux64 (Lsh8x64 x (Const64 [c1])) (Const64 [c2])) (Const64 [c3])) 5041 // cond: uint64(c1) >= uint64(c2) && uint64(c3) >= uint64(c2) && !uaddOvf(c1-c2, c3) 5042 // result: (Lsh8x64 x (Const64 <config.fe.TypeUInt64()> [c1-c2+c3])) 5043 for { 5044 v_0 := v.Args[0] 5045 if v_0.Op != OpRsh8Ux64 { 5046 break 5047 } 5048 v_0_0 := v_0.Args[0] 5049 if v_0_0.Op != OpLsh8x64 { 5050 break 5051 } 5052 x := v_0_0.Args[0] 5053 v_0_0_1 := v_0_0.Args[1] 5054 if v_0_0_1.Op != OpConst64 { 5055 break 5056 } 5057 c1 := v_0_0_1.AuxInt 5058 v_0_1 := v_0.Args[1] 5059 if v_0_1.Op != OpConst64 { 5060 break 5061 } 5062 c2 := v_0_1.AuxInt 5063 v_1 := v.Args[1] 5064 if v_1.Op != OpConst64 { 5065 break 5066 } 5067 c3 := v_1.AuxInt 5068 if !(uint64(c1) >= uint64(c2) && uint64(c3) >= uint64(c2) && !uaddOvf(c1-c2, c3)) { 5069 break 5070 } 5071 v.reset(OpLsh8x64) 5072 v.AddArg(x) 5073 v0 := b.NewValue0(v.Line, OpConst64, config.fe.TypeUInt64()) 5074 v0.AuxInt = c1 - c2 + c3 5075 v.AddArg(v0) 5076 return true 5077 } 5078 return false 5079 } 5080 func rewriteValuegeneric_OpLsh8x8(v *Value, config *Config) bool { 5081 b := v.Block 5082 _ = b 5083 // match: (Lsh8x8 <t> x (Const8 [c])) 5084 // cond: 5085 // result: (Lsh8x64 x (Const64 <t> [int64(uint8(c))])) 5086 for { 5087 t := v.Type 5088 x := v.Args[0] 5089 v_1 := v.Args[1] 5090 if v_1.Op != OpConst8 { 5091 break 5092 } 5093 c := v_1.AuxInt 5094 v.reset(OpLsh8x64) 5095 v.AddArg(x) 5096 v0 := b.NewValue0(v.Line, OpConst64, t) 5097 v0.AuxInt = int64(uint8(c)) 5098 v.AddArg(v0) 5099 return true 5100 } 5101 // match: (Lsh8x8 (Const8 [0]) _) 5102 // cond: 5103 // result: (Const8 [0]) 5104 for { 5105 v_0 := v.Args[0] 5106 if v_0.Op != OpConst8 { 5107 break 5108 } 5109 if v_0.AuxInt != 0 { 5110 break 5111 } 5112 v.reset(OpConst8) 5113 v.AuxInt = 0 5114 return true 5115 } 5116 return false 5117 } 5118 func rewriteValuegeneric_OpMod16(v *Value, config *Config) bool { 5119 b := v.Block 5120 _ = b 5121 // match: (Mod16 (Const16 [c]) (Const16 [d])) 5122 // cond: d != 0 5123 // result: (Const16 [int64(int16(c % d))]) 5124 for { 5125 v_0 := v.Args[0] 5126 if v_0.Op != OpConst16 { 5127 break 5128 } 5129 c := v_0.AuxInt 5130 v_1 := v.Args[1] 5131 if v_1.Op != OpConst16 { 5132 break 5133 } 5134 d := v_1.AuxInt 5135 if !(d != 0) { 5136 break 5137 } 5138 v.reset(OpConst16) 5139 v.AuxInt = int64(int16(c % d)) 5140 return true 5141 } 5142 return false 5143 } 5144 func rewriteValuegeneric_OpMod16u(v *Value, config *Config) bool { 5145 b := v.Block 5146 _ = b 5147 // match: (Mod16u (Const16 [c]) (Const16 [d])) 5148 // cond: d != 0 5149 // result: (Const16 [int64(uint16(c) % uint16(d))]) 5150 for { 5151 v_0 := v.Args[0] 5152 if v_0.Op != OpConst16 { 5153 break 5154 } 5155 c := v_0.AuxInt 5156 v_1 := v.Args[1] 5157 if v_1.Op != OpConst16 { 5158 break 5159 } 5160 d := v_1.AuxInt 5161 if !(d != 0) { 5162 break 5163 } 5164 v.reset(OpConst16) 5165 v.AuxInt = int64(uint16(c) % uint16(d)) 5166 return true 5167 } 5168 return false 5169 } 5170 func rewriteValuegeneric_OpMod32(v *Value, config *Config) bool { 5171 b := v.Block 5172 _ = b 5173 // match: (Mod32 (Const32 [c]) (Const32 [d])) 5174 // cond: d != 0 5175 // result: (Const32 [int64(int32(c % d))]) 5176 for { 5177 v_0 := v.Args[0] 5178 if v_0.Op != OpConst32 { 5179 break 5180 } 5181 c := v_0.AuxInt 5182 v_1 := v.Args[1] 5183 if v_1.Op != OpConst32 { 5184 break 5185 } 5186 d := v_1.AuxInt 5187 if !(d != 0) { 5188 break 5189 } 5190 v.reset(OpConst32) 5191 v.AuxInt = int64(int32(c % d)) 5192 return true 5193 } 5194 return false 5195 } 5196 func rewriteValuegeneric_OpMod32u(v *Value, config *Config) bool { 5197 b := v.Block 5198 _ = b 5199 // match: (Mod32u (Const32 [c]) (Const32 [d])) 5200 // cond: d != 0 5201 // result: (Const32 [int64(uint32(c) % uint32(d))]) 5202 for { 5203 v_0 := v.Args[0] 5204 if v_0.Op != OpConst32 { 5205 break 5206 } 5207 c := v_0.AuxInt 5208 v_1 := v.Args[1] 5209 if v_1.Op != OpConst32 { 5210 break 5211 } 5212 d := v_1.AuxInt 5213 if !(d != 0) { 5214 break 5215 } 5216 v.reset(OpConst32) 5217 v.AuxInt = int64(uint32(c) % uint32(d)) 5218 return true 5219 } 5220 return false 5221 } 5222 func rewriteValuegeneric_OpMod64(v *Value, config *Config) bool { 5223 b := v.Block 5224 _ = b 5225 // match: (Mod64 (Const64 [c]) (Const64 [d])) 5226 // cond: d != 0 5227 // result: (Const64 [c % d]) 5228 for { 5229 v_0 := v.Args[0] 5230 if v_0.Op != OpConst64 { 5231 break 5232 } 5233 c := v_0.AuxInt 5234 v_1 := v.Args[1] 5235 if v_1.Op != OpConst64 { 5236 break 5237 } 5238 d := v_1.AuxInt 5239 if !(d != 0) { 5240 break 5241 } 5242 v.reset(OpConst64) 5243 v.AuxInt = c % d 5244 return true 5245 } 5246 // match: (Mod64 <t> x (Const64 [c])) 5247 // cond: x.Op != OpConst64 && smagic64ok(c) 5248 // result: (Sub64 x (Mul64 <t> (Div64 <t> x (Const64 <t> [c])) (Const64 <t> [c]))) 5249 for { 5250 t := v.Type 5251 x := v.Args[0] 5252 v_1 := v.Args[1] 5253 if v_1.Op != OpConst64 { 5254 break 5255 } 5256 c := v_1.AuxInt 5257 if !(x.Op != OpConst64 && smagic64ok(c)) { 5258 break 5259 } 5260 v.reset(OpSub64) 5261 v.AddArg(x) 5262 v0 := b.NewValue0(v.Line, OpMul64, t) 5263 v1 := b.NewValue0(v.Line, OpDiv64, t) 5264 v1.AddArg(x) 5265 v2 := b.NewValue0(v.Line, OpConst64, t) 5266 v2.AuxInt = c 5267 v1.AddArg(v2) 5268 v0.AddArg(v1) 5269 v3 := b.NewValue0(v.Line, OpConst64, t) 5270 v3.AuxInt = c 5271 v0.AddArg(v3) 5272 v.AddArg(v0) 5273 return true 5274 } 5275 return false 5276 } 5277 func rewriteValuegeneric_OpMod64u(v *Value, config *Config) bool { 5278 b := v.Block 5279 _ = b 5280 // match: (Mod64u (Const64 [c]) (Const64 [d])) 5281 // cond: d != 0 5282 // result: (Const64 [int64(uint64(c) % uint64(d))]) 5283 for { 5284 v_0 := v.Args[0] 5285 if v_0.Op != OpConst64 { 5286 break 5287 } 5288 c := v_0.AuxInt 5289 v_1 := v.Args[1] 5290 if v_1.Op != OpConst64 { 5291 break 5292 } 5293 d := v_1.AuxInt 5294 if !(d != 0) { 5295 break 5296 } 5297 v.reset(OpConst64) 5298 v.AuxInt = int64(uint64(c) % uint64(d)) 5299 return true 5300 } 5301 // match: (Mod64u <t> n (Const64 [c])) 5302 // cond: isPowerOfTwo(c) 5303 // result: (And64 n (Const64 <t> [c-1])) 5304 for { 5305 t := v.Type 5306 n := v.Args[0] 5307 v_1 := v.Args[1] 5308 if v_1.Op != OpConst64 { 5309 break 5310 } 5311 c := v_1.AuxInt 5312 if !(isPowerOfTwo(c)) { 5313 break 5314 } 5315 v.reset(OpAnd64) 5316 v.AddArg(n) 5317 v0 := b.NewValue0(v.Line, OpConst64, t) 5318 v0.AuxInt = c - 1 5319 v.AddArg(v0) 5320 return true 5321 } 5322 // match: (Mod64u <t> x (Const64 [c])) 5323 // cond: x.Op != OpConst64 && umagic64ok(c) 5324 // result: (Sub64 x (Mul64 <t> (Div64u <t> x (Const64 <t> [c])) (Const64 <t> [c]))) 5325 for { 5326 t := v.Type 5327 x := v.Args[0] 5328 v_1 := v.Args[1] 5329 if v_1.Op != OpConst64 { 5330 break 5331 } 5332 c := v_1.AuxInt 5333 if !(x.Op != OpConst64 && umagic64ok(c)) { 5334 break 5335 } 5336 v.reset(OpSub64) 5337 v.AddArg(x) 5338 v0 := b.NewValue0(v.Line, OpMul64, t) 5339 v1 := b.NewValue0(v.Line, OpDiv64u, t) 5340 v1.AddArg(x) 5341 v2 := b.NewValue0(v.Line, OpConst64, t) 5342 v2.AuxInt = c 5343 v1.AddArg(v2) 5344 v0.AddArg(v1) 5345 v3 := b.NewValue0(v.Line, OpConst64, t) 5346 v3.AuxInt = c 5347 v0.AddArg(v3) 5348 v.AddArg(v0) 5349 return true 5350 } 5351 return false 5352 } 5353 func rewriteValuegeneric_OpMod8(v *Value, config *Config) bool { 5354 b := v.Block 5355 _ = b 5356 // match: (Mod8 (Const8 [c]) (Const8 [d])) 5357 // cond: d != 0 5358 // result: (Const8 [int64(int8(c % d))]) 5359 for { 5360 v_0 := v.Args[0] 5361 if v_0.Op != OpConst8 { 5362 break 5363 } 5364 c := v_0.AuxInt 5365 v_1 := v.Args[1] 5366 if v_1.Op != OpConst8 { 5367 break 5368 } 5369 d := v_1.AuxInt 5370 if !(d != 0) { 5371 break 5372 } 5373 v.reset(OpConst8) 5374 v.AuxInt = int64(int8(c % d)) 5375 return true 5376 } 5377 return false 5378 } 5379 func rewriteValuegeneric_OpMod8u(v *Value, config *Config) bool { 5380 b := v.Block 5381 _ = b 5382 // match: (Mod8u (Const8 [c]) (Const8 [d])) 5383 // cond: d != 0 5384 // result: (Const8 [int64(uint8(c) % uint8(d))]) 5385 for { 5386 v_0 := v.Args[0] 5387 if v_0.Op != OpConst8 { 5388 break 5389 } 5390 c := v_0.AuxInt 5391 v_1 := v.Args[1] 5392 if v_1.Op != OpConst8 { 5393 break 5394 } 5395 d := v_1.AuxInt 5396 if !(d != 0) { 5397 break 5398 } 5399 v.reset(OpConst8) 5400 v.AuxInt = int64(uint8(c) % uint8(d)) 5401 return true 5402 } 5403 return false 5404 } 5405 func rewriteValuegeneric_OpMul16(v *Value, config *Config) bool { 5406 b := v.Block 5407 _ = b 5408 // match: (Mul16 (Const16 [c]) (Const16 [d])) 5409 // cond: 5410 // result: (Const16 [int64(int16(c*d))]) 5411 for { 5412 v_0 := v.Args[0] 5413 if v_0.Op != OpConst16 { 5414 break 5415 } 5416 c := v_0.AuxInt 5417 v_1 := v.Args[1] 5418 if v_1.Op != OpConst16 { 5419 break 5420 } 5421 d := v_1.AuxInt 5422 v.reset(OpConst16) 5423 v.AuxInt = int64(int16(c * d)) 5424 return true 5425 } 5426 // match: (Mul16 (Const16 [-1]) x) 5427 // cond: 5428 // result: (Neg16 x) 5429 for { 5430 v_0 := v.Args[0] 5431 if v_0.Op != OpConst16 { 5432 break 5433 } 5434 if v_0.AuxInt != -1 { 5435 break 5436 } 5437 x := v.Args[1] 5438 v.reset(OpNeg16) 5439 v.AddArg(x) 5440 return true 5441 } 5442 // match: (Mul16 x (Const16 <t> [c])) 5443 // cond: x.Op != OpConst16 5444 // result: (Mul16 (Const16 <t> [c]) x) 5445 for { 5446 x := v.Args[0] 5447 v_1 := v.Args[1] 5448 if v_1.Op != OpConst16 { 5449 break 5450 } 5451 t := v_1.Type 5452 c := v_1.AuxInt 5453 if !(x.Op != OpConst16) { 5454 break 5455 } 5456 v.reset(OpMul16) 5457 v0 := b.NewValue0(v.Line, OpConst16, t) 5458 v0.AuxInt = c 5459 v.AddArg(v0) 5460 v.AddArg(x) 5461 return true 5462 } 5463 // match: (Mul16 (Const16 [0]) _) 5464 // cond: 5465 // result: (Const16 [0]) 5466 for { 5467 v_0 := v.Args[0] 5468 if v_0.Op != OpConst16 { 5469 break 5470 } 5471 if v_0.AuxInt != 0 { 5472 break 5473 } 5474 v.reset(OpConst16) 5475 v.AuxInt = 0 5476 return true 5477 } 5478 return false 5479 } 5480 func rewriteValuegeneric_OpMul32(v *Value, config *Config) bool { 5481 b := v.Block 5482 _ = b 5483 // match: (Mul32 (Const32 [c]) (Const32 [d])) 5484 // cond: 5485 // result: (Const32 [int64(int32(c*d))]) 5486 for { 5487 v_0 := v.Args[0] 5488 if v_0.Op != OpConst32 { 5489 break 5490 } 5491 c := v_0.AuxInt 5492 v_1 := v.Args[1] 5493 if v_1.Op != OpConst32 { 5494 break 5495 } 5496 d := v_1.AuxInt 5497 v.reset(OpConst32) 5498 v.AuxInt = int64(int32(c * d)) 5499 return true 5500 } 5501 // match: (Mul32 (Const32 [-1]) x) 5502 // cond: 5503 // result: (Neg32 x) 5504 for { 5505 v_0 := v.Args[0] 5506 if v_0.Op != OpConst32 { 5507 break 5508 } 5509 if v_0.AuxInt != -1 { 5510 break 5511 } 5512 x := v.Args[1] 5513 v.reset(OpNeg32) 5514 v.AddArg(x) 5515 return true 5516 } 5517 // match: (Mul32 x (Const32 <t> [c])) 5518 // cond: x.Op != OpConst32 5519 // result: (Mul32 (Const32 <t> [c]) x) 5520 for { 5521 x := v.Args[0] 5522 v_1 := v.Args[1] 5523 if v_1.Op != OpConst32 { 5524 break 5525 } 5526 t := v_1.Type 5527 c := v_1.AuxInt 5528 if !(x.Op != OpConst32) { 5529 break 5530 } 5531 v.reset(OpMul32) 5532 v0 := b.NewValue0(v.Line, OpConst32, t) 5533 v0.AuxInt = c 5534 v.AddArg(v0) 5535 v.AddArg(x) 5536 return true 5537 } 5538 // match: (Mul32 (Const32 <t> [c]) (Add32 <t> (Const32 <t> [d]) x)) 5539 // cond: 5540 // result: (Add32 (Const32 <t> [int64(int32(c*d))]) (Mul32 <t> (Const32 <t> [c]) x)) 5541 for { 5542 v_0 := v.Args[0] 5543 if v_0.Op != OpConst32 { 5544 break 5545 } 5546 t := v_0.Type 5547 c := v_0.AuxInt 5548 v_1 := v.Args[1] 5549 if v_1.Op != OpAdd32 { 5550 break 5551 } 5552 if v_1.Type != t { 5553 break 5554 } 5555 v_1_0 := v_1.Args[0] 5556 if v_1_0.Op != OpConst32 { 5557 break 5558 } 5559 if v_1_0.Type != t { 5560 break 5561 } 5562 d := v_1_0.AuxInt 5563 x := v_1.Args[1] 5564 v.reset(OpAdd32) 5565 v0 := b.NewValue0(v.Line, OpConst32, t) 5566 v0.AuxInt = int64(int32(c * d)) 5567 v.AddArg(v0) 5568 v1 := b.NewValue0(v.Line, OpMul32, t) 5569 v2 := b.NewValue0(v.Line, OpConst32, t) 5570 v2.AuxInt = c 5571 v1.AddArg(v2) 5572 v1.AddArg(x) 5573 v.AddArg(v1) 5574 return true 5575 } 5576 // match: (Mul32 (Const32 [0]) _) 5577 // cond: 5578 // result: (Const32 [0]) 5579 for { 5580 v_0 := v.Args[0] 5581 if v_0.Op != OpConst32 { 5582 break 5583 } 5584 if v_0.AuxInt != 0 { 5585 break 5586 } 5587 v.reset(OpConst32) 5588 v.AuxInt = 0 5589 return true 5590 } 5591 return false 5592 } 5593 func rewriteValuegeneric_OpMul32F(v *Value, config *Config) bool { 5594 b := v.Block 5595 _ = b 5596 // match: (Mul32F (Const32F [c]) (Const32F [d])) 5597 // cond: 5598 // result: (Const32F [f2i(float64(i2f32(c) * i2f32(d)))]) 5599 for { 5600 v_0 := v.Args[0] 5601 if v_0.Op != OpConst32F { 5602 break 5603 } 5604 c := v_0.AuxInt 5605 v_1 := v.Args[1] 5606 if v_1.Op != OpConst32F { 5607 break 5608 } 5609 d := v_1.AuxInt 5610 v.reset(OpConst32F) 5611 v.AuxInt = f2i(float64(i2f32(c) * i2f32(d))) 5612 return true 5613 } 5614 // match: (Mul32F x (Const32F [f2i(1)])) 5615 // cond: 5616 // result: x 5617 for { 5618 x := v.Args[0] 5619 v_1 := v.Args[1] 5620 if v_1.Op != OpConst32F { 5621 break 5622 } 5623 if v_1.AuxInt != f2i(1) { 5624 break 5625 } 5626 v.reset(OpCopy) 5627 v.Type = x.Type 5628 v.AddArg(x) 5629 return true 5630 } 5631 // match: (Mul32F (Const32F [f2i(1)]) x) 5632 // cond: 5633 // result: x 5634 for { 5635 v_0 := v.Args[0] 5636 if v_0.Op != OpConst32F { 5637 break 5638 } 5639 if v_0.AuxInt != f2i(1) { 5640 break 5641 } 5642 x := v.Args[1] 5643 v.reset(OpCopy) 5644 v.Type = x.Type 5645 v.AddArg(x) 5646 return true 5647 } 5648 // match: (Mul32F x (Const32F [f2i(-1)])) 5649 // cond: 5650 // result: (Neg32F x) 5651 for { 5652 x := v.Args[0] 5653 v_1 := v.Args[1] 5654 if v_1.Op != OpConst32F { 5655 break 5656 } 5657 if v_1.AuxInt != f2i(-1) { 5658 break 5659 } 5660 v.reset(OpNeg32F) 5661 v.AddArg(x) 5662 return true 5663 } 5664 // match: (Mul32F (Const32F [f2i(-1)]) x) 5665 // cond: 5666 // result: (Neg32F x) 5667 for { 5668 v_0 := v.Args[0] 5669 if v_0.Op != OpConst32F { 5670 break 5671 } 5672 if v_0.AuxInt != f2i(-1) { 5673 break 5674 } 5675 x := v.Args[1] 5676 v.reset(OpNeg32F) 5677 v.AddArg(x) 5678 return true 5679 } 5680 return false 5681 } 5682 func rewriteValuegeneric_OpMul64(v *Value, config *Config) bool { 5683 b := v.Block 5684 _ = b 5685 // match: (Mul64 (Const64 [c]) (Const64 [d])) 5686 // cond: 5687 // result: (Const64 [c*d]) 5688 for { 5689 v_0 := v.Args[0] 5690 if v_0.Op != OpConst64 { 5691 break 5692 } 5693 c := v_0.AuxInt 5694 v_1 := v.Args[1] 5695 if v_1.Op != OpConst64 { 5696 break 5697 } 5698 d := v_1.AuxInt 5699 v.reset(OpConst64) 5700 v.AuxInt = c * d 5701 return true 5702 } 5703 // match: (Mul64 (Const64 [-1]) x) 5704 // cond: 5705 // result: (Neg64 x) 5706 for { 5707 v_0 := v.Args[0] 5708 if v_0.Op != OpConst64 { 5709 break 5710 } 5711 if v_0.AuxInt != -1 { 5712 break 5713 } 5714 x := v.Args[1] 5715 v.reset(OpNeg64) 5716 v.AddArg(x) 5717 return true 5718 } 5719 // match: (Mul64 x (Const64 <t> [c])) 5720 // cond: x.Op != OpConst64 5721 // result: (Mul64 (Const64 <t> [c]) x) 5722 for { 5723 x := v.Args[0] 5724 v_1 := v.Args[1] 5725 if v_1.Op != OpConst64 { 5726 break 5727 } 5728 t := v_1.Type 5729 c := v_1.AuxInt 5730 if !(x.Op != OpConst64) { 5731 break 5732 } 5733 v.reset(OpMul64) 5734 v0 := b.NewValue0(v.Line, OpConst64, t) 5735 v0.AuxInt = c 5736 v.AddArg(v0) 5737 v.AddArg(x) 5738 return true 5739 } 5740 // match: (Mul64 (Const64 <t> [c]) (Add64 <t> (Const64 <t> [d]) x)) 5741 // cond: 5742 // result: (Add64 (Const64 <t> [c*d]) (Mul64 <t> (Const64 <t> [c]) x)) 5743 for { 5744 v_0 := v.Args[0] 5745 if v_0.Op != OpConst64 { 5746 break 5747 } 5748 t := v_0.Type 5749 c := v_0.AuxInt 5750 v_1 := v.Args[1] 5751 if v_1.Op != OpAdd64 { 5752 break 5753 } 5754 if v_1.Type != t { 5755 break 5756 } 5757 v_1_0 := v_1.Args[0] 5758 if v_1_0.Op != OpConst64 { 5759 break 5760 } 5761 if v_1_0.Type != t { 5762 break 5763 } 5764 d := v_1_0.AuxInt 5765 x := v_1.Args[1] 5766 v.reset(OpAdd64) 5767 v0 := b.NewValue0(v.Line, OpConst64, t) 5768 v0.AuxInt = c * d 5769 v.AddArg(v0) 5770 v1 := b.NewValue0(v.Line, OpMul64, t) 5771 v2 := b.NewValue0(v.Line, OpConst64, t) 5772 v2.AuxInt = c 5773 v1.AddArg(v2) 5774 v1.AddArg(x) 5775 v.AddArg(v1) 5776 return true 5777 } 5778 // match: (Mul64 (Const64 [0]) _) 5779 // cond: 5780 // result: (Const64 [0]) 5781 for { 5782 v_0 := v.Args[0] 5783 if v_0.Op != OpConst64 { 5784 break 5785 } 5786 if v_0.AuxInt != 0 { 5787 break 5788 } 5789 v.reset(OpConst64) 5790 v.AuxInt = 0 5791 return true 5792 } 5793 return false 5794 } 5795 func rewriteValuegeneric_OpMul64F(v *Value, config *Config) bool { 5796 b := v.Block 5797 _ = b 5798 // match: (Mul64F (Const64F [c]) (Const64F [d])) 5799 // cond: 5800 // result: (Const64F [f2i(i2f(c) * i2f(d))]) 5801 for { 5802 v_0 := v.Args[0] 5803 if v_0.Op != OpConst64F { 5804 break 5805 } 5806 c := v_0.AuxInt 5807 v_1 := v.Args[1] 5808 if v_1.Op != OpConst64F { 5809 break 5810 } 5811 d := v_1.AuxInt 5812 v.reset(OpConst64F) 5813 v.AuxInt = f2i(i2f(c) * i2f(d)) 5814 return true 5815 } 5816 // match: (Mul64F x (Const64F [f2i(1)])) 5817 // cond: 5818 // result: x 5819 for { 5820 x := v.Args[0] 5821 v_1 := v.Args[1] 5822 if v_1.Op != OpConst64F { 5823 break 5824 } 5825 if v_1.AuxInt != f2i(1) { 5826 break 5827 } 5828 v.reset(OpCopy) 5829 v.Type = x.Type 5830 v.AddArg(x) 5831 return true 5832 } 5833 // match: (Mul64F (Const64F [f2i(1)]) x) 5834 // cond: 5835 // result: x 5836 for { 5837 v_0 := v.Args[0] 5838 if v_0.Op != OpConst64F { 5839 break 5840 } 5841 if v_0.AuxInt != f2i(1) { 5842 break 5843 } 5844 x := v.Args[1] 5845 v.reset(OpCopy) 5846 v.Type = x.Type 5847 v.AddArg(x) 5848 return true 5849 } 5850 // match: (Mul64F x (Const64F [f2i(-1)])) 5851 // cond: 5852 // result: (Neg64F x) 5853 for { 5854 x := v.Args[0] 5855 v_1 := v.Args[1] 5856 if v_1.Op != OpConst64F { 5857 break 5858 } 5859 if v_1.AuxInt != f2i(-1) { 5860 break 5861 } 5862 v.reset(OpNeg64F) 5863 v.AddArg(x) 5864 return true 5865 } 5866 // match: (Mul64F (Const64F [f2i(-1)]) x) 5867 // cond: 5868 // result: (Neg64F x) 5869 for { 5870 v_0 := v.Args[0] 5871 if v_0.Op != OpConst64F { 5872 break 5873 } 5874 if v_0.AuxInt != f2i(-1) { 5875 break 5876 } 5877 x := v.Args[1] 5878 v.reset(OpNeg64F) 5879 v.AddArg(x) 5880 return true 5881 } 5882 return false 5883 } 5884 func rewriteValuegeneric_OpMul8(v *Value, config *Config) bool { 5885 b := v.Block 5886 _ = b 5887 // match: (Mul8 (Const8 [c]) (Const8 [d])) 5888 // cond: 5889 // result: (Const8 [int64(int8(c*d))]) 5890 for { 5891 v_0 := v.Args[0] 5892 if v_0.Op != OpConst8 { 5893 break 5894 } 5895 c := v_0.AuxInt 5896 v_1 := v.Args[1] 5897 if v_1.Op != OpConst8 { 5898 break 5899 } 5900 d := v_1.AuxInt 5901 v.reset(OpConst8) 5902 v.AuxInt = int64(int8(c * d)) 5903 return true 5904 } 5905 // match: (Mul8 (Const8 [-1]) x) 5906 // cond: 5907 // result: (Neg8 x) 5908 for { 5909 v_0 := v.Args[0] 5910 if v_0.Op != OpConst8 { 5911 break 5912 } 5913 if v_0.AuxInt != -1 { 5914 break 5915 } 5916 x := v.Args[1] 5917 v.reset(OpNeg8) 5918 v.AddArg(x) 5919 return true 5920 } 5921 // match: (Mul8 x (Const8 <t> [c])) 5922 // cond: x.Op != OpConst8 5923 // result: (Mul8 (Const8 <t> [c]) x) 5924 for { 5925 x := v.Args[0] 5926 v_1 := v.Args[1] 5927 if v_1.Op != OpConst8 { 5928 break 5929 } 5930 t := v_1.Type 5931 c := v_1.AuxInt 5932 if !(x.Op != OpConst8) { 5933 break 5934 } 5935 v.reset(OpMul8) 5936 v0 := b.NewValue0(v.Line, OpConst8, t) 5937 v0.AuxInt = c 5938 v.AddArg(v0) 5939 v.AddArg(x) 5940 return true 5941 } 5942 // match: (Mul8 (Const8 [0]) _) 5943 // cond: 5944 // result: (Const8 [0]) 5945 for { 5946 v_0 := v.Args[0] 5947 if v_0.Op != OpConst8 { 5948 break 5949 } 5950 if v_0.AuxInt != 0 { 5951 break 5952 } 5953 v.reset(OpConst8) 5954 v.AuxInt = 0 5955 return true 5956 } 5957 return false 5958 } 5959 func rewriteValuegeneric_OpNeg16(v *Value, config *Config) bool { 5960 b := v.Block 5961 _ = b 5962 // match: (Neg16 (Sub16 x y)) 5963 // cond: 5964 // result: (Sub16 y x) 5965 for { 5966 v_0 := v.Args[0] 5967 if v_0.Op != OpSub16 { 5968 break 5969 } 5970 x := v_0.Args[0] 5971 y := v_0.Args[1] 5972 v.reset(OpSub16) 5973 v.AddArg(y) 5974 v.AddArg(x) 5975 return true 5976 } 5977 return false 5978 } 5979 func rewriteValuegeneric_OpNeg32(v *Value, config *Config) bool { 5980 b := v.Block 5981 _ = b 5982 // match: (Neg32 (Sub32 x y)) 5983 // cond: 5984 // result: (Sub32 y x) 5985 for { 5986 v_0 := v.Args[0] 5987 if v_0.Op != OpSub32 { 5988 break 5989 } 5990 x := v_0.Args[0] 5991 y := v_0.Args[1] 5992 v.reset(OpSub32) 5993 v.AddArg(y) 5994 v.AddArg(x) 5995 return true 5996 } 5997 return false 5998 } 5999 func rewriteValuegeneric_OpNeg64(v *Value, config *Config) bool { 6000 b := v.Block 6001 _ = b 6002 // match: (Neg64 (Sub64 x y)) 6003 // cond: 6004 // result: (Sub64 y x) 6005 for { 6006 v_0 := v.Args[0] 6007 if v_0.Op != OpSub64 { 6008 break 6009 } 6010 x := v_0.Args[0] 6011 y := v_0.Args[1] 6012 v.reset(OpSub64) 6013 v.AddArg(y) 6014 v.AddArg(x) 6015 return true 6016 } 6017 return false 6018 } 6019 func rewriteValuegeneric_OpNeg8(v *Value, config *Config) bool { 6020 b := v.Block 6021 _ = b 6022 // match: (Neg8 (Sub8 x y)) 6023 // cond: 6024 // result: (Sub8 y x) 6025 for { 6026 v_0 := v.Args[0] 6027 if v_0.Op != OpSub8 { 6028 break 6029 } 6030 x := v_0.Args[0] 6031 y := v_0.Args[1] 6032 v.reset(OpSub8) 6033 v.AddArg(y) 6034 v.AddArg(x) 6035 return true 6036 } 6037 return false 6038 } 6039 func rewriteValuegeneric_OpNeq16(v *Value, config *Config) bool { 6040 b := v.Block 6041 _ = b 6042 // match: (Neq16 x x) 6043 // cond: 6044 // result: (ConstBool [0]) 6045 for { 6046 x := v.Args[0] 6047 if x != v.Args[1] { 6048 break 6049 } 6050 v.reset(OpConstBool) 6051 v.AuxInt = 0 6052 return true 6053 } 6054 // match: (Neq16 (Const16 <t> [c]) (Add16 (Const16 <t> [d]) x)) 6055 // cond: 6056 // result: (Neq16 (Const16 <t> [int64(int16(c-d))]) x) 6057 for { 6058 v_0 := v.Args[0] 6059 if v_0.Op != OpConst16 { 6060 break 6061 } 6062 t := v_0.Type 6063 c := v_0.AuxInt 6064 v_1 := v.Args[1] 6065 if v_1.Op != OpAdd16 { 6066 break 6067 } 6068 v_1_0 := v_1.Args[0] 6069 if v_1_0.Op != OpConst16 { 6070 break 6071 } 6072 if v_1_0.Type != t { 6073 break 6074 } 6075 d := v_1_0.AuxInt 6076 x := v_1.Args[1] 6077 v.reset(OpNeq16) 6078 v0 := b.NewValue0(v.Line, OpConst16, t) 6079 v0.AuxInt = int64(int16(c - d)) 6080 v.AddArg(v0) 6081 v.AddArg(x) 6082 return true 6083 } 6084 // match: (Neq16 x (Const16 <t> [c])) 6085 // cond: x.Op != OpConst16 6086 // result: (Neq16 (Const16 <t> [c]) x) 6087 for { 6088 x := v.Args[0] 6089 v_1 := v.Args[1] 6090 if v_1.Op != OpConst16 { 6091 break 6092 } 6093 t := v_1.Type 6094 c := v_1.AuxInt 6095 if !(x.Op != OpConst16) { 6096 break 6097 } 6098 v.reset(OpNeq16) 6099 v0 := b.NewValue0(v.Line, OpConst16, t) 6100 v0.AuxInt = c 6101 v.AddArg(v0) 6102 v.AddArg(x) 6103 return true 6104 } 6105 // match: (Neq16 (Const16 [c]) (Const16 [d])) 6106 // cond: 6107 // result: (ConstBool [b2i(c != d)]) 6108 for { 6109 v_0 := v.Args[0] 6110 if v_0.Op != OpConst16 { 6111 break 6112 } 6113 c := v_0.AuxInt 6114 v_1 := v.Args[1] 6115 if v_1.Op != OpConst16 { 6116 break 6117 } 6118 d := v_1.AuxInt 6119 v.reset(OpConstBool) 6120 v.AuxInt = b2i(c != d) 6121 return true 6122 } 6123 return false 6124 } 6125 func rewriteValuegeneric_OpNeq32(v *Value, config *Config) bool { 6126 b := v.Block 6127 _ = b 6128 // match: (Neq32 x x) 6129 // cond: 6130 // result: (ConstBool [0]) 6131 for { 6132 x := v.Args[0] 6133 if x != v.Args[1] { 6134 break 6135 } 6136 v.reset(OpConstBool) 6137 v.AuxInt = 0 6138 return true 6139 } 6140 // match: (Neq32 (Const32 <t> [c]) (Add32 (Const32 <t> [d]) x)) 6141 // cond: 6142 // result: (Neq32 (Const32 <t> [int64(int32(c-d))]) x) 6143 for { 6144 v_0 := v.Args[0] 6145 if v_0.Op != OpConst32 { 6146 break 6147 } 6148 t := v_0.Type 6149 c := v_0.AuxInt 6150 v_1 := v.Args[1] 6151 if v_1.Op != OpAdd32 { 6152 break 6153 } 6154 v_1_0 := v_1.Args[0] 6155 if v_1_0.Op != OpConst32 { 6156 break 6157 } 6158 if v_1_0.Type != t { 6159 break 6160 } 6161 d := v_1_0.AuxInt 6162 x := v_1.Args[1] 6163 v.reset(OpNeq32) 6164 v0 := b.NewValue0(v.Line, OpConst32, t) 6165 v0.AuxInt = int64(int32(c - d)) 6166 v.AddArg(v0) 6167 v.AddArg(x) 6168 return true 6169 } 6170 // match: (Neq32 x (Const32 <t> [c])) 6171 // cond: x.Op != OpConst32 6172 // result: (Neq32 (Const32 <t> [c]) x) 6173 for { 6174 x := v.Args[0] 6175 v_1 := v.Args[1] 6176 if v_1.Op != OpConst32 { 6177 break 6178 } 6179 t := v_1.Type 6180 c := v_1.AuxInt 6181 if !(x.Op != OpConst32) { 6182 break 6183 } 6184 v.reset(OpNeq32) 6185 v0 := b.NewValue0(v.Line, OpConst32, t) 6186 v0.AuxInt = c 6187 v.AddArg(v0) 6188 v.AddArg(x) 6189 return true 6190 } 6191 // match: (Neq32 (Const32 [c]) (Const32 [d])) 6192 // cond: 6193 // result: (ConstBool [b2i(c != d)]) 6194 for { 6195 v_0 := v.Args[0] 6196 if v_0.Op != OpConst32 { 6197 break 6198 } 6199 c := v_0.AuxInt 6200 v_1 := v.Args[1] 6201 if v_1.Op != OpConst32 { 6202 break 6203 } 6204 d := v_1.AuxInt 6205 v.reset(OpConstBool) 6206 v.AuxInt = b2i(c != d) 6207 return true 6208 } 6209 return false 6210 } 6211 func rewriteValuegeneric_OpNeq64(v *Value, config *Config) bool { 6212 b := v.Block 6213 _ = b 6214 // match: (Neq64 x x) 6215 // cond: 6216 // result: (ConstBool [0]) 6217 for { 6218 x := v.Args[0] 6219 if x != v.Args[1] { 6220 break 6221 } 6222 v.reset(OpConstBool) 6223 v.AuxInt = 0 6224 return true 6225 } 6226 // match: (Neq64 (Const64 <t> [c]) (Add64 (Const64 <t> [d]) x)) 6227 // cond: 6228 // result: (Neq64 (Const64 <t> [c-d]) x) 6229 for { 6230 v_0 := v.Args[0] 6231 if v_0.Op != OpConst64 { 6232 break 6233 } 6234 t := v_0.Type 6235 c := v_0.AuxInt 6236 v_1 := v.Args[1] 6237 if v_1.Op != OpAdd64 { 6238 break 6239 } 6240 v_1_0 := v_1.Args[0] 6241 if v_1_0.Op != OpConst64 { 6242 break 6243 } 6244 if v_1_0.Type != t { 6245 break 6246 } 6247 d := v_1_0.AuxInt 6248 x := v_1.Args[1] 6249 v.reset(OpNeq64) 6250 v0 := b.NewValue0(v.Line, OpConst64, t) 6251 v0.AuxInt = c - d 6252 v.AddArg(v0) 6253 v.AddArg(x) 6254 return true 6255 } 6256 // match: (Neq64 x (Const64 <t> [c])) 6257 // cond: x.Op != OpConst64 6258 // result: (Neq64 (Const64 <t> [c]) x) 6259 for { 6260 x := v.Args[0] 6261 v_1 := v.Args[1] 6262 if v_1.Op != OpConst64 { 6263 break 6264 } 6265 t := v_1.Type 6266 c := v_1.AuxInt 6267 if !(x.Op != OpConst64) { 6268 break 6269 } 6270 v.reset(OpNeq64) 6271 v0 := b.NewValue0(v.Line, OpConst64, t) 6272 v0.AuxInt = c 6273 v.AddArg(v0) 6274 v.AddArg(x) 6275 return true 6276 } 6277 // match: (Neq64 (Const64 [c]) (Const64 [d])) 6278 // cond: 6279 // result: (ConstBool [b2i(c != d)]) 6280 for { 6281 v_0 := v.Args[0] 6282 if v_0.Op != OpConst64 { 6283 break 6284 } 6285 c := v_0.AuxInt 6286 v_1 := v.Args[1] 6287 if v_1.Op != OpConst64 { 6288 break 6289 } 6290 d := v_1.AuxInt 6291 v.reset(OpConstBool) 6292 v.AuxInt = b2i(c != d) 6293 return true 6294 } 6295 return false 6296 } 6297 func rewriteValuegeneric_OpNeq8(v *Value, config *Config) bool { 6298 b := v.Block 6299 _ = b 6300 // match: (Neq8 x x) 6301 // cond: 6302 // result: (ConstBool [0]) 6303 for { 6304 x := v.Args[0] 6305 if x != v.Args[1] { 6306 break 6307 } 6308 v.reset(OpConstBool) 6309 v.AuxInt = 0 6310 return true 6311 } 6312 // match: (Neq8 (Const8 <t> [c]) (Add8 (Const8 <t> [d]) x)) 6313 // cond: 6314 // result: (Neq8 (Const8 <t> [int64(int8(c-d))]) x) 6315 for { 6316 v_0 := v.Args[0] 6317 if v_0.Op != OpConst8 { 6318 break 6319 } 6320 t := v_0.Type 6321 c := v_0.AuxInt 6322 v_1 := v.Args[1] 6323 if v_1.Op != OpAdd8 { 6324 break 6325 } 6326 v_1_0 := v_1.Args[0] 6327 if v_1_0.Op != OpConst8 { 6328 break 6329 } 6330 if v_1_0.Type != t { 6331 break 6332 } 6333 d := v_1_0.AuxInt 6334 x := v_1.Args[1] 6335 v.reset(OpNeq8) 6336 v0 := b.NewValue0(v.Line, OpConst8, t) 6337 v0.AuxInt = int64(int8(c - d)) 6338 v.AddArg(v0) 6339 v.AddArg(x) 6340 return true 6341 } 6342 // match: (Neq8 x (Const8 <t> [c])) 6343 // cond: x.Op != OpConst8 6344 // result: (Neq8 (Const8 <t> [c]) x) 6345 for { 6346 x := v.Args[0] 6347 v_1 := v.Args[1] 6348 if v_1.Op != OpConst8 { 6349 break 6350 } 6351 t := v_1.Type 6352 c := v_1.AuxInt 6353 if !(x.Op != OpConst8) { 6354 break 6355 } 6356 v.reset(OpNeq8) 6357 v0 := b.NewValue0(v.Line, OpConst8, t) 6358 v0.AuxInt = c 6359 v.AddArg(v0) 6360 v.AddArg(x) 6361 return true 6362 } 6363 // match: (Neq8 (Const8 [c]) (Const8 [d])) 6364 // cond: 6365 // result: (ConstBool [b2i(c != d)]) 6366 for { 6367 v_0 := v.Args[0] 6368 if v_0.Op != OpConst8 { 6369 break 6370 } 6371 c := v_0.AuxInt 6372 v_1 := v.Args[1] 6373 if v_1.Op != OpConst8 { 6374 break 6375 } 6376 d := v_1.AuxInt 6377 v.reset(OpConstBool) 6378 v.AuxInt = b2i(c != d) 6379 return true 6380 } 6381 return false 6382 } 6383 func rewriteValuegeneric_OpNeqB(v *Value, config *Config) bool { 6384 b := v.Block 6385 _ = b 6386 // match: (NeqB (ConstBool [c]) (ConstBool [d])) 6387 // cond: 6388 // result: (ConstBool [b2i(c != d)]) 6389 for { 6390 v_0 := v.Args[0] 6391 if v_0.Op != OpConstBool { 6392 break 6393 } 6394 c := v_0.AuxInt 6395 v_1 := v.Args[1] 6396 if v_1.Op != OpConstBool { 6397 break 6398 } 6399 d := v_1.AuxInt 6400 v.reset(OpConstBool) 6401 v.AuxInt = b2i(c != d) 6402 return true 6403 } 6404 // match: (NeqB (ConstBool [0]) x) 6405 // cond: 6406 // result: x 6407 for { 6408 v_0 := v.Args[0] 6409 if v_0.Op != OpConstBool { 6410 break 6411 } 6412 if v_0.AuxInt != 0 { 6413 break 6414 } 6415 x := v.Args[1] 6416 v.reset(OpCopy) 6417 v.Type = x.Type 6418 v.AddArg(x) 6419 return true 6420 } 6421 // match: (NeqB (ConstBool [1]) x) 6422 // cond: 6423 // result: (Not x) 6424 for { 6425 v_0 := v.Args[0] 6426 if v_0.Op != OpConstBool { 6427 break 6428 } 6429 if v_0.AuxInt != 1 { 6430 break 6431 } 6432 x := v.Args[1] 6433 v.reset(OpNot) 6434 v.AddArg(x) 6435 return true 6436 } 6437 return false 6438 } 6439 func rewriteValuegeneric_OpNeqInter(v *Value, config *Config) bool { 6440 b := v.Block 6441 _ = b 6442 // match: (NeqInter x y) 6443 // cond: 6444 // result: (NeqPtr (ITab x) (ITab y)) 6445 for { 6446 x := v.Args[0] 6447 y := v.Args[1] 6448 v.reset(OpNeqPtr) 6449 v0 := b.NewValue0(v.Line, OpITab, config.fe.TypeBytePtr()) 6450 v0.AddArg(x) 6451 v.AddArg(v0) 6452 v1 := b.NewValue0(v.Line, OpITab, config.fe.TypeBytePtr()) 6453 v1.AddArg(y) 6454 v.AddArg(v1) 6455 return true 6456 } 6457 } 6458 func rewriteValuegeneric_OpNeqPtr(v *Value, config *Config) bool { 6459 b := v.Block 6460 _ = b 6461 // match: (NeqPtr p (ConstNil)) 6462 // cond: 6463 // result: (IsNonNil p) 6464 for { 6465 p := v.Args[0] 6466 v_1 := v.Args[1] 6467 if v_1.Op != OpConstNil { 6468 break 6469 } 6470 v.reset(OpIsNonNil) 6471 v.AddArg(p) 6472 return true 6473 } 6474 // match: (NeqPtr (ConstNil) p) 6475 // cond: 6476 // result: (IsNonNil p) 6477 for { 6478 v_0 := v.Args[0] 6479 if v_0.Op != OpConstNil { 6480 break 6481 } 6482 p := v.Args[1] 6483 v.reset(OpIsNonNil) 6484 v.AddArg(p) 6485 return true 6486 } 6487 return false 6488 } 6489 func rewriteValuegeneric_OpNeqSlice(v *Value, config *Config) bool { 6490 b := v.Block 6491 _ = b 6492 // match: (NeqSlice x y) 6493 // cond: 6494 // result: (NeqPtr (SlicePtr x) (SlicePtr y)) 6495 for { 6496 x := v.Args[0] 6497 y := v.Args[1] 6498 v.reset(OpNeqPtr) 6499 v0 := b.NewValue0(v.Line, OpSlicePtr, config.fe.TypeBytePtr()) 6500 v0.AddArg(x) 6501 v.AddArg(v0) 6502 v1 := b.NewValue0(v.Line, OpSlicePtr, config.fe.TypeBytePtr()) 6503 v1.AddArg(y) 6504 v.AddArg(v1) 6505 return true 6506 } 6507 } 6508 func rewriteValuegeneric_OpNilCheck(v *Value, config *Config) bool { 6509 b := v.Block 6510 _ = b 6511 // match: (NilCheck (GetG mem) mem) 6512 // cond: 6513 // result: mem 6514 for { 6515 v_0 := v.Args[0] 6516 if v_0.Op != OpGetG { 6517 break 6518 } 6519 mem := v_0.Args[0] 6520 if mem != v.Args[1] { 6521 break 6522 } 6523 v.reset(OpCopy) 6524 v.Type = mem.Type 6525 v.AddArg(mem) 6526 return true 6527 } 6528 // match: (NilCheck (Load (OffPtr [c] (SP)) mem) mem) 6529 // cond: mem.Op == OpStaticCall && isSameSym(mem.Aux, "runtime.newobject") && c == config.ctxt.FixedFrameSize() + config.RegSize && warnRule(config.Debug_checknil() && int(v.Line) > 1, v, "removed nil check") 6530 // result: (Invalid) 6531 for { 6532 v_0 := v.Args[0] 6533 if v_0.Op != OpLoad { 6534 break 6535 } 6536 v_0_0 := v_0.Args[0] 6537 if v_0_0.Op != OpOffPtr { 6538 break 6539 } 6540 c := v_0_0.AuxInt 6541 v_0_0_0 := v_0_0.Args[0] 6542 if v_0_0_0.Op != OpSP { 6543 break 6544 } 6545 mem := v_0.Args[1] 6546 if mem != v.Args[1] { 6547 break 6548 } 6549 if !(mem.Op == OpStaticCall && isSameSym(mem.Aux, "runtime.newobject") && c == config.ctxt.FixedFrameSize()+config.RegSize && warnRule(config.Debug_checknil() && int(v.Line) > 1, v, "removed nil check")) { 6550 break 6551 } 6552 v.reset(OpInvalid) 6553 return true 6554 } 6555 // match: (NilCheck (OffPtr (Load (OffPtr [c] (SP)) mem)) mem) 6556 // cond: mem.Op == OpStaticCall && isSameSym(mem.Aux, "runtime.newobject") && c == config.ctxt.FixedFrameSize() + config.RegSize && warnRule(config.Debug_checknil() && int(v.Line) > 1, v, "removed nil check") 6557 // result: (Invalid) 6558 for { 6559 v_0 := v.Args[0] 6560 if v_0.Op != OpOffPtr { 6561 break 6562 } 6563 v_0_0 := v_0.Args[0] 6564 if v_0_0.Op != OpLoad { 6565 break 6566 } 6567 v_0_0_0 := v_0_0.Args[0] 6568 if v_0_0_0.Op != OpOffPtr { 6569 break 6570 } 6571 c := v_0_0_0.AuxInt 6572 v_0_0_0_0 := v_0_0_0.Args[0] 6573 if v_0_0_0_0.Op != OpSP { 6574 break 6575 } 6576 mem := v_0_0.Args[1] 6577 if mem != v.Args[1] { 6578 break 6579 } 6580 if !(mem.Op == OpStaticCall && isSameSym(mem.Aux, "runtime.newobject") && c == config.ctxt.FixedFrameSize()+config.RegSize && warnRule(config.Debug_checknil() && int(v.Line) > 1, v, "removed nil check")) { 6581 break 6582 } 6583 v.reset(OpInvalid) 6584 return true 6585 } 6586 return false 6587 } 6588 func rewriteValuegeneric_OpNot(v *Value, config *Config) bool { 6589 b := v.Block 6590 _ = b 6591 // match: (Not (Eq64 x y)) 6592 // cond: 6593 // result: (Neq64 x y) 6594 for { 6595 v_0 := v.Args[0] 6596 if v_0.Op != OpEq64 { 6597 break 6598 } 6599 x := v_0.Args[0] 6600 y := v_0.Args[1] 6601 v.reset(OpNeq64) 6602 v.AddArg(x) 6603 v.AddArg(y) 6604 return true 6605 } 6606 // match: (Not (Eq32 x y)) 6607 // cond: 6608 // result: (Neq32 x y) 6609 for { 6610 v_0 := v.Args[0] 6611 if v_0.Op != OpEq32 { 6612 break 6613 } 6614 x := v_0.Args[0] 6615 y := v_0.Args[1] 6616 v.reset(OpNeq32) 6617 v.AddArg(x) 6618 v.AddArg(y) 6619 return true 6620 } 6621 // match: (Not (Eq16 x y)) 6622 // cond: 6623 // result: (Neq16 x y) 6624 for { 6625 v_0 := v.Args[0] 6626 if v_0.Op != OpEq16 { 6627 break 6628 } 6629 x := v_0.Args[0] 6630 y := v_0.Args[1] 6631 v.reset(OpNeq16) 6632 v.AddArg(x) 6633 v.AddArg(y) 6634 return true 6635 } 6636 // match: (Not (Eq8 x y)) 6637 // cond: 6638 // result: (Neq8 x y) 6639 for { 6640 v_0 := v.Args[0] 6641 if v_0.Op != OpEq8 { 6642 break 6643 } 6644 x := v_0.Args[0] 6645 y := v_0.Args[1] 6646 v.reset(OpNeq8) 6647 v.AddArg(x) 6648 v.AddArg(y) 6649 return true 6650 } 6651 // match: (Not (EqB x y)) 6652 // cond: 6653 // result: (NeqB x y) 6654 for { 6655 v_0 := v.Args[0] 6656 if v_0.Op != OpEqB { 6657 break 6658 } 6659 x := v_0.Args[0] 6660 y := v_0.Args[1] 6661 v.reset(OpNeqB) 6662 v.AddArg(x) 6663 v.AddArg(y) 6664 return true 6665 } 6666 // match: (Not (Neq64 x y)) 6667 // cond: 6668 // result: (Eq64 x y) 6669 for { 6670 v_0 := v.Args[0] 6671 if v_0.Op != OpNeq64 { 6672 break 6673 } 6674 x := v_0.Args[0] 6675 y := v_0.Args[1] 6676 v.reset(OpEq64) 6677 v.AddArg(x) 6678 v.AddArg(y) 6679 return true 6680 } 6681 // match: (Not (Neq32 x y)) 6682 // cond: 6683 // result: (Eq32 x y) 6684 for { 6685 v_0 := v.Args[0] 6686 if v_0.Op != OpNeq32 { 6687 break 6688 } 6689 x := v_0.Args[0] 6690 y := v_0.Args[1] 6691 v.reset(OpEq32) 6692 v.AddArg(x) 6693 v.AddArg(y) 6694 return true 6695 } 6696 // match: (Not (Neq16 x y)) 6697 // cond: 6698 // result: (Eq16 x y) 6699 for { 6700 v_0 := v.Args[0] 6701 if v_0.Op != OpNeq16 { 6702 break 6703 } 6704 x := v_0.Args[0] 6705 y := v_0.Args[1] 6706 v.reset(OpEq16) 6707 v.AddArg(x) 6708 v.AddArg(y) 6709 return true 6710 } 6711 // match: (Not (Neq8 x y)) 6712 // cond: 6713 // result: (Eq8 x y) 6714 for { 6715 v_0 := v.Args[0] 6716 if v_0.Op != OpNeq8 { 6717 break 6718 } 6719 x := v_0.Args[0] 6720 y := v_0.Args[1] 6721 v.reset(OpEq8) 6722 v.AddArg(x) 6723 v.AddArg(y) 6724 return true 6725 } 6726 // match: (Not (NeqB x y)) 6727 // cond: 6728 // result: (EqB x y) 6729 for { 6730 v_0 := v.Args[0] 6731 if v_0.Op != OpNeqB { 6732 break 6733 } 6734 x := v_0.Args[0] 6735 y := v_0.Args[1] 6736 v.reset(OpEqB) 6737 v.AddArg(x) 6738 v.AddArg(y) 6739 return true 6740 } 6741 // match: (Not (Greater64 x y)) 6742 // cond: 6743 // result: (Leq64 x y) 6744 for { 6745 v_0 := v.Args[0] 6746 if v_0.Op != OpGreater64 { 6747 break 6748 } 6749 x := v_0.Args[0] 6750 y := v_0.Args[1] 6751 v.reset(OpLeq64) 6752 v.AddArg(x) 6753 v.AddArg(y) 6754 return true 6755 } 6756 // match: (Not (Greater32 x y)) 6757 // cond: 6758 // result: (Leq32 x y) 6759 for { 6760 v_0 := v.Args[0] 6761 if v_0.Op != OpGreater32 { 6762 break 6763 } 6764 x := v_0.Args[0] 6765 y := v_0.Args[1] 6766 v.reset(OpLeq32) 6767 v.AddArg(x) 6768 v.AddArg(y) 6769 return true 6770 } 6771 // match: (Not (Greater16 x y)) 6772 // cond: 6773 // result: (Leq16 x y) 6774 for { 6775 v_0 := v.Args[0] 6776 if v_0.Op != OpGreater16 { 6777 break 6778 } 6779 x := v_0.Args[0] 6780 y := v_0.Args[1] 6781 v.reset(OpLeq16) 6782 v.AddArg(x) 6783 v.AddArg(y) 6784 return true 6785 } 6786 // match: (Not (Greater8 x y)) 6787 // cond: 6788 // result: (Leq8 x y) 6789 for { 6790 v_0 := v.Args[0] 6791 if v_0.Op != OpGreater8 { 6792 break 6793 } 6794 x := v_0.Args[0] 6795 y := v_0.Args[1] 6796 v.reset(OpLeq8) 6797 v.AddArg(x) 6798 v.AddArg(y) 6799 return true 6800 } 6801 // match: (Not (Greater64U x y)) 6802 // cond: 6803 // result: (Leq64U x y) 6804 for { 6805 v_0 := v.Args[0] 6806 if v_0.Op != OpGreater64U { 6807 break 6808 } 6809 x := v_0.Args[0] 6810 y := v_0.Args[1] 6811 v.reset(OpLeq64U) 6812 v.AddArg(x) 6813 v.AddArg(y) 6814 return true 6815 } 6816 // match: (Not (Greater32U x y)) 6817 // cond: 6818 // result: (Leq32U x y) 6819 for { 6820 v_0 := v.Args[0] 6821 if v_0.Op != OpGreater32U { 6822 break 6823 } 6824 x := v_0.Args[0] 6825 y := v_0.Args[1] 6826 v.reset(OpLeq32U) 6827 v.AddArg(x) 6828 v.AddArg(y) 6829 return true 6830 } 6831 // match: (Not (Greater16U x y)) 6832 // cond: 6833 // result: (Leq16U x y) 6834 for { 6835 v_0 := v.Args[0] 6836 if v_0.Op != OpGreater16U { 6837 break 6838 } 6839 x := v_0.Args[0] 6840 y := v_0.Args[1] 6841 v.reset(OpLeq16U) 6842 v.AddArg(x) 6843 v.AddArg(y) 6844 return true 6845 } 6846 // match: (Not (Greater8U x y)) 6847 // cond: 6848 // result: (Leq8U x y) 6849 for { 6850 v_0 := v.Args[0] 6851 if v_0.Op != OpGreater8U { 6852 break 6853 } 6854 x := v_0.Args[0] 6855 y := v_0.Args[1] 6856 v.reset(OpLeq8U) 6857 v.AddArg(x) 6858 v.AddArg(y) 6859 return true 6860 } 6861 // match: (Not (Geq64 x y)) 6862 // cond: 6863 // result: (Less64 x y) 6864 for { 6865 v_0 := v.Args[0] 6866 if v_0.Op != OpGeq64 { 6867 break 6868 } 6869 x := v_0.Args[0] 6870 y := v_0.Args[1] 6871 v.reset(OpLess64) 6872 v.AddArg(x) 6873 v.AddArg(y) 6874 return true 6875 } 6876 // match: (Not (Geq32 x y)) 6877 // cond: 6878 // result: (Less32 x y) 6879 for { 6880 v_0 := v.Args[0] 6881 if v_0.Op != OpGeq32 { 6882 break 6883 } 6884 x := v_0.Args[0] 6885 y := v_0.Args[1] 6886 v.reset(OpLess32) 6887 v.AddArg(x) 6888 v.AddArg(y) 6889 return true 6890 } 6891 // match: (Not (Geq16 x y)) 6892 // cond: 6893 // result: (Less16 x y) 6894 for { 6895 v_0 := v.Args[0] 6896 if v_0.Op != OpGeq16 { 6897 break 6898 } 6899 x := v_0.Args[0] 6900 y := v_0.Args[1] 6901 v.reset(OpLess16) 6902 v.AddArg(x) 6903 v.AddArg(y) 6904 return true 6905 } 6906 // match: (Not (Geq8 x y)) 6907 // cond: 6908 // result: (Less8 x y) 6909 for { 6910 v_0 := v.Args[0] 6911 if v_0.Op != OpGeq8 { 6912 break 6913 } 6914 x := v_0.Args[0] 6915 y := v_0.Args[1] 6916 v.reset(OpLess8) 6917 v.AddArg(x) 6918 v.AddArg(y) 6919 return true 6920 } 6921 // match: (Not (Geq64U x y)) 6922 // cond: 6923 // result: (Less64U x y) 6924 for { 6925 v_0 := v.Args[0] 6926 if v_0.Op != OpGeq64U { 6927 break 6928 } 6929 x := v_0.Args[0] 6930 y := v_0.Args[1] 6931 v.reset(OpLess64U) 6932 v.AddArg(x) 6933 v.AddArg(y) 6934 return true 6935 } 6936 // match: (Not (Geq32U x y)) 6937 // cond: 6938 // result: (Less32U x y) 6939 for { 6940 v_0 := v.Args[0] 6941 if v_0.Op != OpGeq32U { 6942 break 6943 } 6944 x := v_0.Args[0] 6945 y := v_0.Args[1] 6946 v.reset(OpLess32U) 6947 v.AddArg(x) 6948 v.AddArg(y) 6949 return true 6950 } 6951 // match: (Not (Geq16U x y)) 6952 // cond: 6953 // result: (Less16U x y) 6954 for { 6955 v_0 := v.Args[0] 6956 if v_0.Op != OpGeq16U { 6957 break 6958 } 6959 x := v_0.Args[0] 6960 y := v_0.Args[1] 6961 v.reset(OpLess16U) 6962 v.AddArg(x) 6963 v.AddArg(y) 6964 return true 6965 } 6966 // match: (Not (Geq8U x y)) 6967 // cond: 6968 // result: (Less8U x y) 6969 for { 6970 v_0 := v.Args[0] 6971 if v_0.Op != OpGeq8U { 6972 break 6973 } 6974 x := v_0.Args[0] 6975 y := v_0.Args[1] 6976 v.reset(OpLess8U) 6977 v.AddArg(x) 6978 v.AddArg(y) 6979 return true 6980 } 6981 // match: (Not (Less64 x y)) 6982 // cond: 6983 // result: (Geq64 x y) 6984 for { 6985 v_0 := v.Args[0] 6986 if v_0.Op != OpLess64 { 6987 break 6988 } 6989 x := v_0.Args[0] 6990 y := v_0.Args[1] 6991 v.reset(OpGeq64) 6992 v.AddArg(x) 6993 v.AddArg(y) 6994 return true 6995 } 6996 // match: (Not (Less32 x y)) 6997 // cond: 6998 // result: (Geq32 x y) 6999 for { 7000 v_0 := v.Args[0] 7001 if v_0.Op != OpLess32 { 7002 break 7003 } 7004 x := v_0.Args[0] 7005 y := v_0.Args[1] 7006 v.reset(OpGeq32) 7007 v.AddArg(x) 7008 v.AddArg(y) 7009 return true 7010 } 7011 // match: (Not (Less16 x y)) 7012 // cond: 7013 // result: (Geq16 x y) 7014 for { 7015 v_0 := v.Args[0] 7016 if v_0.Op != OpLess16 { 7017 break 7018 } 7019 x := v_0.Args[0] 7020 y := v_0.Args[1] 7021 v.reset(OpGeq16) 7022 v.AddArg(x) 7023 v.AddArg(y) 7024 return true 7025 } 7026 // match: (Not (Less8 x y)) 7027 // cond: 7028 // result: (Geq8 x y) 7029 for { 7030 v_0 := v.Args[0] 7031 if v_0.Op != OpLess8 { 7032 break 7033 } 7034 x := v_0.Args[0] 7035 y := v_0.Args[1] 7036 v.reset(OpGeq8) 7037 v.AddArg(x) 7038 v.AddArg(y) 7039 return true 7040 } 7041 // match: (Not (Less64U x y)) 7042 // cond: 7043 // result: (Geq64U x y) 7044 for { 7045 v_0 := v.Args[0] 7046 if v_0.Op != OpLess64U { 7047 break 7048 } 7049 x := v_0.Args[0] 7050 y := v_0.Args[1] 7051 v.reset(OpGeq64U) 7052 v.AddArg(x) 7053 v.AddArg(y) 7054 return true 7055 } 7056 // match: (Not (Less32U x y)) 7057 // cond: 7058 // result: (Geq32U x y) 7059 for { 7060 v_0 := v.Args[0] 7061 if v_0.Op != OpLess32U { 7062 break 7063 } 7064 x := v_0.Args[0] 7065 y := v_0.Args[1] 7066 v.reset(OpGeq32U) 7067 v.AddArg(x) 7068 v.AddArg(y) 7069 return true 7070 } 7071 // match: (Not (Less16U x y)) 7072 // cond: 7073 // result: (Geq16U x y) 7074 for { 7075 v_0 := v.Args[0] 7076 if v_0.Op != OpLess16U { 7077 break 7078 } 7079 x := v_0.Args[0] 7080 y := v_0.Args[1] 7081 v.reset(OpGeq16U) 7082 v.AddArg(x) 7083 v.AddArg(y) 7084 return true 7085 } 7086 // match: (Not (Less8U x y)) 7087 // cond: 7088 // result: (Geq8U x y) 7089 for { 7090 v_0 := v.Args[0] 7091 if v_0.Op != OpLess8U { 7092 break 7093 } 7094 x := v_0.Args[0] 7095 y := v_0.Args[1] 7096 v.reset(OpGeq8U) 7097 v.AddArg(x) 7098 v.AddArg(y) 7099 return true 7100 } 7101 // match: (Not (Leq64 x y)) 7102 // cond: 7103 // result: (Greater64 x y) 7104 for { 7105 v_0 := v.Args[0] 7106 if v_0.Op != OpLeq64 { 7107 break 7108 } 7109 x := v_0.Args[0] 7110 y := v_0.Args[1] 7111 v.reset(OpGreater64) 7112 v.AddArg(x) 7113 v.AddArg(y) 7114 return true 7115 } 7116 // match: (Not (Leq32 x y)) 7117 // cond: 7118 // result: (Greater32 x y) 7119 for { 7120 v_0 := v.Args[0] 7121 if v_0.Op != OpLeq32 { 7122 break 7123 } 7124 x := v_0.Args[0] 7125 y := v_0.Args[1] 7126 v.reset(OpGreater32) 7127 v.AddArg(x) 7128 v.AddArg(y) 7129 return true 7130 } 7131 // match: (Not (Leq16 x y)) 7132 // cond: 7133 // result: (Greater16 x y) 7134 for { 7135 v_0 := v.Args[0] 7136 if v_0.Op != OpLeq16 { 7137 break 7138 } 7139 x := v_0.Args[0] 7140 y := v_0.Args[1] 7141 v.reset(OpGreater16) 7142 v.AddArg(x) 7143 v.AddArg(y) 7144 return true 7145 } 7146 // match: (Not (Leq8 x y)) 7147 // cond: 7148 // result: (Greater8 x y) 7149 for { 7150 v_0 := v.Args[0] 7151 if v_0.Op != OpLeq8 { 7152 break 7153 } 7154 x := v_0.Args[0] 7155 y := v_0.Args[1] 7156 v.reset(OpGreater8) 7157 v.AddArg(x) 7158 v.AddArg(y) 7159 return true 7160 } 7161 // match: (Not (Leq64U x y)) 7162 // cond: 7163 // result: (Greater64U x y) 7164 for { 7165 v_0 := v.Args[0] 7166 if v_0.Op != OpLeq64U { 7167 break 7168 } 7169 x := v_0.Args[0] 7170 y := v_0.Args[1] 7171 v.reset(OpGreater64U) 7172 v.AddArg(x) 7173 v.AddArg(y) 7174 return true 7175 } 7176 // match: (Not (Leq32U x y)) 7177 // cond: 7178 // result: (Greater32U x y) 7179 for { 7180 v_0 := v.Args[0] 7181 if v_0.Op != OpLeq32U { 7182 break 7183 } 7184 x := v_0.Args[0] 7185 y := v_0.Args[1] 7186 v.reset(OpGreater32U) 7187 v.AddArg(x) 7188 v.AddArg(y) 7189 return true 7190 } 7191 // match: (Not (Leq16U x y)) 7192 // cond: 7193 // result: (Greater16U x y) 7194 for { 7195 v_0 := v.Args[0] 7196 if v_0.Op != OpLeq16U { 7197 break 7198 } 7199 x := v_0.Args[0] 7200 y := v_0.Args[1] 7201 v.reset(OpGreater16U) 7202 v.AddArg(x) 7203 v.AddArg(y) 7204 return true 7205 } 7206 // match: (Not (Leq8U x y)) 7207 // cond: 7208 // result: (Greater8U x y) 7209 for { 7210 v_0 := v.Args[0] 7211 if v_0.Op != OpLeq8U { 7212 break 7213 } 7214 x := v_0.Args[0] 7215 y := v_0.Args[1] 7216 v.reset(OpGreater8U) 7217 v.AddArg(x) 7218 v.AddArg(y) 7219 return true 7220 } 7221 return false 7222 } 7223 func rewriteValuegeneric_OpOffPtr(v *Value, config *Config) bool { 7224 b := v.Block 7225 _ = b 7226 // match: (OffPtr (OffPtr p [b]) [a]) 7227 // cond: 7228 // result: (OffPtr p [a+b]) 7229 for { 7230 a := v.AuxInt 7231 v_0 := v.Args[0] 7232 if v_0.Op != OpOffPtr { 7233 break 7234 } 7235 b := v_0.AuxInt 7236 p := v_0.Args[0] 7237 v.reset(OpOffPtr) 7238 v.AuxInt = a + b 7239 v.AddArg(p) 7240 return true 7241 } 7242 // match: (OffPtr p [0]) 7243 // cond: v.Type.Compare(p.Type) == CMPeq 7244 // result: p 7245 for { 7246 if v.AuxInt != 0 { 7247 break 7248 } 7249 p := v.Args[0] 7250 if !(v.Type.Compare(p.Type) == CMPeq) { 7251 break 7252 } 7253 v.reset(OpCopy) 7254 v.Type = p.Type 7255 v.AddArg(p) 7256 return true 7257 } 7258 return false 7259 } 7260 func rewriteValuegeneric_OpOr16(v *Value, config *Config) bool { 7261 b := v.Block 7262 _ = b 7263 // match: (Or16 x (Const16 <t> [c])) 7264 // cond: x.Op != OpConst16 7265 // result: (Or16 (Const16 <t> [c]) x) 7266 for { 7267 x := v.Args[0] 7268 v_1 := v.Args[1] 7269 if v_1.Op != OpConst16 { 7270 break 7271 } 7272 t := v_1.Type 7273 c := v_1.AuxInt 7274 if !(x.Op != OpConst16) { 7275 break 7276 } 7277 v.reset(OpOr16) 7278 v0 := b.NewValue0(v.Line, OpConst16, t) 7279 v0.AuxInt = c 7280 v.AddArg(v0) 7281 v.AddArg(x) 7282 return true 7283 } 7284 // match: (Or16 x x) 7285 // cond: 7286 // result: x 7287 for { 7288 x := v.Args[0] 7289 if x != v.Args[1] { 7290 break 7291 } 7292 v.reset(OpCopy) 7293 v.Type = x.Type 7294 v.AddArg(x) 7295 return true 7296 } 7297 // match: (Or16 (Const16 [0]) x) 7298 // cond: 7299 // result: x 7300 for { 7301 v_0 := v.Args[0] 7302 if v_0.Op != OpConst16 { 7303 break 7304 } 7305 if v_0.AuxInt != 0 { 7306 break 7307 } 7308 x := v.Args[1] 7309 v.reset(OpCopy) 7310 v.Type = x.Type 7311 v.AddArg(x) 7312 return true 7313 } 7314 // match: (Or16 (Const16 [-1]) _) 7315 // cond: 7316 // result: (Const16 [-1]) 7317 for { 7318 v_0 := v.Args[0] 7319 if v_0.Op != OpConst16 { 7320 break 7321 } 7322 if v_0.AuxInt != -1 { 7323 break 7324 } 7325 v.reset(OpConst16) 7326 v.AuxInt = -1 7327 return true 7328 } 7329 // match: (Or16 x (Or16 x y)) 7330 // cond: 7331 // result: (Or16 x y) 7332 for { 7333 x := v.Args[0] 7334 v_1 := v.Args[1] 7335 if v_1.Op != OpOr16 { 7336 break 7337 } 7338 if x != v_1.Args[0] { 7339 break 7340 } 7341 y := v_1.Args[1] 7342 v.reset(OpOr16) 7343 v.AddArg(x) 7344 v.AddArg(y) 7345 return true 7346 } 7347 // match: (Or16 x (Or16 y x)) 7348 // cond: 7349 // result: (Or16 x y) 7350 for { 7351 x := v.Args[0] 7352 v_1 := v.Args[1] 7353 if v_1.Op != OpOr16 { 7354 break 7355 } 7356 y := v_1.Args[0] 7357 if x != v_1.Args[1] { 7358 break 7359 } 7360 v.reset(OpOr16) 7361 v.AddArg(x) 7362 v.AddArg(y) 7363 return true 7364 } 7365 // match: (Or16 (Or16 x y) x) 7366 // cond: 7367 // result: (Or16 x y) 7368 for { 7369 v_0 := v.Args[0] 7370 if v_0.Op != OpOr16 { 7371 break 7372 } 7373 x := v_0.Args[0] 7374 y := v_0.Args[1] 7375 if x != v.Args[1] { 7376 break 7377 } 7378 v.reset(OpOr16) 7379 v.AddArg(x) 7380 v.AddArg(y) 7381 return true 7382 } 7383 // match: (Or16 (Or16 x y) y) 7384 // cond: 7385 // result: (Or16 x y) 7386 for { 7387 v_0 := v.Args[0] 7388 if v_0.Op != OpOr16 { 7389 break 7390 } 7391 x := v_0.Args[0] 7392 y := v_0.Args[1] 7393 if y != v.Args[1] { 7394 break 7395 } 7396 v.reset(OpOr16) 7397 v.AddArg(x) 7398 v.AddArg(y) 7399 return true 7400 } 7401 return false 7402 } 7403 func rewriteValuegeneric_OpOr32(v *Value, config *Config) bool { 7404 b := v.Block 7405 _ = b 7406 // match: (Or32 x (Const32 <t> [c])) 7407 // cond: x.Op != OpConst32 7408 // result: (Or32 (Const32 <t> [c]) x) 7409 for { 7410 x := v.Args[0] 7411 v_1 := v.Args[1] 7412 if v_1.Op != OpConst32 { 7413 break 7414 } 7415 t := v_1.Type 7416 c := v_1.AuxInt 7417 if !(x.Op != OpConst32) { 7418 break 7419 } 7420 v.reset(OpOr32) 7421 v0 := b.NewValue0(v.Line, OpConst32, t) 7422 v0.AuxInt = c 7423 v.AddArg(v0) 7424 v.AddArg(x) 7425 return true 7426 } 7427 // match: (Or32 x x) 7428 // cond: 7429 // result: x 7430 for { 7431 x := v.Args[0] 7432 if x != v.Args[1] { 7433 break 7434 } 7435 v.reset(OpCopy) 7436 v.Type = x.Type 7437 v.AddArg(x) 7438 return true 7439 } 7440 // match: (Or32 (Const32 [0]) x) 7441 // cond: 7442 // result: x 7443 for { 7444 v_0 := v.Args[0] 7445 if v_0.Op != OpConst32 { 7446 break 7447 } 7448 if v_0.AuxInt != 0 { 7449 break 7450 } 7451 x := v.Args[1] 7452 v.reset(OpCopy) 7453 v.Type = x.Type 7454 v.AddArg(x) 7455 return true 7456 } 7457 // match: (Or32 (Const32 [-1]) _) 7458 // cond: 7459 // result: (Const32 [-1]) 7460 for { 7461 v_0 := v.Args[0] 7462 if v_0.Op != OpConst32 { 7463 break 7464 } 7465 if v_0.AuxInt != -1 { 7466 break 7467 } 7468 v.reset(OpConst32) 7469 v.AuxInt = -1 7470 return true 7471 } 7472 // match: (Or32 x (Or32 x y)) 7473 // cond: 7474 // result: (Or32 x y) 7475 for { 7476 x := v.Args[0] 7477 v_1 := v.Args[1] 7478 if v_1.Op != OpOr32 { 7479 break 7480 } 7481 if x != v_1.Args[0] { 7482 break 7483 } 7484 y := v_1.Args[1] 7485 v.reset(OpOr32) 7486 v.AddArg(x) 7487 v.AddArg(y) 7488 return true 7489 } 7490 // match: (Or32 x (Or32 y x)) 7491 // cond: 7492 // result: (Or32 x y) 7493 for { 7494 x := v.Args[0] 7495 v_1 := v.Args[1] 7496 if v_1.Op != OpOr32 { 7497 break 7498 } 7499 y := v_1.Args[0] 7500 if x != v_1.Args[1] { 7501 break 7502 } 7503 v.reset(OpOr32) 7504 v.AddArg(x) 7505 v.AddArg(y) 7506 return true 7507 } 7508 // match: (Or32 (Or32 x y) x) 7509 // cond: 7510 // result: (Or32 x y) 7511 for { 7512 v_0 := v.Args[0] 7513 if v_0.Op != OpOr32 { 7514 break 7515 } 7516 x := v_0.Args[0] 7517 y := v_0.Args[1] 7518 if x != v.Args[1] { 7519 break 7520 } 7521 v.reset(OpOr32) 7522 v.AddArg(x) 7523 v.AddArg(y) 7524 return true 7525 } 7526 // match: (Or32 (Or32 x y) y) 7527 // cond: 7528 // result: (Or32 x y) 7529 for { 7530 v_0 := v.Args[0] 7531 if v_0.Op != OpOr32 { 7532 break 7533 } 7534 x := v_0.Args[0] 7535 y := v_0.Args[1] 7536 if y != v.Args[1] { 7537 break 7538 } 7539 v.reset(OpOr32) 7540 v.AddArg(x) 7541 v.AddArg(y) 7542 return true 7543 } 7544 return false 7545 } 7546 func rewriteValuegeneric_OpOr64(v *Value, config *Config) bool { 7547 b := v.Block 7548 _ = b 7549 // match: (Or64 x (Const64 <t> [c])) 7550 // cond: x.Op != OpConst64 7551 // result: (Or64 (Const64 <t> [c]) x) 7552 for { 7553 x := v.Args[0] 7554 v_1 := v.Args[1] 7555 if v_1.Op != OpConst64 { 7556 break 7557 } 7558 t := v_1.Type 7559 c := v_1.AuxInt 7560 if !(x.Op != OpConst64) { 7561 break 7562 } 7563 v.reset(OpOr64) 7564 v0 := b.NewValue0(v.Line, OpConst64, t) 7565 v0.AuxInt = c 7566 v.AddArg(v0) 7567 v.AddArg(x) 7568 return true 7569 } 7570 // match: (Or64 x x) 7571 // cond: 7572 // result: x 7573 for { 7574 x := v.Args[0] 7575 if x != v.Args[1] { 7576 break 7577 } 7578 v.reset(OpCopy) 7579 v.Type = x.Type 7580 v.AddArg(x) 7581 return true 7582 } 7583 // match: (Or64 (Const64 [0]) x) 7584 // cond: 7585 // result: x 7586 for { 7587 v_0 := v.Args[0] 7588 if v_0.Op != OpConst64 { 7589 break 7590 } 7591 if v_0.AuxInt != 0 { 7592 break 7593 } 7594 x := v.Args[1] 7595 v.reset(OpCopy) 7596 v.Type = x.Type 7597 v.AddArg(x) 7598 return true 7599 } 7600 // match: (Or64 (Const64 [-1]) _) 7601 // cond: 7602 // result: (Const64 [-1]) 7603 for { 7604 v_0 := v.Args[0] 7605 if v_0.Op != OpConst64 { 7606 break 7607 } 7608 if v_0.AuxInt != -1 { 7609 break 7610 } 7611 v.reset(OpConst64) 7612 v.AuxInt = -1 7613 return true 7614 } 7615 // match: (Or64 x (Or64 x y)) 7616 // cond: 7617 // result: (Or64 x y) 7618 for { 7619 x := v.Args[0] 7620 v_1 := v.Args[1] 7621 if v_1.Op != OpOr64 { 7622 break 7623 } 7624 if x != v_1.Args[0] { 7625 break 7626 } 7627 y := v_1.Args[1] 7628 v.reset(OpOr64) 7629 v.AddArg(x) 7630 v.AddArg(y) 7631 return true 7632 } 7633 // match: (Or64 x (Or64 y x)) 7634 // cond: 7635 // result: (Or64 x y) 7636 for { 7637 x := v.Args[0] 7638 v_1 := v.Args[1] 7639 if v_1.Op != OpOr64 { 7640 break 7641 } 7642 y := v_1.Args[0] 7643 if x != v_1.Args[1] { 7644 break 7645 } 7646 v.reset(OpOr64) 7647 v.AddArg(x) 7648 v.AddArg(y) 7649 return true 7650 } 7651 // match: (Or64 (Or64 x y) x) 7652 // cond: 7653 // result: (Or64 x y) 7654 for { 7655 v_0 := v.Args[0] 7656 if v_0.Op != OpOr64 { 7657 break 7658 } 7659 x := v_0.Args[0] 7660 y := v_0.Args[1] 7661 if x != v.Args[1] { 7662 break 7663 } 7664 v.reset(OpOr64) 7665 v.AddArg(x) 7666 v.AddArg(y) 7667 return true 7668 } 7669 // match: (Or64 (Or64 x y) y) 7670 // cond: 7671 // result: (Or64 x y) 7672 for { 7673 v_0 := v.Args[0] 7674 if v_0.Op != OpOr64 { 7675 break 7676 } 7677 x := v_0.Args[0] 7678 y := v_0.Args[1] 7679 if y != v.Args[1] { 7680 break 7681 } 7682 v.reset(OpOr64) 7683 v.AddArg(x) 7684 v.AddArg(y) 7685 return true 7686 } 7687 return false 7688 } 7689 func rewriteValuegeneric_OpOr8(v *Value, config *Config) bool { 7690 b := v.Block 7691 _ = b 7692 // match: (Or8 x (Const8 <t> [c])) 7693 // cond: x.Op != OpConst8 7694 // result: (Or8 (Const8 <t> [c]) x) 7695 for { 7696 x := v.Args[0] 7697 v_1 := v.Args[1] 7698 if v_1.Op != OpConst8 { 7699 break 7700 } 7701 t := v_1.Type 7702 c := v_1.AuxInt 7703 if !(x.Op != OpConst8) { 7704 break 7705 } 7706 v.reset(OpOr8) 7707 v0 := b.NewValue0(v.Line, OpConst8, t) 7708 v0.AuxInt = c 7709 v.AddArg(v0) 7710 v.AddArg(x) 7711 return true 7712 } 7713 // match: (Or8 x x) 7714 // cond: 7715 // result: x 7716 for { 7717 x := v.Args[0] 7718 if x != v.Args[1] { 7719 break 7720 } 7721 v.reset(OpCopy) 7722 v.Type = x.Type 7723 v.AddArg(x) 7724 return true 7725 } 7726 // match: (Or8 (Const8 [0]) x) 7727 // cond: 7728 // result: x 7729 for { 7730 v_0 := v.Args[0] 7731 if v_0.Op != OpConst8 { 7732 break 7733 } 7734 if v_0.AuxInt != 0 { 7735 break 7736 } 7737 x := v.Args[1] 7738 v.reset(OpCopy) 7739 v.Type = x.Type 7740 v.AddArg(x) 7741 return true 7742 } 7743 // match: (Or8 (Const8 [-1]) _) 7744 // cond: 7745 // result: (Const8 [-1]) 7746 for { 7747 v_0 := v.Args[0] 7748 if v_0.Op != OpConst8 { 7749 break 7750 } 7751 if v_0.AuxInt != -1 { 7752 break 7753 } 7754 v.reset(OpConst8) 7755 v.AuxInt = -1 7756 return true 7757 } 7758 // match: (Or8 x (Or8 x y)) 7759 // cond: 7760 // result: (Or8 x y) 7761 for { 7762 x := v.Args[0] 7763 v_1 := v.Args[1] 7764 if v_1.Op != OpOr8 { 7765 break 7766 } 7767 if x != v_1.Args[0] { 7768 break 7769 } 7770 y := v_1.Args[1] 7771 v.reset(OpOr8) 7772 v.AddArg(x) 7773 v.AddArg(y) 7774 return true 7775 } 7776 // match: (Or8 x (Or8 y x)) 7777 // cond: 7778 // result: (Or8 x y) 7779 for { 7780 x := v.Args[0] 7781 v_1 := v.Args[1] 7782 if v_1.Op != OpOr8 { 7783 break 7784 } 7785 y := v_1.Args[0] 7786 if x != v_1.Args[1] { 7787 break 7788 } 7789 v.reset(OpOr8) 7790 v.AddArg(x) 7791 v.AddArg(y) 7792 return true 7793 } 7794 // match: (Or8 (Or8 x y) x) 7795 // cond: 7796 // result: (Or8 x y) 7797 for { 7798 v_0 := v.Args[0] 7799 if v_0.Op != OpOr8 { 7800 break 7801 } 7802 x := v_0.Args[0] 7803 y := v_0.Args[1] 7804 if x != v.Args[1] { 7805 break 7806 } 7807 v.reset(OpOr8) 7808 v.AddArg(x) 7809 v.AddArg(y) 7810 return true 7811 } 7812 // match: (Or8 (Or8 x y) y) 7813 // cond: 7814 // result: (Or8 x y) 7815 for { 7816 v_0 := v.Args[0] 7817 if v_0.Op != OpOr8 { 7818 break 7819 } 7820 x := v_0.Args[0] 7821 y := v_0.Args[1] 7822 if y != v.Args[1] { 7823 break 7824 } 7825 v.reset(OpOr8) 7826 v.AddArg(x) 7827 v.AddArg(y) 7828 return true 7829 } 7830 return false 7831 } 7832 func rewriteValuegeneric_OpPhi(v *Value, config *Config) bool { 7833 b := v.Block 7834 _ = b 7835 // match: (Phi (Const8 [c]) (Const8 [c])) 7836 // cond: 7837 // result: (Const8 [c]) 7838 for { 7839 v_0 := v.Args[0] 7840 if v_0.Op != OpConst8 { 7841 break 7842 } 7843 c := v_0.AuxInt 7844 v_1 := v.Args[1] 7845 if v_1.Op != OpConst8 { 7846 break 7847 } 7848 if v_1.AuxInt != c { 7849 break 7850 } 7851 if len(v.Args) != 2 { 7852 break 7853 } 7854 v.reset(OpConst8) 7855 v.AuxInt = c 7856 return true 7857 } 7858 // match: (Phi (Const16 [c]) (Const16 [c])) 7859 // cond: 7860 // result: (Const16 [c]) 7861 for { 7862 v_0 := v.Args[0] 7863 if v_0.Op != OpConst16 { 7864 break 7865 } 7866 c := v_0.AuxInt 7867 v_1 := v.Args[1] 7868 if v_1.Op != OpConst16 { 7869 break 7870 } 7871 if v_1.AuxInt != c { 7872 break 7873 } 7874 if len(v.Args) != 2 { 7875 break 7876 } 7877 v.reset(OpConst16) 7878 v.AuxInt = c 7879 return true 7880 } 7881 // match: (Phi (Const32 [c]) (Const32 [c])) 7882 // cond: 7883 // result: (Const32 [c]) 7884 for { 7885 v_0 := v.Args[0] 7886 if v_0.Op != OpConst32 { 7887 break 7888 } 7889 c := v_0.AuxInt 7890 v_1 := v.Args[1] 7891 if v_1.Op != OpConst32 { 7892 break 7893 } 7894 if v_1.AuxInt != c { 7895 break 7896 } 7897 if len(v.Args) != 2 { 7898 break 7899 } 7900 v.reset(OpConst32) 7901 v.AuxInt = c 7902 return true 7903 } 7904 // match: (Phi (Const64 [c]) (Const64 [c])) 7905 // cond: 7906 // result: (Const64 [c]) 7907 for { 7908 v_0 := v.Args[0] 7909 if v_0.Op != OpConst64 { 7910 break 7911 } 7912 c := v_0.AuxInt 7913 v_1 := v.Args[1] 7914 if v_1.Op != OpConst64 { 7915 break 7916 } 7917 if v_1.AuxInt != c { 7918 break 7919 } 7920 if len(v.Args) != 2 { 7921 break 7922 } 7923 v.reset(OpConst64) 7924 v.AuxInt = c 7925 return true 7926 } 7927 return false 7928 } 7929 func rewriteValuegeneric_OpPtrIndex(v *Value, config *Config) bool { 7930 b := v.Block 7931 _ = b 7932 // match: (PtrIndex <t> ptr idx) 7933 // cond: config.PtrSize == 4 7934 // result: (AddPtr ptr (Mul32 <config.fe.TypeInt()> idx (Const32 <config.fe.TypeInt()> [t.ElemType().Size()]))) 7935 for { 7936 t := v.Type 7937 ptr := v.Args[0] 7938 idx := v.Args[1] 7939 if !(config.PtrSize == 4) { 7940 break 7941 } 7942 v.reset(OpAddPtr) 7943 v.AddArg(ptr) 7944 v0 := b.NewValue0(v.Line, OpMul32, config.fe.TypeInt()) 7945 v0.AddArg(idx) 7946 v1 := b.NewValue0(v.Line, OpConst32, config.fe.TypeInt()) 7947 v1.AuxInt = t.ElemType().Size() 7948 v0.AddArg(v1) 7949 v.AddArg(v0) 7950 return true 7951 } 7952 // match: (PtrIndex <t> ptr idx) 7953 // cond: config.PtrSize == 8 7954 // result: (AddPtr ptr (Mul64 <config.fe.TypeInt()> idx (Const64 <config.fe.TypeInt()> [t.ElemType().Size()]))) 7955 for { 7956 t := v.Type 7957 ptr := v.Args[0] 7958 idx := v.Args[1] 7959 if !(config.PtrSize == 8) { 7960 break 7961 } 7962 v.reset(OpAddPtr) 7963 v.AddArg(ptr) 7964 v0 := b.NewValue0(v.Line, OpMul64, config.fe.TypeInt()) 7965 v0.AddArg(idx) 7966 v1 := b.NewValue0(v.Line, OpConst64, config.fe.TypeInt()) 7967 v1.AuxInt = t.ElemType().Size() 7968 v0.AddArg(v1) 7969 v.AddArg(v0) 7970 return true 7971 } 7972 return false 7973 } 7974 func rewriteValuegeneric_OpRsh16Ux16(v *Value, config *Config) bool { 7975 b := v.Block 7976 _ = b 7977 // match: (Rsh16Ux16 <t> x (Const16 [c])) 7978 // cond: 7979 // result: (Rsh16Ux64 x (Const64 <t> [int64(uint16(c))])) 7980 for { 7981 t := v.Type 7982 x := v.Args[0] 7983 v_1 := v.Args[1] 7984 if v_1.Op != OpConst16 { 7985 break 7986 } 7987 c := v_1.AuxInt 7988 v.reset(OpRsh16Ux64) 7989 v.AddArg(x) 7990 v0 := b.NewValue0(v.Line, OpConst64, t) 7991 v0.AuxInt = int64(uint16(c)) 7992 v.AddArg(v0) 7993 return true 7994 } 7995 // match: (Rsh16Ux16 (Const16 [0]) _) 7996 // cond: 7997 // result: (Const16 [0]) 7998 for { 7999 v_0 := v.Args[0] 8000 if v_0.Op != OpConst16 { 8001 break 8002 } 8003 if v_0.AuxInt != 0 { 8004 break 8005 } 8006 v.reset(OpConst16) 8007 v.AuxInt = 0 8008 return true 8009 } 8010 return false 8011 } 8012 func rewriteValuegeneric_OpRsh16Ux32(v *Value, config *Config) bool { 8013 b := v.Block 8014 _ = b 8015 // match: (Rsh16Ux32 <t> x (Const32 [c])) 8016 // cond: 8017 // result: (Rsh16Ux64 x (Const64 <t> [int64(uint32(c))])) 8018 for { 8019 t := v.Type 8020 x := v.Args[0] 8021 v_1 := v.Args[1] 8022 if v_1.Op != OpConst32 { 8023 break 8024 } 8025 c := v_1.AuxInt 8026 v.reset(OpRsh16Ux64) 8027 v.AddArg(x) 8028 v0 := b.NewValue0(v.Line, OpConst64, t) 8029 v0.AuxInt = int64(uint32(c)) 8030 v.AddArg(v0) 8031 return true 8032 } 8033 // match: (Rsh16Ux32 (Const16 [0]) _) 8034 // cond: 8035 // result: (Const16 [0]) 8036 for { 8037 v_0 := v.Args[0] 8038 if v_0.Op != OpConst16 { 8039 break 8040 } 8041 if v_0.AuxInt != 0 { 8042 break 8043 } 8044 v.reset(OpConst16) 8045 v.AuxInt = 0 8046 return true 8047 } 8048 return false 8049 } 8050 func rewriteValuegeneric_OpRsh16Ux64(v *Value, config *Config) bool { 8051 b := v.Block 8052 _ = b 8053 // match: (Rsh16Ux64 (Const16 [c]) (Const64 [d])) 8054 // cond: 8055 // result: (Const16 [int64(int16(uint16(c) >> uint64(d)))]) 8056 for { 8057 v_0 := v.Args[0] 8058 if v_0.Op != OpConst16 { 8059 break 8060 } 8061 c := v_0.AuxInt 8062 v_1 := v.Args[1] 8063 if v_1.Op != OpConst64 { 8064 break 8065 } 8066 d := v_1.AuxInt 8067 v.reset(OpConst16) 8068 v.AuxInt = int64(int16(uint16(c) >> uint64(d))) 8069 return true 8070 } 8071 // match: (Rsh16Ux64 x (Const64 [0])) 8072 // cond: 8073 // result: x 8074 for { 8075 x := v.Args[0] 8076 v_1 := v.Args[1] 8077 if v_1.Op != OpConst64 { 8078 break 8079 } 8080 if v_1.AuxInt != 0 { 8081 break 8082 } 8083 v.reset(OpCopy) 8084 v.Type = x.Type 8085 v.AddArg(x) 8086 return true 8087 } 8088 // match: (Rsh16Ux64 (Const16 [0]) _) 8089 // cond: 8090 // result: (Const16 [0]) 8091 for { 8092 v_0 := v.Args[0] 8093 if v_0.Op != OpConst16 { 8094 break 8095 } 8096 if v_0.AuxInt != 0 { 8097 break 8098 } 8099 v.reset(OpConst16) 8100 v.AuxInt = 0 8101 return true 8102 } 8103 // match: (Rsh16Ux64 _ (Const64 [c])) 8104 // cond: uint64(c) >= 16 8105 // result: (Const16 [0]) 8106 for { 8107 v_1 := v.Args[1] 8108 if v_1.Op != OpConst64 { 8109 break 8110 } 8111 c := v_1.AuxInt 8112 if !(uint64(c) >= 16) { 8113 break 8114 } 8115 v.reset(OpConst16) 8116 v.AuxInt = 0 8117 return true 8118 } 8119 // match: (Rsh16Ux64 <t> (Rsh16Ux64 x (Const64 [c])) (Const64 [d])) 8120 // cond: !uaddOvf(c,d) 8121 // result: (Rsh16Ux64 x (Const64 <t> [c+d])) 8122 for { 8123 t := v.Type 8124 v_0 := v.Args[0] 8125 if v_0.Op != OpRsh16Ux64 { 8126 break 8127 } 8128 x := v_0.Args[0] 8129 v_0_1 := v_0.Args[1] 8130 if v_0_1.Op != OpConst64 { 8131 break 8132 } 8133 c := v_0_1.AuxInt 8134 v_1 := v.Args[1] 8135 if v_1.Op != OpConst64 { 8136 break 8137 } 8138 d := v_1.AuxInt 8139 if !(!uaddOvf(c, d)) { 8140 break 8141 } 8142 v.reset(OpRsh16Ux64) 8143 v.AddArg(x) 8144 v0 := b.NewValue0(v.Line, OpConst64, t) 8145 v0.AuxInt = c + d 8146 v.AddArg(v0) 8147 return true 8148 } 8149 // match: (Rsh16Ux64 (Lsh16x64 (Rsh16Ux64 x (Const64 [c1])) (Const64 [c2])) (Const64 [c3])) 8150 // cond: uint64(c1) >= uint64(c2) && uint64(c3) >= uint64(c2) && !uaddOvf(c1-c2, c3) 8151 // result: (Rsh16Ux64 x (Const64 <config.fe.TypeUInt64()> [c1-c2+c3])) 8152 for { 8153 v_0 := v.Args[0] 8154 if v_0.Op != OpLsh16x64 { 8155 break 8156 } 8157 v_0_0 := v_0.Args[0] 8158 if v_0_0.Op != OpRsh16Ux64 { 8159 break 8160 } 8161 x := v_0_0.Args[0] 8162 v_0_0_1 := v_0_0.Args[1] 8163 if v_0_0_1.Op != OpConst64 { 8164 break 8165 } 8166 c1 := v_0_0_1.AuxInt 8167 v_0_1 := v_0.Args[1] 8168 if v_0_1.Op != OpConst64 { 8169 break 8170 } 8171 c2 := v_0_1.AuxInt 8172 v_1 := v.Args[1] 8173 if v_1.Op != OpConst64 { 8174 break 8175 } 8176 c3 := v_1.AuxInt 8177 if !(uint64(c1) >= uint64(c2) && uint64(c3) >= uint64(c2) && !uaddOvf(c1-c2, c3)) { 8178 break 8179 } 8180 v.reset(OpRsh16Ux64) 8181 v.AddArg(x) 8182 v0 := b.NewValue0(v.Line, OpConst64, config.fe.TypeUInt64()) 8183 v0.AuxInt = c1 - c2 + c3 8184 v.AddArg(v0) 8185 return true 8186 } 8187 return false 8188 } 8189 func rewriteValuegeneric_OpRsh16Ux8(v *Value, config *Config) bool { 8190 b := v.Block 8191 _ = b 8192 // match: (Rsh16Ux8 <t> x (Const8 [c])) 8193 // cond: 8194 // result: (Rsh16Ux64 x (Const64 <t> [int64(uint8(c))])) 8195 for { 8196 t := v.Type 8197 x := v.Args[0] 8198 v_1 := v.Args[1] 8199 if v_1.Op != OpConst8 { 8200 break 8201 } 8202 c := v_1.AuxInt 8203 v.reset(OpRsh16Ux64) 8204 v.AddArg(x) 8205 v0 := b.NewValue0(v.Line, OpConst64, t) 8206 v0.AuxInt = int64(uint8(c)) 8207 v.AddArg(v0) 8208 return true 8209 } 8210 // match: (Rsh16Ux8 (Const16 [0]) _) 8211 // cond: 8212 // result: (Const16 [0]) 8213 for { 8214 v_0 := v.Args[0] 8215 if v_0.Op != OpConst16 { 8216 break 8217 } 8218 if v_0.AuxInt != 0 { 8219 break 8220 } 8221 v.reset(OpConst16) 8222 v.AuxInt = 0 8223 return true 8224 } 8225 return false 8226 } 8227 func rewriteValuegeneric_OpRsh16x16(v *Value, config *Config) bool { 8228 b := v.Block 8229 _ = b 8230 // match: (Rsh16x16 <t> x (Const16 [c])) 8231 // cond: 8232 // result: (Rsh16x64 x (Const64 <t> [int64(uint16(c))])) 8233 for { 8234 t := v.Type 8235 x := v.Args[0] 8236 v_1 := v.Args[1] 8237 if v_1.Op != OpConst16 { 8238 break 8239 } 8240 c := v_1.AuxInt 8241 v.reset(OpRsh16x64) 8242 v.AddArg(x) 8243 v0 := b.NewValue0(v.Line, OpConst64, t) 8244 v0.AuxInt = int64(uint16(c)) 8245 v.AddArg(v0) 8246 return true 8247 } 8248 // match: (Rsh16x16 (Const16 [0]) _) 8249 // cond: 8250 // result: (Const16 [0]) 8251 for { 8252 v_0 := v.Args[0] 8253 if v_0.Op != OpConst16 { 8254 break 8255 } 8256 if v_0.AuxInt != 0 { 8257 break 8258 } 8259 v.reset(OpConst16) 8260 v.AuxInt = 0 8261 return true 8262 } 8263 return false 8264 } 8265 func rewriteValuegeneric_OpRsh16x32(v *Value, config *Config) bool { 8266 b := v.Block 8267 _ = b 8268 // match: (Rsh16x32 <t> x (Const32 [c])) 8269 // cond: 8270 // result: (Rsh16x64 x (Const64 <t> [int64(uint32(c))])) 8271 for { 8272 t := v.Type 8273 x := v.Args[0] 8274 v_1 := v.Args[1] 8275 if v_1.Op != OpConst32 { 8276 break 8277 } 8278 c := v_1.AuxInt 8279 v.reset(OpRsh16x64) 8280 v.AddArg(x) 8281 v0 := b.NewValue0(v.Line, OpConst64, t) 8282 v0.AuxInt = int64(uint32(c)) 8283 v.AddArg(v0) 8284 return true 8285 } 8286 // match: (Rsh16x32 (Const16 [0]) _) 8287 // cond: 8288 // result: (Const16 [0]) 8289 for { 8290 v_0 := v.Args[0] 8291 if v_0.Op != OpConst16 { 8292 break 8293 } 8294 if v_0.AuxInt != 0 { 8295 break 8296 } 8297 v.reset(OpConst16) 8298 v.AuxInt = 0 8299 return true 8300 } 8301 return false 8302 } 8303 func rewriteValuegeneric_OpRsh16x64(v *Value, config *Config) bool { 8304 b := v.Block 8305 _ = b 8306 // match: (Rsh16x64 (Const16 [c]) (Const64 [d])) 8307 // cond: 8308 // result: (Const16 [int64(int16(c) >> uint64(d))]) 8309 for { 8310 v_0 := v.Args[0] 8311 if v_0.Op != OpConst16 { 8312 break 8313 } 8314 c := v_0.AuxInt 8315 v_1 := v.Args[1] 8316 if v_1.Op != OpConst64 { 8317 break 8318 } 8319 d := v_1.AuxInt 8320 v.reset(OpConst16) 8321 v.AuxInt = int64(int16(c) >> uint64(d)) 8322 return true 8323 } 8324 // match: (Rsh16x64 x (Const64 [0])) 8325 // cond: 8326 // result: x 8327 for { 8328 x := v.Args[0] 8329 v_1 := v.Args[1] 8330 if v_1.Op != OpConst64 { 8331 break 8332 } 8333 if v_1.AuxInt != 0 { 8334 break 8335 } 8336 v.reset(OpCopy) 8337 v.Type = x.Type 8338 v.AddArg(x) 8339 return true 8340 } 8341 // match: (Rsh16x64 (Const16 [0]) _) 8342 // cond: 8343 // result: (Const16 [0]) 8344 for { 8345 v_0 := v.Args[0] 8346 if v_0.Op != OpConst16 { 8347 break 8348 } 8349 if v_0.AuxInt != 0 { 8350 break 8351 } 8352 v.reset(OpConst16) 8353 v.AuxInt = 0 8354 return true 8355 } 8356 // match: (Rsh16x64 <t> (Rsh16x64 x (Const64 [c])) (Const64 [d])) 8357 // cond: !uaddOvf(c,d) 8358 // result: (Rsh16x64 x (Const64 <t> [c+d])) 8359 for { 8360 t := v.Type 8361 v_0 := v.Args[0] 8362 if v_0.Op != OpRsh16x64 { 8363 break 8364 } 8365 x := v_0.Args[0] 8366 v_0_1 := v_0.Args[1] 8367 if v_0_1.Op != OpConst64 { 8368 break 8369 } 8370 c := v_0_1.AuxInt 8371 v_1 := v.Args[1] 8372 if v_1.Op != OpConst64 { 8373 break 8374 } 8375 d := v_1.AuxInt 8376 if !(!uaddOvf(c, d)) { 8377 break 8378 } 8379 v.reset(OpRsh16x64) 8380 v.AddArg(x) 8381 v0 := b.NewValue0(v.Line, OpConst64, t) 8382 v0.AuxInt = c + d 8383 v.AddArg(v0) 8384 return true 8385 } 8386 return false 8387 } 8388 func rewriteValuegeneric_OpRsh16x8(v *Value, config *Config) bool { 8389 b := v.Block 8390 _ = b 8391 // match: (Rsh16x8 <t> x (Const8 [c])) 8392 // cond: 8393 // result: (Rsh16x64 x (Const64 <t> [int64(uint8(c))])) 8394 for { 8395 t := v.Type 8396 x := v.Args[0] 8397 v_1 := v.Args[1] 8398 if v_1.Op != OpConst8 { 8399 break 8400 } 8401 c := v_1.AuxInt 8402 v.reset(OpRsh16x64) 8403 v.AddArg(x) 8404 v0 := b.NewValue0(v.Line, OpConst64, t) 8405 v0.AuxInt = int64(uint8(c)) 8406 v.AddArg(v0) 8407 return true 8408 } 8409 // match: (Rsh16x8 (Const16 [0]) _) 8410 // cond: 8411 // result: (Const16 [0]) 8412 for { 8413 v_0 := v.Args[0] 8414 if v_0.Op != OpConst16 { 8415 break 8416 } 8417 if v_0.AuxInt != 0 { 8418 break 8419 } 8420 v.reset(OpConst16) 8421 v.AuxInt = 0 8422 return true 8423 } 8424 return false 8425 } 8426 func rewriteValuegeneric_OpRsh32Ux16(v *Value, config *Config) bool { 8427 b := v.Block 8428 _ = b 8429 // match: (Rsh32Ux16 <t> x (Const16 [c])) 8430 // cond: 8431 // result: (Rsh32Ux64 x (Const64 <t> [int64(uint16(c))])) 8432 for { 8433 t := v.Type 8434 x := v.Args[0] 8435 v_1 := v.Args[1] 8436 if v_1.Op != OpConst16 { 8437 break 8438 } 8439 c := v_1.AuxInt 8440 v.reset(OpRsh32Ux64) 8441 v.AddArg(x) 8442 v0 := b.NewValue0(v.Line, OpConst64, t) 8443 v0.AuxInt = int64(uint16(c)) 8444 v.AddArg(v0) 8445 return true 8446 } 8447 // match: (Rsh32Ux16 (Const32 [0]) _) 8448 // cond: 8449 // result: (Const32 [0]) 8450 for { 8451 v_0 := v.Args[0] 8452 if v_0.Op != OpConst32 { 8453 break 8454 } 8455 if v_0.AuxInt != 0 { 8456 break 8457 } 8458 v.reset(OpConst32) 8459 v.AuxInt = 0 8460 return true 8461 } 8462 return false 8463 } 8464 func rewriteValuegeneric_OpRsh32Ux32(v *Value, config *Config) bool { 8465 b := v.Block 8466 _ = b 8467 // match: (Rsh32Ux32 <t> x (Const32 [c])) 8468 // cond: 8469 // result: (Rsh32Ux64 x (Const64 <t> [int64(uint32(c))])) 8470 for { 8471 t := v.Type 8472 x := v.Args[0] 8473 v_1 := v.Args[1] 8474 if v_1.Op != OpConst32 { 8475 break 8476 } 8477 c := v_1.AuxInt 8478 v.reset(OpRsh32Ux64) 8479 v.AddArg(x) 8480 v0 := b.NewValue0(v.Line, OpConst64, t) 8481 v0.AuxInt = int64(uint32(c)) 8482 v.AddArg(v0) 8483 return true 8484 } 8485 // match: (Rsh32Ux32 (Const32 [0]) _) 8486 // cond: 8487 // result: (Const32 [0]) 8488 for { 8489 v_0 := v.Args[0] 8490 if v_0.Op != OpConst32 { 8491 break 8492 } 8493 if v_0.AuxInt != 0 { 8494 break 8495 } 8496 v.reset(OpConst32) 8497 v.AuxInt = 0 8498 return true 8499 } 8500 return false 8501 } 8502 func rewriteValuegeneric_OpRsh32Ux64(v *Value, config *Config) bool { 8503 b := v.Block 8504 _ = b 8505 // match: (Rsh32Ux64 (Const32 [c]) (Const64 [d])) 8506 // cond: 8507 // result: (Const32 [int64(int32(uint32(c) >> uint64(d)))]) 8508 for { 8509 v_0 := v.Args[0] 8510 if v_0.Op != OpConst32 { 8511 break 8512 } 8513 c := v_0.AuxInt 8514 v_1 := v.Args[1] 8515 if v_1.Op != OpConst64 { 8516 break 8517 } 8518 d := v_1.AuxInt 8519 v.reset(OpConst32) 8520 v.AuxInt = int64(int32(uint32(c) >> uint64(d))) 8521 return true 8522 } 8523 // match: (Rsh32Ux64 x (Const64 [0])) 8524 // cond: 8525 // result: x 8526 for { 8527 x := v.Args[0] 8528 v_1 := v.Args[1] 8529 if v_1.Op != OpConst64 { 8530 break 8531 } 8532 if v_1.AuxInt != 0 { 8533 break 8534 } 8535 v.reset(OpCopy) 8536 v.Type = x.Type 8537 v.AddArg(x) 8538 return true 8539 } 8540 // match: (Rsh32Ux64 (Const32 [0]) _) 8541 // cond: 8542 // result: (Const32 [0]) 8543 for { 8544 v_0 := v.Args[0] 8545 if v_0.Op != OpConst32 { 8546 break 8547 } 8548 if v_0.AuxInt != 0 { 8549 break 8550 } 8551 v.reset(OpConst32) 8552 v.AuxInt = 0 8553 return true 8554 } 8555 // match: (Rsh32Ux64 _ (Const64 [c])) 8556 // cond: uint64(c) >= 32 8557 // result: (Const32 [0]) 8558 for { 8559 v_1 := v.Args[1] 8560 if v_1.Op != OpConst64 { 8561 break 8562 } 8563 c := v_1.AuxInt 8564 if !(uint64(c) >= 32) { 8565 break 8566 } 8567 v.reset(OpConst32) 8568 v.AuxInt = 0 8569 return true 8570 } 8571 // match: (Rsh32Ux64 <t> (Rsh32Ux64 x (Const64 [c])) (Const64 [d])) 8572 // cond: !uaddOvf(c,d) 8573 // result: (Rsh32Ux64 x (Const64 <t> [c+d])) 8574 for { 8575 t := v.Type 8576 v_0 := v.Args[0] 8577 if v_0.Op != OpRsh32Ux64 { 8578 break 8579 } 8580 x := v_0.Args[0] 8581 v_0_1 := v_0.Args[1] 8582 if v_0_1.Op != OpConst64 { 8583 break 8584 } 8585 c := v_0_1.AuxInt 8586 v_1 := v.Args[1] 8587 if v_1.Op != OpConst64 { 8588 break 8589 } 8590 d := v_1.AuxInt 8591 if !(!uaddOvf(c, d)) { 8592 break 8593 } 8594 v.reset(OpRsh32Ux64) 8595 v.AddArg(x) 8596 v0 := b.NewValue0(v.Line, OpConst64, t) 8597 v0.AuxInt = c + d 8598 v.AddArg(v0) 8599 return true 8600 } 8601 // match: (Rsh32Ux64 (Lsh32x64 (Rsh32Ux64 x (Const64 [c1])) (Const64 [c2])) (Const64 [c3])) 8602 // cond: uint64(c1) >= uint64(c2) && uint64(c3) >= uint64(c2) && !uaddOvf(c1-c2, c3) 8603 // result: (Rsh32Ux64 x (Const64 <config.fe.TypeUInt64()> [c1-c2+c3])) 8604 for { 8605 v_0 := v.Args[0] 8606 if v_0.Op != OpLsh32x64 { 8607 break 8608 } 8609 v_0_0 := v_0.Args[0] 8610 if v_0_0.Op != OpRsh32Ux64 { 8611 break 8612 } 8613 x := v_0_0.Args[0] 8614 v_0_0_1 := v_0_0.Args[1] 8615 if v_0_0_1.Op != OpConst64 { 8616 break 8617 } 8618 c1 := v_0_0_1.AuxInt 8619 v_0_1 := v_0.Args[1] 8620 if v_0_1.Op != OpConst64 { 8621 break 8622 } 8623 c2 := v_0_1.AuxInt 8624 v_1 := v.Args[1] 8625 if v_1.Op != OpConst64 { 8626 break 8627 } 8628 c3 := v_1.AuxInt 8629 if !(uint64(c1) >= uint64(c2) && uint64(c3) >= uint64(c2) && !uaddOvf(c1-c2, c3)) { 8630 break 8631 } 8632 v.reset(OpRsh32Ux64) 8633 v.AddArg(x) 8634 v0 := b.NewValue0(v.Line, OpConst64, config.fe.TypeUInt64()) 8635 v0.AuxInt = c1 - c2 + c3 8636 v.AddArg(v0) 8637 return true 8638 } 8639 return false 8640 } 8641 func rewriteValuegeneric_OpRsh32Ux8(v *Value, config *Config) bool { 8642 b := v.Block 8643 _ = b 8644 // match: (Rsh32Ux8 <t> x (Const8 [c])) 8645 // cond: 8646 // result: (Rsh32Ux64 x (Const64 <t> [int64(uint8(c))])) 8647 for { 8648 t := v.Type 8649 x := v.Args[0] 8650 v_1 := v.Args[1] 8651 if v_1.Op != OpConst8 { 8652 break 8653 } 8654 c := v_1.AuxInt 8655 v.reset(OpRsh32Ux64) 8656 v.AddArg(x) 8657 v0 := b.NewValue0(v.Line, OpConst64, t) 8658 v0.AuxInt = int64(uint8(c)) 8659 v.AddArg(v0) 8660 return true 8661 } 8662 // match: (Rsh32Ux8 (Const32 [0]) _) 8663 // cond: 8664 // result: (Const32 [0]) 8665 for { 8666 v_0 := v.Args[0] 8667 if v_0.Op != OpConst32 { 8668 break 8669 } 8670 if v_0.AuxInt != 0 { 8671 break 8672 } 8673 v.reset(OpConst32) 8674 v.AuxInt = 0 8675 return true 8676 } 8677 return false 8678 } 8679 func rewriteValuegeneric_OpRsh32x16(v *Value, config *Config) bool { 8680 b := v.Block 8681 _ = b 8682 // match: (Rsh32x16 <t> x (Const16 [c])) 8683 // cond: 8684 // result: (Rsh32x64 x (Const64 <t> [int64(uint16(c))])) 8685 for { 8686 t := v.Type 8687 x := v.Args[0] 8688 v_1 := v.Args[1] 8689 if v_1.Op != OpConst16 { 8690 break 8691 } 8692 c := v_1.AuxInt 8693 v.reset(OpRsh32x64) 8694 v.AddArg(x) 8695 v0 := b.NewValue0(v.Line, OpConst64, t) 8696 v0.AuxInt = int64(uint16(c)) 8697 v.AddArg(v0) 8698 return true 8699 } 8700 // match: (Rsh32x16 (Const32 [0]) _) 8701 // cond: 8702 // result: (Const32 [0]) 8703 for { 8704 v_0 := v.Args[0] 8705 if v_0.Op != OpConst32 { 8706 break 8707 } 8708 if v_0.AuxInt != 0 { 8709 break 8710 } 8711 v.reset(OpConst32) 8712 v.AuxInt = 0 8713 return true 8714 } 8715 return false 8716 } 8717 func rewriteValuegeneric_OpRsh32x32(v *Value, config *Config) bool { 8718 b := v.Block 8719 _ = b 8720 // match: (Rsh32x32 <t> x (Const32 [c])) 8721 // cond: 8722 // result: (Rsh32x64 x (Const64 <t> [int64(uint32(c))])) 8723 for { 8724 t := v.Type 8725 x := v.Args[0] 8726 v_1 := v.Args[1] 8727 if v_1.Op != OpConst32 { 8728 break 8729 } 8730 c := v_1.AuxInt 8731 v.reset(OpRsh32x64) 8732 v.AddArg(x) 8733 v0 := b.NewValue0(v.Line, OpConst64, t) 8734 v0.AuxInt = int64(uint32(c)) 8735 v.AddArg(v0) 8736 return true 8737 } 8738 // match: (Rsh32x32 (Const32 [0]) _) 8739 // cond: 8740 // result: (Const32 [0]) 8741 for { 8742 v_0 := v.Args[0] 8743 if v_0.Op != OpConst32 { 8744 break 8745 } 8746 if v_0.AuxInt != 0 { 8747 break 8748 } 8749 v.reset(OpConst32) 8750 v.AuxInt = 0 8751 return true 8752 } 8753 return false 8754 } 8755 func rewriteValuegeneric_OpRsh32x64(v *Value, config *Config) bool { 8756 b := v.Block 8757 _ = b 8758 // match: (Rsh32x64 (Const32 [c]) (Const64 [d])) 8759 // cond: 8760 // result: (Const32 [int64(int32(c) >> uint64(d))]) 8761 for { 8762 v_0 := v.Args[0] 8763 if v_0.Op != OpConst32 { 8764 break 8765 } 8766 c := v_0.AuxInt 8767 v_1 := v.Args[1] 8768 if v_1.Op != OpConst64 { 8769 break 8770 } 8771 d := v_1.AuxInt 8772 v.reset(OpConst32) 8773 v.AuxInt = int64(int32(c) >> uint64(d)) 8774 return true 8775 } 8776 // match: (Rsh32x64 x (Const64 [0])) 8777 // cond: 8778 // result: x 8779 for { 8780 x := v.Args[0] 8781 v_1 := v.Args[1] 8782 if v_1.Op != OpConst64 { 8783 break 8784 } 8785 if v_1.AuxInt != 0 { 8786 break 8787 } 8788 v.reset(OpCopy) 8789 v.Type = x.Type 8790 v.AddArg(x) 8791 return true 8792 } 8793 // match: (Rsh32x64 (Const32 [0]) _) 8794 // cond: 8795 // result: (Const32 [0]) 8796 for { 8797 v_0 := v.Args[0] 8798 if v_0.Op != OpConst32 { 8799 break 8800 } 8801 if v_0.AuxInt != 0 { 8802 break 8803 } 8804 v.reset(OpConst32) 8805 v.AuxInt = 0 8806 return true 8807 } 8808 // match: (Rsh32x64 <t> (Rsh32x64 x (Const64 [c])) (Const64 [d])) 8809 // cond: !uaddOvf(c,d) 8810 // result: (Rsh32x64 x (Const64 <t> [c+d])) 8811 for { 8812 t := v.Type 8813 v_0 := v.Args[0] 8814 if v_0.Op != OpRsh32x64 { 8815 break 8816 } 8817 x := v_0.Args[0] 8818 v_0_1 := v_0.Args[1] 8819 if v_0_1.Op != OpConst64 { 8820 break 8821 } 8822 c := v_0_1.AuxInt 8823 v_1 := v.Args[1] 8824 if v_1.Op != OpConst64 { 8825 break 8826 } 8827 d := v_1.AuxInt 8828 if !(!uaddOvf(c, d)) { 8829 break 8830 } 8831 v.reset(OpRsh32x64) 8832 v.AddArg(x) 8833 v0 := b.NewValue0(v.Line, OpConst64, t) 8834 v0.AuxInt = c + d 8835 v.AddArg(v0) 8836 return true 8837 } 8838 return false 8839 } 8840 func rewriteValuegeneric_OpRsh32x8(v *Value, config *Config) bool { 8841 b := v.Block 8842 _ = b 8843 // match: (Rsh32x8 <t> x (Const8 [c])) 8844 // cond: 8845 // result: (Rsh32x64 x (Const64 <t> [int64(uint8(c))])) 8846 for { 8847 t := v.Type 8848 x := v.Args[0] 8849 v_1 := v.Args[1] 8850 if v_1.Op != OpConst8 { 8851 break 8852 } 8853 c := v_1.AuxInt 8854 v.reset(OpRsh32x64) 8855 v.AddArg(x) 8856 v0 := b.NewValue0(v.Line, OpConst64, t) 8857 v0.AuxInt = int64(uint8(c)) 8858 v.AddArg(v0) 8859 return true 8860 } 8861 // match: (Rsh32x8 (Const32 [0]) _) 8862 // cond: 8863 // result: (Const32 [0]) 8864 for { 8865 v_0 := v.Args[0] 8866 if v_0.Op != OpConst32 { 8867 break 8868 } 8869 if v_0.AuxInt != 0 { 8870 break 8871 } 8872 v.reset(OpConst32) 8873 v.AuxInt = 0 8874 return true 8875 } 8876 return false 8877 } 8878 func rewriteValuegeneric_OpRsh64Ux16(v *Value, config *Config) bool { 8879 b := v.Block 8880 _ = b 8881 // match: (Rsh64Ux16 <t> x (Const16 [c])) 8882 // cond: 8883 // result: (Rsh64Ux64 x (Const64 <t> [int64(uint16(c))])) 8884 for { 8885 t := v.Type 8886 x := v.Args[0] 8887 v_1 := v.Args[1] 8888 if v_1.Op != OpConst16 { 8889 break 8890 } 8891 c := v_1.AuxInt 8892 v.reset(OpRsh64Ux64) 8893 v.AddArg(x) 8894 v0 := b.NewValue0(v.Line, OpConst64, t) 8895 v0.AuxInt = int64(uint16(c)) 8896 v.AddArg(v0) 8897 return true 8898 } 8899 // match: (Rsh64Ux16 (Const64 [0]) _) 8900 // cond: 8901 // result: (Const64 [0]) 8902 for { 8903 v_0 := v.Args[0] 8904 if v_0.Op != OpConst64 { 8905 break 8906 } 8907 if v_0.AuxInt != 0 { 8908 break 8909 } 8910 v.reset(OpConst64) 8911 v.AuxInt = 0 8912 return true 8913 } 8914 return false 8915 } 8916 func rewriteValuegeneric_OpRsh64Ux32(v *Value, config *Config) bool { 8917 b := v.Block 8918 _ = b 8919 // match: (Rsh64Ux32 <t> x (Const32 [c])) 8920 // cond: 8921 // result: (Rsh64Ux64 x (Const64 <t> [int64(uint32(c))])) 8922 for { 8923 t := v.Type 8924 x := v.Args[0] 8925 v_1 := v.Args[1] 8926 if v_1.Op != OpConst32 { 8927 break 8928 } 8929 c := v_1.AuxInt 8930 v.reset(OpRsh64Ux64) 8931 v.AddArg(x) 8932 v0 := b.NewValue0(v.Line, OpConst64, t) 8933 v0.AuxInt = int64(uint32(c)) 8934 v.AddArg(v0) 8935 return true 8936 } 8937 // match: (Rsh64Ux32 (Const64 [0]) _) 8938 // cond: 8939 // result: (Const64 [0]) 8940 for { 8941 v_0 := v.Args[0] 8942 if v_0.Op != OpConst64 { 8943 break 8944 } 8945 if v_0.AuxInt != 0 { 8946 break 8947 } 8948 v.reset(OpConst64) 8949 v.AuxInt = 0 8950 return true 8951 } 8952 return false 8953 } 8954 func rewriteValuegeneric_OpRsh64Ux64(v *Value, config *Config) bool { 8955 b := v.Block 8956 _ = b 8957 // match: (Rsh64Ux64 (Const64 [c]) (Const64 [d])) 8958 // cond: 8959 // result: (Const64 [int64(uint64(c) >> uint64(d))]) 8960 for { 8961 v_0 := v.Args[0] 8962 if v_0.Op != OpConst64 { 8963 break 8964 } 8965 c := v_0.AuxInt 8966 v_1 := v.Args[1] 8967 if v_1.Op != OpConst64 { 8968 break 8969 } 8970 d := v_1.AuxInt 8971 v.reset(OpConst64) 8972 v.AuxInt = int64(uint64(c) >> uint64(d)) 8973 return true 8974 } 8975 // match: (Rsh64Ux64 x (Const64 [0])) 8976 // cond: 8977 // result: x 8978 for { 8979 x := v.Args[0] 8980 v_1 := v.Args[1] 8981 if v_1.Op != OpConst64 { 8982 break 8983 } 8984 if v_1.AuxInt != 0 { 8985 break 8986 } 8987 v.reset(OpCopy) 8988 v.Type = x.Type 8989 v.AddArg(x) 8990 return true 8991 } 8992 // match: (Rsh64Ux64 (Const64 [0]) _) 8993 // cond: 8994 // result: (Const64 [0]) 8995 for { 8996 v_0 := v.Args[0] 8997 if v_0.Op != OpConst64 { 8998 break 8999 } 9000 if v_0.AuxInt != 0 { 9001 break 9002 } 9003 v.reset(OpConst64) 9004 v.AuxInt = 0 9005 return true 9006 } 9007 // match: (Rsh64Ux64 _ (Const64 [c])) 9008 // cond: uint64(c) >= 64 9009 // result: (Const64 [0]) 9010 for { 9011 v_1 := v.Args[1] 9012 if v_1.Op != OpConst64 { 9013 break 9014 } 9015 c := v_1.AuxInt 9016 if !(uint64(c) >= 64) { 9017 break 9018 } 9019 v.reset(OpConst64) 9020 v.AuxInt = 0 9021 return true 9022 } 9023 // match: (Rsh64Ux64 <t> (Rsh64Ux64 x (Const64 [c])) (Const64 [d])) 9024 // cond: !uaddOvf(c,d) 9025 // result: (Rsh64Ux64 x (Const64 <t> [c+d])) 9026 for { 9027 t := v.Type 9028 v_0 := v.Args[0] 9029 if v_0.Op != OpRsh64Ux64 { 9030 break 9031 } 9032 x := v_0.Args[0] 9033 v_0_1 := v_0.Args[1] 9034 if v_0_1.Op != OpConst64 { 9035 break 9036 } 9037 c := v_0_1.AuxInt 9038 v_1 := v.Args[1] 9039 if v_1.Op != OpConst64 { 9040 break 9041 } 9042 d := v_1.AuxInt 9043 if !(!uaddOvf(c, d)) { 9044 break 9045 } 9046 v.reset(OpRsh64Ux64) 9047 v.AddArg(x) 9048 v0 := b.NewValue0(v.Line, OpConst64, t) 9049 v0.AuxInt = c + d 9050 v.AddArg(v0) 9051 return true 9052 } 9053 // match: (Rsh64Ux64 (Lsh64x64 (Rsh64Ux64 x (Const64 [c1])) (Const64 [c2])) (Const64 [c3])) 9054 // cond: uint64(c1) >= uint64(c2) && uint64(c3) >= uint64(c2) && !uaddOvf(c1-c2, c3) 9055 // result: (Rsh64Ux64 x (Const64 <config.fe.TypeUInt64()> [c1-c2+c3])) 9056 for { 9057 v_0 := v.Args[0] 9058 if v_0.Op != OpLsh64x64 { 9059 break 9060 } 9061 v_0_0 := v_0.Args[0] 9062 if v_0_0.Op != OpRsh64Ux64 { 9063 break 9064 } 9065 x := v_0_0.Args[0] 9066 v_0_0_1 := v_0_0.Args[1] 9067 if v_0_0_1.Op != OpConst64 { 9068 break 9069 } 9070 c1 := v_0_0_1.AuxInt 9071 v_0_1 := v_0.Args[1] 9072 if v_0_1.Op != OpConst64 { 9073 break 9074 } 9075 c2 := v_0_1.AuxInt 9076 v_1 := v.Args[1] 9077 if v_1.Op != OpConst64 { 9078 break 9079 } 9080 c3 := v_1.AuxInt 9081 if !(uint64(c1) >= uint64(c2) && uint64(c3) >= uint64(c2) && !uaddOvf(c1-c2, c3)) { 9082 break 9083 } 9084 v.reset(OpRsh64Ux64) 9085 v.AddArg(x) 9086 v0 := b.NewValue0(v.Line, OpConst64, config.fe.TypeUInt64()) 9087 v0.AuxInt = c1 - c2 + c3 9088 v.AddArg(v0) 9089 return true 9090 } 9091 return false 9092 } 9093 func rewriteValuegeneric_OpRsh64Ux8(v *Value, config *Config) bool { 9094 b := v.Block 9095 _ = b 9096 // match: (Rsh64Ux8 <t> x (Const8 [c])) 9097 // cond: 9098 // result: (Rsh64Ux64 x (Const64 <t> [int64(uint8(c))])) 9099 for { 9100 t := v.Type 9101 x := v.Args[0] 9102 v_1 := v.Args[1] 9103 if v_1.Op != OpConst8 { 9104 break 9105 } 9106 c := v_1.AuxInt 9107 v.reset(OpRsh64Ux64) 9108 v.AddArg(x) 9109 v0 := b.NewValue0(v.Line, OpConst64, t) 9110 v0.AuxInt = int64(uint8(c)) 9111 v.AddArg(v0) 9112 return true 9113 } 9114 // match: (Rsh64Ux8 (Const64 [0]) _) 9115 // cond: 9116 // result: (Const64 [0]) 9117 for { 9118 v_0 := v.Args[0] 9119 if v_0.Op != OpConst64 { 9120 break 9121 } 9122 if v_0.AuxInt != 0 { 9123 break 9124 } 9125 v.reset(OpConst64) 9126 v.AuxInt = 0 9127 return true 9128 } 9129 return false 9130 } 9131 func rewriteValuegeneric_OpRsh64x16(v *Value, config *Config) bool { 9132 b := v.Block 9133 _ = b 9134 // match: (Rsh64x16 <t> x (Const16 [c])) 9135 // cond: 9136 // result: (Rsh64x64 x (Const64 <t> [int64(uint16(c))])) 9137 for { 9138 t := v.Type 9139 x := v.Args[0] 9140 v_1 := v.Args[1] 9141 if v_1.Op != OpConst16 { 9142 break 9143 } 9144 c := v_1.AuxInt 9145 v.reset(OpRsh64x64) 9146 v.AddArg(x) 9147 v0 := b.NewValue0(v.Line, OpConst64, t) 9148 v0.AuxInt = int64(uint16(c)) 9149 v.AddArg(v0) 9150 return true 9151 } 9152 // match: (Rsh64x16 (Const64 [0]) _) 9153 // cond: 9154 // result: (Const64 [0]) 9155 for { 9156 v_0 := v.Args[0] 9157 if v_0.Op != OpConst64 { 9158 break 9159 } 9160 if v_0.AuxInt != 0 { 9161 break 9162 } 9163 v.reset(OpConst64) 9164 v.AuxInt = 0 9165 return true 9166 } 9167 return false 9168 } 9169 func rewriteValuegeneric_OpRsh64x32(v *Value, config *Config) bool { 9170 b := v.Block 9171 _ = b 9172 // match: (Rsh64x32 <t> x (Const32 [c])) 9173 // cond: 9174 // result: (Rsh64x64 x (Const64 <t> [int64(uint32(c))])) 9175 for { 9176 t := v.Type 9177 x := v.Args[0] 9178 v_1 := v.Args[1] 9179 if v_1.Op != OpConst32 { 9180 break 9181 } 9182 c := v_1.AuxInt 9183 v.reset(OpRsh64x64) 9184 v.AddArg(x) 9185 v0 := b.NewValue0(v.Line, OpConst64, t) 9186 v0.AuxInt = int64(uint32(c)) 9187 v.AddArg(v0) 9188 return true 9189 } 9190 // match: (Rsh64x32 (Const64 [0]) _) 9191 // cond: 9192 // result: (Const64 [0]) 9193 for { 9194 v_0 := v.Args[0] 9195 if v_0.Op != OpConst64 { 9196 break 9197 } 9198 if v_0.AuxInt != 0 { 9199 break 9200 } 9201 v.reset(OpConst64) 9202 v.AuxInt = 0 9203 return true 9204 } 9205 return false 9206 } 9207 func rewriteValuegeneric_OpRsh64x64(v *Value, config *Config) bool { 9208 b := v.Block 9209 _ = b 9210 // match: (Rsh64x64 (Const64 [c]) (Const64 [d])) 9211 // cond: 9212 // result: (Const64 [c >> uint64(d)]) 9213 for { 9214 v_0 := v.Args[0] 9215 if v_0.Op != OpConst64 { 9216 break 9217 } 9218 c := v_0.AuxInt 9219 v_1 := v.Args[1] 9220 if v_1.Op != OpConst64 { 9221 break 9222 } 9223 d := v_1.AuxInt 9224 v.reset(OpConst64) 9225 v.AuxInt = c >> uint64(d) 9226 return true 9227 } 9228 // match: (Rsh64x64 x (Const64 [0])) 9229 // cond: 9230 // result: x 9231 for { 9232 x := v.Args[0] 9233 v_1 := v.Args[1] 9234 if v_1.Op != OpConst64 { 9235 break 9236 } 9237 if v_1.AuxInt != 0 { 9238 break 9239 } 9240 v.reset(OpCopy) 9241 v.Type = x.Type 9242 v.AddArg(x) 9243 return true 9244 } 9245 // match: (Rsh64x64 (Const64 [0]) _) 9246 // cond: 9247 // result: (Const64 [0]) 9248 for { 9249 v_0 := v.Args[0] 9250 if v_0.Op != OpConst64 { 9251 break 9252 } 9253 if v_0.AuxInt != 0 { 9254 break 9255 } 9256 v.reset(OpConst64) 9257 v.AuxInt = 0 9258 return true 9259 } 9260 // match: (Rsh64x64 <t> (Rsh64x64 x (Const64 [c])) (Const64 [d])) 9261 // cond: !uaddOvf(c,d) 9262 // result: (Rsh64x64 x (Const64 <t> [c+d])) 9263 for { 9264 t := v.Type 9265 v_0 := v.Args[0] 9266 if v_0.Op != OpRsh64x64 { 9267 break 9268 } 9269 x := v_0.Args[0] 9270 v_0_1 := v_0.Args[1] 9271 if v_0_1.Op != OpConst64 { 9272 break 9273 } 9274 c := v_0_1.AuxInt 9275 v_1 := v.Args[1] 9276 if v_1.Op != OpConst64 { 9277 break 9278 } 9279 d := v_1.AuxInt 9280 if !(!uaddOvf(c, d)) { 9281 break 9282 } 9283 v.reset(OpRsh64x64) 9284 v.AddArg(x) 9285 v0 := b.NewValue0(v.Line, OpConst64, t) 9286 v0.AuxInt = c + d 9287 v.AddArg(v0) 9288 return true 9289 } 9290 return false 9291 } 9292 func rewriteValuegeneric_OpRsh64x8(v *Value, config *Config) bool { 9293 b := v.Block 9294 _ = b 9295 // match: (Rsh64x8 <t> x (Const8 [c])) 9296 // cond: 9297 // result: (Rsh64x64 x (Const64 <t> [int64(uint8(c))])) 9298 for { 9299 t := v.Type 9300 x := v.Args[0] 9301 v_1 := v.Args[1] 9302 if v_1.Op != OpConst8 { 9303 break 9304 } 9305 c := v_1.AuxInt 9306 v.reset(OpRsh64x64) 9307 v.AddArg(x) 9308 v0 := b.NewValue0(v.Line, OpConst64, t) 9309 v0.AuxInt = int64(uint8(c)) 9310 v.AddArg(v0) 9311 return true 9312 } 9313 // match: (Rsh64x8 (Const64 [0]) _) 9314 // cond: 9315 // result: (Const64 [0]) 9316 for { 9317 v_0 := v.Args[0] 9318 if v_0.Op != OpConst64 { 9319 break 9320 } 9321 if v_0.AuxInt != 0 { 9322 break 9323 } 9324 v.reset(OpConst64) 9325 v.AuxInt = 0 9326 return true 9327 } 9328 return false 9329 } 9330 func rewriteValuegeneric_OpRsh8Ux16(v *Value, config *Config) bool { 9331 b := v.Block 9332 _ = b 9333 // match: (Rsh8Ux16 <t> x (Const16 [c])) 9334 // cond: 9335 // result: (Rsh8Ux64 x (Const64 <t> [int64(uint16(c))])) 9336 for { 9337 t := v.Type 9338 x := v.Args[0] 9339 v_1 := v.Args[1] 9340 if v_1.Op != OpConst16 { 9341 break 9342 } 9343 c := v_1.AuxInt 9344 v.reset(OpRsh8Ux64) 9345 v.AddArg(x) 9346 v0 := b.NewValue0(v.Line, OpConst64, t) 9347 v0.AuxInt = int64(uint16(c)) 9348 v.AddArg(v0) 9349 return true 9350 } 9351 // match: (Rsh8Ux16 (Const8 [0]) _) 9352 // cond: 9353 // result: (Const8 [0]) 9354 for { 9355 v_0 := v.Args[0] 9356 if v_0.Op != OpConst8 { 9357 break 9358 } 9359 if v_0.AuxInt != 0 { 9360 break 9361 } 9362 v.reset(OpConst8) 9363 v.AuxInt = 0 9364 return true 9365 } 9366 return false 9367 } 9368 func rewriteValuegeneric_OpRsh8Ux32(v *Value, config *Config) bool { 9369 b := v.Block 9370 _ = b 9371 // match: (Rsh8Ux32 <t> x (Const32 [c])) 9372 // cond: 9373 // result: (Rsh8Ux64 x (Const64 <t> [int64(uint32(c))])) 9374 for { 9375 t := v.Type 9376 x := v.Args[0] 9377 v_1 := v.Args[1] 9378 if v_1.Op != OpConst32 { 9379 break 9380 } 9381 c := v_1.AuxInt 9382 v.reset(OpRsh8Ux64) 9383 v.AddArg(x) 9384 v0 := b.NewValue0(v.Line, OpConst64, t) 9385 v0.AuxInt = int64(uint32(c)) 9386 v.AddArg(v0) 9387 return true 9388 } 9389 // match: (Rsh8Ux32 (Const8 [0]) _) 9390 // cond: 9391 // result: (Const8 [0]) 9392 for { 9393 v_0 := v.Args[0] 9394 if v_0.Op != OpConst8 { 9395 break 9396 } 9397 if v_0.AuxInt != 0 { 9398 break 9399 } 9400 v.reset(OpConst8) 9401 v.AuxInt = 0 9402 return true 9403 } 9404 return false 9405 } 9406 func rewriteValuegeneric_OpRsh8Ux64(v *Value, config *Config) bool { 9407 b := v.Block 9408 _ = b 9409 // match: (Rsh8Ux64 (Const8 [c]) (Const64 [d])) 9410 // cond: 9411 // result: (Const8 [int64(int8(uint8(c) >> uint64(d)))]) 9412 for { 9413 v_0 := v.Args[0] 9414 if v_0.Op != OpConst8 { 9415 break 9416 } 9417 c := v_0.AuxInt 9418 v_1 := v.Args[1] 9419 if v_1.Op != OpConst64 { 9420 break 9421 } 9422 d := v_1.AuxInt 9423 v.reset(OpConst8) 9424 v.AuxInt = int64(int8(uint8(c) >> uint64(d))) 9425 return true 9426 } 9427 // match: (Rsh8Ux64 x (Const64 [0])) 9428 // cond: 9429 // result: x 9430 for { 9431 x := v.Args[0] 9432 v_1 := v.Args[1] 9433 if v_1.Op != OpConst64 { 9434 break 9435 } 9436 if v_1.AuxInt != 0 { 9437 break 9438 } 9439 v.reset(OpCopy) 9440 v.Type = x.Type 9441 v.AddArg(x) 9442 return true 9443 } 9444 // match: (Rsh8Ux64 (Const8 [0]) _) 9445 // cond: 9446 // result: (Const8 [0]) 9447 for { 9448 v_0 := v.Args[0] 9449 if v_0.Op != OpConst8 { 9450 break 9451 } 9452 if v_0.AuxInt != 0 { 9453 break 9454 } 9455 v.reset(OpConst8) 9456 v.AuxInt = 0 9457 return true 9458 } 9459 // match: (Rsh8Ux64 _ (Const64 [c])) 9460 // cond: uint64(c) >= 8 9461 // result: (Const8 [0]) 9462 for { 9463 v_1 := v.Args[1] 9464 if v_1.Op != OpConst64 { 9465 break 9466 } 9467 c := v_1.AuxInt 9468 if !(uint64(c) >= 8) { 9469 break 9470 } 9471 v.reset(OpConst8) 9472 v.AuxInt = 0 9473 return true 9474 } 9475 // match: (Rsh8Ux64 <t> (Rsh8Ux64 x (Const64 [c])) (Const64 [d])) 9476 // cond: !uaddOvf(c,d) 9477 // result: (Rsh8Ux64 x (Const64 <t> [c+d])) 9478 for { 9479 t := v.Type 9480 v_0 := v.Args[0] 9481 if v_0.Op != OpRsh8Ux64 { 9482 break 9483 } 9484 x := v_0.Args[0] 9485 v_0_1 := v_0.Args[1] 9486 if v_0_1.Op != OpConst64 { 9487 break 9488 } 9489 c := v_0_1.AuxInt 9490 v_1 := v.Args[1] 9491 if v_1.Op != OpConst64 { 9492 break 9493 } 9494 d := v_1.AuxInt 9495 if !(!uaddOvf(c, d)) { 9496 break 9497 } 9498 v.reset(OpRsh8Ux64) 9499 v.AddArg(x) 9500 v0 := b.NewValue0(v.Line, OpConst64, t) 9501 v0.AuxInt = c + d 9502 v.AddArg(v0) 9503 return true 9504 } 9505 // match: (Rsh8Ux64 (Lsh8x64 (Rsh8Ux64 x (Const64 [c1])) (Const64 [c2])) (Const64 [c3])) 9506 // cond: uint64(c1) >= uint64(c2) && uint64(c3) >= uint64(c2) && !uaddOvf(c1-c2, c3) 9507 // result: (Rsh8Ux64 x (Const64 <config.fe.TypeUInt64()> [c1-c2+c3])) 9508 for { 9509 v_0 := v.Args[0] 9510 if v_0.Op != OpLsh8x64 { 9511 break 9512 } 9513 v_0_0 := v_0.Args[0] 9514 if v_0_0.Op != OpRsh8Ux64 { 9515 break 9516 } 9517 x := v_0_0.Args[0] 9518 v_0_0_1 := v_0_0.Args[1] 9519 if v_0_0_1.Op != OpConst64 { 9520 break 9521 } 9522 c1 := v_0_0_1.AuxInt 9523 v_0_1 := v_0.Args[1] 9524 if v_0_1.Op != OpConst64 { 9525 break 9526 } 9527 c2 := v_0_1.AuxInt 9528 v_1 := v.Args[1] 9529 if v_1.Op != OpConst64 { 9530 break 9531 } 9532 c3 := v_1.AuxInt 9533 if !(uint64(c1) >= uint64(c2) && uint64(c3) >= uint64(c2) && !uaddOvf(c1-c2, c3)) { 9534 break 9535 } 9536 v.reset(OpRsh8Ux64) 9537 v.AddArg(x) 9538 v0 := b.NewValue0(v.Line, OpConst64, config.fe.TypeUInt64()) 9539 v0.AuxInt = c1 - c2 + c3 9540 v.AddArg(v0) 9541 return true 9542 } 9543 return false 9544 } 9545 func rewriteValuegeneric_OpRsh8Ux8(v *Value, config *Config) bool { 9546 b := v.Block 9547 _ = b 9548 // match: (Rsh8Ux8 <t> x (Const8 [c])) 9549 // cond: 9550 // result: (Rsh8Ux64 x (Const64 <t> [int64(uint8(c))])) 9551 for { 9552 t := v.Type 9553 x := v.Args[0] 9554 v_1 := v.Args[1] 9555 if v_1.Op != OpConst8 { 9556 break 9557 } 9558 c := v_1.AuxInt 9559 v.reset(OpRsh8Ux64) 9560 v.AddArg(x) 9561 v0 := b.NewValue0(v.Line, OpConst64, t) 9562 v0.AuxInt = int64(uint8(c)) 9563 v.AddArg(v0) 9564 return true 9565 } 9566 // match: (Rsh8Ux8 (Const8 [0]) _) 9567 // cond: 9568 // result: (Const8 [0]) 9569 for { 9570 v_0 := v.Args[0] 9571 if v_0.Op != OpConst8 { 9572 break 9573 } 9574 if v_0.AuxInt != 0 { 9575 break 9576 } 9577 v.reset(OpConst8) 9578 v.AuxInt = 0 9579 return true 9580 } 9581 return false 9582 } 9583 func rewriteValuegeneric_OpRsh8x16(v *Value, config *Config) bool { 9584 b := v.Block 9585 _ = b 9586 // match: (Rsh8x16 <t> x (Const16 [c])) 9587 // cond: 9588 // result: (Rsh8x64 x (Const64 <t> [int64(uint16(c))])) 9589 for { 9590 t := v.Type 9591 x := v.Args[0] 9592 v_1 := v.Args[1] 9593 if v_1.Op != OpConst16 { 9594 break 9595 } 9596 c := v_1.AuxInt 9597 v.reset(OpRsh8x64) 9598 v.AddArg(x) 9599 v0 := b.NewValue0(v.Line, OpConst64, t) 9600 v0.AuxInt = int64(uint16(c)) 9601 v.AddArg(v0) 9602 return true 9603 } 9604 // match: (Rsh8x16 (Const8 [0]) _) 9605 // cond: 9606 // result: (Const8 [0]) 9607 for { 9608 v_0 := v.Args[0] 9609 if v_0.Op != OpConst8 { 9610 break 9611 } 9612 if v_0.AuxInt != 0 { 9613 break 9614 } 9615 v.reset(OpConst8) 9616 v.AuxInt = 0 9617 return true 9618 } 9619 return false 9620 } 9621 func rewriteValuegeneric_OpRsh8x32(v *Value, config *Config) bool { 9622 b := v.Block 9623 _ = b 9624 // match: (Rsh8x32 <t> x (Const32 [c])) 9625 // cond: 9626 // result: (Rsh8x64 x (Const64 <t> [int64(uint32(c))])) 9627 for { 9628 t := v.Type 9629 x := v.Args[0] 9630 v_1 := v.Args[1] 9631 if v_1.Op != OpConst32 { 9632 break 9633 } 9634 c := v_1.AuxInt 9635 v.reset(OpRsh8x64) 9636 v.AddArg(x) 9637 v0 := b.NewValue0(v.Line, OpConst64, t) 9638 v0.AuxInt = int64(uint32(c)) 9639 v.AddArg(v0) 9640 return true 9641 } 9642 // match: (Rsh8x32 (Const8 [0]) _) 9643 // cond: 9644 // result: (Const8 [0]) 9645 for { 9646 v_0 := v.Args[0] 9647 if v_0.Op != OpConst8 { 9648 break 9649 } 9650 if v_0.AuxInt != 0 { 9651 break 9652 } 9653 v.reset(OpConst8) 9654 v.AuxInt = 0 9655 return true 9656 } 9657 return false 9658 } 9659 func rewriteValuegeneric_OpRsh8x64(v *Value, config *Config) bool { 9660 b := v.Block 9661 _ = b 9662 // match: (Rsh8x64 (Const8 [c]) (Const64 [d])) 9663 // cond: 9664 // result: (Const8 [int64(int8(c) >> uint64(d))]) 9665 for { 9666 v_0 := v.Args[0] 9667 if v_0.Op != OpConst8 { 9668 break 9669 } 9670 c := v_0.AuxInt 9671 v_1 := v.Args[1] 9672 if v_1.Op != OpConst64 { 9673 break 9674 } 9675 d := v_1.AuxInt 9676 v.reset(OpConst8) 9677 v.AuxInt = int64(int8(c) >> uint64(d)) 9678 return true 9679 } 9680 // match: (Rsh8x64 x (Const64 [0])) 9681 // cond: 9682 // result: x 9683 for { 9684 x := v.Args[0] 9685 v_1 := v.Args[1] 9686 if v_1.Op != OpConst64 { 9687 break 9688 } 9689 if v_1.AuxInt != 0 { 9690 break 9691 } 9692 v.reset(OpCopy) 9693 v.Type = x.Type 9694 v.AddArg(x) 9695 return true 9696 } 9697 // match: (Rsh8x64 (Const8 [0]) _) 9698 // cond: 9699 // result: (Const8 [0]) 9700 for { 9701 v_0 := v.Args[0] 9702 if v_0.Op != OpConst8 { 9703 break 9704 } 9705 if v_0.AuxInt != 0 { 9706 break 9707 } 9708 v.reset(OpConst8) 9709 v.AuxInt = 0 9710 return true 9711 } 9712 // match: (Rsh8x64 <t> (Rsh8x64 x (Const64 [c])) (Const64 [d])) 9713 // cond: !uaddOvf(c,d) 9714 // result: (Rsh8x64 x (Const64 <t> [c+d])) 9715 for { 9716 t := v.Type 9717 v_0 := v.Args[0] 9718 if v_0.Op != OpRsh8x64 { 9719 break 9720 } 9721 x := v_0.Args[0] 9722 v_0_1 := v_0.Args[1] 9723 if v_0_1.Op != OpConst64 { 9724 break 9725 } 9726 c := v_0_1.AuxInt 9727 v_1 := v.Args[1] 9728 if v_1.Op != OpConst64 { 9729 break 9730 } 9731 d := v_1.AuxInt 9732 if !(!uaddOvf(c, d)) { 9733 break 9734 } 9735 v.reset(OpRsh8x64) 9736 v.AddArg(x) 9737 v0 := b.NewValue0(v.Line, OpConst64, t) 9738 v0.AuxInt = c + d 9739 v.AddArg(v0) 9740 return true 9741 } 9742 return false 9743 } 9744 func rewriteValuegeneric_OpRsh8x8(v *Value, config *Config) bool { 9745 b := v.Block 9746 _ = b 9747 // match: (Rsh8x8 <t> x (Const8 [c])) 9748 // cond: 9749 // result: (Rsh8x64 x (Const64 <t> [int64(uint8(c))])) 9750 for { 9751 t := v.Type 9752 x := v.Args[0] 9753 v_1 := v.Args[1] 9754 if v_1.Op != OpConst8 { 9755 break 9756 } 9757 c := v_1.AuxInt 9758 v.reset(OpRsh8x64) 9759 v.AddArg(x) 9760 v0 := b.NewValue0(v.Line, OpConst64, t) 9761 v0.AuxInt = int64(uint8(c)) 9762 v.AddArg(v0) 9763 return true 9764 } 9765 // match: (Rsh8x8 (Const8 [0]) _) 9766 // cond: 9767 // result: (Const8 [0]) 9768 for { 9769 v_0 := v.Args[0] 9770 if v_0.Op != OpConst8 { 9771 break 9772 } 9773 if v_0.AuxInt != 0 { 9774 break 9775 } 9776 v.reset(OpConst8) 9777 v.AuxInt = 0 9778 return true 9779 } 9780 return false 9781 } 9782 func rewriteValuegeneric_OpSignExt16to32(v *Value, config *Config) bool { 9783 b := v.Block 9784 _ = b 9785 // match: (SignExt16to32 (Trunc32to16 x:(Rsh32x64 _ (Const64 [s])))) 9786 // cond: s >= 16 9787 // result: x 9788 for { 9789 v_0 := v.Args[0] 9790 if v_0.Op != OpTrunc32to16 { 9791 break 9792 } 9793 x := v_0.Args[0] 9794 if x.Op != OpRsh32x64 { 9795 break 9796 } 9797 x_1 := x.Args[1] 9798 if x_1.Op != OpConst64 { 9799 break 9800 } 9801 s := x_1.AuxInt 9802 if !(s >= 16) { 9803 break 9804 } 9805 v.reset(OpCopy) 9806 v.Type = x.Type 9807 v.AddArg(x) 9808 return true 9809 } 9810 return false 9811 } 9812 func rewriteValuegeneric_OpSignExt16to64(v *Value, config *Config) bool { 9813 b := v.Block 9814 _ = b 9815 // match: (SignExt16to64 (Trunc64to16 x:(Rsh64x64 _ (Const64 [s])))) 9816 // cond: s >= 48 9817 // result: x 9818 for { 9819 v_0 := v.Args[0] 9820 if v_0.Op != OpTrunc64to16 { 9821 break 9822 } 9823 x := v_0.Args[0] 9824 if x.Op != OpRsh64x64 { 9825 break 9826 } 9827 x_1 := x.Args[1] 9828 if x_1.Op != OpConst64 { 9829 break 9830 } 9831 s := x_1.AuxInt 9832 if !(s >= 48) { 9833 break 9834 } 9835 v.reset(OpCopy) 9836 v.Type = x.Type 9837 v.AddArg(x) 9838 return true 9839 } 9840 return false 9841 } 9842 func rewriteValuegeneric_OpSignExt32to64(v *Value, config *Config) bool { 9843 b := v.Block 9844 _ = b 9845 // match: (SignExt32to64 (Trunc64to32 x:(Rsh64x64 _ (Const64 [s])))) 9846 // cond: s >= 32 9847 // result: x 9848 for { 9849 v_0 := v.Args[0] 9850 if v_0.Op != OpTrunc64to32 { 9851 break 9852 } 9853 x := v_0.Args[0] 9854 if x.Op != OpRsh64x64 { 9855 break 9856 } 9857 x_1 := x.Args[1] 9858 if x_1.Op != OpConst64 { 9859 break 9860 } 9861 s := x_1.AuxInt 9862 if !(s >= 32) { 9863 break 9864 } 9865 v.reset(OpCopy) 9866 v.Type = x.Type 9867 v.AddArg(x) 9868 return true 9869 } 9870 return false 9871 } 9872 func rewriteValuegeneric_OpSignExt8to16(v *Value, config *Config) bool { 9873 b := v.Block 9874 _ = b 9875 // match: (SignExt8to16 (Trunc16to8 x:(Rsh16x64 _ (Const64 [s])))) 9876 // cond: s >= 8 9877 // result: x 9878 for { 9879 v_0 := v.Args[0] 9880 if v_0.Op != OpTrunc16to8 { 9881 break 9882 } 9883 x := v_0.Args[0] 9884 if x.Op != OpRsh16x64 { 9885 break 9886 } 9887 x_1 := x.Args[1] 9888 if x_1.Op != OpConst64 { 9889 break 9890 } 9891 s := x_1.AuxInt 9892 if !(s >= 8) { 9893 break 9894 } 9895 v.reset(OpCopy) 9896 v.Type = x.Type 9897 v.AddArg(x) 9898 return true 9899 } 9900 return false 9901 } 9902 func rewriteValuegeneric_OpSignExt8to32(v *Value, config *Config) bool { 9903 b := v.Block 9904 _ = b 9905 // match: (SignExt8to32 (Trunc32to8 x:(Rsh32x64 _ (Const64 [s])))) 9906 // cond: s >= 24 9907 // result: x 9908 for { 9909 v_0 := v.Args[0] 9910 if v_0.Op != OpTrunc32to8 { 9911 break 9912 } 9913 x := v_0.Args[0] 9914 if x.Op != OpRsh32x64 { 9915 break 9916 } 9917 x_1 := x.Args[1] 9918 if x_1.Op != OpConst64 { 9919 break 9920 } 9921 s := x_1.AuxInt 9922 if !(s >= 24) { 9923 break 9924 } 9925 v.reset(OpCopy) 9926 v.Type = x.Type 9927 v.AddArg(x) 9928 return true 9929 } 9930 return false 9931 } 9932 func rewriteValuegeneric_OpSignExt8to64(v *Value, config *Config) bool { 9933 b := v.Block 9934 _ = b 9935 // match: (SignExt8to64 (Trunc64to8 x:(Rsh64x64 _ (Const64 [s])))) 9936 // cond: s >= 56 9937 // result: x 9938 for { 9939 v_0 := v.Args[0] 9940 if v_0.Op != OpTrunc64to8 { 9941 break 9942 } 9943 x := v_0.Args[0] 9944 if x.Op != OpRsh64x64 { 9945 break 9946 } 9947 x_1 := x.Args[1] 9948 if x_1.Op != OpConst64 { 9949 break 9950 } 9951 s := x_1.AuxInt 9952 if !(s >= 56) { 9953 break 9954 } 9955 v.reset(OpCopy) 9956 v.Type = x.Type 9957 v.AddArg(x) 9958 return true 9959 } 9960 return false 9961 } 9962 func rewriteValuegeneric_OpSliceCap(v *Value, config *Config) bool { 9963 b := v.Block 9964 _ = b 9965 // match: (SliceCap (SliceMake _ _ (Const64 <t> [c]))) 9966 // cond: 9967 // result: (Const64 <t> [c]) 9968 for { 9969 v_0 := v.Args[0] 9970 if v_0.Op != OpSliceMake { 9971 break 9972 } 9973 v_0_2 := v_0.Args[2] 9974 if v_0_2.Op != OpConst64 { 9975 break 9976 } 9977 t := v_0_2.Type 9978 c := v_0_2.AuxInt 9979 v.reset(OpConst64) 9980 v.Type = t 9981 v.AuxInt = c 9982 return true 9983 } 9984 // match: (SliceCap (SliceMake _ _ (Const32 <t> [c]))) 9985 // cond: 9986 // result: (Const32 <t> [c]) 9987 for { 9988 v_0 := v.Args[0] 9989 if v_0.Op != OpSliceMake { 9990 break 9991 } 9992 v_0_2 := v_0.Args[2] 9993 if v_0_2.Op != OpConst32 { 9994 break 9995 } 9996 t := v_0_2.Type 9997 c := v_0_2.AuxInt 9998 v.reset(OpConst32) 9999 v.Type = t 10000 v.AuxInt = c 10001 return true 10002 } 10003 // match: (SliceCap (SliceMake _ _ (SliceCap x))) 10004 // cond: 10005 // result: (SliceCap x) 10006 for { 10007 v_0 := v.Args[0] 10008 if v_0.Op != OpSliceMake { 10009 break 10010 } 10011 v_0_2 := v_0.Args[2] 10012 if v_0_2.Op != OpSliceCap { 10013 break 10014 } 10015 x := v_0_2.Args[0] 10016 v.reset(OpSliceCap) 10017 v.AddArg(x) 10018 return true 10019 } 10020 // match: (SliceCap (SliceMake _ _ (SliceLen x))) 10021 // cond: 10022 // result: (SliceLen x) 10023 for { 10024 v_0 := v.Args[0] 10025 if v_0.Op != OpSliceMake { 10026 break 10027 } 10028 v_0_2 := v_0.Args[2] 10029 if v_0_2.Op != OpSliceLen { 10030 break 10031 } 10032 x := v_0_2.Args[0] 10033 v.reset(OpSliceLen) 10034 v.AddArg(x) 10035 return true 10036 } 10037 return false 10038 } 10039 func rewriteValuegeneric_OpSliceLen(v *Value, config *Config) bool { 10040 b := v.Block 10041 _ = b 10042 // match: (SliceLen (SliceMake _ (Const64 <t> [c]) _)) 10043 // cond: 10044 // result: (Const64 <t> [c]) 10045 for { 10046 v_0 := v.Args[0] 10047 if v_0.Op != OpSliceMake { 10048 break 10049 } 10050 v_0_1 := v_0.Args[1] 10051 if v_0_1.Op != OpConst64 { 10052 break 10053 } 10054 t := v_0_1.Type 10055 c := v_0_1.AuxInt 10056 v.reset(OpConst64) 10057 v.Type = t 10058 v.AuxInt = c 10059 return true 10060 } 10061 // match: (SliceLen (SliceMake _ (Const32 <t> [c]) _)) 10062 // cond: 10063 // result: (Const32 <t> [c]) 10064 for { 10065 v_0 := v.Args[0] 10066 if v_0.Op != OpSliceMake { 10067 break 10068 } 10069 v_0_1 := v_0.Args[1] 10070 if v_0_1.Op != OpConst32 { 10071 break 10072 } 10073 t := v_0_1.Type 10074 c := v_0_1.AuxInt 10075 v.reset(OpConst32) 10076 v.Type = t 10077 v.AuxInt = c 10078 return true 10079 } 10080 // match: (SliceLen (SliceMake _ (SliceLen x) _)) 10081 // cond: 10082 // result: (SliceLen x) 10083 for { 10084 v_0 := v.Args[0] 10085 if v_0.Op != OpSliceMake { 10086 break 10087 } 10088 v_0_1 := v_0.Args[1] 10089 if v_0_1.Op != OpSliceLen { 10090 break 10091 } 10092 x := v_0_1.Args[0] 10093 v.reset(OpSliceLen) 10094 v.AddArg(x) 10095 return true 10096 } 10097 return false 10098 } 10099 func rewriteValuegeneric_OpSlicePtr(v *Value, config *Config) bool { 10100 b := v.Block 10101 _ = b 10102 // match: (SlicePtr (SliceMake (SlicePtr x) _ _)) 10103 // cond: 10104 // result: (SlicePtr x) 10105 for { 10106 v_0 := v.Args[0] 10107 if v_0.Op != OpSliceMake { 10108 break 10109 } 10110 v_0_0 := v_0.Args[0] 10111 if v_0_0.Op != OpSlicePtr { 10112 break 10113 } 10114 x := v_0_0.Args[0] 10115 v.reset(OpSlicePtr) 10116 v.AddArg(x) 10117 return true 10118 } 10119 return false 10120 } 10121 func rewriteValuegeneric_OpSlicemask(v *Value, config *Config) bool { 10122 b := v.Block 10123 _ = b 10124 // match: (Slicemask (Const32 [x])) 10125 // cond: x > 0 10126 // result: (Const32 [-1]) 10127 for { 10128 v_0 := v.Args[0] 10129 if v_0.Op != OpConst32 { 10130 break 10131 } 10132 x := v_0.AuxInt 10133 if !(x > 0) { 10134 break 10135 } 10136 v.reset(OpConst32) 10137 v.AuxInt = -1 10138 return true 10139 } 10140 // match: (Slicemask (Const32 [0])) 10141 // cond: 10142 // result: (Const32 [0]) 10143 for { 10144 v_0 := v.Args[0] 10145 if v_0.Op != OpConst32 { 10146 break 10147 } 10148 if v_0.AuxInt != 0 { 10149 break 10150 } 10151 v.reset(OpConst32) 10152 v.AuxInt = 0 10153 return true 10154 } 10155 // match: (Slicemask (Const64 [x])) 10156 // cond: x > 0 10157 // result: (Const64 [-1]) 10158 for { 10159 v_0 := v.Args[0] 10160 if v_0.Op != OpConst64 { 10161 break 10162 } 10163 x := v_0.AuxInt 10164 if !(x > 0) { 10165 break 10166 } 10167 v.reset(OpConst64) 10168 v.AuxInt = -1 10169 return true 10170 } 10171 // match: (Slicemask (Const64 [0])) 10172 // cond: 10173 // result: (Const64 [0]) 10174 for { 10175 v_0 := v.Args[0] 10176 if v_0.Op != OpConst64 { 10177 break 10178 } 10179 if v_0.AuxInt != 0 { 10180 break 10181 } 10182 v.reset(OpConst64) 10183 v.AuxInt = 0 10184 return true 10185 } 10186 return false 10187 } 10188 func rewriteValuegeneric_OpSqrt(v *Value, config *Config) bool { 10189 b := v.Block 10190 _ = b 10191 // match: (Sqrt (Const64F [c])) 10192 // cond: 10193 // result: (Const64F [f2i(math.Sqrt(i2f(c)))]) 10194 for { 10195 v_0 := v.Args[0] 10196 if v_0.Op != OpConst64F { 10197 break 10198 } 10199 c := v_0.AuxInt 10200 v.reset(OpConst64F) 10201 v.AuxInt = f2i(math.Sqrt(i2f(c))) 10202 return true 10203 } 10204 return false 10205 } 10206 func rewriteValuegeneric_OpStore(v *Value, config *Config) bool { 10207 b := v.Block 10208 _ = b 10209 // match: (Store _ (StructMake0) mem) 10210 // cond: 10211 // result: mem 10212 for { 10213 v_1 := v.Args[1] 10214 if v_1.Op != OpStructMake0 { 10215 break 10216 } 10217 mem := v.Args[2] 10218 v.reset(OpCopy) 10219 v.Type = mem.Type 10220 v.AddArg(mem) 10221 return true 10222 } 10223 // match: (Store dst (StructMake1 <t> f0) mem) 10224 // cond: 10225 // result: (Store [t.FieldType(0).Size()] dst f0 mem) 10226 for { 10227 dst := v.Args[0] 10228 v_1 := v.Args[1] 10229 if v_1.Op != OpStructMake1 { 10230 break 10231 } 10232 t := v_1.Type 10233 f0 := v_1.Args[0] 10234 mem := v.Args[2] 10235 v.reset(OpStore) 10236 v.AuxInt = t.FieldType(0).Size() 10237 v.AddArg(dst) 10238 v.AddArg(f0) 10239 v.AddArg(mem) 10240 return true 10241 } 10242 // match: (Store dst (StructMake2 <t> f0 f1) mem) 10243 // cond: 10244 // result: (Store [t.FieldType(1).Size()] (OffPtr <t.FieldType(1).PtrTo()> [t.FieldOff(1)] dst) f1 (Store [t.FieldType(0).Size()] dst f0 mem)) 10245 for { 10246 dst := v.Args[0] 10247 v_1 := v.Args[1] 10248 if v_1.Op != OpStructMake2 { 10249 break 10250 } 10251 t := v_1.Type 10252 f0 := v_1.Args[0] 10253 f1 := v_1.Args[1] 10254 mem := v.Args[2] 10255 v.reset(OpStore) 10256 v.AuxInt = t.FieldType(1).Size() 10257 v0 := b.NewValue0(v.Line, OpOffPtr, t.FieldType(1).PtrTo()) 10258 v0.AuxInt = t.FieldOff(1) 10259 v0.AddArg(dst) 10260 v.AddArg(v0) 10261 v.AddArg(f1) 10262 v1 := b.NewValue0(v.Line, OpStore, TypeMem) 10263 v1.AuxInt = t.FieldType(0).Size() 10264 v1.AddArg(dst) 10265 v1.AddArg(f0) 10266 v1.AddArg(mem) 10267 v.AddArg(v1) 10268 return true 10269 } 10270 // match: (Store dst (StructMake3 <t> f0 f1 f2) mem) 10271 // cond: 10272 // result: (Store [t.FieldType(2).Size()] (OffPtr <t.FieldType(2).PtrTo()> [t.FieldOff(2)] dst) f2 (Store [t.FieldType(1).Size()] (OffPtr <t.FieldType(1).PtrTo()> [t.FieldOff(1)] dst) f1 (Store [t.FieldType(0).Size()] dst f0 mem))) 10273 for { 10274 dst := v.Args[0] 10275 v_1 := v.Args[1] 10276 if v_1.Op != OpStructMake3 { 10277 break 10278 } 10279 t := v_1.Type 10280 f0 := v_1.Args[0] 10281 f1 := v_1.Args[1] 10282 f2 := v_1.Args[2] 10283 mem := v.Args[2] 10284 v.reset(OpStore) 10285 v.AuxInt = t.FieldType(2).Size() 10286 v0 := b.NewValue0(v.Line, OpOffPtr, t.FieldType(2).PtrTo()) 10287 v0.AuxInt = t.FieldOff(2) 10288 v0.AddArg(dst) 10289 v.AddArg(v0) 10290 v.AddArg(f2) 10291 v1 := b.NewValue0(v.Line, OpStore, TypeMem) 10292 v1.AuxInt = t.FieldType(1).Size() 10293 v2 := b.NewValue0(v.Line, OpOffPtr, t.FieldType(1).PtrTo()) 10294 v2.AuxInt = t.FieldOff(1) 10295 v2.AddArg(dst) 10296 v1.AddArg(v2) 10297 v1.AddArg(f1) 10298 v3 := b.NewValue0(v.Line, OpStore, TypeMem) 10299 v3.AuxInt = t.FieldType(0).Size() 10300 v3.AddArg(dst) 10301 v3.AddArg(f0) 10302 v3.AddArg(mem) 10303 v1.AddArg(v3) 10304 v.AddArg(v1) 10305 return true 10306 } 10307 // match: (Store dst (StructMake4 <t> f0 f1 f2 f3) mem) 10308 // cond: 10309 // result: (Store [t.FieldType(3).Size()] (OffPtr <t.FieldType(3).PtrTo()> [t.FieldOff(3)] dst) f3 (Store [t.FieldType(2).Size()] (OffPtr <t.FieldType(2).PtrTo()> [t.FieldOff(2)] dst) f2 (Store [t.FieldType(1).Size()] (OffPtr <t.FieldType(1).PtrTo()> [t.FieldOff(1)] dst) f1 (Store [t.FieldType(0).Size()] dst f0 mem)))) 10310 for { 10311 dst := v.Args[0] 10312 v_1 := v.Args[1] 10313 if v_1.Op != OpStructMake4 { 10314 break 10315 } 10316 t := v_1.Type 10317 f0 := v_1.Args[0] 10318 f1 := v_1.Args[1] 10319 f2 := v_1.Args[2] 10320 f3 := v_1.Args[3] 10321 mem := v.Args[2] 10322 v.reset(OpStore) 10323 v.AuxInt = t.FieldType(3).Size() 10324 v0 := b.NewValue0(v.Line, OpOffPtr, t.FieldType(3).PtrTo()) 10325 v0.AuxInt = t.FieldOff(3) 10326 v0.AddArg(dst) 10327 v.AddArg(v0) 10328 v.AddArg(f3) 10329 v1 := b.NewValue0(v.Line, OpStore, TypeMem) 10330 v1.AuxInt = t.FieldType(2).Size() 10331 v2 := b.NewValue0(v.Line, OpOffPtr, t.FieldType(2).PtrTo()) 10332 v2.AuxInt = t.FieldOff(2) 10333 v2.AddArg(dst) 10334 v1.AddArg(v2) 10335 v1.AddArg(f2) 10336 v3 := b.NewValue0(v.Line, OpStore, TypeMem) 10337 v3.AuxInt = t.FieldType(1).Size() 10338 v4 := b.NewValue0(v.Line, OpOffPtr, t.FieldType(1).PtrTo()) 10339 v4.AuxInt = t.FieldOff(1) 10340 v4.AddArg(dst) 10341 v3.AddArg(v4) 10342 v3.AddArg(f1) 10343 v5 := b.NewValue0(v.Line, OpStore, TypeMem) 10344 v5.AuxInt = t.FieldType(0).Size() 10345 v5.AddArg(dst) 10346 v5.AddArg(f0) 10347 v5.AddArg(mem) 10348 v3.AddArg(v5) 10349 v1.AddArg(v3) 10350 v.AddArg(v1) 10351 return true 10352 } 10353 // match: (Store [size] dst (Load <t> src mem) mem) 10354 // cond: !config.fe.CanSSA(t) 10355 // result: (Move [MakeSizeAndAlign(size, t.Alignment()).Int64()] dst src mem) 10356 for { 10357 size := v.AuxInt 10358 dst := v.Args[0] 10359 v_1 := v.Args[1] 10360 if v_1.Op != OpLoad { 10361 break 10362 } 10363 t := v_1.Type 10364 src := v_1.Args[0] 10365 mem := v_1.Args[1] 10366 if mem != v.Args[2] { 10367 break 10368 } 10369 if !(!config.fe.CanSSA(t)) { 10370 break 10371 } 10372 v.reset(OpMove) 10373 v.AuxInt = MakeSizeAndAlign(size, t.Alignment()).Int64() 10374 v.AddArg(dst) 10375 v.AddArg(src) 10376 v.AddArg(mem) 10377 return true 10378 } 10379 // match: (Store [size] dst (Load <t> src mem) (VarDef {x} mem)) 10380 // cond: !config.fe.CanSSA(t) 10381 // result: (Move [MakeSizeAndAlign(size, t.Alignment()).Int64()] dst src (VarDef {x} mem)) 10382 for { 10383 size := v.AuxInt 10384 dst := v.Args[0] 10385 v_1 := v.Args[1] 10386 if v_1.Op != OpLoad { 10387 break 10388 } 10389 t := v_1.Type 10390 src := v_1.Args[0] 10391 mem := v_1.Args[1] 10392 v_2 := v.Args[2] 10393 if v_2.Op != OpVarDef { 10394 break 10395 } 10396 x := v_2.Aux 10397 if mem != v_2.Args[0] { 10398 break 10399 } 10400 if !(!config.fe.CanSSA(t)) { 10401 break 10402 } 10403 v.reset(OpMove) 10404 v.AuxInt = MakeSizeAndAlign(size, t.Alignment()).Int64() 10405 v.AddArg(dst) 10406 v.AddArg(src) 10407 v0 := b.NewValue0(v.Line, OpVarDef, TypeMem) 10408 v0.Aux = x 10409 v0.AddArg(mem) 10410 v.AddArg(v0) 10411 return true 10412 } 10413 // match: (Store _ (ArrayMake0) mem) 10414 // cond: 10415 // result: mem 10416 for { 10417 v_1 := v.Args[1] 10418 if v_1.Op != OpArrayMake0 { 10419 break 10420 } 10421 mem := v.Args[2] 10422 v.reset(OpCopy) 10423 v.Type = mem.Type 10424 v.AddArg(mem) 10425 return true 10426 } 10427 // match: (Store [size] dst (ArrayMake1 e) mem) 10428 // cond: 10429 // result: (Store [size] dst e mem) 10430 for { 10431 size := v.AuxInt 10432 dst := v.Args[0] 10433 v_1 := v.Args[1] 10434 if v_1.Op != OpArrayMake1 { 10435 break 10436 } 10437 e := v_1.Args[0] 10438 mem := v.Args[2] 10439 v.reset(OpStore) 10440 v.AuxInt = size 10441 v.AddArg(dst) 10442 v.AddArg(e) 10443 v.AddArg(mem) 10444 return true 10445 } 10446 return false 10447 } 10448 func rewriteValuegeneric_OpStringLen(v *Value, config *Config) bool { 10449 b := v.Block 10450 _ = b 10451 // match: (StringLen (StringMake _ (Const64 <t> [c]))) 10452 // cond: 10453 // result: (Const64 <t> [c]) 10454 for { 10455 v_0 := v.Args[0] 10456 if v_0.Op != OpStringMake { 10457 break 10458 } 10459 v_0_1 := v_0.Args[1] 10460 if v_0_1.Op != OpConst64 { 10461 break 10462 } 10463 t := v_0_1.Type 10464 c := v_0_1.AuxInt 10465 v.reset(OpConst64) 10466 v.Type = t 10467 v.AuxInt = c 10468 return true 10469 } 10470 return false 10471 } 10472 func rewriteValuegeneric_OpStringPtr(v *Value, config *Config) bool { 10473 b := v.Block 10474 _ = b 10475 // match: (StringPtr (StringMake (Const64 <t> [c]) _)) 10476 // cond: 10477 // result: (Const64 <t> [c]) 10478 for { 10479 v_0 := v.Args[0] 10480 if v_0.Op != OpStringMake { 10481 break 10482 } 10483 v_0_0 := v_0.Args[0] 10484 if v_0_0.Op != OpConst64 { 10485 break 10486 } 10487 t := v_0_0.Type 10488 c := v_0_0.AuxInt 10489 v.reset(OpConst64) 10490 v.Type = t 10491 v.AuxInt = c 10492 return true 10493 } 10494 return false 10495 } 10496 func rewriteValuegeneric_OpStructSelect(v *Value, config *Config) bool { 10497 b := v.Block 10498 _ = b 10499 // match: (StructSelect (StructMake1 x)) 10500 // cond: 10501 // result: x 10502 for { 10503 v_0 := v.Args[0] 10504 if v_0.Op != OpStructMake1 { 10505 break 10506 } 10507 x := v_0.Args[0] 10508 v.reset(OpCopy) 10509 v.Type = x.Type 10510 v.AddArg(x) 10511 return true 10512 } 10513 // match: (StructSelect [0] (StructMake2 x _)) 10514 // cond: 10515 // result: x 10516 for { 10517 if v.AuxInt != 0 { 10518 break 10519 } 10520 v_0 := v.Args[0] 10521 if v_0.Op != OpStructMake2 { 10522 break 10523 } 10524 x := v_0.Args[0] 10525 v.reset(OpCopy) 10526 v.Type = x.Type 10527 v.AddArg(x) 10528 return true 10529 } 10530 // match: (StructSelect [1] (StructMake2 _ x)) 10531 // cond: 10532 // result: x 10533 for { 10534 if v.AuxInt != 1 { 10535 break 10536 } 10537 v_0 := v.Args[0] 10538 if v_0.Op != OpStructMake2 { 10539 break 10540 } 10541 x := v_0.Args[1] 10542 v.reset(OpCopy) 10543 v.Type = x.Type 10544 v.AddArg(x) 10545 return true 10546 } 10547 // match: (StructSelect [0] (StructMake3 x _ _)) 10548 // cond: 10549 // result: x 10550 for { 10551 if v.AuxInt != 0 { 10552 break 10553 } 10554 v_0 := v.Args[0] 10555 if v_0.Op != OpStructMake3 { 10556 break 10557 } 10558 x := v_0.Args[0] 10559 v.reset(OpCopy) 10560 v.Type = x.Type 10561 v.AddArg(x) 10562 return true 10563 } 10564 // match: (StructSelect [1] (StructMake3 _ x _)) 10565 // cond: 10566 // result: x 10567 for { 10568 if v.AuxInt != 1 { 10569 break 10570 } 10571 v_0 := v.Args[0] 10572 if v_0.Op != OpStructMake3 { 10573 break 10574 } 10575 x := v_0.Args[1] 10576 v.reset(OpCopy) 10577 v.Type = x.Type 10578 v.AddArg(x) 10579 return true 10580 } 10581 // match: (StructSelect [2] (StructMake3 _ _ x)) 10582 // cond: 10583 // result: x 10584 for { 10585 if v.AuxInt != 2 { 10586 break 10587 } 10588 v_0 := v.Args[0] 10589 if v_0.Op != OpStructMake3 { 10590 break 10591 } 10592 x := v_0.Args[2] 10593 v.reset(OpCopy) 10594 v.Type = x.Type 10595 v.AddArg(x) 10596 return true 10597 } 10598 // match: (StructSelect [0] (StructMake4 x _ _ _)) 10599 // cond: 10600 // result: x 10601 for { 10602 if v.AuxInt != 0 { 10603 break 10604 } 10605 v_0 := v.Args[0] 10606 if v_0.Op != OpStructMake4 { 10607 break 10608 } 10609 x := v_0.Args[0] 10610 v.reset(OpCopy) 10611 v.Type = x.Type 10612 v.AddArg(x) 10613 return true 10614 } 10615 // match: (StructSelect [1] (StructMake4 _ x _ _)) 10616 // cond: 10617 // result: x 10618 for { 10619 if v.AuxInt != 1 { 10620 break 10621 } 10622 v_0 := v.Args[0] 10623 if v_0.Op != OpStructMake4 { 10624 break 10625 } 10626 x := v_0.Args[1] 10627 v.reset(OpCopy) 10628 v.Type = x.Type 10629 v.AddArg(x) 10630 return true 10631 } 10632 // match: (StructSelect [2] (StructMake4 _ _ x _)) 10633 // cond: 10634 // result: x 10635 for { 10636 if v.AuxInt != 2 { 10637 break 10638 } 10639 v_0 := v.Args[0] 10640 if v_0.Op != OpStructMake4 { 10641 break 10642 } 10643 x := v_0.Args[2] 10644 v.reset(OpCopy) 10645 v.Type = x.Type 10646 v.AddArg(x) 10647 return true 10648 } 10649 // match: (StructSelect [3] (StructMake4 _ _ _ x)) 10650 // cond: 10651 // result: x 10652 for { 10653 if v.AuxInt != 3 { 10654 break 10655 } 10656 v_0 := v.Args[0] 10657 if v_0.Op != OpStructMake4 { 10658 break 10659 } 10660 x := v_0.Args[3] 10661 v.reset(OpCopy) 10662 v.Type = x.Type 10663 v.AddArg(x) 10664 return true 10665 } 10666 // match: (StructSelect [i] x:(Load <t> ptr mem)) 10667 // cond: !config.fe.CanSSA(t) 10668 // result: @x.Block (Load <v.Type> (OffPtr <v.Type.PtrTo()> [t.FieldOff(int(i))] ptr) mem) 10669 for { 10670 i := v.AuxInt 10671 x := v.Args[0] 10672 if x.Op != OpLoad { 10673 break 10674 } 10675 t := x.Type 10676 ptr := x.Args[0] 10677 mem := x.Args[1] 10678 if !(!config.fe.CanSSA(t)) { 10679 break 10680 } 10681 b = x.Block 10682 v0 := b.NewValue0(v.Line, OpLoad, v.Type) 10683 v.reset(OpCopy) 10684 v.AddArg(v0) 10685 v1 := b.NewValue0(v.Line, OpOffPtr, v.Type.PtrTo()) 10686 v1.AuxInt = t.FieldOff(int(i)) 10687 v1.AddArg(ptr) 10688 v0.AddArg(v1) 10689 v0.AddArg(mem) 10690 return true 10691 } 10692 // match: (StructSelect [0] x:(IData _)) 10693 // cond: 10694 // result: x 10695 for { 10696 if v.AuxInt != 0 { 10697 break 10698 } 10699 x := v.Args[0] 10700 if x.Op != OpIData { 10701 break 10702 } 10703 v.reset(OpCopy) 10704 v.Type = x.Type 10705 v.AddArg(x) 10706 return true 10707 } 10708 return false 10709 } 10710 func rewriteValuegeneric_OpSub16(v *Value, config *Config) bool { 10711 b := v.Block 10712 _ = b 10713 // match: (Sub16 (Const16 [c]) (Const16 [d])) 10714 // cond: 10715 // result: (Const16 [int64(int16(c-d))]) 10716 for { 10717 v_0 := v.Args[0] 10718 if v_0.Op != OpConst16 { 10719 break 10720 } 10721 c := v_0.AuxInt 10722 v_1 := v.Args[1] 10723 if v_1.Op != OpConst16 { 10724 break 10725 } 10726 d := v_1.AuxInt 10727 v.reset(OpConst16) 10728 v.AuxInt = int64(int16(c - d)) 10729 return true 10730 } 10731 // match: (Sub16 x (Const16 <t> [c])) 10732 // cond: x.Op != OpConst16 10733 // result: (Add16 (Const16 <t> [int64(int16(-c))]) x) 10734 for { 10735 x := v.Args[0] 10736 v_1 := v.Args[1] 10737 if v_1.Op != OpConst16 { 10738 break 10739 } 10740 t := v_1.Type 10741 c := v_1.AuxInt 10742 if !(x.Op != OpConst16) { 10743 break 10744 } 10745 v.reset(OpAdd16) 10746 v0 := b.NewValue0(v.Line, OpConst16, t) 10747 v0.AuxInt = int64(int16(-c)) 10748 v.AddArg(v0) 10749 v.AddArg(x) 10750 return true 10751 } 10752 // match: (Sub16 x x) 10753 // cond: 10754 // result: (Const16 [0]) 10755 for { 10756 x := v.Args[0] 10757 if x != v.Args[1] { 10758 break 10759 } 10760 v.reset(OpConst16) 10761 v.AuxInt = 0 10762 return true 10763 } 10764 // match: (Sub16 (Add16 x y) x) 10765 // cond: 10766 // result: y 10767 for { 10768 v_0 := v.Args[0] 10769 if v_0.Op != OpAdd16 { 10770 break 10771 } 10772 x := v_0.Args[0] 10773 y := v_0.Args[1] 10774 if x != v.Args[1] { 10775 break 10776 } 10777 v.reset(OpCopy) 10778 v.Type = y.Type 10779 v.AddArg(y) 10780 return true 10781 } 10782 // match: (Sub16 (Add16 x y) y) 10783 // cond: 10784 // result: x 10785 for { 10786 v_0 := v.Args[0] 10787 if v_0.Op != OpAdd16 { 10788 break 10789 } 10790 x := v_0.Args[0] 10791 y := v_0.Args[1] 10792 if y != v.Args[1] { 10793 break 10794 } 10795 v.reset(OpCopy) 10796 v.Type = x.Type 10797 v.AddArg(x) 10798 return true 10799 } 10800 return false 10801 } 10802 func rewriteValuegeneric_OpSub32(v *Value, config *Config) bool { 10803 b := v.Block 10804 _ = b 10805 // match: (Sub32 (Const32 [c]) (Const32 [d])) 10806 // cond: 10807 // result: (Const32 [int64(int32(c-d))]) 10808 for { 10809 v_0 := v.Args[0] 10810 if v_0.Op != OpConst32 { 10811 break 10812 } 10813 c := v_0.AuxInt 10814 v_1 := v.Args[1] 10815 if v_1.Op != OpConst32 { 10816 break 10817 } 10818 d := v_1.AuxInt 10819 v.reset(OpConst32) 10820 v.AuxInt = int64(int32(c - d)) 10821 return true 10822 } 10823 // match: (Sub32 x (Const32 <t> [c])) 10824 // cond: x.Op != OpConst32 10825 // result: (Add32 (Const32 <t> [int64(int32(-c))]) x) 10826 for { 10827 x := v.Args[0] 10828 v_1 := v.Args[1] 10829 if v_1.Op != OpConst32 { 10830 break 10831 } 10832 t := v_1.Type 10833 c := v_1.AuxInt 10834 if !(x.Op != OpConst32) { 10835 break 10836 } 10837 v.reset(OpAdd32) 10838 v0 := b.NewValue0(v.Line, OpConst32, t) 10839 v0.AuxInt = int64(int32(-c)) 10840 v.AddArg(v0) 10841 v.AddArg(x) 10842 return true 10843 } 10844 // match: (Sub32 x x) 10845 // cond: 10846 // result: (Const32 [0]) 10847 for { 10848 x := v.Args[0] 10849 if x != v.Args[1] { 10850 break 10851 } 10852 v.reset(OpConst32) 10853 v.AuxInt = 0 10854 return true 10855 } 10856 // match: (Sub32 (Add32 x y) x) 10857 // cond: 10858 // result: y 10859 for { 10860 v_0 := v.Args[0] 10861 if v_0.Op != OpAdd32 { 10862 break 10863 } 10864 x := v_0.Args[0] 10865 y := v_0.Args[1] 10866 if x != v.Args[1] { 10867 break 10868 } 10869 v.reset(OpCopy) 10870 v.Type = y.Type 10871 v.AddArg(y) 10872 return true 10873 } 10874 // match: (Sub32 (Add32 x y) y) 10875 // cond: 10876 // result: x 10877 for { 10878 v_0 := v.Args[0] 10879 if v_0.Op != OpAdd32 { 10880 break 10881 } 10882 x := v_0.Args[0] 10883 y := v_0.Args[1] 10884 if y != v.Args[1] { 10885 break 10886 } 10887 v.reset(OpCopy) 10888 v.Type = x.Type 10889 v.AddArg(x) 10890 return true 10891 } 10892 return false 10893 } 10894 func rewriteValuegeneric_OpSub32F(v *Value, config *Config) bool { 10895 b := v.Block 10896 _ = b 10897 // match: (Sub32F (Const32F [c]) (Const32F [d])) 10898 // cond: 10899 // result: (Const32F [f2i(float64(i2f32(c) - i2f32(d)))]) 10900 for { 10901 v_0 := v.Args[0] 10902 if v_0.Op != OpConst32F { 10903 break 10904 } 10905 c := v_0.AuxInt 10906 v_1 := v.Args[1] 10907 if v_1.Op != OpConst32F { 10908 break 10909 } 10910 d := v_1.AuxInt 10911 v.reset(OpConst32F) 10912 v.AuxInt = f2i(float64(i2f32(c) - i2f32(d))) 10913 return true 10914 } 10915 // match: (Sub32F x (Const32F [0])) 10916 // cond: 10917 // result: x 10918 for { 10919 x := v.Args[0] 10920 v_1 := v.Args[1] 10921 if v_1.Op != OpConst32F { 10922 break 10923 } 10924 if v_1.AuxInt != 0 { 10925 break 10926 } 10927 v.reset(OpCopy) 10928 v.Type = x.Type 10929 v.AddArg(x) 10930 return true 10931 } 10932 return false 10933 } 10934 func rewriteValuegeneric_OpSub64(v *Value, config *Config) bool { 10935 b := v.Block 10936 _ = b 10937 // match: (Sub64 (Const64 [c]) (Const64 [d])) 10938 // cond: 10939 // result: (Const64 [c-d]) 10940 for { 10941 v_0 := v.Args[0] 10942 if v_0.Op != OpConst64 { 10943 break 10944 } 10945 c := v_0.AuxInt 10946 v_1 := v.Args[1] 10947 if v_1.Op != OpConst64 { 10948 break 10949 } 10950 d := v_1.AuxInt 10951 v.reset(OpConst64) 10952 v.AuxInt = c - d 10953 return true 10954 } 10955 // match: (Sub64 x (Const64 <t> [c])) 10956 // cond: x.Op != OpConst64 10957 // result: (Add64 (Const64 <t> [-c]) x) 10958 for { 10959 x := v.Args[0] 10960 v_1 := v.Args[1] 10961 if v_1.Op != OpConst64 { 10962 break 10963 } 10964 t := v_1.Type 10965 c := v_1.AuxInt 10966 if !(x.Op != OpConst64) { 10967 break 10968 } 10969 v.reset(OpAdd64) 10970 v0 := b.NewValue0(v.Line, OpConst64, t) 10971 v0.AuxInt = -c 10972 v.AddArg(v0) 10973 v.AddArg(x) 10974 return true 10975 } 10976 // match: (Sub64 x x) 10977 // cond: 10978 // result: (Const64 [0]) 10979 for { 10980 x := v.Args[0] 10981 if x != v.Args[1] { 10982 break 10983 } 10984 v.reset(OpConst64) 10985 v.AuxInt = 0 10986 return true 10987 } 10988 // match: (Sub64 (Add64 x y) x) 10989 // cond: 10990 // result: y 10991 for { 10992 v_0 := v.Args[0] 10993 if v_0.Op != OpAdd64 { 10994 break 10995 } 10996 x := v_0.Args[0] 10997 y := v_0.Args[1] 10998 if x != v.Args[1] { 10999 break 11000 } 11001 v.reset(OpCopy) 11002 v.Type = y.Type 11003 v.AddArg(y) 11004 return true 11005 } 11006 // match: (Sub64 (Add64 x y) y) 11007 // cond: 11008 // result: x 11009 for { 11010 v_0 := v.Args[0] 11011 if v_0.Op != OpAdd64 { 11012 break 11013 } 11014 x := v_0.Args[0] 11015 y := v_0.Args[1] 11016 if y != v.Args[1] { 11017 break 11018 } 11019 v.reset(OpCopy) 11020 v.Type = x.Type 11021 v.AddArg(x) 11022 return true 11023 } 11024 return false 11025 } 11026 func rewriteValuegeneric_OpSub64F(v *Value, config *Config) bool { 11027 b := v.Block 11028 _ = b 11029 // match: (Sub64F (Const64F [c]) (Const64F [d])) 11030 // cond: 11031 // result: (Const64F [f2i(i2f(c) - i2f(d))]) 11032 for { 11033 v_0 := v.Args[0] 11034 if v_0.Op != OpConst64F { 11035 break 11036 } 11037 c := v_0.AuxInt 11038 v_1 := v.Args[1] 11039 if v_1.Op != OpConst64F { 11040 break 11041 } 11042 d := v_1.AuxInt 11043 v.reset(OpConst64F) 11044 v.AuxInt = f2i(i2f(c) - i2f(d)) 11045 return true 11046 } 11047 // match: (Sub64F x (Const64F [0])) 11048 // cond: 11049 // result: x 11050 for { 11051 x := v.Args[0] 11052 v_1 := v.Args[1] 11053 if v_1.Op != OpConst64F { 11054 break 11055 } 11056 if v_1.AuxInt != 0 { 11057 break 11058 } 11059 v.reset(OpCopy) 11060 v.Type = x.Type 11061 v.AddArg(x) 11062 return true 11063 } 11064 return false 11065 } 11066 func rewriteValuegeneric_OpSub8(v *Value, config *Config) bool { 11067 b := v.Block 11068 _ = b 11069 // match: (Sub8 (Const8 [c]) (Const8 [d])) 11070 // cond: 11071 // result: (Const8 [int64(int8(c-d))]) 11072 for { 11073 v_0 := v.Args[0] 11074 if v_0.Op != OpConst8 { 11075 break 11076 } 11077 c := v_0.AuxInt 11078 v_1 := v.Args[1] 11079 if v_1.Op != OpConst8 { 11080 break 11081 } 11082 d := v_1.AuxInt 11083 v.reset(OpConst8) 11084 v.AuxInt = int64(int8(c - d)) 11085 return true 11086 } 11087 // match: (Sub8 x (Const8 <t> [c])) 11088 // cond: x.Op != OpConst8 11089 // result: (Add8 (Const8 <t> [int64(int8(-c))]) x) 11090 for { 11091 x := v.Args[0] 11092 v_1 := v.Args[1] 11093 if v_1.Op != OpConst8 { 11094 break 11095 } 11096 t := v_1.Type 11097 c := v_1.AuxInt 11098 if !(x.Op != OpConst8) { 11099 break 11100 } 11101 v.reset(OpAdd8) 11102 v0 := b.NewValue0(v.Line, OpConst8, t) 11103 v0.AuxInt = int64(int8(-c)) 11104 v.AddArg(v0) 11105 v.AddArg(x) 11106 return true 11107 } 11108 // match: (Sub8 x x) 11109 // cond: 11110 // result: (Const8 [0]) 11111 for { 11112 x := v.Args[0] 11113 if x != v.Args[1] { 11114 break 11115 } 11116 v.reset(OpConst8) 11117 v.AuxInt = 0 11118 return true 11119 } 11120 // match: (Sub8 (Add8 x y) x) 11121 // cond: 11122 // result: y 11123 for { 11124 v_0 := v.Args[0] 11125 if v_0.Op != OpAdd8 { 11126 break 11127 } 11128 x := v_0.Args[0] 11129 y := v_0.Args[1] 11130 if x != v.Args[1] { 11131 break 11132 } 11133 v.reset(OpCopy) 11134 v.Type = y.Type 11135 v.AddArg(y) 11136 return true 11137 } 11138 // match: (Sub8 (Add8 x y) y) 11139 // cond: 11140 // result: x 11141 for { 11142 v_0 := v.Args[0] 11143 if v_0.Op != OpAdd8 { 11144 break 11145 } 11146 x := v_0.Args[0] 11147 y := v_0.Args[1] 11148 if y != v.Args[1] { 11149 break 11150 } 11151 v.reset(OpCopy) 11152 v.Type = x.Type 11153 v.AddArg(x) 11154 return true 11155 } 11156 return false 11157 } 11158 func rewriteValuegeneric_OpTrunc16to8(v *Value, config *Config) bool { 11159 b := v.Block 11160 _ = b 11161 // match: (Trunc16to8 (Const16 [c])) 11162 // cond: 11163 // result: (Const8 [int64(int8(c))]) 11164 for { 11165 v_0 := v.Args[0] 11166 if v_0.Op != OpConst16 { 11167 break 11168 } 11169 c := v_0.AuxInt 11170 v.reset(OpConst8) 11171 v.AuxInt = int64(int8(c)) 11172 return true 11173 } 11174 // match: (Trunc16to8 (ZeroExt8to16 x)) 11175 // cond: 11176 // result: x 11177 for { 11178 v_0 := v.Args[0] 11179 if v_0.Op != OpZeroExt8to16 { 11180 break 11181 } 11182 x := v_0.Args[0] 11183 v.reset(OpCopy) 11184 v.Type = x.Type 11185 v.AddArg(x) 11186 return true 11187 } 11188 // match: (Trunc16to8 (SignExt8to16 x)) 11189 // cond: 11190 // result: x 11191 for { 11192 v_0 := v.Args[0] 11193 if v_0.Op != OpSignExt8to16 { 11194 break 11195 } 11196 x := v_0.Args[0] 11197 v.reset(OpCopy) 11198 v.Type = x.Type 11199 v.AddArg(x) 11200 return true 11201 } 11202 // match: (Trunc16to8 (And16 (Const16 [y]) x)) 11203 // cond: y&0xFF == 0xFF 11204 // result: (Trunc16to8 x) 11205 for { 11206 v_0 := v.Args[0] 11207 if v_0.Op != OpAnd16 { 11208 break 11209 } 11210 v_0_0 := v_0.Args[0] 11211 if v_0_0.Op != OpConst16 { 11212 break 11213 } 11214 y := v_0_0.AuxInt 11215 x := v_0.Args[1] 11216 if !(y&0xFF == 0xFF) { 11217 break 11218 } 11219 v.reset(OpTrunc16to8) 11220 v.AddArg(x) 11221 return true 11222 } 11223 return false 11224 } 11225 func rewriteValuegeneric_OpTrunc32to16(v *Value, config *Config) bool { 11226 b := v.Block 11227 _ = b 11228 // match: (Trunc32to16 (Const32 [c])) 11229 // cond: 11230 // result: (Const16 [int64(int16(c))]) 11231 for { 11232 v_0 := v.Args[0] 11233 if v_0.Op != OpConst32 { 11234 break 11235 } 11236 c := v_0.AuxInt 11237 v.reset(OpConst16) 11238 v.AuxInt = int64(int16(c)) 11239 return true 11240 } 11241 // match: (Trunc32to16 (ZeroExt8to32 x)) 11242 // cond: 11243 // result: (ZeroExt8to16 x) 11244 for { 11245 v_0 := v.Args[0] 11246 if v_0.Op != OpZeroExt8to32 { 11247 break 11248 } 11249 x := v_0.Args[0] 11250 v.reset(OpZeroExt8to16) 11251 v.AddArg(x) 11252 return true 11253 } 11254 // match: (Trunc32to16 (ZeroExt16to32 x)) 11255 // cond: 11256 // result: x 11257 for { 11258 v_0 := v.Args[0] 11259 if v_0.Op != OpZeroExt16to32 { 11260 break 11261 } 11262 x := v_0.Args[0] 11263 v.reset(OpCopy) 11264 v.Type = x.Type 11265 v.AddArg(x) 11266 return true 11267 } 11268 // match: (Trunc32to16 (SignExt8to32 x)) 11269 // cond: 11270 // result: (SignExt8to16 x) 11271 for { 11272 v_0 := v.Args[0] 11273 if v_0.Op != OpSignExt8to32 { 11274 break 11275 } 11276 x := v_0.Args[0] 11277 v.reset(OpSignExt8to16) 11278 v.AddArg(x) 11279 return true 11280 } 11281 // match: (Trunc32to16 (SignExt16to32 x)) 11282 // cond: 11283 // result: x 11284 for { 11285 v_0 := v.Args[0] 11286 if v_0.Op != OpSignExt16to32 { 11287 break 11288 } 11289 x := v_0.Args[0] 11290 v.reset(OpCopy) 11291 v.Type = x.Type 11292 v.AddArg(x) 11293 return true 11294 } 11295 // match: (Trunc32to16 (And32 (Const32 [y]) x)) 11296 // cond: y&0xFFFF == 0xFFFF 11297 // result: (Trunc32to16 x) 11298 for { 11299 v_0 := v.Args[0] 11300 if v_0.Op != OpAnd32 { 11301 break 11302 } 11303 v_0_0 := v_0.Args[0] 11304 if v_0_0.Op != OpConst32 { 11305 break 11306 } 11307 y := v_0_0.AuxInt 11308 x := v_0.Args[1] 11309 if !(y&0xFFFF == 0xFFFF) { 11310 break 11311 } 11312 v.reset(OpTrunc32to16) 11313 v.AddArg(x) 11314 return true 11315 } 11316 return false 11317 } 11318 func rewriteValuegeneric_OpTrunc32to8(v *Value, config *Config) bool { 11319 b := v.Block 11320 _ = b 11321 // match: (Trunc32to8 (Const32 [c])) 11322 // cond: 11323 // result: (Const8 [int64(int8(c))]) 11324 for { 11325 v_0 := v.Args[0] 11326 if v_0.Op != OpConst32 { 11327 break 11328 } 11329 c := v_0.AuxInt 11330 v.reset(OpConst8) 11331 v.AuxInt = int64(int8(c)) 11332 return true 11333 } 11334 // match: (Trunc32to8 (ZeroExt8to32 x)) 11335 // cond: 11336 // result: x 11337 for { 11338 v_0 := v.Args[0] 11339 if v_0.Op != OpZeroExt8to32 { 11340 break 11341 } 11342 x := v_0.Args[0] 11343 v.reset(OpCopy) 11344 v.Type = x.Type 11345 v.AddArg(x) 11346 return true 11347 } 11348 // match: (Trunc32to8 (SignExt8to32 x)) 11349 // cond: 11350 // result: x 11351 for { 11352 v_0 := v.Args[0] 11353 if v_0.Op != OpSignExt8to32 { 11354 break 11355 } 11356 x := v_0.Args[0] 11357 v.reset(OpCopy) 11358 v.Type = x.Type 11359 v.AddArg(x) 11360 return true 11361 } 11362 // match: (Trunc32to8 (And32 (Const32 [y]) x)) 11363 // cond: y&0xFF == 0xFF 11364 // result: (Trunc32to8 x) 11365 for { 11366 v_0 := v.Args[0] 11367 if v_0.Op != OpAnd32 { 11368 break 11369 } 11370 v_0_0 := v_0.Args[0] 11371 if v_0_0.Op != OpConst32 { 11372 break 11373 } 11374 y := v_0_0.AuxInt 11375 x := v_0.Args[1] 11376 if !(y&0xFF == 0xFF) { 11377 break 11378 } 11379 v.reset(OpTrunc32to8) 11380 v.AddArg(x) 11381 return true 11382 } 11383 return false 11384 } 11385 func rewriteValuegeneric_OpTrunc64to16(v *Value, config *Config) bool { 11386 b := v.Block 11387 _ = b 11388 // match: (Trunc64to16 (Const64 [c])) 11389 // cond: 11390 // result: (Const16 [int64(int16(c))]) 11391 for { 11392 v_0 := v.Args[0] 11393 if v_0.Op != OpConst64 { 11394 break 11395 } 11396 c := v_0.AuxInt 11397 v.reset(OpConst16) 11398 v.AuxInt = int64(int16(c)) 11399 return true 11400 } 11401 // match: (Trunc64to16 (ZeroExt8to64 x)) 11402 // cond: 11403 // result: (ZeroExt8to16 x) 11404 for { 11405 v_0 := v.Args[0] 11406 if v_0.Op != OpZeroExt8to64 { 11407 break 11408 } 11409 x := v_0.Args[0] 11410 v.reset(OpZeroExt8to16) 11411 v.AddArg(x) 11412 return true 11413 } 11414 // match: (Trunc64to16 (ZeroExt16to64 x)) 11415 // cond: 11416 // result: x 11417 for { 11418 v_0 := v.Args[0] 11419 if v_0.Op != OpZeroExt16to64 { 11420 break 11421 } 11422 x := v_0.Args[0] 11423 v.reset(OpCopy) 11424 v.Type = x.Type 11425 v.AddArg(x) 11426 return true 11427 } 11428 // match: (Trunc64to16 (SignExt8to64 x)) 11429 // cond: 11430 // result: (SignExt8to16 x) 11431 for { 11432 v_0 := v.Args[0] 11433 if v_0.Op != OpSignExt8to64 { 11434 break 11435 } 11436 x := v_0.Args[0] 11437 v.reset(OpSignExt8to16) 11438 v.AddArg(x) 11439 return true 11440 } 11441 // match: (Trunc64to16 (SignExt16to64 x)) 11442 // cond: 11443 // result: x 11444 for { 11445 v_0 := v.Args[0] 11446 if v_0.Op != OpSignExt16to64 { 11447 break 11448 } 11449 x := v_0.Args[0] 11450 v.reset(OpCopy) 11451 v.Type = x.Type 11452 v.AddArg(x) 11453 return true 11454 } 11455 // match: (Trunc64to16 (And64 (Const64 [y]) x)) 11456 // cond: y&0xFFFF == 0xFFFF 11457 // result: (Trunc64to16 x) 11458 for { 11459 v_0 := v.Args[0] 11460 if v_0.Op != OpAnd64 { 11461 break 11462 } 11463 v_0_0 := v_0.Args[0] 11464 if v_0_0.Op != OpConst64 { 11465 break 11466 } 11467 y := v_0_0.AuxInt 11468 x := v_0.Args[1] 11469 if !(y&0xFFFF == 0xFFFF) { 11470 break 11471 } 11472 v.reset(OpTrunc64to16) 11473 v.AddArg(x) 11474 return true 11475 } 11476 return false 11477 } 11478 func rewriteValuegeneric_OpTrunc64to32(v *Value, config *Config) bool { 11479 b := v.Block 11480 _ = b 11481 // match: (Trunc64to32 (Const64 [c])) 11482 // cond: 11483 // result: (Const32 [int64(int32(c))]) 11484 for { 11485 v_0 := v.Args[0] 11486 if v_0.Op != OpConst64 { 11487 break 11488 } 11489 c := v_0.AuxInt 11490 v.reset(OpConst32) 11491 v.AuxInt = int64(int32(c)) 11492 return true 11493 } 11494 // match: (Trunc64to32 (ZeroExt8to64 x)) 11495 // cond: 11496 // result: (ZeroExt8to32 x) 11497 for { 11498 v_0 := v.Args[0] 11499 if v_0.Op != OpZeroExt8to64 { 11500 break 11501 } 11502 x := v_0.Args[0] 11503 v.reset(OpZeroExt8to32) 11504 v.AddArg(x) 11505 return true 11506 } 11507 // match: (Trunc64to32 (ZeroExt16to64 x)) 11508 // cond: 11509 // result: (ZeroExt16to32 x) 11510 for { 11511 v_0 := v.Args[0] 11512 if v_0.Op != OpZeroExt16to64 { 11513 break 11514 } 11515 x := v_0.Args[0] 11516 v.reset(OpZeroExt16to32) 11517 v.AddArg(x) 11518 return true 11519 } 11520 // match: (Trunc64to32 (ZeroExt32to64 x)) 11521 // cond: 11522 // result: x 11523 for { 11524 v_0 := v.Args[0] 11525 if v_0.Op != OpZeroExt32to64 { 11526 break 11527 } 11528 x := v_0.Args[0] 11529 v.reset(OpCopy) 11530 v.Type = x.Type 11531 v.AddArg(x) 11532 return true 11533 } 11534 // match: (Trunc64to32 (SignExt8to64 x)) 11535 // cond: 11536 // result: (SignExt8to32 x) 11537 for { 11538 v_0 := v.Args[0] 11539 if v_0.Op != OpSignExt8to64 { 11540 break 11541 } 11542 x := v_0.Args[0] 11543 v.reset(OpSignExt8to32) 11544 v.AddArg(x) 11545 return true 11546 } 11547 // match: (Trunc64to32 (SignExt16to64 x)) 11548 // cond: 11549 // result: (SignExt16to32 x) 11550 for { 11551 v_0 := v.Args[0] 11552 if v_0.Op != OpSignExt16to64 { 11553 break 11554 } 11555 x := v_0.Args[0] 11556 v.reset(OpSignExt16to32) 11557 v.AddArg(x) 11558 return true 11559 } 11560 // match: (Trunc64to32 (SignExt32to64 x)) 11561 // cond: 11562 // result: x 11563 for { 11564 v_0 := v.Args[0] 11565 if v_0.Op != OpSignExt32to64 { 11566 break 11567 } 11568 x := v_0.Args[0] 11569 v.reset(OpCopy) 11570 v.Type = x.Type 11571 v.AddArg(x) 11572 return true 11573 } 11574 // match: (Trunc64to32 (And64 (Const64 [y]) x)) 11575 // cond: y&0xFFFFFFFF == 0xFFFFFFFF 11576 // result: (Trunc64to32 x) 11577 for { 11578 v_0 := v.Args[0] 11579 if v_0.Op != OpAnd64 { 11580 break 11581 } 11582 v_0_0 := v_0.Args[0] 11583 if v_0_0.Op != OpConst64 { 11584 break 11585 } 11586 y := v_0_0.AuxInt 11587 x := v_0.Args[1] 11588 if !(y&0xFFFFFFFF == 0xFFFFFFFF) { 11589 break 11590 } 11591 v.reset(OpTrunc64to32) 11592 v.AddArg(x) 11593 return true 11594 } 11595 return false 11596 } 11597 func rewriteValuegeneric_OpTrunc64to8(v *Value, config *Config) bool { 11598 b := v.Block 11599 _ = b 11600 // match: (Trunc64to8 (Const64 [c])) 11601 // cond: 11602 // result: (Const8 [int64(int8(c))]) 11603 for { 11604 v_0 := v.Args[0] 11605 if v_0.Op != OpConst64 { 11606 break 11607 } 11608 c := v_0.AuxInt 11609 v.reset(OpConst8) 11610 v.AuxInt = int64(int8(c)) 11611 return true 11612 } 11613 // match: (Trunc64to8 (ZeroExt8to64 x)) 11614 // cond: 11615 // result: x 11616 for { 11617 v_0 := v.Args[0] 11618 if v_0.Op != OpZeroExt8to64 { 11619 break 11620 } 11621 x := v_0.Args[0] 11622 v.reset(OpCopy) 11623 v.Type = x.Type 11624 v.AddArg(x) 11625 return true 11626 } 11627 // match: (Trunc64to8 (SignExt8to64 x)) 11628 // cond: 11629 // result: x 11630 for { 11631 v_0 := v.Args[0] 11632 if v_0.Op != OpSignExt8to64 { 11633 break 11634 } 11635 x := v_0.Args[0] 11636 v.reset(OpCopy) 11637 v.Type = x.Type 11638 v.AddArg(x) 11639 return true 11640 } 11641 // match: (Trunc64to8 (And64 (Const64 [y]) x)) 11642 // cond: y&0xFF == 0xFF 11643 // result: (Trunc64to8 x) 11644 for { 11645 v_0 := v.Args[0] 11646 if v_0.Op != OpAnd64 { 11647 break 11648 } 11649 v_0_0 := v_0.Args[0] 11650 if v_0_0.Op != OpConst64 { 11651 break 11652 } 11653 y := v_0_0.AuxInt 11654 x := v_0.Args[1] 11655 if !(y&0xFF == 0xFF) { 11656 break 11657 } 11658 v.reset(OpTrunc64to8) 11659 v.AddArg(x) 11660 return true 11661 } 11662 return false 11663 } 11664 func rewriteValuegeneric_OpXor16(v *Value, config *Config) bool { 11665 b := v.Block 11666 _ = b 11667 // match: (Xor16 x (Const16 <t> [c])) 11668 // cond: x.Op != OpConst16 11669 // result: (Xor16 (Const16 <t> [c]) x) 11670 for { 11671 x := v.Args[0] 11672 v_1 := v.Args[1] 11673 if v_1.Op != OpConst16 { 11674 break 11675 } 11676 t := v_1.Type 11677 c := v_1.AuxInt 11678 if !(x.Op != OpConst16) { 11679 break 11680 } 11681 v.reset(OpXor16) 11682 v0 := b.NewValue0(v.Line, OpConst16, t) 11683 v0.AuxInt = c 11684 v.AddArg(v0) 11685 v.AddArg(x) 11686 return true 11687 } 11688 // match: (Xor16 x x) 11689 // cond: 11690 // result: (Const16 [0]) 11691 for { 11692 x := v.Args[0] 11693 if x != v.Args[1] { 11694 break 11695 } 11696 v.reset(OpConst16) 11697 v.AuxInt = 0 11698 return true 11699 } 11700 // match: (Xor16 (Const16 [0]) x) 11701 // cond: 11702 // result: x 11703 for { 11704 v_0 := v.Args[0] 11705 if v_0.Op != OpConst16 { 11706 break 11707 } 11708 if v_0.AuxInt != 0 { 11709 break 11710 } 11711 x := v.Args[1] 11712 v.reset(OpCopy) 11713 v.Type = x.Type 11714 v.AddArg(x) 11715 return true 11716 } 11717 // match: (Xor16 x (Xor16 x y)) 11718 // cond: 11719 // result: y 11720 for { 11721 x := v.Args[0] 11722 v_1 := v.Args[1] 11723 if v_1.Op != OpXor16 { 11724 break 11725 } 11726 if x != v_1.Args[0] { 11727 break 11728 } 11729 y := v_1.Args[1] 11730 v.reset(OpCopy) 11731 v.Type = y.Type 11732 v.AddArg(y) 11733 return true 11734 } 11735 // match: (Xor16 x (Xor16 y x)) 11736 // cond: 11737 // result: y 11738 for { 11739 x := v.Args[0] 11740 v_1 := v.Args[1] 11741 if v_1.Op != OpXor16 { 11742 break 11743 } 11744 y := v_1.Args[0] 11745 if x != v_1.Args[1] { 11746 break 11747 } 11748 v.reset(OpCopy) 11749 v.Type = y.Type 11750 v.AddArg(y) 11751 return true 11752 } 11753 // match: (Xor16 (Xor16 x y) x) 11754 // cond: 11755 // result: y 11756 for { 11757 v_0 := v.Args[0] 11758 if v_0.Op != OpXor16 { 11759 break 11760 } 11761 x := v_0.Args[0] 11762 y := v_0.Args[1] 11763 if x != v.Args[1] { 11764 break 11765 } 11766 v.reset(OpCopy) 11767 v.Type = y.Type 11768 v.AddArg(y) 11769 return true 11770 } 11771 // match: (Xor16 (Xor16 x y) y) 11772 // cond: 11773 // result: x 11774 for { 11775 v_0 := v.Args[0] 11776 if v_0.Op != OpXor16 { 11777 break 11778 } 11779 x := v_0.Args[0] 11780 y := v_0.Args[1] 11781 if y != v.Args[1] { 11782 break 11783 } 11784 v.reset(OpCopy) 11785 v.Type = x.Type 11786 v.AddArg(x) 11787 return true 11788 } 11789 return false 11790 } 11791 func rewriteValuegeneric_OpXor32(v *Value, config *Config) bool { 11792 b := v.Block 11793 _ = b 11794 // match: (Xor32 x (Const32 <t> [c])) 11795 // cond: x.Op != OpConst32 11796 // result: (Xor32 (Const32 <t> [c]) x) 11797 for { 11798 x := v.Args[0] 11799 v_1 := v.Args[1] 11800 if v_1.Op != OpConst32 { 11801 break 11802 } 11803 t := v_1.Type 11804 c := v_1.AuxInt 11805 if !(x.Op != OpConst32) { 11806 break 11807 } 11808 v.reset(OpXor32) 11809 v0 := b.NewValue0(v.Line, OpConst32, t) 11810 v0.AuxInt = c 11811 v.AddArg(v0) 11812 v.AddArg(x) 11813 return true 11814 } 11815 // match: (Xor32 x x) 11816 // cond: 11817 // result: (Const32 [0]) 11818 for { 11819 x := v.Args[0] 11820 if x != v.Args[1] { 11821 break 11822 } 11823 v.reset(OpConst32) 11824 v.AuxInt = 0 11825 return true 11826 } 11827 // match: (Xor32 (Const32 [0]) x) 11828 // cond: 11829 // result: x 11830 for { 11831 v_0 := v.Args[0] 11832 if v_0.Op != OpConst32 { 11833 break 11834 } 11835 if v_0.AuxInt != 0 { 11836 break 11837 } 11838 x := v.Args[1] 11839 v.reset(OpCopy) 11840 v.Type = x.Type 11841 v.AddArg(x) 11842 return true 11843 } 11844 // match: (Xor32 x (Xor32 x y)) 11845 // cond: 11846 // result: y 11847 for { 11848 x := v.Args[0] 11849 v_1 := v.Args[1] 11850 if v_1.Op != OpXor32 { 11851 break 11852 } 11853 if x != v_1.Args[0] { 11854 break 11855 } 11856 y := v_1.Args[1] 11857 v.reset(OpCopy) 11858 v.Type = y.Type 11859 v.AddArg(y) 11860 return true 11861 } 11862 // match: (Xor32 x (Xor32 y x)) 11863 // cond: 11864 // result: y 11865 for { 11866 x := v.Args[0] 11867 v_1 := v.Args[1] 11868 if v_1.Op != OpXor32 { 11869 break 11870 } 11871 y := v_1.Args[0] 11872 if x != v_1.Args[1] { 11873 break 11874 } 11875 v.reset(OpCopy) 11876 v.Type = y.Type 11877 v.AddArg(y) 11878 return true 11879 } 11880 // match: (Xor32 (Xor32 x y) x) 11881 // cond: 11882 // result: y 11883 for { 11884 v_0 := v.Args[0] 11885 if v_0.Op != OpXor32 { 11886 break 11887 } 11888 x := v_0.Args[0] 11889 y := v_0.Args[1] 11890 if x != v.Args[1] { 11891 break 11892 } 11893 v.reset(OpCopy) 11894 v.Type = y.Type 11895 v.AddArg(y) 11896 return true 11897 } 11898 // match: (Xor32 (Xor32 x y) y) 11899 // cond: 11900 // result: x 11901 for { 11902 v_0 := v.Args[0] 11903 if v_0.Op != OpXor32 { 11904 break 11905 } 11906 x := v_0.Args[0] 11907 y := v_0.Args[1] 11908 if y != v.Args[1] { 11909 break 11910 } 11911 v.reset(OpCopy) 11912 v.Type = x.Type 11913 v.AddArg(x) 11914 return true 11915 } 11916 return false 11917 } 11918 func rewriteValuegeneric_OpXor64(v *Value, config *Config) bool { 11919 b := v.Block 11920 _ = b 11921 // match: (Xor64 x (Const64 <t> [c])) 11922 // cond: x.Op != OpConst64 11923 // result: (Xor64 (Const64 <t> [c]) x) 11924 for { 11925 x := v.Args[0] 11926 v_1 := v.Args[1] 11927 if v_1.Op != OpConst64 { 11928 break 11929 } 11930 t := v_1.Type 11931 c := v_1.AuxInt 11932 if !(x.Op != OpConst64) { 11933 break 11934 } 11935 v.reset(OpXor64) 11936 v0 := b.NewValue0(v.Line, OpConst64, t) 11937 v0.AuxInt = c 11938 v.AddArg(v0) 11939 v.AddArg(x) 11940 return true 11941 } 11942 // match: (Xor64 x x) 11943 // cond: 11944 // result: (Const64 [0]) 11945 for { 11946 x := v.Args[0] 11947 if x != v.Args[1] { 11948 break 11949 } 11950 v.reset(OpConst64) 11951 v.AuxInt = 0 11952 return true 11953 } 11954 // match: (Xor64 (Const64 [0]) x) 11955 // cond: 11956 // result: x 11957 for { 11958 v_0 := v.Args[0] 11959 if v_0.Op != OpConst64 { 11960 break 11961 } 11962 if v_0.AuxInt != 0 { 11963 break 11964 } 11965 x := v.Args[1] 11966 v.reset(OpCopy) 11967 v.Type = x.Type 11968 v.AddArg(x) 11969 return true 11970 } 11971 // match: (Xor64 x (Xor64 x y)) 11972 // cond: 11973 // result: y 11974 for { 11975 x := v.Args[0] 11976 v_1 := v.Args[1] 11977 if v_1.Op != OpXor64 { 11978 break 11979 } 11980 if x != v_1.Args[0] { 11981 break 11982 } 11983 y := v_1.Args[1] 11984 v.reset(OpCopy) 11985 v.Type = y.Type 11986 v.AddArg(y) 11987 return true 11988 } 11989 // match: (Xor64 x (Xor64 y x)) 11990 // cond: 11991 // result: y 11992 for { 11993 x := v.Args[0] 11994 v_1 := v.Args[1] 11995 if v_1.Op != OpXor64 { 11996 break 11997 } 11998 y := v_1.Args[0] 11999 if x != v_1.Args[1] { 12000 break 12001 } 12002 v.reset(OpCopy) 12003 v.Type = y.Type 12004 v.AddArg(y) 12005 return true 12006 } 12007 // match: (Xor64 (Xor64 x y) x) 12008 // cond: 12009 // result: y 12010 for { 12011 v_0 := v.Args[0] 12012 if v_0.Op != OpXor64 { 12013 break 12014 } 12015 x := v_0.Args[0] 12016 y := v_0.Args[1] 12017 if x != v.Args[1] { 12018 break 12019 } 12020 v.reset(OpCopy) 12021 v.Type = y.Type 12022 v.AddArg(y) 12023 return true 12024 } 12025 // match: (Xor64 (Xor64 x y) y) 12026 // cond: 12027 // result: x 12028 for { 12029 v_0 := v.Args[0] 12030 if v_0.Op != OpXor64 { 12031 break 12032 } 12033 x := v_0.Args[0] 12034 y := v_0.Args[1] 12035 if y != v.Args[1] { 12036 break 12037 } 12038 v.reset(OpCopy) 12039 v.Type = x.Type 12040 v.AddArg(x) 12041 return true 12042 } 12043 return false 12044 } 12045 func rewriteValuegeneric_OpXor8(v *Value, config *Config) bool { 12046 b := v.Block 12047 _ = b 12048 // match: (Xor8 x (Const8 <t> [c])) 12049 // cond: x.Op != OpConst8 12050 // result: (Xor8 (Const8 <t> [c]) x) 12051 for { 12052 x := v.Args[0] 12053 v_1 := v.Args[1] 12054 if v_1.Op != OpConst8 { 12055 break 12056 } 12057 t := v_1.Type 12058 c := v_1.AuxInt 12059 if !(x.Op != OpConst8) { 12060 break 12061 } 12062 v.reset(OpXor8) 12063 v0 := b.NewValue0(v.Line, OpConst8, t) 12064 v0.AuxInt = c 12065 v.AddArg(v0) 12066 v.AddArg(x) 12067 return true 12068 } 12069 // match: (Xor8 x x) 12070 // cond: 12071 // result: (Const8 [0]) 12072 for { 12073 x := v.Args[0] 12074 if x != v.Args[1] { 12075 break 12076 } 12077 v.reset(OpConst8) 12078 v.AuxInt = 0 12079 return true 12080 } 12081 // match: (Xor8 (Const8 [0]) x) 12082 // cond: 12083 // result: x 12084 for { 12085 v_0 := v.Args[0] 12086 if v_0.Op != OpConst8 { 12087 break 12088 } 12089 if v_0.AuxInt != 0 { 12090 break 12091 } 12092 x := v.Args[1] 12093 v.reset(OpCopy) 12094 v.Type = x.Type 12095 v.AddArg(x) 12096 return true 12097 } 12098 // match: (Xor8 x (Xor8 x y)) 12099 // cond: 12100 // result: y 12101 for { 12102 x := v.Args[0] 12103 v_1 := v.Args[1] 12104 if v_1.Op != OpXor8 { 12105 break 12106 } 12107 if x != v_1.Args[0] { 12108 break 12109 } 12110 y := v_1.Args[1] 12111 v.reset(OpCopy) 12112 v.Type = y.Type 12113 v.AddArg(y) 12114 return true 12115 } 12116 // match: (Xor8 x (Xor8 y x)) 12117 // cond: 12118 // result: y 12119 for { 12120 x := v.Args[0] 12121 v_1 := v.Args[1] 12122 if v_1.Op != OpXor8 { 12123 break 12124 } 12125 y := v_1.Args[0] 12126 if x != v_1.Args[1] { 12127 break 12128 } 12129 v.reset(OpCopy) 12130 v.Type = y.Type 12131 v.AddArg(y) 12132 return true 12133 } 12134 // match: (Xor8 (Xor8 x y) x) 12135 // cond: 12136 // result: y 12137 for { 12138 v_0 := v.Args[0] 12139 if v_0.Op != OpXor8 { 12140 break 12141 } 12142 x := v_0.Args[0] 12143 y := v_0.Args[1] 12144 if x != v.Args[1] { 12145 break 12146 } 12147 v.reset(OpCopy) 12148 v.Type = y.Type 12149 v.AddArg(y) 12150 return true 12151 } 12152 // match: (Xor8 (Xor8 x y) y) 12153 // cond: 12154 // result: x 12155 for { 12156 v_0 := v.Args[0] 12157 if v_0.Op != OpXor8 { 12158 break 12159 } 12160 x := v_0.Args[0] 12161 y := v_0.Args[1] 12162 if y != v.Args[1] { 12163 break 12164 } 12165 v.reset(OpCopy) 12166 v.Type = x.Type 12167 v.AddArg(x) 12168 return true 12169 } 12170 return false 12171 } 12172 func rewriteValuegeneric_OpZero(v *Value, config *Config) bool { 12173 b := v.Block 12174 _ = b 12175 // match: (Zero (Load (OffPtr [c] (SP)) mem) mem) 12176 // cond: mem.Op == OpStaticCall && isSameSym(mem.Aux, "runtime.newobject") && c == config.ctxt.FixedFrameSize() + config.PtrSize 12177 // result: mem 12178 for { 12179 v_0 := v.Args[0] 12180 if v_0.Op != OpLoad { 12181 break 12182 } 12183 v_0_0 := v_0.Args[0] 12184 if v_0_0.Op != OpOffPtr { 12185 break 12186 } 12187 c := v_0_0.AuxInt 12188 v_0_0_0 := v_0_0.Args[0] 12189 if v_0_0_0.Op != OpSP { 12190 break 12191 } 12192 mem := v_0.Args[1] 12193 if mem != v.Args[1] { 12194 break 12195 } 12196 if !(mem.Op == OpStaticCall && isSameSym(mem.Aux, "runtime.newobject") && c == config.ctxt.FixedFrameSize()+config.PtrSize) { 12197 break 12198 } 12199 v.reset(OpCopy) 12200 v.Type = mem.Type 12201 v.AddArg(mem) 12202 return true 12203 } 12204 return false 12205 } 12206 func rewriteValuegeneric_OpZeroExt16to32(v *Value, config *Config) bool { 12207 b := v.Block 12208 _ = b 12209 // match: (ZeroExt16to32 (Trunc32to16 x:(Rsh32Ux64 _ (Const64 [s])))) 12210 // cond: s >= 16 12211 // result: x 12212 for { 12213 v_0 := v.Args[0] 12214 if v_0.Op != OpTrunc32to16 { 12215 break 12216 } 12217 x := v_0.Args[0] 12218 if x.Op != OpRsh32Ux64 { 12219 break 12220 } 12221 x_1 := x.Args[1] 12222 if x_1.Op != OpConst64 { 12223 break 12224 } 12225 s := x_1.AuxInt 12226 if !(s >= 16) { 12227 break 12228 } 12229 v.reset(OpCopy) 12230 v.Type = x.Type 12231 v.AddArg(x) 12232 return true 12233 } 12234 return false 12235 } 12236 func rewriteValuegeneric_OpZeroExt16to64(v *Value, config *Config) bool { 12237 b := v.Block 12238 _ = b 12239 // match: (ZeroExt16to64 (Trunc64to16 x:(Rsh64Ux64 _ (Const64 [s])))) 12240 // cond: s >= 48 12241 // result: x 12242 for { 12243 v_0 := v.Args[0] 12244 if v_0.Op != OpTrunc64to16 { 12245 break 12246 } 12247 x := v_0.Args[0] 12248 if x.Op != OpRsh64Ux64 { 12249 break 12250 } 12251 x_1 := x.Args[1] 12252 if x_1.Op != OpConst64 { 12253 break 12254 } 12255 s := x_1.AuxInt 12256 if !(s >= 48) { 12257 break 12258 } 12259 v.reset(OpCopy) 12260 v.Type = x.Type 12261 v.AddArg(x) 12262 return true 12263 } 12264 return false 12265 } 12266 func rewriteValuegeneric_OpZeroExt32to64(v *Value, config *Config) bool { 12267 b := v.Block 12268 _ = b 12269 // match: (ZeroExt32to64 (Trunc64to32 x:(Rsh64Ux64 _ (Const64 [s])))) 12270 // cond: s >= 32 12271 // result: x 12272 for { 12273 v_0 := v.Args[0] 12274 if v_0.Op != OpTrunc64to32 { 12275 break 12276 } 12277 x := v_0.Args[0] 12278 if x.Op != OpRsh64Ux64 { 12279 break 12280 } 12281 x_1 := x.Args[1] 12282 if x_1.Op != OpConst64 { 12283 break 12284 } 12285 s := x_1.AuxInt 12286 if !(s >= 32) { 12287 break 12288 } 12289 v.reset(OpCopy) 12290 v.Type = x.Type 12291 v.AddArg(x) 12292 return true 12293 } 12294 return false 12295 } 12296 func rewriteValuegeneric_OpZeroExt8to16(v *Value, config *Config) bool { 12297 b := v.Block 12298 _ = b 12299 // match: (ZeroExt8to16 (Trunc16to8 x:(Rsh16Ux64 _ (Const64 [s])))) 12300 // cond: s >= 8 12301 // result: x 12302 for { 12303 v_0 := v.Args[0] 12304 if v_0.Op != OpTrunc16to8 { 12305 break 12306 } 12307 x := v_0.Args[0] 12308 if x.Op != OpRsh16Ux64 { 12309 break 12310 } 12311 x_1 := x.Args[1] 12312 if x_1.Op != OpConst64 { 12313 break 12314 } 12315 s := x_1.AuxInt 12316 if !(s >= 8) { 12317 break 12318 } 12319 v.reset(OpCopy) 12320 v.Type = x.Type 12321 v.AddArg(x) 12322 return true 12323 } 12324 return false 12325 } 12326 func rewriteValuegeneric_OpZeroExt8to32(v *Value, config *Config) bool { 12327 b := v.Block 12328 _ = b 12329 // match: (ZeroExt8to32 (Trunc32to8 x:(Rsh32Ux64 _ (Const64 [s])))) 12330 // cond: s >= 24 12331 // result: x 12332 for { 12333 v_0 := v.Args[0] 12334 if v_0.Op != OpTrunc32to8 { 12335 break 12336 } 12337 x := v_0.Args[0] 12338 if x.Op != OpRsh32Ux64 { 12339 break 12340 } 12341 x_1 := x.Args[1] 12342 if x_1.Op != OpConst64 { 12343 break 12344 } 12345 s := x_1.AuxInt 12346 if !(s >= 24) { 12347 break 12348 } 12349 v.reset(OpCopy) 12350 v.Type = x.Type 12351 v.AddArg(x) 12352 return true 12353 } 12354 return false 12355 } 12356 func rewriteValuegeneric_OpZeroExt8to64(v *Value, config *Config) bool { 12357 b := v.Block 12358 _ = b 12359 // match: (ZeroExt8to64 (Trunc64to8 x:(Rsh64Ux64 _ (Const64 [s])))) 12360 // cond: s >= 56 12361 // result: x 12362 for { 12363 v_0 := v.Args[0] 12364 if v_0.Op != OpTrunc64to8 { 12365 break 12366 } 12367 x := v_0.Args[0] 12368 if x.Op != OpRsh64Ux64 { 12369 break 12370 } 12371 x_1 := x.Args[1] 12372 if x_1.Op != OpConst64 { 12373 break 12374 } 12375 s := x_1.AuxInt 12376 if !(s >= 56) { 12377 break 12378 } 12379 v.reset(OpCopy) 12380 v.Type = x.Type 12381 v.AddArg(x) 12382 return true 12383 } 12384 return false 12385 } 12386 func rewriteBlockgeneric(b *Block, config *Config) bool { 12387 switch b.Kind { 12388 case BlockIf: 12389 // match: (If (Not cond) yes no) 12390 // cond: 12391 // result: (If cond no yes) 12392 for { 12393 v := b.Control 12394 if v.Op != OpNot { 12395 break 12396 } 12397 cond := v.Args[0] 12398 yes := b.Succs[0] 12399 no := b.Succs[1] 12400 b.Kind = BlockIf 12401 b.SetControl(cond) 12402 b.swapSuccessors() 12403 _ = no 12404 _ = yes 12405 return true 12406 } 12407 // match: (If (ConstBool [c]) yes no) 12408 // cond: c == 1 12409 // result: (First nil yes no) 12410 for { 12411 v := b.Control 12412 if v.Op != OpConstBool { 12413 break 12414 } 12415 c := v.AuxInt 12416 yes := b.Succs[0] 12417 no := b.Succs[1] 12418 if !(c == 1) { 12419 break 12420 } 12421 b.Kind = BlockFirst 12422 b.SetControl(nil) 12423 _ = yes 12424 _ = no 12425 return true 12426 } 12427 // match: (If (ConstBool [c]) yes no) 12428 // cond: c == 0 12429 // result: (First nil no yes) 12430 for { 12431 v := b.Control 12432 if v.Op != OpConstBool { 12433 break 12434 } 12435 c := v.AuxInt 12436 yes := b.Succs[0] 12437 no := b.Succs[1] 12438 if !(c == 0) { 12439 break 12440 } 12441 b.Kind = BlockFirst 12442 b.SetControl(nil) 12443 b.swapSuccessors() 12444 _ = no 12445 _ = yes 12446 return true 12447 } 12448 } 12449 return false 12450 } 12451