1 // Do not edit. Bootstrap copy of /Volumes/Android/buildbot/src/android/build-tools/out/obj/go/src/cmd/compile/internal/arm64/peep.go 2 3 //line /Volumes/Android/buildbot/src/android/build-tools/out/obj/go/src/cmd/compile/internal/arm64/peep.go:1 4 // Derived from Inferno utils/6c/peep.c 5 // http://code.google.com/p/inferno-os/source/browse/utils/6c/peep.c 6 // 7 // Copyright 1994-1999 Lucent Technologies Inc. All rights reserved. 8 // Portions Copyright 1995-1997 C H Forsyth (forsyth (a] terzarima.net) 9 // Portions Copyright 1997-1999 Vita Nuova Limited 10 // Portions Copyright 2000-2007 Vita Nuova Holdings Limited (www.vitanuova.com) 11 // Portions Copyright 2004,2006 Bruce Ellis 12 // Portions Copyright 2005-2007 C H Forsyth (forsyth (a] terzarima.net) 13 // Revisions Copyright 2000-2007 Lucent Technologies Inc. and others 14 // Portions Copyright 2009 The Go Authors. All rights reserved. 15 // 16 // Permission is hereby granted, free of charge, to any person obtaining a copy 17 // of this software and associated documentation files (the "Software"), to deal 18 // in the Software without restriction, including without limitation the rights 19 // to use, copy, modify, merge, publish, distribute, sublicense, and/or sell 20 // copies of the Software, and to permit persons to whom the Software is 21 // furnished to do so, subject to the following conditions: 22 // 23 // The above copyright notice and this permission notice shall be included in 24 // all copies or substantial portions of the Software. 25 // 26 // THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR 27 // IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, 28 // FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE 29 // AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER 30 // LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, 31 // OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN 32 // THE SOFTWARE. 33 34 package arm64 35 36 import ( 37 "bootstrap/compile/internal/gc" 38 "bootstrap/internal/obj" 39 "bootstrap/internal/obj/arm64" 40 "fmt" 41 ) 42 43 var gactive uint32 44 45 func peep(firstp *obj.Prog) { 46 g := (*gc.Graph)(gc.Flowstart(firstp, nil)) 47 if g == nil { 48 return 49 } 50 gactive = 0 51 52 var p *obj.Prog 53 var r *gc.Flow 54 var t int 55 loop1: 56 if gc.Debug['P'] != 0 && gc.Debug['v'] != 0 { 57 gc.Dumpit("loop1", g.Start, 0) 58 } 59 60 t = 0 61 for r = g.Start; r != nil; r = r.Link { 62 p = r.Prog 63 64 // TODO(minux) Handle smaller moves. arm and amd64 65 // distinguish between moves that *must* sign/zero 66 // extend and moves that don't care so they 67 // can eliminate moves that don't care without 68 // breaking moves that do care. This might let us 69 // simplify or remove the next peep loop, too. 70 if p.As == arm64.AMOVD || p.As == arm64.AFMOVD { 71 if regtyp(&p.To) { 72 // Try to eliminate reg->reg moves 73 if regtyp(&p.From) { 74 if p.From.Type == p.To.Type { 75 if copyprop(r) { 76 excise(r) 77 t++ 78 } else if subprop(r) && copyprop(r) { 79 excise(r) 80 t++ 81 } 82 } 83 } 84 } 85 } 86 } 87 88 if t != 0 { 89 goto loop1 90 } 91 92 /* 93 * look for MOVB x,R; MOVB R,R (for small MOVs not handled above) 94 */ 95 var p1 *obj.Prog 96 var r1 *gc.Flow 97 for r := (*gc.Flow)(g.Start); r != nil; r = r.Link { 98 p = r.Prog 99 switch p.As { 100 default: 101 continue 102 103 case arm64.AMOVH, 104 arm64.AMOVHU, 105 arm64.AMOVB, 106 arm64.AMOVBU, 107 arm64.AMOVW, 108 arm64.AMOVWU: 109 if p.To.Type != obj.TYPE_REG { 110 continue 111 } 112 } 113 114 r1 = r.Link 115 if r1 == nil { 116 continue 117 } 118 p1 = r1.Prog 119 if p1.As != p.As { 120 continue 121 } 122 if p1.From.Type != obj.TYPE_REG || p1.From.Reg != p.To.Reg { 123 continue 124 } 125 if p1.To.Type != obj.TYPE_REG || p1.To.Reg != p.To.Reg { 126 continue 127 } 128 excise(r1) 129 } 130 131 if gc.Debug['D'] > 1 { 132 goto ret /* allow following code improvement to be suppressed */ 133 } 134 135 // MOVD $c, R'; ADD R', R (R' unused) -> ADD $c, R 136 for r := (*gc.Flow)(g.Start); r != nil; r = r.Link { 137 p = r.Prog 138 switch p.As { 139 default: 140 continue 141 142 case arm64.AMOVD: 143 if p.To.Type != obj.TYPE_REG { 144 continue 145 } 146 if p.From.Type != obj.TYPE_CONST { 147 continue 148 } 149 if p.From.Offset < 0 || 4096 <= p.From.Offset { 150 continue 151 } 152 } 153 r1 = r.Link 154 if r1 == nil { 155 continue 156 } 157 p1 = r1.Prog 158 if p1.As != arm64.AADD && p1.As != arm64.ASUB { // TODO(aram): also logical after we have bimm. 159 continue 160 } 161 if p1.From.Type != obj.TYPE_REG || p1.From.Reg != p.To.Reg { 162 continue 163 } 164 if p1.To.Type != obj.TYPE_REG { 165 continue 166 } 167 if gc.Debug['P'] != 0 { 168 fmt.Printf("encoding $%d directly into %v in:\n%v\n%v\n", p.From.Offset, obj.Aconv(int(p1.As)), p, p1) 169 } 170 p1.From.Type = obj.TYPE_CONST 171 p1.From = p.From 172 excise(r) 173 } 174 175 /* TODO(minux): 176 * look for OP x,y,R; CMP R, $0 -> OP.S x,y,R 177 * when OP can set condition codes correctly 178 */ 179 180 ret: 181 gc.Flowend(g) 182 } 183 184 func excise(r *gc.Flow) { 185 p := (*obj.Prog)(r.Prog) 186 if gc.Debug['P'] != 0 && gc.Debug['v'] != 0 { 187 fmt.Printf("%v ===delete===\n", p) 188 } 189 obj.Nopout(p) 190 gc.Ostats.Ndelmov++ 191 } 192 193 func regtyp(a *obj.Addr) bool { 194 // TODO(rsc): Floating point register exclusions? 195 return a.Type == obj.TYPE_REG && arm64.REG_R0 <= a.Reg && a.Reg <= arm64.REG_F31 && a.Reg != arm64.REGZERO 196 } 197 198 /* 199 * the idea is to substitute 200 * one register for another 201 * from one MOV to another 202 * MOV a, R1 203 * ADD b, R1 / no use of R2 204 * MOV R1, R2 205 * would be converted to 206 * MOV a, R2 207 * ADD b, R2 208 * MOV R2, R1 209 * hopefully, then the former or latter MOV 210 * will be eliminated by copy propagation. 211 * 212 * r0 (the argument, not the register) is the MOV at the end of the 213 * above sequences. This returns 1 if it modified any instructions. 214 */ 215 func subprop(r0 *gc.Flow) bool { 216 p := (*obj.Prog)(r0.Prog) 217 v1 := (*obj.Addr)(&p.From) 218 if !regtyp(v1) { 219 return false 220 } 221 v2 := (*obj.Addr)(&p.To) 222 if !regtyp(v2) { 223 return false 224 } 225 for r := gc.Uniqp(r0); r != nil; r = gc.Uniqp(r) { 226 if gc.Uniqs(r) == nil { 227 break 228 } 229 p = r.Prog 230 if p.As == obj.AVARDEF || p.As == obj.AVARKILL { 231 continue 232 } 233 if p.Info.Flags&gc.Call != 0 { 234 return false 235 } 236 237 if p.Info.Flags&(gc.RightRead|gc.RightWrite) == gc.RightWrite { 238 if p.To.Type == v1.Type { 239 if p.To.Reg == v1.Reg { 240 copysub(&p.To, v1, v2, 1) 241 if gc.Debug['P'] != 0 { 242 fmt.Printf("gotit: %v->%v\n%v", gc.Ctxt.Dconv(v1), gc.Ctxt.Dconv(v2), r.Prog) 243 if p.From.Type == v2.Type { 244 fmt.Printf(" excise") 245 } 246 fmt.Printf("\n") 247 } 248 249 for r = gc.Uniqs(r); r != r0; r = gc.Uniqs(r) { 250 p = r.Prog 251 copysub(&p.From, v1, v2, 1) 252 copysub1(p, v1, v2, 1) 253 copysub(&p.To, v1, v2, 1) 254 if gc.Debug['P'] != 0 { 255 fmt.Printf("%v\n", r.Prog) 256 } 257 } 258 259 t := int(int(v1.Reg)) 260 v1.Reg = v2.Reg 261 v2.Reg = int16(t) 262 if gc.Debug['P'] != 0 { 263 fmt.Printf("%v last\n", r.Prog) 264 } 265 return true 266 } 267 } 268 } 269 270 if copyau(&p.From, v2) || copyau1(p, v2) || copyau(&p.To, v2) { 271 break 272 } 273 if copysub(&p.From, v1, v2, 0) != 0 || copysub1(p, v1, v2, 0) != 0 || copysub(&p.To, v1, v2, 0) != 0 { 274 break 275 } 276 } 277 278 return false 279 } 280 281 /* 282 * The idea is to remove redundant copies. 283 * v1->v2 F=0 284 * (use v2 s/v2/v1/)* 285 * set v1 F=1 286 * use v2 return fail (v1->v2 move must remain) 287 * ----------------- 288 * v1->v2 F=0 289 * (use v2 s/v2/v1/)* 290 * set v1 F=1 291 * set v2 return success (caller can remove v1->v2 move) 292 */ 293 func copyprop(r0 *gc.Flow) bool { 294 p := (*obj.Prog)(r0.Prog) 295 v1 := (*obj.Addr)(&p.From) 296 v2 := (*obj.Addr)(&p.To) 297 if copyas(v1, v2) { 298 if gc.Debug['P'] != 0 { 299 fmt.Printf("eliminating self-move: %v\n", r0.Prog) 300 } 301 return true 302 } 303 304 gactive++ 305 if gc.Debug['P'] != 0 { 306 fmt.Printf("trying to eliminate %v->%v move from:\n%v\n", gc.Ctxt.Dconv(v1), gc.Ctxt.Dconv(v2), r0.Prog) 307 } 308 return copy1(v1, v2, r0.S1, 0) 309 } 310 311 // copy1 replaces uses of v2 with v1 starting at r and returns 1 if 312 // all uses were rewritten. 313 func copy1(v1 *obj.Addr, v2 *obj.Addr, r *gc.Flow, f int) bool { 314 if uint32(r.Active) == gactive { 315 if gc.Debug['P'] != 0 { 316 fmt.Printf("act set; return 1\n") 317 } 318 return true 319 } 320 321 r.Active = int32(gactive) 322 if gc.Debug['P'] != 0 { 323 fmt.Printf("copy1 replace %v with %v f=%d\n", gc.Ctxt.Dconv(v2), gc.Ctxt.Dconv(v1), f) 324 } 325 var t int 326 var p *obj.Prog 327 for ; r != nil; r = r.S1 { 328 p = r.Prog 329 if gc.Debug['P'] != 0 { 330 fmt.Printf("%v", p) 331 } 332 if f == 0 && gc.Uniqp(r) == nil { 333 // Multiple predecessors; conservatively 334 // assume v1 was set on other path 335 f = 1 336 337 if gc.Debug['P'] != 0 { 338 fmt.Printf("; merge; f=%d", f) 339 } 340 } 341 342 t = copyu(p, v2, nil) 343 switch t { 344 case 2: /* rar, can't split */ 345 if gc.Debug['P'] != 0 { 346 fmt.Printf("; %v rar; return 0\n", gc.Ctxt.Dconv(v2)) 347 } 348 return false 349 350 case 3: /* set */ 351 if gc.Debug['P'] != 0 { 352 fmt.Printf("; %v set; return 1\n", gc.Ctxt.Dconv(v2)) 353 } 354 return true 355 356 case 1, /* used, substitute */ 357 4: /* use and set */ 358 if f != 0 { 359 if gc.Debug['P'] == 0 { 360 return false 361 } 362 if t == 4 { 363 fmt.Printf("; %v used+set and f=%d; return 0\n", gc.Ctxt.Dconv(v2), f) 364 } else { 365 fmt.Printf("; %v used and f=%d; return 0\n", gc.Ctxt.Dconv(v2), f) 366 } 367 return false 368 } 369 370 if copyu(p, v2, v1) != 0 { 371 if gc.Debug['P'] != 0 { 372 fmt.Printf("; sub fail; return 0\n") 373 } 374 return false 375 } 376 377 if gc.Debug['P'] != 0 { 378 fmt.Printf("; sub %v->%v\n => %v", gc.Ctxt.Dconv(v2), gc.Ctxt.Dconv(v1), p) 379 } 380 if t == 4 { 381 if gc.Debug['P'] != 0 { 382 fmt.Printf("; %v used+set; return 1\n", gc.Ctxt.Dconv(v2)) 383 } 384 return true 385 } 386 } 387 388 if f == 0 { 389 t = copyu(p, v1, nil) 390 if f == 0 && (t == 2 || t == 3 || t == 4) { 391 f = 1 392 if gc.Debug['P'] != 0 { 393 fmt.Printf("; %v set and !f; f=%d", gc.Ctxt.Dconv(v1), f) 394 } 395 } 396 } 397 398 if gc.Debug['P'] != 0 { 399 fmt.Printf("\n") 400 } 401 if r.S2 != nil { 402 if !copy1(v1, v2, r.S2, f) { 403 return false 404 } 405 } 406 } 407 408 return true 409 } 410 411 // If s==nil, copyu returns the set/use of v in p; otherwise, it 412 // modifies p to replace reads of v with reads of s and returns 0 for 413 // success or non-zero for failure. 414 // 415 // If s==nil, copy returns one of the following values: 416 // 1 if v only used 417 // 2 if v is set and used in one address (read-alter-rewrite; 418 // can't substitute) 419 // 3 if v is only set 420 // 4 if v is set in one address and used in another (so addresses 421 // can be rewritten independently) 422 // 0 otherwise (not touched) 423 func copyu(p *obj.Prog, v *obj.Addr, s *obj.Addr) int { 424 if p.From3Type() != obj.TYPE_NONE { 425 // 7g never generates a from3 426 fmt.Printf("copyu: from3 (%v) not implemented\n", gc.Ctxt.Dconv(p.From3)) 427 } 428 if p.RegTo2 != obj.REG_NONE { 429 // 7g never generates a to2 430 fmt.Printf("copyu: RegTo2 (%v) not implemented\n", obj.Rconv(int(p.RegTo2))) 431 } 432 433 switch p.As { 434 default: 435 fmt.Printf("copyu: can't find %v\n", obj.Aconv(int(p.As))) 436 return 2 437 438 case obj.ANOP, /* read p->from, write p->to */ 439 arm64.ANEG, 440 arm64.AFNEGD, 441 arm64.AFNEGS, 442 arm64.AFSQRTD, 443 arm64.AFCVTZSD, 444 arm64.AFCVTZSS, 445 arm64.AFCVTZSDW, 446 arm64.AFCVTZSSW, 447 arm64.AFCVTZUD, 448 arm64.AFCVTZUS, 449 arm64.AFCVTZUDW, 450 arm64.AFCVTZUSW, 451 arm64.AFCVTSD, 452 arm64.AFCVTDS, 453 arm64.ASCVTFD, 454 arm64.ASCVTFS, 455 arm64.ASCVTFWD, 456 arm64.ASCVTFWS, 457 arm64.AUCVTFD, 458 arm64.AUCVTFS, 459 arm64.AUCVTFWD, 460 arm64.AUCVTFWS, 461 arm64.AMOVB, 462 arm64.AMOVBU, 463 arm64.AMOVH, 464 arm64.AMOVHU, 465 arm64.AMOVW, 466 arm64.AMOVWU, 467 arm64.AMOVD, 468 arm64.AFMOVS, 469 arm64.AFMOVD: 470 if p.Scond == 0 { 471 if s != nil { 472 if copysub(&p.From, v, s, 1) != 0 { 473 return 1 474 } 475 476 // Update only indirect uses of v in p->to 477 if !copyas(&p.To, v) { 478 if copysub(&p.To, v, s, 1) != 0 { 479 return 1 480 } 481 } 482 return 0 483 } 484 485 if copyas(&p.To, v) { 486 // Fix up implicit from 487 if p.From.Type == obj.TYPE_NONE { 488 p.From = p.To 489 } 490 if copyau(&p.From, v) { 491 return 4 492 } 493 return 3 494 } 495 496 if copyau(&p.From, v) { 497 return 1 498 } 499 if copyau(&p.To, v) { 500 // p->to only indirectly uses v 501 return 1 502 } 503 504 return 0 505 } 506 507 /* rar p->from, write p->to or read p->from, rar p->to */ 508 if p.From.Type == obj.TYPE_MEM { 509 if copyas(&p.From, v) { 510 // No s!=nil check; need to fail 511 // anyway in that case 512 return 2 513 } 514 515 if s != nil { 516 if copysub(&p.To, v, s, 1) != 0 { 517 return 1 518 } 519 return 0 520 } 521 522 if copyas(&p.To, v) { 523 return 3 524 } 525 } else if p.To.Type == obj.TYPE_MEM { 526 if copyas(&p.To, v) { 527 return 2 528 } 529 if s != nil { 530 if copysub(&p.From, v, s, 1) != 0 { 531 return 1 532 } 533 return 0 534 } 535 536 if copyau(&p.From, v) { 537 return 1 538 } 539 } else { 540 fmt.Printf("copyu: bad %v\n", p) 541 } 542 543 return 0 544 545 case arm64.AADD, /* read p->from, read p->reg, write p->to */ 546 arm64.ASUB, 547 arm64.AAND, 548 arm64.AORR, 549 arm64.AEOR, 550 arm64.AMUL, 551 arm64.ASMULL, 552 arm64.AUMULL, 553 arm64.ASMULH, 554 arm64.AUMULH, 555 arm64.ASDIV, 556 arm64.AUDIV, 557 arm64.ALSL, 558 arm64.ALSR, 559 arm64.AASR, 560 arm64.AFADDD, 561 arm64.AFADDS, 562 arm64.AFSUBD, 563 arm64.AFSUBS, 564 arm64.AFMULD, 565 arm64.AFMULS, 566 arm64.AFDIVD, 567 arm64.AFDIVS: 568 if s != nil { 569 if copysub(&p.From, v, s, 1) != 0 { 570 return 1 571 } 572 if copysub1(p, v, s, 1) != 0 { 573 return 1 574 } 575 576 // Update only indirect uses of v in p->to 577 if !copyas(&p.To, v) { 578 if copysub(&p.To, v, s, 1) != 0 { 579 return 1 580 } 581 } 582 return 0 583 } 584 585 if copyas(&p.To, v) { 586 if p.Reg == 0 { 587 // Fix up implicit reg (e.g., ADD 588 // R3,R4 -> ADD R3,R4,R4) so we can 589 // update reg and to separately. 590 p.Reg = p.To.Reg 591 } 592 593 if copyau(&p.From, v) { 594 return 4 595 } 596 if copyau1(p, v) { 597 return 4 598 } 599 return 3 600 } 601 602 if copyau(&p.From, v) { 603 return 1 604 } 605 if copyau1(p, v) { 606 return 1 607 } 608 if copyau(&p.To, v) { 609 return 1 610 } 611 return 0 612 613 case arm64.ABEQ, 614 arm64.ABNE, 615 arm64.ABGE, 616 arm64.ABLT, 617 arm64.ABGT, 618 arm64.ABLE, 619 arm64.ABLO, 620 arm64.ABLS, 621 arm64.ABHI, 622 arm64.ABHS: 623 return 0 624 625 case obj.ACHECKNIL, /* read p->from */ 626 arm64.ACMP, /* read p->from, read p->reg */ 627 arm64.AFCMPD, 628 arm64.AFCMPS: 629 if s != nil { 630 if copysub(&p.From, v, s, 1) != 0 { 631 return 1 632 } 633 return copysub1(p, v, s, 1) 634 } 635 636 if copyau(&p.From, v) { 637 return 1 638 } 639 if copyau1(p, v) { 640 return 1 641 } 642 return 0 643 644 case arm64.AB: /* read p->to */ 645 if s != nil { 646 if copysub(&p.To, v, s, 1) != 0 { 647 return 1 648 } 649 return 0 650 } 651 652 if copyau(&p.To, v) { 653 return 1 654 } 655 return 0 656 657 case obj.ARET: /* funny */ 658 if s != nil { 659 return 0 660 } 661 662 // All registers die at this point, so claim 663 // everything is set (and not used). 664 return 3 665 666 case arm64.ABL: /* funny */ 667 if p.From.Type == obj.TYPE_REG && v.Type == obj.TYPE_REG && p.From.Reg == v.Reg { 668 return 2 669 } 670 671 if s != nil { 672 if copysub(&p.To, v, s, 1) != 0 { 673 return 1 674 } 675 return 0 676 } 677 678 if copyau(&p.To, v) { 679 return 4 680 } 681 return 3 682 683 // R31 is zero, used by DUFFZERO, cannot be substituted. 684 // R16 is ptr to memory, used and set, cannot be substituted. 685 case obj.ADUFFZERO: 686 if v.Type == obj.TYPE_REG { 687 if v.Reg == 31 { 688 return 1 689 } 690 if v.Reg == 16 { 691 return 2 692 } 693 } 694 695 return 0 696 697 // R16, R17 are ptr to src, dst, used and set, cannot be substituted. 698 // R27 is scratch, set by DUFFCOPY, cannot be substituted. 699 case obj.ADUFFCOPY: 700 if v.Type == obj.TYPE_REG { 701 if v.Reg == 16 || v.Reg == 17 { 702 return 2 703 } 704 if v.Reg == 27 { 705 return 3 706 } 707 } 708 709 return 0 710 711 case arm64.AHINT, 712 obj.ATEXT, 713 obj.APCDATA, 714 obj.AFUNCDATA, 715 obj.AVARDEF, 716 obj.AVARKILL: 717 return 0 718 } 719 } 720 721 // copyas returns 1 if a and v address the same register. 722 // 723 // If a is the from operand, this means this operation reads the 724 // register in v. If a is the to operand, this means this operation 725 // writes the register in v. 726 func copyas(a *obj.Addr, v *obj.Addr) bool { 727 if regtyp(v) { 728 if a.Type == v.Type { 729 if a.Reg == v.Reg { 730 return true 731 } 732 } 733 } 734 return false 735 } 736 737 // copyau returns 1 if a either directly or indirectly addresses the 738 // same register as v. 739 // 740 // If a is the from operand, this means this operation reads the 741 // register in v. If a is the to operand, this means the operation 742 // either reads or writes the register in v (if !copyas(a, v), then 743 // the operation reads the register in v). 744 func copyau(a *obj.Addr, v *obj.Addr) bool { 745 if copyas(a, v) { 746 return true 747 } 748 if v.Type == obj.TYPE_REG { 749 if a.Type == obj.TYPE_MEM || (a.Type == obj.TYPE_ADDR && a.Reg != 0) { 750 if v.Reg == a.Reg { 751 return true 752 } 753 } 754 } 755 return false 756 } 757 758 // copyau1 returns 1 if p->reg references the same register as v and v 759 // is a direct reference. 760 func copyau1(p *obj.Prog, v *obj.Addr) bool { 761 if regtyp(v) && v.Reg != 0 { 762 if p.Reg == v.Reg { 763 return true 764 } 765 } 766 return false 767 } 768 769 // copysub replaces v with s in a if f!=0 or indicates it if could if f==0. 770 // Returns 1 on failure to substitute (it always succeeds on arm64). 771 func copysub(a *obj.Addr, v *obj.Addr, s *obj.Addr, f int) int { 772 if f != 0 { 773 if copyau(a, v) { 774 a.Reg = s.Reg 775 } 776 } 777 return 0 778 } 779 780 // copysub1 replaces v with s in p1->reg if f!=0 or indicates if it could if f==0. 781 // Returns 1 on failure to substitute (it always succeeds on arm64). 782 func copysub1(p1 *obj.Prog, v *obj.Addr, s *obj.Addr, f int) int { 783 if f != 0 { 784 if copyau1(p1, v) { 785 p1.Reg = s.Reg 786 } 787 } 788 return 0 789 } 790 791 func sameaddr(a *obj.Addr, v *obj.Addr) bool { 792 if a.Type != v.Type { 793 return false 794 } 795 if regtyp(v) && a.Reg == v.Reg { 796 return true 797 } 798 if v.Type == obj.NAME_AUTO || v.Type == obj.NAME_PARAM { 799 if v.Offset == a.Offset { 800 return true 801 } 802 } 803 return false 804 } 805 806 func smallindir(a *obj.Addr, reg *obj.Addr) bool { 807 return reg.Type == obj.TYPE_REG && a.Type == obj.TYPE_MEM && a.Reg == reg.Reg && 0 <= a.Offset && a.Offset < 4096 808 } 809 810 func stackaddr(a *obj.Addr) bool { 811 return a.Type == obj.TYPE_REG && a.Reg == arm64.REGSP 812 } 813