1 // Copyright 2009 The Go Authors. All rights reserved. 2 // Use of this source code is governed by a BSD-style 3 // license that can be found in the LICENSE file. 4 5 package gc 6 7 import ( 8 "cmd/internal/obj" 9 "cmd/internal/sys" 10 "fmt" 11 "strings" 12 ) 13 14 // The constant is known to runtime. 15 const ( 16 tmpstringbufsize = 32 17 ) 18 19 func walk(fn *Node) { 20 Curfn = fn 21 22 if Debug['W'] != 0 { 23 s := fmt.Sprintf("\nbefore %v", Curfn.Func.Nname.Sym) 24 dumplist(s, Curfn.Nbody) 25 } 26 27 lno := lineno 28 29 // Final typecheck for any unused variables. 30 for i, ln := range fn.Func.Dcl { 31 if ln.Op == ONAME && (ln.Class == PAUTO || ln.Class == PAUTOHEAP) { 32 ln = typecheck(ln, Erv|Easgn) 33 fn.Func.Dcl[i] = ln 34 } 35 } 36 37 // Propagate the used flag for typeswitch variables up to the NONAME in it's definition. 38 for _, ln := range fn.Func.Dcl { 39 if ln.Op == ONAME && (ln.Class == PAUTO || ln.Class == PAUTOHEAP) && ln.Name.Defn != nil && ln.Name.Defn.Op == OTYPESW && ln.Used { 40 ln.Name.Defn.Left.Used = true 41 } 42 } 43 44 for _, ln := range fn.Func.Dcl { 45 if ln.Op != ONAME || (ln.Class != PAUTO && ln.Class != PAUTOHEAP) || ln.Sym.Name[0] == '&' || ln.Used { 46 continue 47 } 48 if defn := ln.Name.Defn; defn != nil && defn.Op == OTYPESW { 49 if defn.Left.Used { 50 continue 51 } 52 lineno = defn.Left.Lineno 53 yyerror("%v declared and not used", ln.Sym) 54 defn.Left.Used = true // suppress repeats 55 } else { 56 lineno = ln.Lineno 57 yyerror("%v declared and not used", ln.Sym) 58 } 59 } 60 61 lineno = lno 62 if nerrors != 0 { 63 return 64 } 65 walkstmtlist(Curfn.Nbody.Slice()) 66 if Debug['W'] != 0 { 67 s := fmt.Sprintf("after walk %v", Curfn.Func.Nname.Sym) 68 dumplist(s, Curfn.Nbody) 69 } 70 71 heapmoves() 72 if Debug['W'] != 0 && Curfn.Func.Enter.Len() > 0 { 73 s := fmt.Sprintf("enter %v", Curfn.Func.Nname.Sym) 74 dumplist(s, Curfn.Func.Enter) 75 } 76 } 77 78 func walkstmtlist(s []*Node) { 79 for i := range s { 80 s[i] = walkstmt(s[i]) 81 } 82 } 83 84 func samelist(a, b []*Node) bool { 85 if len(a) != len(b) { 86 return false 87 } 88 for i, n := range a { 89 if n != b[i] { 90 return false 91 } 92 } 93 return true 94 } 95 96 func paramoutheap(fn *Node) bool { 97 for _, ln := range fn.Func.Dcl { 98 switch ln.Class { 99 case PPARAMOUT: 100 if ln.isParamStackCopy() || ln.Addrtaken { 101 return true 102 } 103 104 case PAUTO: 105 // stop early - parameters are over 106 return false 107 } 108 } 109 110 return false 111 } 112 113 // adds "adjust" to all the argument locations for the call n. 114 // n must be a defer or go node that has already been walked. 115 func adjustargs(n *Node, adjust int) { 116 var arg *Node 117 var lhs *Node 118 119 callfunc := n.Left 120 for _, arg = range callfunc.List.Slice() { 121 if arg.Op != OAS { 122 yyerror("call arg not assignment") 123 } 124 lhs = arg.Left 125 if lhs.Op == ONAME { 126 // This is a temporary introduced by reorder1. 127 // The real store to the stack appears later in the arg list. 128 continue 129 } 130 131 if lhs.Op != OINDREGSP { 132 yyerror("call argument store does not use OINDREGSP") 133 } 134 135 // can't really check this in machine-indep code. 136 //if(lhs->val.u.reg != D_SP) 137 // yyerror("call arg assign not indreg(SP)"); 138 lhs.Xoffset += int64(adjust) 139 } 140 } 141 142 // The result of walkstmt MUST be assigned back to n, e.g. 143 // n.Left = walkstmt(n.Left) 144 func walkstmt(n *Node) *Node { 145 if n == nil { 146 return n 147 } 148 if n.IsStatic { // don't walk, generated by anylit. 149 return n 150 } 151 152 setlineno(n) 153 154 walkstmtlist(n.Ninit.Slice()) 155 156 switch n.Op { 157 default: 158 if n.Op == ONAME { 159 yyerror("%v is not a top level statement", n.Sym) 160 } else { 161 yyerror("%v is not a top level statement", n.Op) 162 } 163 Dump("nottop", n) 164 165 case OAS, 166 OASOP, 167 OAS2, 168 OAS2DOTTYPE, 169 OAS2RECV, 170 OAS2FUNC, 171 OAS2MAPR, 172 OCLOSE, 173 OCOPY, 174 OCALLMETH, 175 OCALLINTER, 176 OCALL, 177 OCALLFUNC, 178 ODELETE, 179 OSEND, 180 OPRINT, 181 OPRINTN, 182 OPANIC, 183 OEMPTY, 184 ORECOVER, 185 OGETG: 186 if n.Typecheck == 0 { 187 Fatalf("missing typecheck: %+v", n) 188 } 189 wascopy := n.Op == OCOPY 190 init := n.Ninit 191 n.Ninit.Set(nil) 192 n = walkexpr(n, &init) 193 n = addinit(n, init.Slice()) 194 if wascopy && n.Op == OCONVNOP { 195 n.Op = OEMPTY // don't leave plain values as statements. 196 } 197 198 // special case for a receive where we throw away 199 // the value received. 200 case ORECV: 201 if n.Typecheck == 0 { 202 Fatalf("missing typecheck: %+v", n) 203 } 204 init := n.Ninit 205 n.Ninit.Set(nil) 206 207 n.Left = walkexpr(n.Left, &init) 208 n = mkcall1(chanfn("chanrecv1", 2, n.Left.Type), nil, &init, typename(n.Left.Type), n.Left, nodnil()) 209 n = walkexpr(n, &init) 210 211 n = addinit(n, init.Slice()) 212 213 case OBREAK, 214 OCONTINUE, 215 OFALL, 216 OGOTO, 217 OLABEL, 218 ODCLCONST, 219 ODCLTYPE, 220 OCHECKNIL, 221 OVARKILL, 222 OVARLIVE: 223 break 224 225 case ODCL: 226 v := n.Left 227 if v.Class == PAUTOHEAP { 228 if compiling_runtime { 229 yyerror("%v escapes to heap, not allowed in runtime.", v) 230 } 231 if prealloc[v] == nil { 232 prealloc[v] = callnew(v.Type) 233 } 234 nn := nod(OAS, v.Name.Heapaddr, prealloc[v]) 235 nn.Colas = true 236 nn = typecheck(nn, Etop) 237 return walkstmt(nn) 238 } 239 240 case OBLOCK: 241 walkstmtlist(n.List.Slice()) 242 243 case OXCASE: 244 yyerror("case statement out of place") 245 n.Op = OCASE 246 fallthrough 247 248 case OCASE: 249 n.Right = walkstmt(n.Right) 250 251 case ODEFER: 252 hasdefer = true 253 switch n.Left.Op { 254 case OPRINT, OPRINTN: 255 n.Left = walkprintfunc(n.Left, &n.Ninit) 256 257 case OCOPY: 258 n.Left = copyany(n.Left, &n.Ninit, true) 259 260 default: 261 n.Left = walkexpr(n.Left, &n.Ninit) 262 } 263 264 // make room for size & fn arguments. 265 adjustargs(n, 2*Widthptr) 266 267 case OFOR: 268 if n.Left != nil { 269 walkstmtlist(n.Left.Ninit.Slice()) 270 init := n.Left.Ninit 271 n.Left.Ninit.Set(nil) 272 n.Left = walkexpr(n.Left, &init) 273 n.Left = addinit(n.Left, init.Slice()) 274 } 275 276 n.Right = walkstmt(n.Right) 277 walkstmtlist(n.Nbody.Slice()) 278 279 case OIF: 280 n.Left = walkexpr(n.Left, &n.Ninit) 281 walkstmtlist(n.Nbody.Slice()) 282 walkstmtlist(n.Rlist.Slice()) 283 284 case OPROC: 285 switch n.Left.Op { 286 case OPRINT, OPRINTN: 287 n.Left = walkprintfunc(n.Left, &n.Ninit) 288 289 case OCOPY: 290 n.Left = copyany(n.Left, &n.Ninit, true) 291 292 default: 293 n.Left = walkexpr(n.Left, &n.Ninit) 294 } 295 296 // make room for size & fn arguments. 297 adjustargs(n, 2*Widthptr) 298 299 case ORETURN: 300 walkexprlist(n.List.Slice(), &n.Ninit) 301 if n.List.Len() == 0 { 302 break 303 } 304 if (Curfn.Type.FuncType().Outnamed && n.List.Len() > 1) || paramoutheap(Curfn) { 305 // assign to the function out parameters, 306 // so that reorder3 can fix up conflicts 307 var rl []*Node 308 309 var cl Class 310 for _, ln := range Curfn.Func.Dcl { 311 cl = ln.Class 312 if cl == PAUTO || cl == PAUTOHEAP { 313 break 314 } 315 if cl == PPARAMOUT { 316 if ln.isParamStackCopy() { 317 ln = walkexpr(typecheck(nod(OIND, ln.Name.Heapaddr, nil), Erv), nil) 318 } 319 rl = append(rl, ln) 320 } 321 } 322 323 if got, want := n.List.Len(), len(rl); got != want { 324 // order should have rewritten multi-value function calls 325 // with explicit OAS2FUNC nodes. 326 Fatalf("expected %v return arguments, have %v", want, got) 327 } 328 329 if samelist(rl, n.List.Slice()) { 330 // special return in disguise 331 n.List.Set(nil) 332 333 break 334 } 335 336 // move function calls out, to make reorder3's job easier. 337 walkexprlistsafe(n.List.Slice(), &n.Ninit) 338 339 ll := ascompatee(n.Op, rl, n.List.Slice(), &n.Ninit) 340 n.List.Set(reorder3(ll)) 341 ls := n.List.Slice() 342 for i, n := range ls { 343 ls[i] = applywritebarrier(n) 344 } 345 break 346 } 347 348 ll := ascompatte(n.Op, nil, false, Curfn.Type.Results(), n.List.Slice(), 1, &n.Ninit) 349 n.List.Set(ll) 350 351 case ORETJMP: 352 break 353 354 case OSELECT: 355 walkselect(n) 356 357 case OSWITCH: 358 walkswitch(n) 359 360 case ORANGE: 361 walkrange(n) 362 363 case OXFALL: 364 yyerror("fallthrough statement out of place") 365 n.Op = OFALL 366 } 367 368 if n.Op == ONAME { 369 Fatalf("walkstmt ended up with name: %+v", n) 370 } 371 return n 372 } 373 374 func isSmallMakeSlice(n *Node) bool { 375 if n.Op != OMAKESLICE { 376 return false 377 } 378 l := n.Left 379 r := n.Right 380 if r == nil { 381 r = l 382 } 383 t := n.Type 384 385 return smallintconst(l) && smallintconst(r) && (t.Elem().Width == 0 || r.Int64() < (1<<16)/t.Elem().Width) 386 } 387 388 // walk the whole tree of the body of an 389 // expression or simple statement. 390 // the types expressions are calculated. 391 // compile-time constants are evaluated. 392 // complex side effects like statements are appended to init 393 func walkexprlist(s []*Node, init *Nodes) { 394 for i := range s { 395 s[i] = walkexpr(s[i], init) 396 } 397 } 398 399 func walkexprlistsafe(s []*Node, init *Nodes) { 400 for i, n := range s { 401 s[i] = safeexpr(n, init) 402 s[i] = walkexpr(s[i], init) 403 } 404 } 405 406 func walkexprlistcheap(s []*Node, init *Nodes) { 407 for i, n := range s { 408 s[i] = cheapexpr(n, init) 409 s[i] = walkexpr(s[i], init) 410 } 411 } 412 413 // Build name of function for interface conversion. 414 // Not all names are possible 415 // (e.g., we'll never generate convE2E or convE2I or convI2E). 416 func convFuncName(from, to *Type) string { 417 tkind := to.iet() 418 switch from.iet() { 419 case 'I': 420 switch tkind { 421 case 'I': 422 return "convI2I" 423 } 424 case 'T': 425 switch tkind { 426 case 'E': 427 return "convT2E" 428 case 'I': 429 return "convT2I" 430 } 431 } 432 Fatalf("unknown conv func %c2%c", from.iet(), to.iet()) 433 panic("unreachable") 434 } 435 436 // The result of walkexpr MUST be assigned back to n, e.g. 437 // n.Left = walkexpr(n.Left, init) 438 func walkexpr(n *Node, init *Nodes) *Node { 439 if n == nil { 440 return n 441 } 442 443 if init == &n.Ninit { 444 // not okay to use n->ninit when walking n, 445 // because we might replace n with some other node 446 // and would lose the init list. 447 Fatalf("walkexpr init == &n->ninit") 448 } 449 450 if n.Ninit.Len() != 0 { 451 walkstmtlist(n.Ninit.Slice()) 452 init.AppendNodes(&n.Ninit) 453 } 454 455 lno := setlineno(n) 456 457 if Debug['w'] > 1 { 458 Dump("walk-before", n) 459 } 460 461 if n.Typecheck != 1 { 462 Fatalf("missed typecheck: %+v", n) 463 } 464 465 if n.Op == ONAME && n.Class == PAUTOHEAP { 466 nn := nod(OIND, n.Name.Heapaddr, nil) 467 nn = typecheck(nn, Erv) 468 nn = walkexpr(nn, init) 469 nn.Left.NonNil = true 470 return nn 471 } 472 473 opswitch: 474 switch n.Op { 475 default: 476 Dump("walk", n) 477 Fatalf("walkexpr: switch 1 unknown op %+S", n) 478 479 case OTYPE, 480 ONONAME, 481 OINDREGSP, 482 OEMPTY, 483 OGETG: 484 485 case ONOT, 486 OMINUS, 487 OPLUS, 488 OCOM, 489 OREAL, 490 OIMAG, 491 ODOTMETH, 492 ODOTINTER: 493 n.Left = walkexpr(n.Left, init) 494 495 case OIND: 496 n.Left = walkexpr(n.Left, init) 497 498 case ODOT: 499 usefield(n) 500 n.Left = walkexpr(n.Left, init) 501 502 case ODOTPTR: 503 usefield(n) 504 if n.Op == ODOTPTR && n.Left.Type.Elem().Width == 0 { 505 // No actual copy will be generated, so emit an explicit nil check. 506 n.Left = cheapexpr(n.Left, init) 507 508 checknil(n.Left, init) 509 } 510 511 n.Left = walkexpr(n.Left, init) 512 513 case OEFACE: 514 n.Left = walkexpr(n.Left, init) 515 n.Right = walkexpr(n.Right, init) 516 517 case OSPTR, OITAB, OIDATA: 518 n.Left = walkexpr(n.Left, init) 519 520 case OLEN, OCAP: 521 n.Left = walkexpr(n.Left, init) 522 523 // replace len(*[10]int) with 10. 524 // delayed until now to preserve side effects. 525 t := n.Left.Type 526 527 if t.IsPtr() { 528 t = t.Elem() 529 } 530 if t.IsArray() { 531 safeexpr(n.Left, init) 532 Nodconst(n, n.Type, t.NumElem()) 533 n.Typecheck = 1 534 } 535 536 case OLSH, ORSH: 537 n.Left = walkexpr(n.Left, init) 538 n.Right = walkexpr(n.Right, init) 539 t := n.Left.Type 540 n.Bounded = bounded(n.Right, 8*t.Width) 541 if Debug['m'] != 0 && n.Etype != 0 && !Isconst(n.Right, CTINT) { 542 Warn("shift bounds check elided") 543 } 544 545 // Use results from call expression as arguments for complex. 546 case OAND, 547 OSUB, 548 OHMUL, 549 OLT, 550 OLE, 551 OGE, 552 OGT, 553 OADD, 554 OCOMPLEX, 555 OLROT: 556 if n.Op == OCOMPLEX && n.Left == nil && n.Right == nil { 557 n.Left = n.List.First() 558 n.Right = n.List.Second() 559 } 560 561 n.Left = walkexpr(n.Left, init) 562 n.Right = walkexpr(n.Right, init) 563 564 case OOR, OXOR: 565 n.Left = walkexpr(n.Left, init) 566 n.Right = walkexpr(n.Right, init) 567 n = walkrotate(n) 568 569 case OEQ, ONE: 570 n.Left = walkexpr(n.Left, init) 571 n.Right = walkexpr(n.Right, init) 572 573 // Disable safemode while compiling this code: the code we 574 // generate internally can refer to unsafe.Pointer. 575 // In this case it can happen if we need to generate an == 576 // for a struct containing a reflect.Value, which itself has 577 // an unexported field of type unsafe.Pointer. 578 old_safemode := safemode 579 safemode = false 580 n = walkcompare(n, init) 581 safemode = old_safemode 582 583 case OANDAND, OOROR: 584 n.Left = walkexpr(n.Left, init) 585 586 // cannot put side effects from n.Right on init, 587 // because they cannot run before n.Left is checked. 588 // save elsewhere and store on the eventual n.Right. 589 var ll Nodes 590 591 n.Right = walkexpr(n.Right, &ll) 592 n.Right = addinit(n.Right, ll.Slice()) 593 n = walkinrange(n, init) 594 595 case OPRINT, OPRINTN: 596 walkexprlist(n.List.Slice(), init) 597 n = walkprint(n, init) 598 599 case OPANIC: 600 n = mkcall("gopanic", nil, init, n.Left) 601 602 case ORECOVER: 603 n = mkcall("gorecover", n.Type, init, nod(OADDR, nodfp, nil)) 604 605 case OLITERAL: 606 n.Addable = true 607 608 case OCLOSUREVAR, OCFUNC: 609 n.Addable = true 610 611 case ONAME: 612 n.Addable = true 613 614 case OCALLINTER: 615 usemethod(n) 616 t := n.Left.Type 617 if n.List.Len() != 0 && n.List.First().Op == OAS { 618 break 619 } 620 n.Left = walkexpr(n.Left, init) 621 walkexprlist(n.List.Slice(), init) 622 ll := ascompatte(n.Op, n, n.Isddd, t.Params(), n.List.Slice(), 0, init) 623 n.List.Set(reorder1(ll)) 624 625 case OCALLFUNC: 626 if n.Left.Op == OCLOSURE { 627 // Transform direct call of a closure to call of a normal function. 628 // transformclosure already did all preparation work. 629 630 // Prepend captured variables to argument list. 631 n.List.Prepend(n.Left.Func.Enter.Slice()...) 632 633 n.Left.Func.Enter.Set(nil) 634 635 // Replace OCLOSURE with ONAME/PFUNC. 636 n.Left = n.Left.Func.Closure.Func.Nname 637 638 // Update type of OCALLFUNC node. 639 // Output arguments had not changed, but their offsets could. 640 if n.Left.Type.Results().NumFields() == 1 { 641 n.Type = n.Left.Type.Results().Field(0).Type 642 } else { 643 n.Type = n.Left.Type.Results() 644 } 645 } 646 647 t := n.Left.Type 648 if n.List.Len() != 0 && n.List.First().Op == OAS { 649 break 650 } 651 652 n.Left = walkexpr(n.Left, init) 653 walkexprlist(n.List.Slice(), init) 654 655 if n.Left.Op == ONAME && n.Left.Sym.Name == "Sqrt" && 656 (n.Left.Sym.Pkg.Path == "math" || n.Left.Sym.Pkg == localpkg && myimportpath == "math") { 657 if Thearch.LinkArch.InFamily(sys.AMD64, sys.ARM, sys.ARM64, sys.MIPS, sys.PPC64, sys.S390X) { 658 n.Op = OSQRT 659 n.Left = n.List.First() 660 n.List.Set(nil) 661 break opswitch 662 } 663 } 664 665 ll := ascompatte(n.Op, n, n.Isddd, t.Params(), n.List.Slice(), 0, init) 666 n.List.Set(reorder1(ll)) 667 668 case OCALLMETH: 669 t := n.Left.Type 670 if n.List.Len() != 0 && n.List.First().Op == OAS { 671 break 672 } 673 n.Left = walkexpr(n.Left, init) 674 walkexprlist(n.List.Slice(), init) 675 ll := ascompatte(n.Op, n, false, t.Recvs(), []*Node{n.Left.Left}, 0, init) 676 lr := ascompatte(n.Op, n, n.Isddd, t.Params(), n.List.Slice(), 0, init) 677 ll = append(ll, lr...) 678 n.Left.Left = nil 679 ullmancalc(n.Left) 680 n.List.Set(reorder1(ll)) 681 682 case OAS: 683 init.AppendNodes(&n.Ninit) 684 685 n.Left = walkexpr(n.Left, init) 686 n.Left = safeexpr(n.Left, init) 687 688 if oaslit(n, init) { 689 break 690 } 691 692 if n.Right == nil { 693 // TODO(austin): Check all "implicit zeroing" 694 break 695 } 696 697 if !instrumenting && iszero(n.Right) && !needwritebarrier(n.Left, n.Right) { 698 break 699 } 700 701 switch n.Right.Op { 702 default: 703 n.Right = walkexpr(n.Right, init) 704 705 case ORECV: 706 // x = <-c; n.Left is x, n.Right.Left is c. 707 // orderstmt made sure x is addressable. 708 n.Right.Left = walkexpr(n.Right.Left, init) 709 710 n1 := nod(OADDR, n.Left, nil) 711 r := n.Right.Left // the channel 712 n = mkcall1(chanfn("chanrecv1", 2, r.Type), nil, init, typename(r.Type), r, n1) 713 n = walkexpr(n, init) 714 break opswitch 715 716 case OAPPEND: 717 // x = append(...) 718 r := n.Right 719 if r.Type.Elem().NotInHeap { 720 yyerror("%v is go:notinheap; heap allocation disallowed", r.Type.Elem()) 721 } 722 if r.Isddd { 723 r = appendslice(r, init) // also works for append(slice, string). 724 } else { 725 r = walkappend(r, init, n) 726 } 727 n.Right = r 728 if r.Op == OAPPEND { 729 // Left in place for back end. 730 // Do not add a new write barrier. 731 break opswitch 732 } 733 // Otherwise, lowered for race detector. 734 // Treat as ordinary assignment. 735 } 736 737 if n.Left != nil && n.Right != nil { 738 static := n.IsStatic 739 n = convas(n, init) 740 n.IsStatic = static 741 n = applywritebarrier(n) 742 } 743 744 case OAS2: 745 init.AppendNodes(&n.Ninit) 746 walkexprlistsafe(n.List.Slice(), init) 747 walkexprlistsafe(n.Rlist.Slice(), init) 748 ll := ascompatee(OAS, n.List.Slice(), n.Rlist.Slice(), init) 749 ll = reorder3(ll) 750 for i, n := range ll { 751 ll[i] = applywritebarrier(n) 752 } 753 n = liststmt(ll) 754 755 // a,b,... = fn() 756 case OAS2FUNC: 757 init.AppendNodes(&n.Ninit) 758 759 r := n.Rlist.First() 760 walkexprlistsafe(n.List.Slice(), init) 761 r = walkexpr(r, init) 762 763 if isIntrinsicCall(r) { 764 n.Rlist.Set1(r) 765 break 766 } 767 init.Append(r) 768 769 ll := ascompatet(n.Op, n.List, r.Type) 770 for i, n := range ll { 771 ll[i] = applywritebarrier(n) 772 } 773 n = liststmt(ll) 774 775 // x, y = <-c 776 // orderstmt made sure x is addressable. 777 case OAS2RECV: 778 init.AppendNodes(&n.Ninit) 779 780 r := n.Rlist.First() 781 walkexprlistsafe(n.List.Slice(), init) 782 r.Left = walkexpr(r.Left, init) 783 var n1 *Node 784 if isblank(n.List.First()) { 785 n1 = nodnil() 786 } else { 787 n1 = nod(OADDR, n.List.First(), nil) 788 } 789 n1.Etype = 1 // addr does not escape 790 fn := chanfn("chanrecv2", 2, r.Left.Type) 791 ok := n.List.Second() 792 call := mkcall1(fn, ok.Type, init, typename(r.Left.Type), r.Left, n1) 793 n = nod(OAS, ok, call) 794 n = typecheck(n, Etop) 795 796 // a,b = m[i]; 797 case OAS2MAPR: 798 init.AppendNodes(&n.Ninit) 799 800 r := n.Rlist.First() 801 walkexprlistsafe(n.List.Slice(), init) 802 r.Left = walkexpr(r.Left, init) 803 r.Right = walkexpr(r.Right, init) 804 t := r.Left.Type 805 p := "" 806 if t.Val().Width <= 128 { // Check ../../runtime/hashmap.go:maxValueSize before changing. 807 switch algtype(t.Key()) { 808 case AMEM32: 809 p = "mapaccess2_fast32" 810 case AMEM64: 811 p = "mapaccess2_fast64" 812 case ASTRING: 813 p = "mapaccess2_faststr" 814 } 815 } 816 817 var key *Node 818 if p != "" { 819 // fast versions take key by value 820 key = r.Right 821 } else { 822 // standard version takes key by reference 823 // orderexpr made sure key is addressable. 824 key = nod(OADDR, r.Right, nil) 825 826 p = "mapaccess2" 827 } 828 829 // from: 830 // a,b = m[i] 831 // to: 832 // var,b = mapaccess2*(t, m, i) 833 // a = *var 834 a := n.List.First() 835 836 if w := t.Val().Width; w <= 1024 { // 1024 must match ../../../../runtime/hashmap.go:maxZero 837 fn := mapfn(p, t) 838 r = mkcall1(fn, fn.Type.Results(), init, typename(t), r.Left, key) 839 } else { 840 fn := mapfn("mapaccess2_fat", t) 841 z := zeroaddr(w) 842 r = mkcall1(fn, fn.Type.Results(), init, typename(t), r.Left, key, z) 843 } 844 845 // mapaccess2* returns a typed bool, but due to spec changes, 846 // the boolean result of i.(T) is now untyped so we make it the 847 // same type as the variable on the lhs. 848 if ok := n.List.Second(); !isblank(ok) && ok.Type.IsBoolean() { 849 r.Type.Field(1).Type = ok.Type 850 } 851 n.Rlist.Set1(r) 852 n.Op = OAS2FUNC 853 854 // don't generate a = *var if a is _ 855 if !isblank(a) { 856 var_ := temp(ptrto(t.Val())) 857 var_.Typecheck = 1 858 var_.NonNil = true // mapaccess always returns a non-nil pointer 859 n.List.SetIndex(0, var_) 860 n = walkexpr(n, init) 861 init.Append(n) 862 n = nod(OAS, a, nod(OIND, var_, nil)) 863 } 864 865 n = typecheck(n, Etop) 866 n = walkexpr(n, init) 867 868 case ODELETE: 869 init.AppendNodes(&n.Ninit) 870 map_ := n.List.First() 871 key := n.List.Second() 872 map_ = walkexpr(map_, init) 873 key = walkexpr(key, init) 874 875 // orderstmt made sure key is addressable. 876 key = nod(OADDR, key, nil) 877 878 t := map_.Type 879 n = mkcall1(mapfndel("mapdelete", t), nil, init, typename(t), map_, key) 880 881 case OAS2DOTTYPE: 882 walkexprlistsafe(n.List.Slice(), init) 883 e := n.Rlist.First() // i.(T) 884 e.Left = walkexpr(e.Left, init) 885 886 case ODOTTYPE, ODOTTYPE2: 887 n.Left = walkexpr(n.Left, init) 888 889 case OCONVIFACE: 890 n.Left = walkexpr(n.Left, init) 891 892 // Optimize convT2E or convT2I as a two-word copy when T is pointer-shaped. 893 if isdirectiface(n.Left.Type) { 894 var t *Node 895 if n.Type.IsEmptyInterface() { 896 t = typename(n.Left.Type) 897 } else { 898 t = itabname(n.Left.Type, n.Type) 899 } 900 l := nod(OEFACE, t, n.Left) 901 l.Type = n.Type 902 l.Typecheck = n.Typecheck 903 n = l 904 break 905 } 906 // Optimize convT2{E,I} when T is not pointer-shaped. 907 // We make the interface by initializing a stack temporary to 908 // the value we want to put in the interface, then using the address of 909 // that stack temporary for the interface data word. 910 if !n.Left.Type.IsInterface() && n.Esc == EscNone && n.Left.Type.Width <= 1024 { 911 tmp := temp(n.Left.Type) 912 init.Append(typecheck(nod(OAS, tmp, n.Left), Etop)) 913 var t *Node 914 if n.Type.IsEmptyInterface() { 915 t = typename(n.Left.Type) 916 } else { 917 t = itabname(n.Left.Type, n.Type) 918 } 919 l := nod(OEFACE, t, typecheck(nod(OADDR, tmp, nil), Erv)) 920 l.Type = n.Type 921 l.Typecheck = n.Typecheck 922 n = l 923 break 924 } 925 926 // Implement interface to empty interface conversion. 927 // tmp = i.itab 928 // if tmp != nil { 929 // tmp = tmp.type 930 // } 931 // e = iface{tmp, i.data} 932 if n.Type.IsEmptyInterface() && n.Left.Type.IsInterface() && !n.Left.Type.IsEmptyInterface() { 933 // Evaluate the input interface. 934 c := temp(n.Left.Type) 935 init.Append(nod(OAS, c, n.Left)) 936 937 // Get the itab out of the interface. 938 tmp := temp(ptrto(Types[TUINT8])) 939 init.Append(nod(OAS, tmp, typecheck(nod(OITAB, c, nil), Erv))) 940 941 // Get the type out of the itab. 942 nif := nod(OIF, typecheck(nod(ONE, tmp, nodnil()), Erv), nil) 943 nif.Nbody.Set1(nod(OAS, tmp, itabType(tmp))) 944 init.Append(nif) 945 946 // Build the result. 947 e := nod(OEFACE, tmp, ifaceData(c, ptrto(Types[TUINT8]))) 948 e.Type = n.Type // assign type manually, typecheck doesn't understand OEFACE. 949 e.Typecheck = 1 950 n = e 951 break 952 } 953 954 var ll []*Node 955 if n.Type.IsEmptyInterface() { 956 if !n.Left.Type.IsInterface() { 957 ll = append(ll, typename(n.Left.Type)) 958 } 959 } else { 960 if n.Left.Type.IsInterface() { 961 ll = append(ll, typename(n.Type)) 962 } else { 963 ll = append(ll, itabname(n.Left.Type, n.Type)) 964 } 965 } 966 967 if n.Left.Type.IsInterface() { 968 ll = append(ll, n.Left) 969 } else { 970 // regular types are passed by reference to avoid C vararg calls 971 // orderexpr arranged for n.Left to be a temporary for all 972 // the conversions it could see. comparison of an interface 973 // with a non-interface, especially in a switch on interface value 974 // with non-interface cases, is not visible to orderstmt, so we 975 // have to fall back on allocating a temp here. 976 if islvalue(n.Left) { 977 ll = append(ll, nod(OADDR, n.Left, nil)) 978 } else { 979 ll = append(ll, nod(OADDR, copyexpr(n.Left, n.Left.Type, init), nil)) 980 } 981 dowidth(n.Left.Type) 982 } 983 984 fn := syslook(convFuncName(n.Left.Type, n.Type)) 985 fn = substArgTypes(fn, n.Left.Type, n.Type) 986 dowidth(fn.Type) 987 n = nod(OCALL, fn, nil) 988 n.List.Set(ll) 989 n = typecheck(n, Erv) 990 n = walkexpr(n, init) 991 992 case OCONV, OCONVNOP: 993 if Thearch.LinkArch.Family == sys.ARM || Thearch.LinkArch.Family == sys.MIPS { 994 if n.Left.Type.IsFloat() { 995 if n.Type.Etype == TINT64 { 996 n = mkcall("float64toint64", n.Type, init, conv(n.Left, Types[TFLOAT64])) 997 break 998 } 999 1000 if n.Type.Etype == TUINT64 { 1001 n = mkcall("float64touint64", n.Type, init, conv(n.Left, Types[TFLOAT64])) 1002 break 1003 } 1004 } 1005 1006 if n.Type.IsFloat() { 1007 if n.Left.Type.Etype == TINT64 { 1008 n = conv(mkcall("int64tofloat64", Types[TFLOAT64], init, conv(n.Left, Types[TINT64])), n.Type) 1009 break 1010 } 1011 1012 if n.Left.Type.Etype == TUINT64 { 1013 n = conv(mkcall("uint64tofloat64", Types[TFLOAT64], init, conv(n.Left, Types[TUINT64])), n.Type) 1014 break 1015 } 1016 } 1017 } 1018 1019 if Thearch.LinkArch.Family == sys.I386 { 1020 if n.Left.Type.IsFloat() { 1021 if n.Type.Etype == TINT64 { 1022 n = mkcall("float64toint64", n.Type, init, conv(n.Left, Types[TFLOAT64])) 1023 break 1024 } 1025 1026 if n.Type.Etype == TUINT64 { 1027 n = mkcall("float64touint64", n.Type, init, conv(n.Left, Types[TFLOAT64])) 1028 break 1029 } 1030 if n.Type.Etype == TUINT32 || n.Type.Etype == TUINT || n.Type.Etype == TUINTPTR { 1031 n = mkcall("float64touint32", n.Type, init, conv(n.Left, Types[TFLOAT64])) 1032 break 1033 } 1034 } 1035 if n.Type.IsFloat() { 1036 if n.Left.Type.Etype == TINT64 { 1037 n = conv(mkcall("int64tofloat64", Types[TFLOAT64], init, conv(n.Left, Types[TINT64])), n.Type) 1038 break 1039 } 1040 1041 if n.Left.Type.Etype == TUINT64 { 1042 n = conv(mkcall("uint64tofloat64", Types[TFLOAT64], init, conv(n.Left, Types[TUINT64])), n.Type) 1043 break 1044 } 1045 if n.Left.Type.Etype == TUINT32 || n.Left.Type.Etype == TUINT || n.Left.Type.Etype == TUINTPTR { 1046 n = conv(mkcall("uint32tofloat64", Types[TFLOAT64], init, conv(n.Left, Types[TUINT32])), n.Type) 1047 break 1048 } 1049 } 1050 } 1051 1052 n.Left = walkexpr(n.Left, init) 1053 1054 case OANDNOT: 1055 n.Left = walkexpr(n.Left, init) 1056 n.Op = OAND 1057 n.Right = nod(OCOM, n.Right, nil) 1058 n.Right = typecheck(n.Right, Erv) 1059 n.Right = walkexpr(n.Right, init) 1060 1061 case OMUL: 1062 n.Left = walkexpr(n.Left, init) 1063 n.Right = walkexpr(n.Right, init) 1064 n = walkmul(n, init) 1065 1066 case ODIV, OMOD: 1067 n.Left = walkexpr(n.Left, init) 1068 n.Right = walkexpr(n.Right, init) 1069 1070 // rewrite complex div into function call. 1071 et := n.Left.Type.Etype 1072 1073 if isComplex[et] && n.Op == ODIV { 1074 t := n.Type 1075 n = mkcall("complex128div", Types[TCOMPLEX128], init, conv(n.Left, Types[TCOMPLEX128]), conv(n.Right, Types[TCOMPLEX128])) 1076 n = conv(n, t) 1077 break 1078 } 1079 1080 // Nothing to do for float divisions. 1081 if isFloat[et] { 1082 break 1083 } 1084 1085 // Try rewriting as shifts or magic multiplies. 1086 n = walkdiv(n, init) 1087 1088 // rewrite 64-bit div and mod into function calls 1089 // on 32-bit architectures. 1090 switch n.Op { 1091 case OMOD, ODIV: 1092 if Widthreg >= 8 || (et != TUINT64 && et != TINT64) { 1093 break opswitch 1094 } 1095 var fn string 1096 if et == TINT64 { 1097 fn = "int64" 1098 } else { 1099 fn = "uint64" 1100 } 1101 if n.Op == ODIV { 1102 fn += "div" 1103 } else { 1104 fn += "mod" 1105 } 1106 n = mkcall(fn, n.Type, init, conv(n.Left, Types[et]), conv(n.Right, Types[et])) 1107 } 1108 1109 case OINDEX: 1110 n.Left = walkexpr(n.Left, init) 1111 1112 // save the original node for bounds checking elision. 1113 // If it was a ODIV/OMOD walk might rewrite it. 1114 r := n.Right 1115 1116 n.Right = walkexpr(n.Right, init) 1117 1118 // if range of type cannot exceed static array bound, 1119 // disable bounds check. 1120 if n.Bounded { 1121 break 1122 } 1123 t := n.Left.Type 1124 if t != nil && t.IsPtr() { 1125 t = t.Elem() 1126 } 1127 if t.IsArray() { 1128 n.Bounded = bounded(r, t.NumElem()) 1129 if Debug['m'] != 0 && n.Bounded && !Isconst(n.Right, CTINT) { 1130 Warn("index bounds check elided") 1131 } 1132 if smallintconst(n.Right) && !n.Bounded { 1133 yyerror("index out of bounds") 1134 } 1135 } else if Isconst(n.Left, CTSTR) { 1136 n.Bounded = bounded(r, int64(len(n.Left.Val().U.(string)))) 1137 if Debug['m'] != 0 && n.Bounded && !Isconst(n.Right, CTINT) { 1138 Warn("index bounds check elided") 1139 } 1140 if smallintconst(n.Right) && !n.Bounded { 1141 yyerror("index out of bounds") 1142 } 1143 } 1144 1145 if Isconst(n.Right, CTINT) { 1146 if n.Right.Val().U.(*Mpint).CmpInt64(0) < 0 || n.Right.Val().U.(*Mpint).Cmp(maxintval[TINT]) > 0 { 1147 yyerror("index out of bounds") 1148 } 1149 } 1150 1151 case OINDEXMAP: 1152 // Replace m[k] with *map{access1,assign}(maptype, m, &k) 1153 n.Left = walkexpr(n.Left, init) 1154 n.Right = walkexpr(n.Right, init) 1155 map_ := n.Left 1156 key := n.Right 1157 t := map_.Type 1158 if n.Etype == 1 { 1159 // This m[k] expression is on the left-hand side of an assignment. 1160 // orderexpr made sure key is addressable. 1161 key = nod(OADDR, key, nil) 1162 n = mkcall1(mapfn("mapassign", t), nil, init, typename(t), map_, key) 1163 } else { 1164 // m[k] is not the target of an assignment. 1165 p := "" 1166 if t.Val().Width <= 128 { // Check ../../runtime/hashmap.go:maxValueSize before changing. 1167 switch algtype(t.Key()) { 1168 case AMEM32: 1169 p = "mapaccess1_fast32" 1170 case AMEM64: 1171 p = "mapaccess1_fast64" 1172 case ASTRING: 1173 p = "mapaccess1_faststr" 1174 } 1175 } 1176 1177 if p == "" { 1178 // standard version takes key by reference. 1179 // orderexpr made sure key is addressable. 1180 key = nod(OADDR, key, nil) 1181 p = "mapaccess1" 1182 } 1183 1184 if w := t.Val().Width; w <= 1024 { // 1024 must match ../../../../runtime/hashmap.go:maxZero 1185 n = mkcall1(mapfn(p, t), ptrto(t.Val()), init, typename(t), map_, key) 1186 } else { 1187 p = "mapaccess1_fat" 1188 z := zeroaddr(w) 1189 n = mkcall1(mapfn(p, t), ptrto(t.Val()), init, typename(t), map_, key, z) 1190 } 1191 } 1192 n.Type = ptrto(t.Val()) 1193 n.NonNil = true // mapaccess1* and mapassign always return non-nil pointers. 1194 n = nod(OIND, n, nil) 1195 n.Type = t.Val() 1196 n.Typecheck = 1 1197 1198 case ORECV: 1199 Fatalf("walkexpr ORECV") // should see inside OAS only 1200 1201 case OSLICE, OSLICEARR, OSLICESTR, OSLICE3, OSLICE3ARR: 1202 n.Left = walkexpr(n.Left, init) 1203 low, high, max := n.SliceBounds() 1204 low = walkexpr(low, init) 1205 if low != nil && iszero(low) { 1206 // Reduce x[0:j] to x[:j] and x[0:j:k] to x[:j:k]. 1207 low = nil 1208 } 1209 high = walkexpr(high, init) 1210 max = walkexpr(max, init) 1211 n.SetSliceBounds(low, high, max) 1212 if n.Op.IsSlice3() { 1213 if max != nil && max.Op == OCAP && samesafeexpr(n.Left, max.Left) { 1214 // Reduce x[i:j:cap(x)] to x[i:j]. 1215 if n.Op == OSLICE3 { 1216 n.Op = OSLICE 1217 } else { 1218 n.Op = OSLICEARR 1219 } 1220 n = reduceSlice(n) 1221 } 1222 } else { 1223 n = reduceSlice(n) 1224 } 1225 1226 case OADDR: 1227 n.Left = walkexpr(n.Left, init) 1228 1229 case ONEW: 1230 if n.Esc == EscNone { 1231 if n.Type.Elem().Width >= 1<<16 { 1232 Fatalf("large ONEW with EscNone: %v", n) 1233 } 1234 r := temp(n.Type.Elem()) 1235 r = nod(OAS, r, nil) // zero temp 1236 r = typecheck(r, Etop) 1237 init.Append(r) 1238 r = nod(OADDR, r.Left, nil) 1239 r = typecheck(r, Erv) 1240 n = r 1241 } else { 1242 n = callnew(n.Type.Elem()) 1243 } 1244 1245 case OCMPSTR: 1246 // s + "badgerbadgerbadger" == "badgerbadgerbadger" 1247 if (Op(n.Etype) == OEQ || Op(n.Etype) == ONE) && Isconst(n.Right, CTSTR) && n.Left.Op == OADDSTR && n.Left.List.Len() == 2 && Isconst(n.Left.List.Second(), CTSTR) && strlit(n.Right) == strlit(n.Left.List.Second()) { 1248 // TODO(marvin): Fix Node.EType type union. 1249 r := nod(Op(n.Etype), nod(OLEN, n.Left.List.First(), nil), nodintconst(0)) 1250 r = typecheck(r, Erv) 1251 r = walkexpr(r, init) 1252 r.Type = n.Type 1253 n = r 1254 break 1255 } 1256 1257 // Rewrite comparisons to short constant strings as length+byte-wise comparisons. 1258 var cs, ncs *Node // const string, non-const string 1259 switch { 1260 case Isconst(n.Left, CTSTR) && Isconst(n.Right, CTSTR): 1261 // ignore; will be constant evaluated 1262 case Isconst(n.Left, CTSTR): 1263 cs = n.Left 1264 ncs = n.Right 1265 case Isconst(n.Right, CTSTR): 1266 cs = n.Right 1267 ncs = n.Left 1268 } 1269 if cs != nil { 1270 cmp := Op(n.Etype) 1271 // maxRewriteLen was chosen empirically. 1272 // It is the value that minimizes cmd/go file size 1273 // across most architectures. 1274 // See the commit description for CL 26758 for details. 1275 maxRewriteLen := 6 1276 var and Op 1277 switch cmp { 1278 case OEQ: 1279 and = OANDAND 1280 case ONE: 1281 and = OOROR 1282 default: 1283 // Don't do byte-wise comparisons for <, <=, etc. 1284 // They're fairly complicated. 1285 // Length-only checks are ok, though. 1286 maxRewriteLen = 0 1287 } 1288 if s := cs.Val().U.(string); len(s) <= maxRewriteLen { 1289 if len(s) > 0 { 1290 ncs = safeexpr(ncs, init) 1291 } 1292 // TODO(marvin): Fix Node.EType type union. 1293 r := nod(cmp, nod(OLEN, ncs, nil), nodintconst(int64(len(s)))) 1294 for i := 0; i < len(s); i++ { 1295 cb := nodintconst(int64(s[i])) 1296 ncb := nod(OINDEX, ncs, nodintconst(int64(i))) 1297 r = nod(and, r, nod(cmp, ncb, cb)) 1298 } 1299 r = typecheck(r, Erv) 1300 r = walkexpr(r, init) 1301 r.Type = n.Type 1302 n = r 1303 break 1304 } 1305 } 1306 1307 var r *Node 1308 // TODO(marvin): Fix Node.EType type union. 1309 if Op(n.Etype) == OEQ || Op(n.Etype) == ONE { 1310 // prepare for rewrite below 1311 n.Left = cheapexpr(n.Left, init) 1312 n.Right = cheapexpr(n.Right, init) 1313 1314 r = mkcall("eqstring", Types[TBOOL], init, conv(n.Left, Types[TSTRING]), conv(n.Right, Types[TSTRING])) 1315 1316 // quick check of len before full compare for == or != 1317 // eqstring assumes that the lengths are equal 1318 // TODO(marvin): Fix Node.EType type union. 1319 if Op(n.Etype) == OEQ { 1320 // len(left) == len(right) && eqstring(left, right) 1321 r = nod(OANDAND, nod(OEQ, nod(OLEN, n.Left, nil), nod(OLEN, n.Right, nil)), r) 1322 } else { 1323 // len(left) != len(right) || !eqstring(left, right) 1324 r = nod(ONOT, r, nil) 1325 r = nod(OOROR, nod(ONE, nod(OLEN, n.Left, nil), nod(OLEN, n.Right, nil)), r) 1326 } 1327 1328 r = typecheck(r, Erv) 1329 r = walkexpr(r, nil) 1330 } else { 1331 // sys_cmpstring(s1, s2) :: 0 1332 r = mkcall("cmpstring", Types[TINT], init, conv(n.Left, Types[TSTRING]), conv(n.Right, Types[TSTRING])) 1333 // TODO(marvin): Fix Node.EType type union. 1334 r = nod(Op(n.Etype), r, nodintconst(0)) 1335 } 1336 1337 r = typecheck(r, Erv) 1338 if !n.Type.IsBoolean() { 1339 Fatalf("cmp %v", n.Type) 1340 } 1341 r.Type = n.Type 1342 n = r 1343 1344 case OADDSTR: 1345 n = addstr(n, init) 1346 1347 case OAPPEND: 1348 // order should make sure we only see OAS(node, OAPPEND), which we handle above. 1349 Fatalf("append outside assignment") 1350 1351 case OCOPY: 1352 n = copyany(n, init, instrumenting && !compiling_runtime) 1353 1354 // cannot use chanfn - closechan takes any, not chan any 1355 case OCLOSE: 1356 fn := syslook("closechan") 1357 1358 fn = substArgTypes(fn, n.Left.Type) 1359 n = mkcall1(fn, nil, init, n.Left) 1360 1361 case OMAKECHAN: 1362 n = mkcall1(chanfn("makechan", 1, n.Type), n.Type, init, typename(n.Type), conv(n.Left, Types[TINT64])) 1363 1364 case OMAKEMAP: 1365 t := n.Type 1366 1367 a := nodnil() // hmap buffer 1368 r := nodnil() // bucket buffer 1369 if n.Esc == EscNone { 1370 // Allocate hmap buffer on stack. 1371 var_ := temp(hmap(t)) 1372 1373 a = nod(OAS, var_, nil) // zero temp 1374 a = typecheck(a, Etop) 1375 init.Append(a) 1376 a = nod(OADDR, var_, nil) 1377 1378 // Allocate one bucket on stack. 1379 // Maximum key/value size is 128 bytes, larger objects 1380 // are stored with an indirection. So max bucket size is 2048+eps. 1381 var_ = temp(mapbucket(t)) 1382 1383 r = nod(OAS, var_, nil) // zero temp 1384 r = typecheck(r, Etop) 1385 init.Append(r) 1386 r = nod(OADDR, var_, nil) 1387 } 1388 1389 fn := syslook("makemap") 1390 fn = substArgTypes(fn, hmap(t), mapbucket(t), t.Key(), t.Val()) 1391 n = mkcall1(fn, n.Type, init, typename(n.Type), conv(n.Left, Types[TINT64]), a, r) 1392 1393 case OMAKESLICE: 1394 l := n.Left 1395 r := n.Right 1396 if r == nil { 1397 r = safeexpr(l, init) 1398 l = r 1399 } 1400 t := n.Type 1401 if n.Esc == EscNone { 1402 if !isSmallMakeSlice(n) { 1403 Fatalf("non-small OMAKESLICE with EscNone: %v", n) 1404 } 1405 // var arr [r]T 1406 // n = arr[:l] 1407 t = typArray(t.Elem(), nonnegintconst(r)) // [r]T 1408 var_ := temp(t) 1409 a := nod(OAS, var_, nil) // zero temp 1410 a = typecheck(a, Etop) 1411 init.Append(a) 1412 r := nod(OSLICE, var_, nil) // arr[:l] 1413 r.SetSliceBounds(nil, l, nil) 1414 r = conv(r, n.Type) // in case n.Type is named. 1415 r = typecheck(r, Erv) 1416 r = walkexpr(r, init) 1417 n = r 1418 } else { 1419 // n escapes; set up a call to makeslice. 1420 // When len and cap can fit into int, use makeslice instead of 1421 // makeslice64, which is faster and shorter on 32 bit platforms. 1422 1423 if t.Elem().NotInHeap { 1424 yyerror("%v is go:notinheap; heap allocation disallowed", t.Elem()) 1425 } 1426 1427 len, cap := l, r 1428 1429 fnname := "makeslice64" 1430 argtype := Types[TINT64] 1431 1432 // typechecking guarantees that TIDEAL len/cap are positive and fit in an int. 1433 // The case of len or cap overflow when converting TUINT or TUINTPTR to TINT 1434 // will be handled by the negative range checks in makeslice during runtime. 1435 if (len.Type.IsKind(TIDEAL) || maxintval[len.Type.Etype].Cmp(maxintval[TUINT]) <= 0) && 1436 (cap.Type.IsKind(TIDEAL) || maxintval[cap.Type.Etype].Cmp(maxintval[TUINT]) <= 0) { 1437 fnname = "makeslice" 1438 argtype = Types[TINT] 1439 } 1440 1441 fn := syslook(fnname) 1442 fn = substArgTypes(fn, t.Elem()) // any-1 1443 n = mkcall1(fn, t, init, typename(t.Elem()), conv(len, argtype), conv(cap, argtype)) 1444 } 1445 1446 case ORUNESTR: 1447 a := nodnil() 1448 if n.Esc == EscNone { 1449 t := typArray(Types[TUINT8], 4) 1450 var_ := temp(t) 1451 a = nod(OADDR, var_, nil) 1452 } 1453 1454 // intstring(*[4]byte, rune) 1455 n = mkcall("intstring", n.Type, init, a, conv(n.Left, Types[TINT64])) 1456 1457 case OARRAYBYTESTR: 1458 a := nodnil() 1459 if n.Esc == EscNone { 1460 // Create temporary buffer for string on stack. 1461 t := typArray(Types[TUINT8], tmpstringbufsize) 1462 1463 a = nod(OADDR, temp(t), nil) 1464 } 1465 1466 // slicebytetostring(*[32]byte, []byte) string; 1467 n = mkcall("slicebytetostring", n.Type, init, a, n.Left) 1468 1469 // slicebytetostringtmp([]byte) string; 1470 case OARRAYBYTESTRTMP: 1471 n.Left = walkexpr(n.Left, init) 1472 1473 if !instrumenting { 1474 // Let the backend handle OARRAYBYTESTRTMP directly 1475 // to avoid a function call to slicebytetostringtmp. 1476 break 1477 } 1478 1479 n = mkcall("slicebytetostringtmp", n.Type, init, n.Left) 1480 1481 // slicerunetostring(*[32]byte, []rune) string; 1482 case OARRAYRUNESTR: 1483 a := nodnil() 1484 1485 if n.Esc == EscNone { 1486 // Create temporary buffer for string on stack. 1487 t := typArray(Types[TUINT8], tmpstringbufsize) 1488 1489 a = nod(OADDR, temp(t), nil) 1490 } 1491 1492 n = mkcall("slicerunetostring", n.Type, init, a, n.Left) 1493 1494 // stringtoslicebyte(*32[byte], string) []byte; 1495 case OSTRARRAYBYTE: 1496 a := nodnil() 1497 1498 if n.Esc == EscNone { 1499 // Create temporary buffer for slice on stack. 1500 t := typArray(Types[TUINT8], tmpstringbufsize) 1501 1502 a = nod(OADDR, temp(t), nil) 1503 } 1504 1505 n = mkcall("stringtoslicebyte", n.Type, init, a, conv(n.Left, Types[TSTRING])) 1506 1507 case OSTRARRAYBYTETMP: 1508 // []byte(string) conversion that creates a slice 1509 // referring to the actual string bytes. 1510 // This conversion is handled later by the backend and 1511 // is only for use by internal compiler optimizations 1512 // that know that the slice won't be mutated. 1513 // The only such case today is: 1514 // for i, c := range []byte(string) 1515 n.Left = walkexpr(n.Left, init) 1516 1517 // stringtoslicerune(*[32]rune, string) []rune 1518 case OSTRARRAYRUNE: 1519 a := nodnil() 1520 1521 if n.Esc == EscNone { 1522 // Create temporary buffer for slice on stack. 1523 t := typArray(Types[TINT32], tmpstringbufsize) 1524 1525 a = nod(OADDR, temp(t), nil) 1526 } 1527 1528 n = mkcall("stringtoslicerune", n.Type, init, a, n.Left) 1529 1530 // ifaceeq(i1 any-1, i2 any-2) (ret bool); 1531 case OCMPIFACE: 1532 if !eqtype(n.Left.Type, n.Right.Type) { 1533 Fatalf("ifaceeq %v %v %v", n.Op, n.Left.Type, n.Right.Type) 1534 } 1535 var fn *Node 1536 if n.Left.Type.IsEmptyInterface() { 1537 fn = syslook("efaceeq") 1538 } else { 1539 fn = syslook("ifaceeq") 1540 } 1541 1542 n.Right = cheapexpr(n.Right, init) 1543 n.Left = cheapexpr(n.Left, init) 1544 fn = substArgTypes(fn, n.Right.Type, n.Left.Type) 1545 r := mkcall1(fn, n.Type, init, n.Left, n.Right) 1546 // TODO(marvin): Fix Node.EType type union. 1547 if Op(n.Etype) == ONE { 1548 r = nod(ONOT, r, nil) 1549 } 1550 1551 // check itable/type before full compare. 1552 // TODO(marvin): Fix Node.EType type union. 1553 if Op(n.Etype) == OEQ { 1554 r = nod(OANDAND, nod(OEQ, nod(OITAB, n.Left, nil), nod(OITAB, n.Right, nil)), r) 1555 } else { 1556 r = nod(OOROR, nod(ONE, nod(OITAB, n.Left, nil), nod(OITAB, n.Right, nil)), r) 1557 } 1558 r = typecheck(r, Erv) 1559 r = walkexpr(r, init) 1560 r.Type = n.Type 1561 n = r 1562 1563 case OARRAYLIT, OSLICELIT, OMAPLIT, OSTRUCTLIT, OPTRLIT: 1564 if isStaticCompositeLiteral(n) { 1565 // n can be directly represented in the read-only data section. 1566 // Make direct reference to the static data. See issue 12841. 1567 vstat := staticname(n.Type) 1568 vstat.Name.Readonly = true 1569 fixedlit(inInitFunction, initKindStatic, n, vstat, init) 1570 n = vstat 1571 n = typecheck(n, Erv) 1572 break 1573 } 1574 var_ := temp(n.Type) 1575 anylit(n, var_, init) 1576 n = var_ 1577 1578 case OSEND: 1579 n1 := n.Right 1580 n1 = assignconv(n1, n.Left.Type.Elem(), "chan send") 1581 n1 = walkexpr(n1, init) 1582 n1 = nod(OADDR, n1, nil) 1583 n = mkcall1(chanfn("chansend1", 2, n.Left.Type), nil, init, typename(n.Left.Type), n.Left, n1) 1584 1585 case OCLOSURE: 1586 n = walkclosure(n, init) 1587 1588 case OCALLPART: 1589 n = walkpartialcall(n, init) 1590 } 1591 1592 // Expressions that are constant at run time but not 1593 // considered const by the language spec are not turned into 1594 // constants until walk. For example, if n is y%1 == 0, the 1595 // walk of y%1 may have replaced it by 0. 1596 // Check whether n with its updated args is itself now a constant. 1597 t := n.Type 1598 1599 evconst(n) 1600 n.Type = t 1601 if n.Op == OLITERAL { 1602 n = typecheck(n, Erv) 1603 } 1604 1605 ullmancalc(n) 1606 1607 if Debug['w'] != 0 && n != nil { 1608 Dump("walk", n) 1609 } 1610 1611 lineno = lno 1612 return n 1613 } 1614 1615 // TODO(josharian): combine this with its caller and simplify 1616 func reduceSlice(n *Node) *Node { 1617 low, high, max := n.SliceBounds() 1618 if high != nil && high.Op == OLEN && samesafeexpr(n.Left, high.Left) { 1619 // Reduce x[i:len(x)] to x[i:]. 1620 high = nil 1621 } 1622 n.SetSliceBounds(low, high, max) 1623 if (n.Op == OSLICE || n.Op == OSLICESTR) && low == nil && high == nil { 1624 // Reduce x[:] to x. 1625 if Debug_slice > 0 { 1626 Warn("slice: omit slice operation") 1627 } 1628 return n.Left 1629 } 1630 return n 1631 } 1632 1633 func ascompatee1(op Op, l *Node, r *Node, init *Nodes) *Node { 1634 // convas will turn map assigns into function calls, 1635 // making it impossible for reorder3 to work. 1636 n := nod(OAS, l, r) 1637 1638 if l.Op == OINDEXMAP { 1639 return n 1640 } 1641 1642 return convas(n, init) 1643 } 1644 1645 func ascompatee(op Op, nl, nr []*Node, init *Nodes) []*Node { 1646 // check assign expression list to 1647 // a expression list. called in 1648 // expr-list = expr-list 1649 1650 // ensure order of evaluation for function calls 1651 for i := range nl { 1652 nl[i] = safeexpr(nl[i], init) 1653 } 1654 for i1 := range nr { 1655 nr[i1] = safeexpr(nr[i1], init) 1656 } 1657 1658 var nn []*Node 1659 i := 0 1660 for ; i < len(nl); i++ { 1661 if i >= len(nr) { 1662 break 1663 } 1664 // Do not generate 'x = x' during return. See issue 4014. 1665 if op == ORETURN && samesafeexpr(nl[i], nr[i]) { 1666 continue 1667 } 1668 nn = append(nn, ascompatee1(op, nl[i], nr[i], init)) 1669 } 1670 1671 // cannot happen: caller checked that lists had same length 1672 if i < len(nl) || i < len(nr) { 1673 var nln, nrn Nodes 1674 nln.Set(nl) 1675 nrn.Set(nr) 1676 yyerror("error in shape across %+v %v %+v / %d %d [%s]", nln, op, nrn, len(nl), len(nr), Curfn.Func.Nname.Sym.Name) 1677 } 1678 return nn 1679 } 1680 1681 // l is an lv and rt is the type of an rv 1682 // return 1 if this implies a function call 1683 // evaluating the lv or a function call 1684 // in the conversion of the types 1685 func fncall(l *Node, rt *Type) bool { 1686 if l.Ullman >= UINF || l.Op == OINDEXMAP { 1687 return true 1688 } 1689 var r Node 1690 if needwritebarrier(l, &r) { 1691 return true 1692 } 1693 if eqtype(l.Type, rt) { 1694 return false 1695 } 1696 return true 1697 } 1698 1699 // check assign type list to 1700 // a expression list. called in 1701 // expr-list = func() 1702 func ascompatet(op Op, nl Nodes, nr *Type) []*Node { 1703 r, saver := iterFields(nr) 1704 1705 var nn, mm Nodes 1706 var ullmanOverflow bool 1707 var i int 1708 for i = 0; i < nl.Len(); i++ { 1709 if r == nil { 1710 break 1711 } 1712 l := nl.Index(i) 1713 if isblank(l) { 1714 r = saver.Next() 1715 continue 1716 } 1717 1718 // any lv that causes a fn call must be 1719 // deferred until all the return arguments 1720 // have been pulled from the output arguments 1721 if fncall(l, r.Type) { 1722 tmp := temp(r.Type) 1723 tmp = typecheck(tmp, Erv) 1724 a := nod(OAS, l, tmp) 1725 a = convas(a, &mm) 1726 mm.Append(a) 1727 l = tmp 1728 } 1729 1730 a := nod(OAS, l, nodarg(r, 0)) 1731 a = convas(a, &nn) 1732 ullmancalc(a) 1733 if a.Ullman >= UINF { 1734 Dump("ascompatet ucount", a) 1735 ullmanOverflow = true 1736 } 1737 1738 nn.Append(a) 1739 r = saver.Next() 1740 } 1741 1742 if i < nl.Len() || r != nil { 1743 yyerror("ascompatet: assignment count mismatch: %d = %d", nl.Len(), nr.NumFields()) 1744 } 1745 1746 if ullmanOverflow { 1747 Fatalf("ascompatet: too many function calls evaluating parameters") 1748 } 1749 return append(nn.Slice(), mm.Slice()...) 1750 } 1751 1752 // package all the arguments that match a ... T parameter into a []T. 1753 func mkdotargslice(lr0, nn []*Node, l *Field, fp int, init *Nodes, ddd *Node) []*Node { 1754 esc := uint16(EscUnknown) 1755 if ddd != nil { 1756 esc = ddd.Esc 1757 } 1758 1759 tslice := typSlice(l.Type.Elem()) 1760 1761 var n *Node 1762 if len(lr0) == 0 { 1763 n = nodnil() 1764 n.Type = tslice 1765 } else { 1766 n = nod(OCOMPLIT, nil, typenod(tslice)) 1767 if ddd != nil && prealloc[ddd] != nil { 1768 prealloc[n] = prealloc[ddd] // temporary to use 1769 } 1770 n.List.Set(lr0) 1771 n.Esc = esc 1772 n = typecheck(n, Erv) 1773 if n.Type == nil { 1774 Fatalf("mkdotargslice: typecheck failed") 1775 } 1776 n = walkexpr(n, init) 1777 } 1778 1779 a := nod(OAS, nodarg(l, fp), n) 1780 nn = append(nn, convas(a, init)) 1781 return nn 1782 } 1783 1784 // helpers for shape errors 1785 func dumptypes(nl *Type, what string) string { 1786 s := "" 1787 for _, l := range nl.Fields().Slice() { 1788 if s != "" { 1789 s += ", " 1790 } 1791 s += fldconv(l, 0) 1792 } 1793 if s == "" { 1794 s = fmt.Sprintf("[no arguments %s]", what) 1795 } 1796 return s 1797 } 1798 1799 func dumpnodetypes(l []*Node, what string) string { 1800 s := "" 1801 for _, r := range l { 1802 if s != "" { 1803 s += ", " 1804 } 1805 s += r.Type.String() 1806 } 1807 if s == "" { 1808 s = fmt.Sprintf("[no arguments %s]", what) 1809 } 1810 return s 1811 } 1812 1813 // check assign expression list to 1814 // a type list. called in 1815 // return expr-list 1816 // func(expr-list) 1817 func ascompatte(op Op, call *Node, isddd bool, nl *Type, lr []*Node, fp int, init *Nodes) []*Node { 1818 lr0 := lr 1819 l, savel := iterFields(nl) 1820 var r *Node 1821 if len(lr) > 0 { 1822 r = lr[0] 1823 } 1824 var nn []*Node 1825 1826 // f(g()) where g has multiple return values 1827 if r != nil && len(lr) <= 1 && r.Type.IsFuncArgStruct() { 1828 // optimization - can do block copy 1829 if eqtypenoname(r.Type, nl) { 1830 arg := nodarg(nl, fp) 1831 r = nod(OCONVNOP, r, nil) 1832 r.Type = arg.Type 1833 nn = []*Node{convas(nod(OAS, arg, r), init)} 1834 goto ret 1835 } 1836 1837 // conversions involved. 1838 // copy into temporaries. 1839 var alist []*Node 1840 1841 for _, l := range r.Type.Fields().Slice() { 1842 tmp := temp(l.Type) 1843 alist = append(alist, tmp) 1844 } 1845 1846 a := nod(OAS2, nil, nil) 1847 a.List.Set(alist) 1848 a.Rlist.Set(lr) 1849 a = typecheck(a, Etop) 1850 a = walkstmt(a) 1851 init.Append(a) 1852 lr = alist 1853 r = lr[0] 1854 l, savel = iterFields(nl) 1855 } 1856 1857 for { 1858 if l != nil && l.Isddd { 1859 // the ddd parameter must be last 1860 ll := savel.Next() 1861 1862 if ll != nil { 1863 yyerror("... must be last argument") 1864 } 1865 1866 // special case -- 1867 // only if we are assigning a single ddd 1868 // argument to a ddd parameter then it is 1869 // passed through unencapsulated 1870 if r != nil && len(lr) <= 1 && isddd && eqtype(l.Type, r.Type) { 1871 a := nod(OAS, nodarg(l, fp), r) 1872 a = convas(a, init) 1873 nn = append(nn, a) 1874 break 1875 } 1876 1877 // normal case -- make a slice of all 1878 // remaining arguments and pass it to 1879 // the ddd parameter. 1880 nn = mkdotargslice(lr, nn, l, fp, init, call.Right) 1881 1882 break 1883 } 1884 1885 if l == nil || r == nil { 1886 if l != nil || r != nil { 1887 l1 := dumptypes(nl, "expected") 1888 l2 := dumpnodetypes(lr0, "given") 1889 if l != nil { 1890 yyerror("not enough arguments to %v\n\t%s\n\t%s", op, l1, l2) 1891 } else { 1892 yyerror("too many arguments to %v\n\t%s\n\t%s", op, l1, l2) 1893 } 1894 } 1895 1896 break 1897 } 1898 1899 a := nod(OAS, nodarg(l, fp), r) 1900 a = convas(a, init) 1901 nn = append(nn, a) 1902 1903 l = savel.Next() 1904 r = nil 1905 lr = lr[1:] 1906 if len(lr) > 0 { 1907 r = lr[0] 1908 } 1909 } 1910 1911 ret: 1912 for _, n := range nn { 1913 n.Typecheck = 1 1914 } 1915 return nn 1916 } 1917 1918 // generate code for print 1919 func walkprint(nn *Node, init *Nodes) *Node { 1920 var r *Node 1921 var n *Node 1922 var on *Node 1923 var t *Type 1924 var et EType 1925 1926 op := nn.Op 1927 all := nn.List 1928 var calls []*Node 1929 notfirst := false 1930 1931 // Hoist all the argument evaluation up before the lock. 1932 walkexprlistcheap(all.Slice(), init) 1933 1934 calls = append(calls, mkcall("printlock", nil, init)) 1935 for i1, n1 := range all.Slice() { 1936 if notfirst { 1937 calls = append(calls, mkcall("printsp", nil, init)) 1938 } 1939 1940 notfirst = op == OPRINTN 1941 1942 n = n1 1943 if n.Op == OLITERAL { 1944 switch n.Val().Ctype() { 1945 case CTRUNE: 1946 n = defaultlit(n, runetype) 1947 1948 case CTINT: 1949 n = defaultlit(n, Types[TINT64]) 1950 1951 case CTFLT: 1952 n = defaultlit(n, Types[TFLOAT64]) 1953 } 1954 } 1955 1956 if n.Op != OLITERAL && n.Type != nil && n.Type.Etype == TIDEAL { 1957 n = defaultlit(n, Types[TINT64]) 1958 } 1959 n = defaultlit(n, nil) 1960 all.SetIndex(i1, n) 1961 if n.Type == nil || n.Type.Etype == TFORW { 1962 continue 1963 } 1964 1965 t = n.Type 1966 et = n.Type.Etype 1967 if n.Type.IsInterface() { 1968 if n.Type.IsEmptyInterface() { 1969 on = syslook("printeface") 1970 } else { 1971 on = syslook("printiface") 1972 } 1973 on = substArgTypes(on, n.Type) // any-1 1974 } else if n.Type.IsPtr() || et == TCHAN || et == TMAP || et == TFUNC || et == TUNSAFEPTR { 1975 on = syslook("printpointer") 1976 on = substArgTypes(on, n.Type) // any-1 1977 } else if n.Type.IsSlice() { 1978 on = syslook("printslice") 1979 on = substArgTypes(on, n.Type) // any-1 1980 } else if isInt[et] { 1981 if et == TUINT64 { 1982 if (t.Sym.Pkg == Runtimepkg || compiling_runtime) && t.Sym.Name == "hex" { 1983 on = syslook("printhex") 1984 } else { 1985 on = syslook("printuint") 1986 } 1987 } else { 1988 on = syslook("printint") 1989 } 1990 } else if isFloat[et] { 1991 on = syslook("printfloat") 1992 } else if isComplex[et] { 1993 on = syslook("printcomplex") 1994 } else if et == TBOOL { 1995 on = syslook("printbool") 1996 } else if et == TSTRING { 1997 on = syslook("printstring") 1998 } else { 1999 badtype(OPRINT, n.Type, nil) 2000 continue 2001 } 2002 2003 t = on.Type.Params().Field(0).Type 2004 2005 if !eqtype(t, n.Type) { 2006 n = nod(OCONV, n, nil) 2007 n.Type = t 2008 } 2009 2010 r = nod(OCALL, on, nil) 2011 r.List.Append(n) 2012 calls = append(calls, r) 2013 } 2014 2015 if op == OPRINTN { 2016 calls = append(calls, mkcall("printnl", nil, nil)) 2017 } 2018 2019 calls = append(calls, mkcall("printunlock", nil, init)) 2020 2021 typecheckslice(calls, Etop) 2022 walkexprlist(calls, init) 2023 2024 r = nod(OEMPTY, nil, nil) 2025 r = typecheck(r, Etop) 2026 r = walkexpr(r, init) 2027 r.Ninit.Set(calls) 2028 return r 2029 } 2030 2031 func callnew(t *Type) *Node { 2032 if t.NotInHeap { 2033 yyerror("%v is go:notinheap; heap allocation disallowed", t) 2034 } 2035 dowidth(t) 2036 fn := syslook("newobject") 2037 fn = substArgTypes(fn, t) 2038 v := mkcall1(fn, ptrto(t), nil, typename(t)) 2039 v.NonNil = true 2040 return v 2041 } 2042 2043 func iscallret(n *Node) bool { 2044 n = outervalue(n) 2045 return n.Op == OINDREGSP 2046 } 2047 2048 func isstack(n *Node) bool { 2049 n = outervalue(n) 2050 2051 // If n is *autotmp and autotmp = &foo, replace n with foo. 2052 // We introduce such temps when initializing struct literals. 2053 if n.Op == OIND && n.Left.Op == ONAME && n.Left.IsAutoTmp() { 2054 defn := n.Left.Name.Defn 2055 if defn != nil && defn.Op == OAS && defn.Right.Op == OADDR { 2056 n = defn.Right.Left 2057 } 2058 } 2059 2060 switch n.Op { 2061 case OINDREGSP: 2062 return true 2063 2064 case ONAME: 2065 switch n.Class { 2066 case PAUTO, PPARAM, PPARAMOUT: 2067 return true 2068 } 2069 } 2070 2071 return false 2072 } 2073 2074 // Do we need a write barrier for the assignment l = r? 2075 func needwritebarrier(l *Node, r *Node) bool { 2076 if !use_writebarrier { 2077 return false 2078 } 2079 2080 if l == nil || isblank(l) { 2081 return false 2082 } 2083 2084 // No write barrier for write of non-pointers. 2085 dowidth(l.Type) 2086 2087 if !haspointers(l.Type) { 2088 return false 2089 } 2090 2091 // No write barrier for write to stack. 2092 if isstack(l) { 2093 return false 2094 } 2095 2096 // No write barrier if this is a pointer to a go:notinheap 2097 // type, since the write barrier's inheap(ptr) check will fail. 2098 if l.Type.IsPtr() && l.Type.Elem().NotInHeap { 2099 return false 2100 } 2101 2102 // Implicit zeroing is still zeroing, so it needs write 2103 // barriers. In practice, these are all to stack variables 2104 // (even if isstack isn't smart enough to figure that out), so 2105 // they'll be eliminated by the backend. 2106 if r == nil { 2107 return true 2108 } 2109 2110 // Ignore no-op conversions when making decision. 2111 // Ensures that xp = unsafe.Pointer(&x) is treated 2112 // the same as xp = &x. 2113 for r.Op == OCONVNOP { 2114 r = r.Left 2115 } 2116 2117 // TODO: We can eliminate write barriers if we know *both* the 2118 // current and new content of the slot must already be shaded. 2119 // We know a pointer is shaded if it's nil, or points to 2120 // static data, a global (variable or function), or the stack. 2121 // The nil optimization could be particularly useful for 2122 // writes to just-allocated objects. Unfortunately, knowing 2123 // the "current" value of the slot requires flow analysis. 2124 2125 // No write barrier for storing address of stack values, 2126 // which are guaranteed only to be written to the stack. 2127 if r.Op == OADDR && isstack(r.Left) { 2128 return false 2129 } 2130 2131 // Otherwise, be conservative and use write barrier. 2132 return true 2133 } 2134 2135 // TODO(rsc): Perhaps componentgen should run before this. 2136 2137 func applywritebarrier(n *Node) *Node { 2138 if n.Left != nil && n.Right != nil && needwritebarrier(n.Left, n.Right) { 2139 if Debug_wb > 1 { 2140 Warnl(n.Lineno, "marking %v for barrier", n.Left) 2141 } 2142 n.Op = OASWB 2143 return n 2144 } 2145 return n 2146 } 2147 2148 func convas(n *Node, init *Nodes) *Node { 2149 if n.Op != OAS { 2150 Fatalf("convas: not OAS %v", n.Op) 2151 } 2152 2153 n.Typecheck = 1 2154 2155 var lt *Type 2156 var rt *Type 2157 if n.Left == nil || n.Right == nil { 2158 goto out 2159 } 2160 2161 lt = n.Left.Type 2162 rt = n.Right.Type 2163 if lt == nil || rt == nil { 2164 goto out 2165 } 2166 2167 if isblank(n.Left) { 2168 n.Right = defaultlit(n.Right, nil) 2169 goto out 2170 } 2171 2172 if !eqtype(lt, rt) { 2173 n.Right = assignconv(n.Right, lt, "assignment") 2174 n.Right = walkexpr(n.Right, init) 2175 } 2176 2177 out: 2178 ullmancalc(n) 2179 return n 2180 } 2181 2182 // from ascompat[te] 2183 // evaluating actual function arguments. 2184 // f(a,b) 2185 // if there is exactly one function expr, 2186 // then it is done first. otherwise must 2187 // make temp variables 2188 func reorder1(all []*Node) []*Node { 2189 c := 0 // function calls 2190 t := 0 // total parameters 2191 2192 for _, n := range all { 2193 t++ 2194 ullmancalc(n) 2195 if n.Ullman >= UINF { 2196 c++ 2197 } 2198 } 2199 2200 if c == 0 || t == 1 { 2201 return all 2202 } 2203 2204 var g []*Node // fncalls assigned to tempnames 2205 var f *Node // last fncall assigned to stack 2206 var r []*Node // non fncalls and tempnames assigned to stack 2207 d := 0 2208 var a *Node 2209 for _, n := range all { 2210 if n.Ullman < UINF { 2211 r = append(r, n) 2212 continue 2213 } 2214 2215 d++ 2216 if d == c { 2217 f = n 2218 continue 2219 } 2220 2221 // make assignment of fncall to tempname 2222 a = temp(n.Right.Type) 2223 2224 a = nod(OAS, a, n.Right) 2225 g = append(g, a) 2226 2227 // put normal arg assignment on list 2228 // with fncall replaced by tempname 2229 n.Right = a.Left 2230 2231 r = append(r, n) 2232 } 2233 2234 if f != nil { 2235 g = append(g, f) 2236 } 2237 return append(g, r...) 2238 } 2239 2240 // from ascompat[ee] 2241 // a,b = c,d 2242 // simultaneous assignment. there cannot 2243 // be later use of an earlier lvalue. 2244 // 2245 // function calls have been removed. 2246 func reorder3(all []*Node) []*Node { 2247 var l *Node 2248 2249 // If a needed expression may be affected by an 2250 // earlier assignment, make an early copy of that 2251 // expression and use the copy instead. 2252 var early []*Node 2253 2254 var mapinit Nodes 2255 for i, n := range all { 2256 l = n.Left 2257 2258 // Save subexpressions needed on left side. 2259 // Drill through non-dereferences. 2260 for { 2261 if l.Op == ODOT || l.Op == OPAREN { 2262 l = l.Left 2263 continue 2264 } 2265 2266 if l.Op == OINDEX && l.Left.Type.IsArray() { 2267 l.Right = reorder3save(l.Right, all, i, &early) 2268 l = l.Left 2269 continue 2270 } 2271 2272 break 2273 } 2274 2275 switch l.Op { 2276 default: 2277 Fatalf("reorder3 unexpected lvalue %#v", l.Op) 2278 2279 case ONAME: 2280 break 2281 2282 case OINDEX, OINDEXMAP: 2283 l.Left = reorder3save(l.Left, all, i, &early) 2284 l.Right = reorder3save(l.Right, all, i, &early) 2285 if l.Op == OINDEXMAP { 2286 all[i] = convas(all[i], &mapinit) 2287 } 2288 2289 case OIND, ODOTPTR: 2290 l.Left = reorder3save(l.Left, all, i, &early) 2291 } 2292 2293 // Save expression on right side. 2294 all[i].Right = reorder3save(all[i].Right, all, i, &early) 2295 } 2296 2297 early = append(mapinit.Slice(), early...) 2298 return append(early, all...) 2299 } 2300 2301 // if the evaluation of *np would be affected by the 2302 // assignments in all up to but not including the ith assignment, 2303 // copy into a temporary during *early and 2304 // replace *np with that temp. 2305 // The result of reorder3save MUST be assigned back to n, e.g. 2306 // n.Left = reorder3save(n.Left, all, i, early) 2307 func reorder3save(n *Node, all []*Node, i int, early *[]*Node) *Node { 2308 if !aliased(n, all, i) { 2309 return n 2310 } 2311 2312 q := temp(n.Type) 2313 q = nod(OAS, q, n) 2314 q = typecheck(q, Etop) 2315 *early = append(*early, q) 2316 return q.Left 2317 } 2318 2319 // what's the outer value that a write to n affects? 2320 // outer value means containing struct or array. 2321 func outervalue(n *Node) *Node { 2322 for { 2323 if n.Op == OXDOT { 2324 Fatalf("OXDOT in walk") 2325 } 2326 if n.Op == ODOT || n.Op == OPAREN || n.Op == OCONVNOP { 2327 n = n.Left 2328 continue 2329 } 2330 2331 if n.Op == OINDEX && n.Left.Type != nil && n.Left.Type.IsArray() { 2332 n = n.Left 2333 continue 2334 } 2335 2336 break 2337 } 2338 2339 return n 2340 } 2341 2342 // Is it possible that the computation of n might be 2343 // affected by writes in as up to but not including the ith element? 2344 func aliased(n *Node, all []*Node, i int) bool { 2345 if n == nil { 2346 return false 2347 } 2348 2349 // Treat all fields of a struct as referring to the whole struct. 2350 // We could do better but we would have to keep track of the fields. 2351 for n.Op == ODOT { 2352 n = n.Left 2353 } 2354 2355 // Look for obvious aliasing: a variable being assigned 2356 // during the all list and appearing in n. 2357 // Also record whether there are any writes to main memory. 2358 // Also record whether there are any writes to variables 2359 // whose addresses have been taken. 2360 memwrite := 0 2361 2362 varwrite := 0 2363 var a *Node 2364 for _, an := range all[:i] { 2365 a = outervalue(an.Left) 2366 2367 for a.Op == ODOT { 2368 a = a.Left 2369 } 2370 2371 if a.Op != ONAME { 2372 memwrite = 1 2373 continue 2374 } 2375 2376 switch n.Class { 2377 default: 2378 varwrite = 1 2379 continue 2380 2381 case PAUTO, PPARAM, PPARAMOUT: 2382 if n.Addrtaken { 2383 varwrite = 1 2384 continue 2385 } 2386 2387 if vmatch2(a, n) { 2388 // Direct hit. 2389 return true 2390 } 2391 } 2392 } 2393 2394 // The variables being written do not appear in n. 2395 // However, n might refer to computed addresses 2396 // that are being written. 2397 2398 // If no computed addresses are affected by the writes, no aliasing. 2399 if memwrite == 0 && varwrite == 0 { 2400 return false 2401 } 2402 2403 // If n does not refer to computed addresses 2404 // (that is, if n only refers to variables whose addresses 2405 // have not been taken), no aliasing. 2406 if varexpr(n) { 2407 return false 2408 } 2409 2410 // Otherwise, both the writes and n refer to computed memory addresses. 2411 // Assume that they might conflict. 2412 return true 2413 } 2414 2415 // does the evaluation of n only refer to variables 2416 // whose addresses have not been taken? 2417 // (and no other memory) 2418 func varexpr(n *Node) bool { 2419 if n == nil { 2420 return true 2421 } 2422 2423 switch n.Op { 2424 case OLITERAL: 2425 return true 2426 2427 case ONAME: 2428 switch n.Class { 2429 case PAUTO, PPARAM, PPARAMOUT: 2430 if !n.Addrtaken { 2431 return true 2432 } 2433 } 2434 2435 return false 2436 2437 case OADD, 2438 OSUB, 2439 OOR, 2440 OXOR, 2441 OMUL, 2442 ODIV, 2443 OMOD, 2444 OLSH, 2445 ORSH, 2446 OAND, 2447 OANDNOT, 2448 OPLUS, 2449 OMINUS, 2450 OCOM, 2451 OPAREN, 2452 OANDAND, 2453 OOROR, 2454 OCONV, 2455 OCONVNOP, 2456 OCONVIFACE, 2457 ODOTTYPE: 2458 return varexpr(n.Left) && varexpr(n.Right) 2459 2460 case ODOT: // but not ODOTPTR 2461 // Should have been handled in aliased. 2462 Fatalf("varexpr unexpected ODOT") 2463 } 2464 2465 // Be conservative. 2466 return false 2467 } 2468 2469 // is the name l mentioned in r? 2470 func vmatch2(l *Node, r *Node) bool { 2471 if r == nil { 2472 return false 2473 } 2474 switch r.Op { 2475 // match each right given left 2476 case ONAME: 2477 return l == r 2478 2479 case OLITERAL: 2480 return false 2481 } 2482 2483 if vmatch2(l, r.Left) { 2484 return true 2485 } 2486 if vmatch2(l, r.Right) { 2487 return true 2488 } 2489 for _, n := range r.List.Slice() { 2490 if vmatch2(l, n) { 2491 return true 2492 } 2493 } 2494 return false 2495 } 2496 2497 // is any name mentioned in l also mentioned in r? 2498 // called by sinit.go 2499 func vmatch1(l *Node, r *Node) bool { 2500 // isolate all left sides 2501 if l == nil || r == nil { 2502 return false 2503 } 2504 switch l.Op { 2505 case ONAME: 2506 switch l.Class { 2507 case PPARAM, PAUTO: 2508 break 2509 2510 // assignment to non-stack variable 2511 // must be delayed if right has function calls. 2512 default: 2513 if r.Ullman >= UINF { 2514 return true 2515 } 2516 } 2517 2518 return vmatch2(l, r) 2519 2520 case OLITERAL: 2521 return false 2522 } 2523 2524 if vmatch1(l.Left, r) { 2525 return true 2526 } 2527 if vmatch1(l.Right, r) { 2528 return true 2529 } 2530 for _, n := range l.List.Slice() { 2531 if vmatch1(n, r) { 2532 return true 2533 } 2534 } 2535 return false 2536 } 2537 2538 // paramstoheap returns code to allocate memory for heap-escaped parameters 2539 // and to copy non-result prameters' values from the stack. 2540 // If out is true, then code is also produced to zero-initialize their 2541 // stack memory addresses. 2542 func paramstoheap(params *Type) []*Node { 2543 var nn []*Node 2544 for _, t := range params.Fields().Slice() { 2545 // For precise stacks, the garbage collector assumes results 2546 // are always live, so zero them always. 2547 if params.StructType().Funarg == FunargResults { 2548 // Defer might stop a panic and show the 2549 // return values as they exist at the time of panic. 2550 // Make sure to zero them on entry to the function. 2551 nn = append(nn, nod(OAS, nodarg(t, 1), nil)) 2552 } 2553 2554 v := t.Nname 2555 if v != nil && v.Sym != nil && strings.HasPrefix(v.Sym.Name, "~r") { // unnamed result 2556 v = nil 2557 } 2558 if v == nil { 2559 continue 2560 } 2561 2562 if stackcopy := v.Name.Param.Stackcopy; stackcopy != nil { 2563 nn = append(nn, walkstmt(nod(ODCL, v, nil))) 2564 if stackcopy.Class == PPARAM { 2565 nn = append(nn, walkstmt(typecheck(nod(OAS, v, stackcopy), Etop))) 2566 } 2567 } 2568 } 2569 2570 return nn 2571 } 2572 2573 // returnsfromheap returns code to copy values for heap-escaped parameters 2574 // back to the stack. 2575 func returnsfromheap(params *Type) []*Node { 2576 var nn []*Node 2577 for _, t := range params.Fields().Slice() { 2578 v := t.Nname 2579 if v == nil { 2580 continue 2581 } 2582 if stackcopy := v.Name.Param.Stackcopy; stackcopy != nil && stackcopy.Class == PPARAMOUT { 2583 nn = append(nn, walkstmt(typecheck(nod(OAS, stackcopy, v), Etop))) 2584 } 2585 } 2586 2587 return nn 2588 } 2589 2590 // heapmoves generates code to handle migrating heap-escaped parameters 2591 // between the stack and the heap. The generated code is added to Curfn's 2592 // Enter and Exit lists. 2593 func heapmoves() { 2594 lno := lineno 2595 lineno = Curfn.Lineno 2596 nn := paramstoheap(Curfn.Type.Recvs()) 2597 nn = append(nn, paramstoheap(Curfn.Type.Params())...) 2598 nn = append(nn, paramstoheap(Curfn.Type.Results())...) 2599 Curfn.Func.Enter.Append(nn...) 2600 lineno = Curfn.Func.Endlineno 2601 Curfn.Func.Exit.Append(returnsfromheap(Curfn.Type.Results())...) 2602 lineno = lno 2603 } 2604 2605 func vmkcall(fn *Node, t *Type, init *Nodes, va []*Node) *Node { 2606 if fn.Type == nil || fn.Type.Etype != TFUNC { 2607 Fatalf("mkcall %v %v", fn, fn.Type) 2608 } 2609 2610 n := fn.Type.Params().NumFields() 2611 2612 r := nod(OCALL, fn, nil) 2613 r.List.Set(va[:n]) 2614 if fn.Type.Results().NumFields() > 0 { 2615 r = typecheck(r, Erv|Efnstruct) 2616 } else { 2617 r = typecheck(r, Etop) 2618 } 2619 r = walkexpr(r, init) 2620 r.Type = t 2621 return r 2622 } 2623 2624 func mkcall(name string, t *Type, init *Nodes, args ...*Node) *Node { 2625 return vmkcall(syslook(name), t, init, args) 2626 } 2627 2628 func mkcall1(fn *Node, t *Type, init *Nodes, args ...*Node) *Node { 2629 return vmkcall(fn, t, init, args) 2630 } 2631 2632 func conv(n *Node, t *Type) *Node { 2633 if eqtype(n.Type, t) { 2634 return n 2635 } 2636 n = nod(OCONV, n, nil) 2637 n.Type = t 2638 n = typecheck(n, Erv) 2639 return n 2640 } 2641 2642 func chanfn(name string, n int, t *Type) *Node { 2643 if !t.IsChan() { 2644 Fatalf("chanfn %v", t) 2645 } 2646 fn := syslook(name) 2647 switch n { 2648 default: 2649 Fatalf("chanfn %d", n) 2650 case 1: 2651 fn = substArgTypes(fn, t.Elem()) 2652 case 2: 2653 fn = substArgTypes(fn, t.Elem(), t.Elem()) 2654 } 2655 return fn 2656 } 2657 2658 func mapfn(name string, t *Type) *Node { 2659 if !t.IsMap() { 2660 Fatalf("mapfn %v", t) 2661 } 2662 fn := syslook(name) 2663 fn = substArgTypes(fn, t.Key(), t.Val(), t.Key(), t.Val()) 2664 return fn 2665 } 2666 2667 func mapfndel(name string, t *Type) *Node { 2668 if !t.IsMap() { 2669 Fatalf("mapfn %v", t) 2670 } 2671 fn := syslook(name) 2672 fn = substArgTypes(fn, t.Key(), t.Val(), t.Key()) 2673 return fn 2674 } 2675 2676 func writebarrierfn(name string, l *Type, r *Type) *Node { 2677 fn := syslook(name) 2678 fn = substArgTypes(fn, l, r) 2679 return fn 2680 } 2681 2682 func addstr(n *Node, init *Nodes) *Node { 2683 // orderexpr rewrote OADDSTR to have a list of strings. 2684 c := n.List.Len() 2685 2686 if c < 2 { 2687 yyerror("addstr count %d too small", c) 2688 } 2689 2690 buf := nodnil() 2691 if n.Esc == EscNone { 2692 sz := int64(0) 2693 for _, n1 := range n.List.Slice() { 2694 if n1.Op == OLITERAL { 2695 sz += int64(len(n1.Val().U.(string))) 2696 } 2697 } 2698 2699 // Don't allocate the buffer if the result won't fit. 2700 if sz < tmpstringbufsize { 2701 // Create temporary buffer for result string on stack. 2702 t := typArray(Types[TUINT8], tmpstringbufsize) 2703 2704 buf = nod(OADDR, temp(t), nil) 2705 } 2706 } 2707 2708 // build list of string arguments 2709 args := []*Node{buf} 2710 for _, n2 := range n.List.Slice() { 2711 args = append(args, conv(n2, Types[TSTRING])) 2712 } 2713 2714 var fn string 2715 if c <= 5 { 2716 // small numbers of strings use direct runtime helpers. 2717 // note: orderexpr knows this cutoff too. 2718 fn = fmt.Sprintf("concatstring%d", c) 2719 } else { 2720 // large numbers of strings are passed to the runtime as a slice. 2721 fn = "concatstrings" 2722 2723 t := typSlice(Types[TSTRING]) 2724 slice := nod(OCOMPLIT, nil, typenod(t)) 2725 if prealloc[n] != nil { 2726 prealloc[slice] = prealloc[n] 2727 } 2728 slice.List.Set(args[1:]) // skip buf arg 2729 args = []*Node{buf, slice} 2730 slice.Esc = EscNone 2731 } 2732 2733 cat := syslook(fn) 2734 r := nod(OCALL, cat, nil) 2735 r.List.Set(args) 2736 r = typecheck(r, Erv) 2737 r = walkexpr(r, init) 2738 r.Type = n.Type 2739 2740 return r 2741 } 2742 2743 // expand append(l1, l2...) to 2744 // init { 2745 // s := l1 2746 // n := len(s) + len(l2) 2747 // // Compare as uint so growslice can panic on overflow. 2748 // if uint(n) > uint(cap(s)) { 2749 // s = growslice(s, n) 2750 // } 2751 // s = s[:n] 2752 // memmove(&s[len(l1)], &l2[0], len(l2)*sizeof(T)) 2753 // } 2754 // s 2755 // 2756 // l2 is allowed to be a string. 2757 func appendslice(n *Node, init *Nodes) *Node { 2758 walkexprlistsafe(n.List.Slice(), init) 2759 2760 // walkexprlistsafe will leave OINDEX (s[n]) alone if both s 2761 // and n are name or literal, but those may index the slice we're 2762 // modifying here. Fix explicitly. 2763 ls := n.List.Slice() 2764 for i1, n1 := range ls { 2765 ls[i1] = cheapexpr(n1, init) 2766 } 2767 2768 l1 := n.List.First() 2769 l2 := n.List.Second() 2770 2771 var l []*Node 2772 2773 // var s []T 2774 s := temp(l1.Type) 2775 l = append(l, nod(OAS, s, l1)) // s = l1 2776 2777 // n := len(s) + len(l2) 2778 nn := temp(Types[TINT]) 2779 l = append(l, nod(OAS, nn, nod(OADD, nod(OLEN, s, nil), nod(OLEN, l2, nil)))) 2780 2781 // if uint(n) > uint(cap(s)) 2782 nif := nod(OIF, nil, nil) 2783 nif.Left = nod(OGT, nod(OCONV, nn, nil), nod(OCONV, nod(OCAP, s, nil), nil)) 2784 nif.Left.Left.Type = Types[TUINT] 2785 nif.Left.Right.Type = Types[TUINT] 2786 2787 // instantiate growslice(Type*, []any, int) []any 2788 fn := syslook("growslice") 2789 fn = substArgTypes(fn, s.Type.Elem(), s.Type.Elem()) 2790 2791 // s = growslice(T, s, n) 2792 nif.Nbody.Set1(nod(OAS, s, mkcall1(fn, s.Type, &nif.Ninit, typename(s.Type.Elem()), s, nn))) 2793 l = append(l, nif) 2794 2795 // s = s[:n] 2796 nt := nod(OSLICE, s, nil) 2797 nt.SetSliceBounds(nil, nn, nil) 2798 nt.Etype = 1 2799 l = append(l, nod(OAS, s, nt)) 2800 2801 if haspointers(l1.Type.Elem()) { 2802 // copy(s[len(l1):], l2) 2803 nptr1 := nod(OSLICE, s, nil) 2804 nptr1.SetSliceBounds(nod(OLEN, l1, nil), nil, nil) 2805 nptr1.Etype = 1 2806 nptr2 := l2 2807 fn := syslook("typedslicecopy") 2808 fn = substArgTypes(fn, l1.Type, l2.Type) 2809 var ln Nodes 2810 ln.Set(l) 2811 nt := mkcall1(fn, Types[TINT], &ln, typename(l1.Type.Elem()), nptr1, nptr2) 2812 l = append(ln.Slice(), nt) 2813 } else if instrumenting && !compiling_runtime { 2814 // rely on runtime to instrument copy. 2815 // copy(s[len(l1):], l2) 2816 nptr1 := nod(OSLICE, s, nil) 2817 nptr1.SetSliceBounds(nod(OLEN, l1, nil), nil, nil) 2818 nptr1.Etype = 1 2819 nptr2 := l2 2820 var fn *Node 2821 if l2.Type.IsString() { 2822 fn = syslook("slicestringcopy") 2823 } else { 2824 fn = syslook("slicecopy") 2825 } 2826 fn = substArgTypes(fn, l1.Type, l2.Type) 2827 var ln Nodes 2828 ln.Set(l) 2829 nt := mkcall1(fn, Types[TINT], &ln, nptr1, nptr2, nodintconst(s.Type.Elem().Width)) 2830 l = append(ln.Slice(), nt) 2831 } else { 2832 // memmove(&s[len(l1)], &l2[0], len(l2)*sizeof(T)) 2833 nptr1 := nod(OINDEX, s, nod(OLEN, l1, nil)) 2834 nptr1.Bounded = true 2835 2836 nptr1 = nod(OADDR, nptr1, nil) 2837 2838 nptr2 := nod(OSPTR, l2, nil) 2839 2840 fn := syslook("memmove") 2841 fn = substArgTypes(fn, s.Type.Elem(), s.Type.Elem()) 2842 2843 var ln Nodes 2844 ln.Set(l) 2845 nwid := cheapexpr(conv(nod(OLEN, l2, nil), Types[TUINTPTR]), &ln) 2846 2847 nwid = nod(OMUL, nwid, nodintconst(s.Type.Elem().Width)) 2848 nt := mkcall1(fn, nil, &ln, nptr1, nptr2, nwid) 2849 l = append(ln.Slice(), nt) 2850 } 2851 2852 typecheckslice(l, Etop) 2853 walkstmtlist(l) 2854 init.Append(l...) 2855 return s 2856 } 2857 2858 // Rewrite append(src, x, y, z) so that any side effects in 2859 // x, y, z (including runtime panics) are evaluated in 2860 // initialization statements before the append. 2861 // For normal code generation, stop there and leave the 2862 // rest to cgen_append. 2863 // 2864 // For race detector, expand append(src, a [, b]* ) to 2865 // 2866 // init { 2867 // s := src 2868 // const argc = len(args) - 1 2869 // if cap(s) - len(s) < argc { 2870 // s = growslice(s, len(s)+argc) 2871 // } 2872 // n := len(s) 2873 // s = s[:n+argc] 2874 // s[n] = a 2875 // s[n+1] = b 2876 // ... 2877 // } 2878 // s 2879 func walkappend(n *Node, init *Nodes, dst *Node) *Node { 2880 if !samesafeexpr(dst, n.List.First()) { 2881 n.List.SetIndex(0, safeexpr(n.List.Index(0), init)) 2882 n.List.SetIndex(0, walkexpr(n.List.Index(0), init)) 2883 } 2884 walkexprlistsafe(n.List.Slice()[1:], init) 2885 2886 // walkexprlistsafe will leave OINDEX (s[n]) alone if both s 2887 // and n are name or literal, but those may index the slice we're 2888 // modifying here. Fix explicitly. 2889 // Using cheapexpr also makes sure that the evaluation 2890 // of all arguments (and especially any panics) happen 2891 // before we begin to modify the slice in a visible way. 2892 ls := n.List.Slice()[1:] 2893 for i, n := range ls { 2894 ls[i] = cheapexpr(n, init) 2895 } 2896 2897 nsrc := n.List.First() 2898 2899 argc := n.List.Len() - 1 2900 if argc < 1 { 2901 return nsrc 2902 } 2903 2904 // General case, with no function calls left as arguments. 2905 // Leave for gen, except that instrumentation requires old form. 2906 if !instrumenting || compiling_runtime { 2907 return n 2908 } 2909 2910 var l []*Node 2911 2912 ns := temp(nsrc.Type) 2913 l = append(l, nod(OAS, ns, nsrc)) // s = src 2914 2915 na := nodintconst(int64(argc)) // const argc 2916 nx := nod(OIF, nil, nil) // if cap(s) - len(s) < argc 2917 nx.Left = nod(OLT, nod(OSUB, nod(OCAP, ns, nil), nod(OLEN, ns, nil)), na) 2918 2919 fn := syslook("growslice") // growslice(<type>, old []T, mincap int) (ret []T) 2920 fn = substArgTypes(fn, ns.Type.Elem(), ns.Type.Elem()) 2921 2922 nx.Nbody.Set1(nod(OAS, ns, 2923 mkcall1(fn, ns.Type, &nx.Ninit, typename(ns.Type.Elem()), ns, 2924 nod(OADD, nod(OLEN, ns, nil), na)))) 2925 2926 l = append(l, nx) 2927 2928 nn := temp(Types[TINT]) 2929 l = append(l, nod(OAS, nn, nod(OLEN, ns, nil))) // n = len(s) 2930 2931 nx = nod(OSLICE, ns, nil) // ...s[:n+argc] 2932 nx.SetSliceBounds(nil, nod(OADD, nn, na), nil) 2933 nx.Etype = 1 2934 l = append(l, nod(OAS, ns, nx)) // s = s[:n+argc] 2935 2936 ls = n.List.Slice()[1:] 2937 for i, n := range ls { 2938 nx = nod(OINDEX, ns, nn) // s[n] ... 2939 nx.Bounded = true 2940 l = append(l, nod(OAS, nx, n)) // s[n] = arg 2941 if i+1 < len(ls) { 2942 l = append(l, nod(OAS, nn, nod(OADD, nn, nodintconst(1)))) // n = n + 1 2943 } 2944 } 2945 2946 typecheckslice(l, Etop) 2947 walkstmtlist(l) 2948 init.Append(l...) 2949 return ns 2950 } 2951 2952 // Lower copy(a, b) to a memmove call or a runtime call. 2953 // 2954 // init { 2955 // n := len(a) 2956 // if n > len(b) { n = len(b) } 2957 // memmove(a.ptr, b.ptr, n*sizeof(elem(a))) 2958 // } 2959 // n; 2960 // 2961 // Also works if b is a string. 2962 // 2963 func copyany(n *Node, init *Nodes, runtimecall bool) *Node { 2964 if haspointers(n.Left.Type.Elem()) { 2965 fn := writebarrierfn("typedslicecopy", n.Left.Type, n.Right.Type) 2966 return mkcall1(fn, n.Type, init, typename(n.Left.Type.Elem()), n.Left, n.Right) 2967 } 2968 2969 if runtimecall { 2970 var fn *Node 2971 if n.Right.Type.IsString() { 2972 fn = syslook("slicestringcopy") 2973 } else { 2974 fn = syslook("slicecopy") 2975 } 2976 fn = substArgTypes(fn, n.Left.Type, n.Right.Type) 2977 return mkcall1(fn, n.Type, init, n.Left, n.Right, nodintconst(n.Left.Type.Elem().Width)) 2978 } 2979 2980 n.Left = walkexpr(n.Left, init) 2981 n.Right = walkexpr(n.Right, init) 2982 nl := temp(n.Left.Type) 2983 nr := temp(n.Right.Type) 2984 var l []*Node 2985 l = append(l, nod(OAS, nl, n.Left)) 2986 l = append(l, nod(OAS, nr, n.Right)) 2987 2988 nfrm := nod(OSPTR, nr, nil) 2989 nto := nod(OSPTR, nl, nil) 2990 2991 nlen := temp(Types[TINT]) 2992 2993 // n = len(to) 2994 l = append(l, nod(OAS, nlen, nod(OLEN, nl, nil))) 2995 2996 // if n > len(frm) { n = len(frm) } 2997 nif := nod(OIF, nil, nil) 2998 2999 nif.Left = nod(OGT, nlen, nod(OLEN, nr, nil)) 3000 nif.Nbody.Append(nod(OAS, nlen, nod(OLEN, nr, nil))) 3001 l = append(l, nif) 3002 3003 // Call memmove. 3004 fn := syslook("memmove") 3005 3006 fn = substArgTypes(fn, nl.Type.Elem(), nl.Type.Elem()) 3007 nwid := temp(Types[TUINTPTR]) 3008 l = append(l, nod(OAS, nwid, conv(nlen, Types[TUINTPTR]))) 3009 nwid = nod(OMUL, nwid, nodintconst(nl.Type.Elem().Width)) 3010 l = append(l, mkcall1(fn, nil, init, nto, nfrm, nwid)) 3011 3012 typecheckslice(l, Etop) 3013 walkstmtlist(l) 3014 init.Append(l...) 3015 return nlen 3016 } 3017 3018 func eqfor(t *Type, needsize *int) *Node { 3019 // Should only arrive here with large memory or 3020 // a struct/array containing a non-memory field/element. 3021 // Small memory is handled inline, and single non-memory 3022 // is handled during type check (OCMPSTR etc). 3023 switch a, _ := algtype1(t); a { 3024 case AMEM: 3025 n := syslook("memequal") 3026 n = substArgTypes(n, t, t) 3027 *needsize = 1 3028 return n 3029 case ASPECIAL: 3030 sym := typesymprefix(".eq", t) 3031 n := newname(sym) 3032 n.Class = PFUNC 3033 ntype := nod(OTFUNC, nil, nil) 3034 ntype.List.Append(nod(ODCLFIELD, nil, typenod(ptrto(t)))) 3035 ntype.List.Append(nod(ODCLFIELD, nil, typenod(ptrto(t)))) 3036 ntype.Rlist.Append(nod(ODCLFIELD, nil, typenod(Types[TBOOL]))) 3037 ntype = typecheck(ntype, Etype) 3038 n.Type = ntype.Type 3039 *needsize = 0 3040 return n 3041 } 3042 Fatalf("eqfor %v", t) 3043 return nil 3044 } 3045 3046 // The result of walkcompare MUST be assigned back to n, e.g. 3047 // n.Left = walkcompare(n.Left, init) 3048 func walkcompare(n *Node, init *Nodes) *Node { 3049 // Given interface value l and concrete value r, rewrite 3050 // l == r 3051 // into types-equal && data-equal. 3052 // This is efficient, avoids allocations, and avoids runtime calls. 3053 var l, r *Node 3054 if n.Left.Type.IsInterface() && !n.Right.Type.IsInterface() { 3055 l = n.Left 3056 r = n.Right 3057 } else if !n.Left.Type.IsInterface() && n.Right.Type.IsInterface() { 3058 l = n.Right 3059 r = n.Left 3060 } 3061 3062 if l != nil { 3063 // Handle both == and !=. 3064 eq := n.Op 3065 var andor Op 3066 if eq == OEQ { 3067 andor = OANDAND 3068 } else { 3069 andor = OOROR 3070 } 3071 // Check for types equal. 3072 // For empty interface, this is: 3073 // l.tab == type(r) 3074 // For non-empty interface, this is: 3075 // l.tab != nil && l.tab._type == type(r) 3076 var eqtype *Node 3077 tab := nod(OITAB, l, nil) 3078 rtyp := typename(r.Type) 3079 if l.Type.IsEmptyInterface() { 3080 tab.Type = ptrto(Types[TUINT8]) 3081 tab.Typecheck = 1 3082 eqtype = nod(eq, tab, rtyp) 3083 } else { 3084 nonnil := nod(brcom(eq), nodnil(), tab) 3085 match := nod(eq, itabType(tab), rtyp) 3086 eqtype = nod(andor, nonnil, match) 3087 } 3088 // Check for data equal. 3089 eqdata := nod(eq, ifaceData(l, r.Type), r) 3090 // Put it all together. 3091 expr := nod(andor, eqtype, eqdata) 3092 n = finishcompare(n, expr, init) 3093 return n 3094 } 3095 3096 // Must be comparison of array or struct. 3097 // Otherwise back end handles it. 3098 // While we're here, decide whether to 3099 // inline or call an eq alg. 3100 t := n.Left.Type 3101 var inline bool 3102 switch t.Etype { 3103 default: 3104 return n 3105 case TARRAY: 3106 inline = t.NumElem() <= 1 || (t.NumElem() <= 4 && issimple[t.Elem().Etype]) 3107 case TSTRUCT: 3108 inline = t.NumFields() <= 4 3109 } 3110 3111 cmpl := n.Left 3112 for cmpl != nil && cmpl.Op == OCONVNOP { 3113 cmpl = cmpl.Left 3114 } 3115 cmpr := n.Right 3116 for cmpr != nil && cmpr.Op == OCONVNOP { 3117 cmpr = cmpr.Left 3118 } 3119 3120 // Chose not to inline. Call equality function directly. 3121 if !inline { 3122 if !islvalue(cmpl) || !islvalue(cmpr) { 3123 Fatalf("arguments of comparison must be lvalues - %v %v", cmpl, cmpr) 3124 } 3125 3126 // eq algs take pointers 3127 pl := temp(ptrto(t)) 3128 al := nod(OAS, pl, nod(OADDR, cmpl, nil)) 3129 al.Right.Etype = 1 // addr does not escape 3130 al = typecheck(al, Etop) 3131 init.Append(al) 3132 3133 pr := temp(ptrto(t)) 3134 ar := nod(OAS, pr, nod(OADDR, cmpr, nil)) 3135 ar.Right.Etype = 1 // addr does not escape 3136 ar = typecheck(ar, Etop) 3137 init.Append(ar) 3138 3139 var needsize int 3140 call := nod(OCALL, eqfor(t, &needsize), nil) 3141 call.List.Append(pl) 3142 call.List.Append(pr) 3143 if needsize != 0 { 3144 call.List.Append(nodintconst(t.Width)) 3145 } 3146 res := call 3147 if n.Op != OEQ { 3148 res = nod(ONOT, res, nil) 3149 } 3150 n = finishcompare(n, res, init) 3151 return n 3152 } 3153 3154 // inline: build boolean expression comparing element by element 3155 andor := OANDAND 3156 if n.Op == ONE { 3157 andor = OOROR 3158 } 3159 var expr *Node 3160 compare := func(el, er *Node) { 3161 a := nod(n.Op, el, er) 3162 if expr == nil { 3163 expr = a 3164 } else { 3165 expr = nod(andor, expr, a) 3166 } 3167 } 3168 cmpl = safeexpr(cmpl, init) 3169 cmpr = safeexpr(cmpr, init) 3170 if t.IsStruct() { 3171 for _, f := range t.Fields().Slice() { 3172 sym := f.Sym 3173 if isblanksym(sym) { 3174 continue 3175 } 3176 compare( 3177 nodSym(OXDOT, cmpl, sym), 3178 nodSym(OXDOT, cmpr, sym), 3179 ) 3180 } 3181 } else { 3182 for i := 0; int64(i) < t.NumElem(); i++ { 3183 compare( 3184 nod(OINDEX, cmpl, nodintconst(int64(i))), 3185 nod(OINDEX, cmpr, nodintconst(int64(i))), 3186 ) 3187 } 3188 } 3189 if expr == nil { 3190 expr = nodbool(n.Op == OEQ) 3191 } 3192 n = finishcompare(n, expr, init) 3193 return n 3194 } 3195 3196 // The result of finishcompare MUST be assigned back to n, e.g. 3197 // n.Left = finishcompare(n.Left, x, r, init) 3198 func finishcompare(n, r *Node, init *Nodes) *Node { 3199 // Use nn here to avoid passing r to typecheck. 3200 nn := r 3201 nn = typecheck(nn, Erv) 3202 nn = walkexpr(nn, init) 3203 r = nn 3204 if r.Type != n.Type { 3205 r = nod(OCONVNOP, r, nil) 3206 r.Type = n.Type 3207 r.Typecheck = 1 3208 nn = r 3209 } 3210 return nn 3211 } 3212 3213 func samecheap(a *Node, b *Node) bool { 3214 var ar *Node 3215 var br *Node 3216 for a != nil && b != nil && a.Op == b.Op { 3217 switch a.Op { 3218 default: 3219 return false 3220 3221 case ONAME: 3222 return a == b 3223 3224 case ODOT, ODOTPTR: 3225 if a.Sym != b.Sym { 3226 return false 3227 } 3228 3229 case OINDEX: 3230 ar = a.Right 3231 br = b.Right 3232 if !Isconst(ar, CTINT) || !Isconst(br, CTINT) || ar.Val().U.(*Mpint).Cmp(br.Val().U.(*Mpint)) != 0 { 3233 return false 3234 } 3235 } 3236 3237 a = a.Left 3238 b = b.Left 3239 } 3240 3241 return false 3242 } 3243 3244 // The result of walkrotate MUST be assigned back to n, e.g. 3245 // n.Left = walkrotate(n.Left) 3246 func walkrotate(n *Node) *Node { 3247 if Thearch.LinkArch.InFamily(sys.MIPS, sys.MIPS64, sys.PPC64) { 3248 return n 3249 } 3250 3251 // Want << | >> or >> | << or << ^ >> or >> ^ << on unsigned value. 3252 l := n.Left 3253 3254 r := n.Right 3255 if (n.Op != OOR && n.Op != OXOR) || (l.Op != OLSH && l.Op != ORSH) || (r.Op != OLSH && r.Op != ORSH) || n.Type == nil || n.Type.IsSigned() || l.Op == r.Op { 3256 return n 3257 } 3258 3259 // Want same, side effect-free expression on lhs of both shifts. 3260 if !samecheap(l.Left, r.Left) { 3261 return n 3262 } 3263 3264 // Constants adding to width? 3265 w := int(l.Type.Width * 8) 3266 3267 if Thearch.LinkArch.Family == sys.S390X && w != 32 && w != 64 { 3268 // only supports 32-bit and 64-bit rotates 3269 return n 3270 } 3271 3272 if smallintconst(l.Right) && smallintconst(r.Right) { 3273 sl := int(l.Right.Int64()) 3274 if sl >= 0 { 3275 sr := int(r.Right.Int64()) 3276 if sr >= 0 && sl+sr == w { 3277 // Rewrite left shift half to left rotate. 3278 if l.Op == OLSH { 3279 n = l 3280 } else { 3281 n = r 3282 } 3283 n.Op = OLROT 3284 3285 // Remove rotate 0 and rotate w. 3286 s := int(n.Right.Int64()) 3287 3288 if s == 0 || s == w { 3289 n = n.Left 3290 } 3291 return n 3292 } 3293 } 3294 return n 3295 } 3296 3297 // TODO: Could allow s and 32-s if s is bounded (maybe s&31 and 32-s&31). 3298 return n 3299 } 3300 3301 // isIntOrdering reports whether n is a <, , >, or ordering between integers. 3302 func (n *Node) isIntOrdering() bool { 3303 switch n.Op { 3304 case OLE, OLT, OGE, OGT: 3305 default: 3306 return false 3307 } 3308 return n.Left.Type.IsInteger() && n.Right.Type.IsInteger() 3309 } 3310 3311 // walkinrange optimizes integer-in-range checks, such as 4 <= x && x < 10. 3312 // n must be an OANDAND or OOROR node. 3313 // The result of walkinrange MUST be assigned back to n, e.g. 3314 // n.Left = walkinrange(n.Left) 3315 func walkinrange(n *Node, init *Nodes) *Node { 3316 // We are looking for something equivalent to a opl b OP b opr c, where: 3317 // * a, b, and c have integer type 3318 // * b is side-effect-free 3319 // * opl and opr are each < or 3320 // * OP is && 3321 l := n.Left 3322 r := n.Right 3323 if !l.isIntOrdering() || !r.isIntOrdering() { 3324 return n 3325 } 3326 3327 // Find b, if it exists, and rename appropriately. 3328 // Input is: l.Left l.Op l.Right ANDAND/OROR r.Left r.Op r.Right 3329 // Output is: a opl b(==x) ANDAND/OROR b(==x) opr c 3330 a, opl, b := l.Left, l.Op, l.Right 3331 x, opr, c := r.Left, r.Op, r.Right 3332 for i := 0; ; i++ { 3333 if samesafeexpr(b, x) { 3334 break 3335 } 3336 if i == 3 { 3337 // Tried all permutations and couldn't find an appropriate b == x. 3338 return n 3339 } 3340 if i&1 == 0 { 3341 a, opl, b = b, brrev(opl), a 3342 } else { 3343 x, opr, c = c, brrev(opr), x 3344 } 3345 } 3346 3347 // If n.Op is ||, apply de Morgan. 3348 // Negate the internal ops now; we'll negate the top level op at the end. 3349 // Henceforth assume &&. 3350 negateResult := n.Op == OOROR 3351 if negateResult { 3352 opl = brcom(opl) 3353 opr = brcom(opr) 3354 } 3355 3356 cmpdir := func(o Op) int { 3357 switch o { 3358 case OLE, OLT: 3359 return -1 3360 case OGE, OGT: 3361 return +1 3362 } 3363 Fatalf("walkinrange cmpdir %v", o) 3364 return 0 3365 } 3366 if cmpdir(opl) != cmpdir(opr) { 3367 // Not a range check; something like b < a && b < c. 3368 return n 3369 } 3370 3371 switch opl { 3372 case OGE, OGT: 3373 // We have something like a > b && b c. 3374 // Switch and reverse ops and rename constants, 3375 // to make it look like a b && b < c. 3376 a, c = c, a 3377 opl, opr = brrev(opr), brrev(opl) 3378 } 3379 3380 // We must ensure that c-a is non-negative. 3381 // For now, require a and c to be constants. 3382 // In the future, we could also support a == 0 and c == len/cap(...). 3383 // Unfortunately, by this point, most len/cap expressions have been 3384 // stored into temporary variables. 3385 if !Isconst(a, CTINT) || !Isconst(c, CTINT) { 3386 return n 3387 } 3388 3389 if opl == OLT { 3390 // We have a < b && ... 3391 // We need a b && ... to safely use unsigned comparison tricks. 3392 // If a is not the maximum constant for b's type, 3393 // we can increment a and switch to . 3394 if a.Int64() >= maxintval[b.Type.Etype].Int64() { 3395 return n 3396 } 3397 a = nodintconst(a.Int64() + 1) 3398 opl = OLE 3399 } 3400 3401 bound := c.Int64() - a.Int64() 3402 if bound < 0 { 3403 // Bad news. Something like 5 <= x && x < 3. 3404 // Rare in practice, and we still need to generate side-effects, 3405 // so just leave it alone. 3406 return n 3407 } 3408 3409 // We have a b && b < c (or a b && b c). 3410 // This is equivalent to (a-a) (b-a) && (b-a) < (c-a), 3411 // which is equivalent to 0 (b-a) && (b-a) < (c-a), 3412 // which is equivalent to uint(b-a) < uint(c-a). 3413 ut := b.Type.toUnsigned() 3414 lhs := conv(nod(OSUB, b, a), ut) 3415 rhs := nodintconst(bound) 3416 if negateResult { 3417 // Negate top level. 3418 opr = brcom(opr) 3419 } 3420 cmp := nod(opr, lhs, rhs) 3421 cmp.Lineno = n.Lineno 3422 cmp = addinit(cmp, l.Ninit.Slice()) 3423 cmp = addinit(cmp, r.Ninit.Slice()) 3424 // Typecheck the AST rooted at cmp... 3425 cmp = typecheck(cmp, Erv) 3426 // ...but then reset cmp's type to match n's type. 3427 cmp.Type = n.Type 3428 cmp = walkexpr(cmp, init) 3429 return cmp 3430 } 3431 3432 // walkmul rewrites integer multiplication by powers of two as shifts. 3433 // The result of walkmul MUST be assigned back to n, e.g. 3434 // n.Left = walkmul(n.Left, init) 3435 func walkmul(n *Node, init *Nodes) *Node { 3436 if !n.Type.IsInteger() { 3437 return n 3438 } 3439 3440 var nr *Node 3441 var nl *Node 3442 if n.Right.Op == OLITERAL { 3443 nl = n.Left 3444 nr = n.Right 3445 } else if n.Left.Op == OLITERAL { 3446 nl = n.Right 3447 nr = n.Left 3448 } else { 3449 return n 3450 } 3451 3452 neg := 0 3453 3454 // x*0 is 0 (and side effects of x). 3455 var pow int 3456 var w int 3457 if nr.Int64() == 0 { 3458 cheapexpr(nl, init) 3459 Nodconst(n, n.Type, 0) 3460 goto ret 3461 } 3462 3463 // nr is a constant. 3464 pow = powtwo(nr) 3465 3466 if pow < 0 { 3467 return n 3468 } 3469 if pow >= 1000 { 3470 // negative power of 2, like -16 3471 neg = 1 3472 3473 pow -= 1000 3474 } 3475 3476 w = int(nl.Type.Width * 8) 3477 if pow+1 >= w { // too big, shouldn't happen 3478 return n 3479 } 3480 3481 nl = cheapexpr(nl, init) 3482 3483 if pow == 0 { 3484 // x*1 is x 3485 n = nl 3486 3487 goto ret 3488 } 3489 3490 n = nod(OLSH, nl, nodintconst(int64(pow))) 3491 3492 ret: 3493 if neg != 0 { 3494 n = nod(OMINUS, n, nil) 3495 } 3496 3497 n = typecheck(n, Erv) 3498 n = walkexpr(n, init) 3499 return n 3500 } 3501 3502 // walkdiv rewrites division by a constant as less expensive 3503 // operations. 3504 // The result of walkdiv MUST be assigned back to n, e.g. 3505 // n.Left = walkdiv(n.Left, init) 3506 func walkdiv(n *Node, init *Nodes) *Node { 3507 // if >= 0, nr is 1<<pow // 1 if nr is negative. 3508 3509 if n.Right.Op != OLITERAL { 3510 return n 3511 } 3512 3513 // nr is a constant. 3514 nl := cheapexpr(n.Left, init) 3515 3516 nr := n.Right 3517 3518 // special cases of mod/div 3519 // by a constant 3520 w := int(nl.Type.Width * 8) 3521 3522 s := 0 // 1 if nr is negative. 3523 pow := powtwo(nr) // if >= 0, nr is 1<<pow 3524 if pow >= 1000 { 3525 // negative power of 2 3526 s = 1 3527 3528 pow -= 1000 3529 } 3530 3531 if pow+1 >= w { 3532 // divisor too large. 3533 return n 3534 } 3535 3536 if pow < 0 { 3537 // try to do division by multiply by (2^w)/d 3538 // see hacker's delight chapter 10 3539 // TODO: support 64-bit magic multiply here. 3540 var m Magic 3541 m.W = w 3542 3543 if nl.Type.IsSigned() { 3544 m.Sd = nr.Int64() 3545 smagic(&m) 3546 } else { 3547 m.Ud = uint64(nr.Int64()) 3548 umagic(&m) 3549 } 3550 3551 if m.Bad != 0 { 3552 return n 3553 } 3554 3555 // We have a quick division method so use it 3556 // for modulo too. 3557 if n.Op == OMOD { 3558 // rewrite as A%B = A - (A/B*B). 3559 n1 := nod(ODIV, nl, nr) 3560 3561 n2 := nod(OMUL, n1, nr) 3562 n = nod(OSUB, nl, n2) 3563 goto ret 3564 } 3565 3566 switch simtype[nl.Type.Etype] { 3567 default: 3568 return n 3569 3570 // n1 = nl * magic >> w (HMUL) 3571 case TUINT8, TUINT16, TUINT32: 3572 var nc Node 3573 3574 Nodconst(&nc, nl.Type, int64(m.Um)) 3575 n1 := nod(OHMUL, nl, &nc) 3576 n1 = typecheck(n1, Erv) 3577 if m.Ua != 0 { 3578 // Select a Go type with (at least) twice the width. 3579 var twide *Type 3580 switch simtype[nl.Type.Etype] { 3581 default: 3582 return n 3583 3584 case TUINT8, TUINT16: 3585 twide = Types[TUINT32] 3586 3587 case TUINT32: 3588 twide = Types[TUINT64] 3589 3590 case TINT8, TINT16: 3591 twide = Types[TINT32] 3592 3593 case TINT32: 3594 twide = Types[TINT64] 3595 } 3596 3597 // add numerator (might overflow). 3598 // n2 = (n1 + nl) 3599 n2 := nod(OADD, conv(n1, twide), conv(nl, twide)) 3600 3601 // shift by m.s 3602 var nc Node 3603 3604 Nodconst(&nc, Types[TUINT], int64(m.S)) 3605 n = conv(nod(ORSH, n2, &nc), nl.Type) 3606 } else { 3607 // n = n1 >> m.s 3608 var nc Node 3609 3610 Nodconst(&nc, Types[TUINT], int64(m.S)) 3611 n = nod(ORSH, n1, &nc) 3612 } 3613 3614 // n1 = nl * magic >> w 3615 case TINT8, TINT16, TINT32: 3616 var nc Node 3617 3618 Nodconst(&nc, nl.Type, m.Sm) 3619 n1 := nod(OHMUL, nl, &nc) 3620 n1 = typecheck(n1, Erv) 3621 if m.Sm < 0 { 3622 // add the numerator. 3623 n1 = nod(OADD, n1, nl) 3624 } 3625 3626 // shift by m.s 3627 var ns Node 3628 3629 Nodconst(&ns, Types[TUINT], int64(m.S)) 3630 n2 := conv(nod(ORSH, n1, &ns), nl.Type) 3631 3632 // add 1 iff n1 is negative. 3633 var nneg Node 3634 3635 Nodconst(&nneg, Types[TUINT], int64(w)-1) 3636 n3 := nod(ORSH, nl, &nneg) // n4 = -1 iff n1 is negative. 3637 n = nod(OSUB, n2, n3) 3638 3639 // apply sign. 3640 if m.Sd < 0 { 3641 n = nod(OMINUS, n, nil) 3642 } 3643 } 3644 3645 goto ret 3646 } 3647 3648 switch pow { 3649 case 0: 3650 if n.Op == OMOD { 3651 // nl % 1 is zero. 3652 Nodconst(n, n.Type, 0) 3653 } else if s != 0 { 3654 // divide by -1 3655 n.Op = OMINUS 3656 3657 n.Right = nil 3658 } else { 3659 // divide by 1 3660 n = nl 3661 } 3662 3663 default: 3664 if n.Type.IsSigned() { 3665 if n.Op == OMOD { 3666 // signed modulo 2^pow is like ANDing 3667 // with the last pow bits, but if nl < 0, 3668 // nl & (2^pow-1) is (nl+1)%2^pow - 1. 3669 var nc Node 3670 3671 Nodconst(&nc, Types[simtype[TUINT]], int64(w)-1) 3672 n1 := nod(ORSH, nl, &nc) // n1 = -1 iff nl < 0. 3673 if pow == 1 { 3674 n1 = typecheck(n1, Erv) 3675 n1 = cheapexpr(n1, init) 3676 3677 // n = (nl+)&1 - where =1 iff nl<0. 3678 n2 := nod(OSUB, nl, n1) 3679 3680 var nc Node 3681 Nodconst(&nc, nl.Type, 1) 3682 n3 := nod(OAND, n2, &nc) 3683 n = nod(OADD, n3, n1) 3684 } else { 3685 // n = (nl+)&(nr-1) - where =2^pow-1 iff nl<0. 3686 var nc Node 3687 3688 Nodconst(&nc, nl.Type, (1<<uint(pow))-1) 3689 n2 := nod(OAND, n1, &nc) // n2 = 2^pow-1 iff nl<0. 3690 n2 = typecheck(n2, Erv) 3691 n2 = cheapexpr(n2, init) 3692 3693 n3 := nod(OADD, nl, n2) 3694 n4 := nod(OAND, n3, &nc) 3695 n = nod(OSUB, n4, n2) 3696 } 3697 3698 break 3699 } else { 3700 // arithmetic right shift does not give the correct rounding. 3701 // if nl >= 0, nl >> n == nl / nr 3702 // if nl < 0, we want to add 2^n-1 first. 3703 var nc Node 3704 3705 Nodconst(&nc, Types[simtype[TUINT]], int64(w)-1) 3706 n1 := nod(ORSH, nl, &nc) // n1 = -1 iff nl < 0. 3707 if pow == 1 { 3708 // nl+1 is nl-(-1) 3709 n.Left = nod(OSUB, nl, n1) 3710 } else { 3711 // Do a logical right right on -1 to keep pow bits. 3712 var nc Node 3713 3714 Nodconst(&nc, Types[simtype[TUINT]], int64(w)-int64(pow)) 3715 n2 := nod(ORSH, conv(n1, nl.Type.toUnsigned()), &nc) 3716 n.Left = nod(OADD, nl, conv(n2, nl.Type)) 3717 } 3718 3719 // n = (nl + 2^pow-1) >> pow 3720 n.Op = ORSH 3721 3722 var n2 Node 3723 Nodconst(&n2, Types[simtype[TUINT]], int64(pow)) 3724 n.Right = &n2 3725 n.Typecheck = 0 3726 } 3727 3728 if s != 0 { 3729 n = nod(OMINUS, n, nil) 3730 } 3731 break 3732 } 3733 3734 var nc Node 3735 if n.Op == OMOD { 3736 // n = nl & (nr-1) 3737 n.Op = OAND 3738 3739 Nodconst(&nc, nl.Type, nr.Int64()-1) 3740 } else { 3741 // n = nl >> pow 3742 n.Op = ORSH 3743 3744 Nodconst(&nc, Types[simtype[TUINT]], int64(pow)) 3745 } 3746 3747 n.Typecheck = 0 3748 n.Right = &nc 3749 } 3750 3751 goto ret 3752 3753 ret: 3754 n = typecheck(n, Erv) 3755 n = walkexpr(n, init) 3756 return n 3757 } 3758 3759 // return 1 if integer n must be in range [0, max), 0 otherwise 3760 func bounded(n *Node, max int64) bool { 3761 if n.Type == nil || !n.Type.IsInteger() { 3762 return false 3763 } 3764 3765 sign := n.Type.IsSigned() 3766 bits := int32(8 * n.Type.Width) 3767 3768 if smallintconst(n) { 3769 v := n.Int64() 3770 return 0 <= v && v < max 3771 } 3772 3773 switch n.Op { 3774 case OAND: 3775 v := int64(-1) 3776 if smallintconst(n.Left) { 3777 v = n.Left.Int64() 3778 } else if smallintconst(n.Right) { 3779 v = n.Right.Int64() 3780 } 3781 3782 if 0 <= v && v < max { 3783 return true 3784 } 3785 3786 case OMOD: 3787 if !sign && smallintconst(n.Right) { 3788 v := n.Right.Int64() 3789 if 0 <= v && v <= max { 3790 return true 3791 } 3792 } 3793 3794 case ODIV: 3795 if !sign && smallintconst(n.Right) { 3796 v := n.Right.Int64() 3797 for bits > 0 && v >= 2 { 3798 bits-- 3799 v >>= 1 3800 } 3801 } 3802 3803 case ORSH: 3804 if !sign && smallintconst(n.Right) { 3805 v := n.Right.Int64() 3806 if v > int64(bits) { 3807 return true 3808 } 3809 bits -= int32(v) 3810 } 3811 } 3812 3813 if !sign && bits <= 62 && 1<<uint(bits) <= max { 3814 return true 3815 } 3816 3817 return false 3818 } 3819 3820 // usemethod check interface method calls for uses of reflect.Type.Method. 3821 func usemethod(n *Node) { 3822 t := n.Left.Type 3823 3824 // Looking for either of: 3825 // Method(int) reflect.Method 3826 // MethodByName(string) (reflect.Method, bool) 3827 // 3828 // TODO(crawshaw): improve precision of match by working out 3829 // how to check the method name. 3830 if n := t.Params().NumFields(); n != 1 { 3831 return 3832 } 3833 if n := t.Results().NumFields(); n != 1 && n != 2 { 3834 return 3835 } 3836 p0 := t.Params().Field(0) 3837 res0 := t.Results().Field(0) 3838 var res1 *Field 3839 if t.Results().NumFields() == 2 { 3840 res1 = t.Results().Field(1) 3841 } 3842 3843 if res1 == nil { 3844 if p0.Type.Etype != TINT { 3845 return 3846 } 3847 } else { 3848 if !p0.Type.IsString() { 3849 return 3850 } 3851 if !res1.Type.IsBoolean() { 3852 return 3853 } 3854 } 3855 if res0.Type.String() != "reflect.Method" { 3856 return 3857 } 3858 3859 Curfn.Func.ReflectMethod = true 3860 } 3861 3862 func usefield(n *Node) { 3863 if obj.Fieldtrack_enabled == 0 { 3864 return 3865 } 3866 3867 switch n.Op { 3868 default: 3869 Fatalf("usefield %v", n.Op) 3870 3871 case ODOT, ODOTPTR: 3872 break 3873 } 3874 if n.Sym == nil { 3875 // No field name. This DOTPTR was built by the compiler for access 3876 // to runtime data structures. Ignore. 3877 return 3878 } 3879 3880 t := n.Left.Type 3881 if t.IsPtr() { 3882 t = t.Elem() 3883 } 3884 field := dotField[typeSym{t.Orig, n.Sym}] 3885 if field == nil { 3886 Fatalf("usefield %v %v without paramfld", n.Left.Type, n.Sym) 3887 } 3888 if !strings.Contains(field.Note, "go:\"track\"") { 3889 return 3890 } 3891 3892 outer := n.Left.Type 3893 if outer.IsPtr() { 3894 outer = outer.Elem() 3895 } 3896 if outer.Sym == nil { 3897 yyerror("tracked field must be in named struct type") 3898 } 3899 if !exportname(field.Sym.Name) { 3900 yyerror("tracked field must be exported (upper case)") 3901 } 3902 3903 sym := tracksym(outer, field) 3904 if Curfn.Func.FieldTrack == nil { 3905 Curfn.Func.FieldTrack = make(map[*Sym]struct{}) 3906 } 3907 Curfn.Func.FieldTrack[sym] = struct{}{} 3908 } 3909 3910 func candiscardlist(l Nodes) bool { 3911 for _, n := range l.Slice() { 3912 if !candiscard(n) { 3913 return false 3914 } 3915 } 3916 return true 3917 } 3918 3919 func candiscard(n *Node) bool { 3920 if n == nil { 3921 return true 3922 } 3923 3924 switch n.Op { 3925 default: 3926 return false 3927 3928 // Discardable as long as the subpieces are. 3929 case ONAME, 3930 ONONAME, 3931 OTYPE, 3932 OPACK, 3933 OLITERAL, 3934 OADD, 3935 OSUB, 3936 OOR, 3937 OXOR, 3938 OADDSTR, 3939 OADDR, 3940 OANDAND, 3941 OARRAYBYTESTR, 3942 OARRAYRUNESTR, 3943 OSTRARRAYBYTE, 3944 OSTRARRAYRUNE, 3945 OCAP, 3946 OCMPIFACE, 3947 OCMPSTR, 3948 OCOMPLIT, 3949 OMAPLIT, 3950 OSTRUCTLIT, 3951 OARRAYLIT, 3952 OSLICELIT, 3953 OPTRLIT, 3954 OCONV, 3955 OCONVIFACE, 3956 OCONVNOP, 3957 ODOT, 3958 OEQ, 3959 ONE, 3960 OLT, 3961 OLE, 3962 OGT, 3963 OGE, 3964 OKEY, 3965 OSTRUCTKEY, 3966 OLEN, 3967 OMUL, 3968 OLSH, 3969 ORSH, 3970 OAND, 3971 OANDNOT, 3972 ONEW, 3973 ONOT, 3974 OCOM, 3975 OPLUS, 3976 OMINUS, 3977 OOROR, 3978 OPAREN, 3979 ORUNESTR, 3980 OREAL, 3981 OIMAG, 3982 OCOMPLEX: 3983 break 3984 3985 // Discardable as long as we know it's not division by zero. 3986 case ODIV, OMOD: 3987 if Isconst(n.Right, CTINT) && n.Right.Val().U.(*Mpint).CmpInt64(0) != 0 { 3988 break 3989 } 3990 if Isconst(n.Right, CTFLT) && n.Right.Val().U.(*Mpflt).CmpFloat64(0) != 0 { 3991 break 3992 } 3993 return false 3994 3995 // Discardable as long as we know it won't fail because of a bad size. 3996 case OMAKECHAN, OMAKEMAP: 3997 if Isconst(n.Left, CTINT) && n.Left.Val().U.(*Mpint).CmpInt64(0) == 0 { 3998 break 3999 } 4000 return false 4001 4002 // Difficult to tell what sizes are okay. 4003 case OMAKESLICE: 4004 return false 4005 } 4006 4007 if !candiscard(n.Left) || !candiscard(n.Right) || !candiscardlist(n.Ninit) || !candiscardlist(n.Nbody) || !candiscardlist(n.List) || !candiscardlist(n.Rlist) { 4008 return false 4009 } 4010 4011 return true 4012 } 4013 4014 // rewrite 4015 // print(x, y, z) 4016 // into 4017 // func(a1, a2, a3) { 4018 // print(a1, a2, a3) 4019 // }(x, y, z) 4020 // and same for println. 4021 4022 var walkprintfunc_prgen int 4023 4024 // The result of walkprintfunc MUST be assigned back to n, e.g. 4025 // n.Left = walkprintfunc(n.Left, init) 4026 func walkprintfunc(n *Node, init *Nodes) *Node { 4027 if n.Ninit.Len() != 0 { 4028 walkstmtlist(n.Ninit.Slice()) 4029 init.AppendNodes(&n.Ninit) 4030 } 4031 4032 t := nod(OTFUNC, nil, nil) 4033 num := 0 4034 var printargs []*Node 4035 var a *Node 4036 var buf string 4037 for _, n1 := range n.List.Slice() { 4038 buf = fmt.Sprintf("a%d", num) 4039 num++ 4040 a = nod(ODCLFIELD, newname(lookup(buf)), typenod(n1.Type)) 4041 t.List.Append(a) 4042 printargs = append(printargs, a.Left) 4043 } 4044 4045 fn := nod(ODCLFUNC, nil, nil) 4046 walkprintfunc_prgen++ 4047 buf = fmt.Sprintf("print%d", walkprintfunc_prgen) 4048 fn.Func.Nname = newname(lookup(buf)) 4049 fn.Func.Nname.Name.Defn = fn 4050 fn.Func.Nname.Name.Param.Ntype = t 4051 declare(fn.Func.Nname, PFUNC) 4052 4053 oldfn := Curfn 4054 Curfn = nil 4055 funchdr(fn) 4056 4057 a = nod(n.Op, nil, nil) 4058 a.List.Set(printargs) 4059 a = typecheck(a, Etop) 4060 a = walkstmt(a) 4061 4062 fn.Nbody.Set1(a) 4063 4064 funcbody(fn) 4065 4066 fn = typecheck(fn, Etop) 4067 typecheckslice(fn.Nbody.Slice(), Etop) 4068 xtop = append(xtop, fn) 4069 Curfn = oldfn 4070 4071 a = nod(OCALL, nil, nil) 4072 a.Left = fn.Func.Nname 4073 a.List.Set(n.List.Slice()) 4074 a = typecheck(a, Etop) 4075 a = walkexpr(a, init) 4076 return a 4077 } 4078