Home | History | Annotate | Download | only in gc
      1 // Copyright 2015 The Go Authors. All rights reserved.
      2 // Use of this source code is governed by a BSD-style
      3 // license that can be found in the LICENSE file.
      4 
      5 package gc
      6 
      7 import (
      8 	"bytes"
      9 	"encoding/binary"
     10 	"fmt"
     11 	"html"
     12 	"os"
     13 	"sort"
     14 
     15 	"cmd/compile/internal/ssa"
     16 	"cmd/internal/obj"
     17 	"cmd/internal/sys"
     18 )
     19 
     20 var ssaConfig *ssa.Config
     21 var ssaExp ssaExport
     22 
     23 func initssa() *ssa.Config {
     24 	if ssaConfig == nil {
     25 		ssaConfig = ssa.NewConfig(Thearch.LinkArch.Name, &ssaExp, Ctxt, Debug['N'] == 0)
     26 		if Thearch.LinkArch.Name == "386" {
     27 			ssaConfig.Set387(Thearch.Use387)
     28 		}
     29 	}
     30 	ssaConfig.HTML = nil
     31 	return ssaConfig
     32 }
     33 
     34 // buildssa builds an SSA function.
     35 func buildssa(fn *Node) *ssa.Func {
     36 	name := fn.Func.Nname.Sym.Name
     37 	printssa := name == os.Getenv("GOSSAFUNC")
     38 	if printssa {
     39 		fmt.Println("generating SSA for", name)
     40 		dumplist("buildssa-enter", fn.Func.Enter)
     41 		dumplist("buildssa-body", fn.Nbody)
     42 		dumplist("buildssa-exit", fn.Func.Exit)
     43 	}
     44 
     45 	var s state
     46 	s.pushLine(fn.Lineno)
     47 	defer s.popLine()
     48 
     49 	if fn.Func.Pragma&CgoUnsafeArgs != 0 {
     50 		s.cgoUnsafeArgs = true
     51 	}
     52 	if fn.Func.Pragma&Nowritebarrier != 0 {
     53 		s.noWB = true
     54 	}
     55 	defer func() {
     56 		if s.WBLineno != 0 {
     57 			fn.Func.WBLineno = s.WBLineno
     58 		}
     59 	}()
     60 	// TODO(khr): build config just once at the start of the compiler binary
     61 
     62 	ssaExp.log = printssa
     63 
     64 	s.config = initssa()
     65 	s.f = s.config.NewFunc()
     66 	s.f.Name = name
     67 	if fn.Func.Pragma&Nosplit != 0 {
     68 		s.f.NoSplit = true
     69 	}
     70 	s.exitCode = fn.Func.Exit
     71 	s.panics = map[funcLine]*ssa.Block{}
     72 	s.config.DebugTest = s.config.DebugHashMatch("GOSSAHASH", name)
     73 
     74 	if name == os.Getenv("GOSSAFUNC") {
     75 		// TODO: tempfile? it is handy to have the location
     76 		// of this file be stable, so you can just reload in the browser.
     77 		s.config.HTML = ssa.NewHTMLWriter("ssa.html", s.config, name)
     78 		// TODO: generate and print a mapping from nodes to values and blocks
     79 	}
     80 
     81 	// Allocate starting block
     82 	s.f.Entry = s.f.NewBlock(ssa.BlockPlain)
     83 
     84 	// Allocate starting values
     85 	s.labels = map[string]*ssaLabel{}
     86 	s.labeledNodes = map[*Node]*ssaLabel{}
     87 	s.fwdVars = map[*Node]*ssa.Value{}
     88 	s.startmem = s.entryNewValue0(ssa.OpInitMem, ssa.TypeMem)
     89 	s.sp = s.entryNewValue0(ssa.OpSP, Types[TUINTPTR]) // TODO: use generic pointer type (unsafe.Pointer?) instead
     90 	s.sb = s.entryNewValue0(ssa.OpSB, Types[TUINTPTR])
     91 
     92 	s.startBlock(s.f.Entry)
     93 	s.vars[&memVar] = s.startmem
     94 
     95 	s.varsyms = map[*Node]interface{}{}
     96 
     97 	// Generate addresses of local declarations
     98 	s.decladdrs = map[*Node]*ssa.Value{}
     99 	for _, n := range fn.Func.Dcl {
    100 		switch n.Class {
    101 		case PPARAM, PPARAMOUT:
    102 			aux := s.lookupSymbol(n, &ssa.ArgSymbol{Typ: n.Type, Node: n})
    103 			s.decladdrs[n] = s.entryNewValue1A(ssa.OpAddr, ptrto(n.Type), aux, s.sp)
    104 			if n.Class == PPARAMOUT && s.canSSA(n) {
    105 				// Save ssa-able PPARAMOUT variables so we can
    106 				// store them back to the stack at the end of
    107 				// the function.
    108 				s.returns = append(s.returns, n)
    109 			}
    110 		case PAUTO:
    111 			// processed at each use, to prevent Addr coming
    112 			// before the decl.
    113 		case PAUTOHEAP:
    114 			// moved to heap - already handled by frontend
    115 		case PFUNC:
    116 			// local function - already handled by frontend
    117 		default:
    118 			s.Fatalf("local variable with class %s unimplemented", classnames[n.Class])
    119 		}
    120 	}
    121 
    122 	// Populate arguments.
    123 	for _, n := range fn.Func.Dcl {
    124 		if n.Class != PPARAM {
    125 			continue
    126 		}
    127 		var v *ssa.Value
    128 		if s.canSSA(n) {
    129 			v = s.newValue0A(ssa.OpArg, n.Type, n)
    130 		} else {
    131 			// Not SSAable. Load it.
    132 			v = s.newValue2(ssa.OpLoad, n.Type, s.decladdrs[n], s.startmem)
    133 		}
    134 		s.vars[n] = v
    135 	}
    136 
    137 	// Convert the AST-based IR to the SSA-based IR
    138 	s.stmtList(fn.Func.Enter)
    139 	s.stmtList(fn.Nbody)
    140 
    141 	// fallthrough to exit
    142 	if s.curBlock != nil {
    143 		s.pushLine(fn.Func.Endlineno)
    144 		s.exit()
    145 		s.popLine()
    146 	}
    147 
    148 	// Check that we used all labels
    149 	for name, lab := range s.labels {
    150 		if !lab.used() && !lab.reported && !lab.defNode.Used {
    151 			yyerrorl(lab.defNode.Lineno, "label %v defined and not used", name)
    152 			lab.reported = true
    153 		}
    154 		if lab.used() && !lab.defined() && !lab.reported {
    155 			yyerrorl(lab.useNode.Lineno, "label %v not defined", name)
    156 			lab.reported = true
    157 		}
    158 	}
    159 
    160 	// Check any forward gotos. Non-forward gotos have already been checked.
    161 	for _, n := range s.fwdGotos {
    162 		lab := s.labels[n.Left.Sym.Name]
    163 		// If the label is undefined, we have already have printed an error.
    164 		if lab.defined() {
    165 			s.checkgoto(n, lab.defNode)
    166 		}
    167 	}
    168 
    169 	if nerrors > 0 {
    170 		s.f.Free()
    171 		return nil
    172 	}
    173 
    174 	s.insertPhis()
    175 
    176 	// Don't carry reference this around longer than necessary
    177 	s.exitCode = Nodes{}
    178 
    179 	// Main call to ssa package to compile function
    180 	ssa.Compile(s.f)
    181 
    182 	return s.f
    183 }
    184 
    185 type state struct {
    186 	// configuration (arch) information
    187 	config *ssa.Config
    188 
    189 	// function we're building
    190 	f *ssa.Func
    191 
    192 	// labels and labeled control flow nodes (OFOR, OSWITCH, OSELECT) in f
    193 	labels       map[string]*ssaLabel
    194 	labeledNodes map[*Node]*ssaLabel
    195 
    196 	// gotos that jump forward; required for deferred checkgoto calls
    197 	fwdGotos []*Node
    198 	// Code that must precede any return
    199 	// (e.g., copying value of heap-escaped paramout back to true paramout)
    200 	exitCode Nodes
    201 
    202 	// unlabeled break and continue statement tracking
    203 	breakTo    *ssa.Block // current target for plain break statement
    204 	continueTo *ssa.Block // current target for plain continue statement
    205 
    206 	// current location where we're interpreting the AST
    207 	curBlock *ssa.Block
    208 
    209 	// variable assignments in the current block (map from variable symbol to ssa value)
    210 	// *Node is the unique identifier (an ONAME Node) for the variable.
    211 	// TODO: keep a single varnum map, then make all of these maps slices instead?
    212 	vars map[*Node]*ssa.Value
    213 
    214 	// fwdVars are variables that are used before they are defined in the current block.
    215 	// This map exists just to coalesce multiple references into a single FwdRef op.
    216 	// *Node is the unique identifier (an ONAME Node) for the variable.
    217 	fwdVars map[*Node]*ssa.Value
    218 
    219 	// all defined variables at the end of each block. Indexed by block ID.
    220 	defvars []map[*Node]*ssa.Value
    221 
    222 	// addresses of PPARAM and PPARAMOUT variables.
    223 	decladdrs map[*Node]*ssa.Value
    224 
    225 	// symbols for PEXTERN, PAUTO and PPARAMOUT variables so they can be reused.
    226 	varsyms map[*Node]interface{}
    227 
    228 	// starting values. Memory, stack pointer, and globals pointer
    229 	startmem *ssa.Value
    230 	sp       *ssa.Value
    231 	sb       *ssa.Value
    232 
    233 	// line number stack. The current line number is top of stack
    234 	line []int32
    235 
    236 	// list of panic calls by function name and line number.
    237 	// Used to deduplicate panic calls.
    238 	panics map[funcLine]*ssa.Block
    239 
    240 	// list of PPARAMOUT (return) variables.
    241 	returns []*Node
    242 
    243 	// A dummy value used during phi construction.
    244 	placeholder *ssa.Value
    245 
    246 	cgoUnsafeArgs bool
    247 	noWB          bool
    248 	WBLineno      int32 // line number of first write barrier. 0=no write barriers
    249 }
    250 
    251 type funcLine struct {
    252 	f    *Node
    253 	line int32
    254 }
    255 
    256 type ssaLabel struct {
    257 	target         *ssa.Block // block identified by this label
    258 	breakTarget    *ssa.Block // block to break to in control flow node identified by this label
    259 	continueTarget *ssa.Block // block to continue to in control flow node identified by this label
    260 	defNode        *Node      // label definition Node (OLABEL)
    261 	// Label use Node (OGOTO, OBREAK, OCONTINUE).
    262 	// Used only for error detection and reporting.
    263 	// There might be multiple uses, but we only need to track one.
    264 	useNode  *Node
    265 	reported bool // reported indicates whether an error has already been reported for this label
    266 }
    267 
    268 // defined reports whether the label has a definition (OLABEL node).
    269 func (l *ssaLabel) defined() bool { return l.defNode != nil }
    270 
    271 // used reports whether the label has a use (OGOTO, OBREAK, or OCONTINUE node).
    272 func (l *ssaLabel) used() bool { return l.useNode != nil }
    273 
    274 // label returns the label associated with sym, creating it if necessary.
    275 func (s *state) label(sym *Sym) *ssaLabel {
    276 	lab := s.labels[sym.Name]
    277 	if lab == nil {
    278 		lab = new(ssaLabel)
    279 		s.labels[sym.Name] = lab
    280 	}
    281 	return lab
    282 }
    283 
    284 func (s *state) Logf(msg string, args ...interface{})              { s.config.Logf(msg, args...) }
    285 func (s *state) Log() bool                                         { return s.config.Log() }
    286 func (s *state) Fatalf(msg string, args ...interface{})            { s.config.Fatalf(s.peekLine(), msg, args...) }
    287 func (s *state) Warnl(line int32, msg string, args ...interface{}) { s.config.Warnl(line, msg, args...) }
    288 func (s *state) Debug_checknil() bool                              { return s.config.Debug_checknil() }
    289 
    290 var (
    291 	// dummy node for the memory variable
    292 	memVar = Node{Op: ONAME, Class: Pxxx, Sym: &Sym{Name: "mem"}}
    293 
    294 	// dummy nodes for temporary variables
    295 	ptrVar    = Node{Op: ONAME, Class: Pxxx, Sym: &Sym{Name: "ptr"}}
    296 	lenVar    = Node{Op: ONAME, Class: Pxxx, Sym: &Sym{Name: "len"}}
    297 	newlenVar = Node{Op: ONAME, Class: Pxxx, Sym: &Sym{Name: "newlen"}}
    298 	capVar    = Node{Op: ONAME, Class: Pxxx, Sym: &Sym{Name: "cap"}}
    299 	typVar    = Node{Op: ONAME, Class: Pxxx, Sym: &Sym{Name: "typ"}}
    300 	okVar     = Node{Op: ONAME, Class: Pxxx, Sym: &Sym{Name: "ok"}}
    301 )
    302 
    303 // startBlock sets the current block we're generating code in to b.
    304 func (s *state) startBlock(b *ssa.Block) {
    305 	if s.curBlock != nil {
    306 		s.Fatalf("starting block %v when block %v has not ended", b, s.curBlock)
    307 	}
    308 	s.curBlock = b
    309 	s.vars = map[*Node]*ssa.Value{}
    310 	for n := range s.fwdVars {
    311 		delete(s.fwdVars, n)
    312 	}
    313 }
    314 
    315 // endBlock marks the end of generating code for the current block.
    316 // Returns the (former) current block. Returns nil if there is no current
    317 // block, i.e. if no code flows to the current execution point.
    318 func (s *state) endBlock() *ssa.Block {
    319 	b := s.curBlock
    320 	if b == nil {
    321 		return nil
    322 	}
    323 	for len(s.defvars) <= int(b.ID) {
    324 		s.defvars = append(s.defvars, nil)
    325 	}
    326 	s.defvars[b.ID] = s.vars
    327 	s.curBlock = nil
    328 	s.vars = nil
    329 	b.Line = s.peekLine()
    330 	return b
    331 }
    332 
    333 // pushLine pushes a line number on the line number stack.
    334 func (s *state) pushLine(line int32) {
    335 	if line == 0 {
    336 		// the frontend may emit node with line number missing,
    337 		// use the parent line number in this case.
    338 		line = s.peekLine()
    339 		if Debug['K'] != 0 {
    340 			Warn("buildssa: line 0")
    341 		}
    342 	}
    343 	s.line = append(s.line, line)
    344 }
    345 
    346 // popLine pops the top of the line number stack.
    347 func (s *state) popLine() {
    348 	s.line = s.line[:len(s.line)-1]
    349 }
    350 
    351 // peekLine peek the top of the line number stack.
    352 func (s *state) peekLine() int32 {
    353 	return s.line[len(s.line)-1]
    354 }
    355 
    356 func (s *state) Error(msg string, args ...interface{}) {
    357 	yyerrorl(s.peekLine(), msg, args...)
    358 }
    359 
    360 // newValue0 adds a new value with no arguments to the current block.
    361 func (s *state) newValue0(op ssa.Op, t ssa.Type) *ssa.Value {
    362 	return s.curBlock.NewValue0(s.peekLine(), op, t)
    363 }
    364 
    365 // newValue0A adds a new value with no arguments and an aux value to the current block.
    366 func (s *state) newValue0A(op ssa.Op, t ssa.Type, aux interface{}) *ssa.Value {
    367 	return s.curBlock.NewValue0A(s.peekLine(), op, t, aux)
    368 }
    369 
    370 // newValue0I adds a new value with no arguments and an auxint value to the current block.
    371 func (s *state) newValue0I(op ssa.Op, t ssa.Type, auxint int64) *ssa.Value {
    372 	return s.curBlock.NewValue0I(s.peekLine(), op, t, auxint)
    373 }
    374 
    375 // newValue1 adds a new value with one argument to the current block.
    376 func (s *state) newValue1(op ssa.Op, t ssa.Type, arg *ssa.Value) *ssa.Value {
    377 	return s.curBlock.NewValue1(s.peekLine(), op, t, arg)
    378 }
    379 
    380 // newValue1A adds a new value with one argument and an aux value to the current block.
    381 func (s *state) newValue1A(op ssa.Op, t ssa.Type, aux interface{}, arg *ssa.Value) *ssa.Value {
    382 	return s.curBlock.NewValue1A(s.peekLine(), op, t, aux, arg)
    383 }
    384 
    385 // newValue1I adds a new value with one argument and an auxint value to the current block.
    386 func (s *state) newValue1I(op ssa.Op, t ssa.Type, aux int64, arg *ssa.Value) *ssa.Value {
    387 	return s.curBlock.NewValue1I(s.peekLine(), op, t, aux, arg)
    388 }
    389 
    390 // newValue2 adds a new value with two arguments to the current block.
    391 func (s *state) newValue2(op ssa.Op, t ssa.Type, arg0, arg1 *ssa.Value) *ssa.Value {
    392 	return s.curBlock.NewValue2(s.peekLine(), op, t, arg0, arg1)
    393 }
    394 
    395 // newValue2I adds a new value with two arguments and an auxint value to the current block.
    396 func (s *state) newValue2I(op ssa.Op, t ssa.Type, aux int64, arg0, arg1 *ssa.Value) *ssa.Value {
    397 	return s.curBlock.NewValue2I(s.peekLine(), op, t, aux, arg0, arg1)
    398 }
    399 
    400 // newValue3 adds a new value with three arguments to the current block.
    401 func (s *state) newValue3(op ssa.Op, t ssa.Type, arg0, arg1, arg2 *ssa.Value) *ssa.Value {
    402 	return s.curBlock.NewValue3(s.peekLine(), op, t, arg0, arg1, arg2)
    403 }
    404 
    405 // newValue3I adds a new value with three arguments and an auxint value to the current block.
    406 func (s *state) newValue3I(op ssa.Op, t ssa.Type, aux int64, arg0, arg1, arg2 *ssa.Value) *ssa.Value {
    407 	return s.curBlock.NewValue3I(s.peekLine(), op, t, aux, arg0, arg1, arg2)
    408 }
    409 
    410 // newValue4 adds a new value with four arguments to the current block.
    411 func (s *state) newValue4(op ssa.Op, t ssa.Type, arg0, arg1, arg2, arg3 *ssa.Value) *ssa.Value {
    412 	return s.curBlock.NewValue4(s.peekLine(), op, t, arg0, arg1, arg2, arg3)
    413 }
    414 
    415 // entryNewValue0 adds a new value with no arguments to the entry block.
    416 func (s *state) entryNewValue0(op ssa.Op, t ssa.Type) *ssa.Value {
    417 	return s.f.Entry.NewValue0(s.peekLine(), op, t)
    418 }
    419 
    420 // entryNewValue0A adds a new value with no arguments and an aux value to the entry block.
    421 func (s *state) entryNewValue0A(op ssa.Op, t ssa.Type, aux interface{}) *ssa.Value {
    422 	return s.f.Entry.NewValue0A(s.peekLine(), op, t, aux)
    423 }
    424 
    425 // entryNewValue0I adds a new value with no arguments and an auxint value to the entry block.
    426 func (s *state) entryNewValue0I(op ssa.Op, t ssa.Type, auxint int64) *ssa.Value {
    427 	return s.f.Entry.NewValue0I(s.peekLine(), op, t, auxint)
    428 }
    429 
    430 // entryNewValue1 adds a new value with one argument to the entry block.
    431 func (s *state) entryNewValue1(op ssa.Op, t ssa.Type, arg *ssa.Value) *ssa.Value {
    432 	return s.f.Entry.NewValue1(s.peekLine(), op, t, arg)
    433 }
    434 
    435 // entryNewValue1 adds a new value with one argument and an auxint value to the entry block.
    436 func (s *state) entryNewValue1I(op ssa.Op, t ssa.Type, auxint int64, arg *ssa.Value) *ssa.Value {
    437 	return s.f.Entry.NewValue1I(s.peekLine(), op, t, auxint, arg)
    438 }
    439 
    440 // entryNewValue1A adds a new value with one argument and an aux value to the entry block.
    441 func (s *state) entryNewValue1A(op ssa.Op, t ssa.Type, aux interface{}, arg *ssa.Value) *ssa.Value {
    442 	return s.f.Entry.NewValue1A(s.peekLine(), op, t, aux, arg)
    443 }
    444 
    445 // entryNewValue2 adds a new value with two arguments to the entry block.
    446 func (s *state) entryNewValue2(op ssa.Op, t ssa.Type, arg0, arg1 *ssa.Value) *ssa.Value {
    447 	return s.f.Entry.NewValue2(s.peekLine(), op, t, arg0, arg1)
    448 }
    449 
    450 // const* routines add a new const value to the entry block.
    451 func (s *state) constSlice(t ssa.Type) *ssa.Value       { return s.f.ConstSlice(s.peekLine(), t) }
    452 func (s *state) constInterface(t ssa.Type) *ssa.Value   { return s.f.ConstInterface(s.peekLine(), t) }
    453 func (s *state) constNil(t ssa.Type) *ssa.Value         { return s.f.ConstNil(s.peekLine(), t) }
    454 func (s *state) constEmptyString(t ssa.Type) *ssa.Value { return s.f.ConstEmptyString(s.peekLine(), t) }
    455 func (s *state) constBool(c bool) *ssa.Value {
    456 	return s.f.ConstBool(s.peekLine(), Types[TBOOL], c)
    457 }
    458 func (s *state) constInt8(t ssa.Type, c int8) *ssa.Value {
    459 	return s.f.ConstInt8(s.peekLine(), t, c)
    460 }
    461 func (s *state) constInt16(t ssa.Type, c int16) *ssa.Value {
    462 	return s.f.ConstInt16(s.peekLine(), t, c)
    463 }
    464 func (s *state) constInt32(t ssa.Type, c int32) *ssa.Value {
    465 	return s.f.ConstInt32(s.peekLine(), t, c)
    466 }
    467 func (s *state) constInt64(t ssa.Type, c int64) *ssa.Value {
    468 	return s.f.ConstInt64(s.peekLine(), t, c)
    469 }
    470 func (s *state) constFloat32(t ssa.Type, c float64) *ssa.Value {
    471 	return s.f.ConstFloat32(s.peekLine(), t, c)
    472 }
    473 func (s *state) constFloat64(t ssa.Type, c float64) *ssa.Value {
    474 	return s.f.ConstFloat64(s.peekLine(), t, c)
    475 }
    476 func (s *state) constInt(t ssa.Type, c int64) *ssa.Value {
    477 	if s.config.IntSize == 8 {
    478 		return s.constInt64(t, c)
    479 	}
    480 	if int64(int32(c)) != c {
    481 		s.Fatalf("integer constant too big %d", c)
    482 	}
    483 	return s.constInt32(t, int32(c))
    484 }
    485 
    486 // stmtList converts the statement list n to SSA and adds it to s.
    487 func (s *state) stmtList(l Nodes) {
    488 	for _, n := range l.Slice() {
    489 		s.stmt(n)
    490 	}
    491 }
    492 
    493 // stmt converts the statement n to SSA and adds it to s.
    494 func (s *state) stmt(n *Node) {
    495 	s.pushLine(n.Lineno)
    496 	defer s.popLine()
    497 
    498 	// If s.curBlock is nil, then we're about to generate dead code.
    499 	// We can't just short-circuit here, though,
    500 	// because we check labels and gotos as part of SSA generation.
    501 	// Provide a block for the dead code so that we don't have
    502 	// to add special cases everywhere else.
    503 	if s.curBlock == nil {
    504 		dead := s.f.NewBlock(ssa.BlockPlain)
    505 		s.startBlock(dead)
    506 	}
    507 
    508 	s.stmtList(n.Ninit)
    509 	switch n.Op {
    510 
    511 	case OBLOCK:
    512 		s.stmtList(n.List)
    513 
    514 	// No-ops
    515 	case OEMPTY, ODCLCONST, ODCLTYPE, OFALL:
    516 
    517 	// Expression statements
    518 	case OCALLFUNC:
    519 		if isIntrinsicCall(n) {
    520 			s.intrinsicCall(n)
    521 			return
    522 		}
    523 		fallthrough
    524 
    525 	case OCALLMETH, OCALLINTER:
    526 		s.call(n, callNormal)
    527 		if n.Op == OCALLFUNC && n.Left.Op == ONAME && n.Left.Class == PFUNC {
    528 			if fn := n.Left.Sym.Name; compiling_runtime && fn == "throw" ||
    529 				n.Left.Sym.Pkg == Runtimepkg && (fn == "throwinit" || fn == "gopanic" || fn == "panicwrap" || fn == "selectgo" || fn == "block") {
    530 				m := s.mem()
    531 				b := s.endBlock()
    532 				b.Kind = ssa.BlockExit
    533 				b.SetControl(m)
    534 				// TODO: never rewrite OPANIC to OCALLFUNC in the
    535 				// first place. Need to wait until all backends
    536 				// go through SSA.
    537 			}
    538 		}
    539 	case ODEFER:
    540 		s.call(n.Left, callDefer)
    541 	case OPROC:
    542 		s.call(n.Left, callGo)
    543 
    544 	case OAS2DOTTYPE:
    545 		res, resok := s.dottype(n.Rlist.First(), true)
    546 		deref := false
    547 		if !canSSAType(n.Rlist.First().Type) {
    548 			if res.Op != ssa.OpLoad {
    549 				s.Fatalf("dottype of non-load")
    550 			}
    551 			mem := s.mem()
    552 			if mem.Op == ssa.OpVarKill {
    553 				mem = mem.Args[0]
    554 			}
    555 			if res.Args[1] != mem {
    556 				s.Fatalf("memory no longer live from 2-result dottype load")
    557 			}
    558 			deref = true
    559 			res = res.Args[0]
    560 		}
    561 		s.assign(n.List.First(), res, needwritebarrier(n.List.First(), n.Rlist.First()), deref, n.Lineno, 0, false)
    562 		s.assign(n.List.Second(), resok, false, false, n.Lineno, 0, false)
    563 		return
    564 
    565 	case OAS2FUNC:
    566 		// We come here only when it is an intrinsic call returning two values.
    567 		if !isIntrinsicCall(n.Rlist.First()) {
    568 			s.Fatalf("non-intrinsic AS2FUNC not expanded %v", n.Rlist.First())
    569 		}
    570 		v := s.intrinsicCall(n.Rlist.First())
    571 		v1 := s.newValue1(ssa.OpSelect0, n.List.First().Type, v)
    572 		v2 := s.newValue1(ssa.OpSelect1, n.List.Second().Type, v)
    573 		// Make a fake node to mimic loading return value, ONLY for write barrier test.
    574 		// This is future-proofing against non-scalar 2-result intrinsics.
    575 		// Currently we only have scalar ones, which result in no write barrier.
    576 		fakeret := &Node{Op: OINDREGSP}
    577 		s.assign(n.List.First(), v1, needwritebarrier(n.List.First(), fakeret), false, n.Lineno, 0, false)
    578 		s.assign(n.List.Second(), v2, needwritebarrier(n.List.Second(), fakeret), false, n.Lineno, 0, false)
    579 		return
    580 
    581 	case ODCL:
    582 		if n.Left.Class == PAUTOHEAP {
    583 			Fatalf("DCL %v", n)
    584 		}
    585 
    586 	case OLABEL:
    587 		sym := n.Left.Sym
    588 
    589 		if isblanksym(sym) {
    590 			// Empty identifier is valid but useless.
    591 			// See issues 11589, 11593.
    592 			return
    593 		}
    594 
    595 		lab := s.label(sym)
    596 
    597 		// Associate label with its control flow node, if any
    598 		if ctl := n.Name.Defn; ctl != nil {
    599 			switch ctl.Op {
    600 			case OFOR, OSWITCH, OSELECT:
    601 				s.labeledNodes[ctl] = lab
    602 			}
    603 		}
    604 
    605 		if !lab.defined() {
    606 			lab.defNode = n
    607 		} else {
    608 			s.Error("label %v already defined at %v", sym, linestr(lab.defNode.Lineno))
    609 			lab.reported = true
    610 		}
    611 		// The label might already have a target block via a goto.
    612 		if lab.target == nil {
    613 			lab.target = s.f.NewBlock(ssa.BlockPlain)
    614 		}
    615 
    616 		// go to that label (we pretend "label:" is preceded by "goto label")
    617 		b := s.endBlock()
    618 		b.AddEdgeTo(lab.target)
    619 		s.startBlock(lab.target)
    620 
    621 	case OGOTO:
    622 		sym := n.Left.Sym
    623 
    624 		lab := s.label(sym)
    625 		if lab.target == nil {
    626 			lab.target = s.f.NewBlock(ssa.BlockPlain)
    627 		}
    628 		if !lab.used() {
    629 			lab.useNode = n
    630 		}
    631 
    632 		if lab.defined() {
    633 			s.checkgoto(n, lab.defNode)
    634 		} else {
    635 			s.fwdGotos = append(s.fwdGotos, n)
    636 		}
    637 
    638 		b := s.endBlock()
    639 		b.AddEdgeTo(lab.target)
    640 
    641 	case OAS, OASWB:
    642 		// Check whether we can generate static data rather than code.
    643 		// If so, ignore n and defer data generation until codegen.
    644 		// Failure to do this causes writes to readonly symbols.
    645 		if gen_as_init(n, true) {
    646 			var data []*Node
    647 			if s.f.StaticData != nil {
    648 				data = s.f.StaticData.([]*Node)
    649 			}
    650 			s.f.StaticData = append(data, n)
    651 			return
    652 		}
    653 
    654 		if n.Left == n.Right && n.Left.Op == ONAME {
    655 			// An x=x assignment. No point in doing anything
    656 			// here. In addition, skipping this assignment
    657 			// prevents generating:
    658 			//   VARDEF x
    659 			//   COPY x -> x
    660 			// which is bad because x is incorrectly considered
    661 			// dead before the vardef. See issue #14904.
    662 			return
    663 		}
    664 
    665 		var t *Type
    666 		if n.Right != nil {
    667 			t = n.Right.Type
    668 		} else {
    669 			t = n.Left.Type
    670 		}
    671 
    672 		// Evaluate RHS.
    673 		rhs := n.Right
    674 		if rhs != nil {
    675 			switch rhs.Op {
    676 			case OSTRUCTLIT, OARRAYLIT, OSLICELIT:
    677 				// All literals with nonzero fields have already been
    678 				// rewritten during walk. Any that remain are just T{}
    679 				// or equivalents. Use the zero value.
    680 				if !iszero(rhs) {
    681 					Fatalf("literal with nonzero value in SSA: %v", rhs)
    682 				}
    683 				rhs = nil
    684 			case OAPPEND:
    685 				// If we're writing the result of an append back to the same slice,
    686 				// handle it specially to avoid write barriers on the fast (non-growth) path.
    687 				// If the slice can be SSA'd, it'll be on the stack,
    688 				// so there will be no write barriers,
    689 				// so there's no need to attempt to prevent them.
    690 				if samesafeexpr(n.Left, rhs.List.First()) {
    691 					if !s.canSSA(n.Left) {
    692 						if Debug_append > 0 {
    693 							Warnl(n.Lineno, "append: len-only update")
    694 						}
    695 						s.append(rhs, true)
    696 						return
    697 					} else {
    698 						if Debug_append > 0 { // replicating old diagnostic message
    699 							Warnl(n.Lineno, "append: len-only update (in local slice)")
    700 						}
    701 					}
    702 				}
    703 			}
    704 		}
    705 		var r *ssa.Value
    706 		var isVolatile bool
    707 		needwb := n.Op == OASWB
    708 		deref := !canSSAType(t)
    709 		if deref {
    710 			if rhs == nil {
    711 				r = nil // Signal assign to use OpZero.
    712 			} else {
    713 				r, isVolatile = s.addr(rhs, false)
    714 			}
    715 		} else {
    716 			if rhs == nil {
    717 				r = s.zeroVal(t)
    718 			} else {
    719 				r = s.expr(rhs)
    720 			}
    721 		}
    722 		if rhs != nil && rhs.Op == OAPPEND && needwritebarrier(n.Left, rhs) {
    723 			// The frontend gets rid of the write barrier to enable the special OAPPEND
    724 			// handling above, but since this is not a special case, we need it.
    725 			// TODO: just add a ptr graying to the end of growslice?
    726 			// TODO: check whether we need to provide special handling and a write barrier
    727 			// for ODOTTYPE and ORECV also.
    728 			// They get similar wb-removal treatment in walk.go:OAS.
    729 			needwb = true
    730 		}
    731 
    732 		var skip skipMask
    733 		if rhs != nil && (rhs.Op == OSLICE || rhs.Op == OSLICE3 || rhs.Op == OSLICESTR) && samesafeexpr(rhs.Left, n.Left) {
    734 			// We're assigning a slicing operation back to its source.
    735 			// Don't write back fields we aren't changing. See issue #14855.
    736 			i, j, k := rhs.SliceBounds()
    737 			if i != nil && (i.Op == OLITERAL && i.Val().Ctype() == CTINT && i.Int64() == 0) {
    738 				// [0:...] is the same as [:...]
    739 				i = nil
    740 			}
    741 			// TODO: detect defaults for len/cap also.
    742 			// Currently doesn't really work because (*p)[:len(*p)] appears here as:
    743 			//    tmp = len(*p)
    744 			//    (*p)[:tmp]
    745 			//if j != nil && (j.Op == OLEN && samesafeexpr(j.Left, n.Left)) {
    746 			//      j = nil
    747 			//}
    748 			//if k != nil && (k.Op == OCAP && samesafeexpr(k.Left, n.Left)) {
    749 			//      k = nil
    750 			//}
    751 			if i == nil {
    752 				skip |= skipPtr
    753 				if j == nil {
    754 					skip |= skipLen
    755 				}
    756 				if k == nil {
    757 					skip |= skipCap
    758 				}
    759 			}
    760 		}
    761 
    762 		s.assign(n.Left, r, needwb, deref, n.Lineno, skip, isVolatile)
    763 
    764 	case OIF:
    765 		bThen := s.f.NewBlock(ssa.BlockPlain)
    766 		bEnd := s.f.NewBlock(ssa.BlockPlain)
    767 		var bElse *ssa.Block
    768 		if n.Rlist.Len() != 0 {
    769 			bElse = s.f.NewBlock(ssa.BlockPlain)
    770 			s.condBranch(n.Left, bThen, bElse, n.Likely)
    771 		} else {
    772 			s.condBranch(n.Left, bThen, bEnd, n.Likely)
    773 		}
    774 
    775 		s.startBlock(bThen)
    776 		s.stmtList(n.Nbody)
    777 		if b := s.endBlock(); b != nil {
    778 			b.AddEdgeTo(bEnd)
    779 		}
    780 
    781 		if n.Rlist.Len() != 0 {
    782 			s.startBlock(bElse)
    783 			s.stmtList(n.Rlist)
    784 			if b := s.endBlock(); b != nil {
    785 				b.AddEdgeTo(bEnd)
    786 			}
    787 		}
    788 		s.startBlock(bEnd)
    789 
    790 	case ORETURN:
    791 		s.stmtList(n.List)
    792 		s.exit()
    793 	case ORETJMP:
    794 		s.stmtList(n.List)
    795 		b := s.exit()
    796 		b.Kind = ssa.BlockRetJmp // override BlockRet
    797 		b.Aux = n.Left.Sym
    798 
    799 	case OCONTINUE, OBREAK:
    800 		var op string
    801 		var to *ssa.Block
    802 		switch n.Op {
    803 		case OCONTINUE:
    804 			op = "continue"
    805 			to = s.continueTo
    806 		case OBREAK:
    807 			op = "break"
    808 			to = s.breakTo
    809 		}
    810 		if n.Left == nil {
    811 			// plain break/continue
    812 			if to == nil {
    813 				s.Error("%s is not in a loop", op)
    814 				return
    815 			}
    816 			// nothing to do; "to" is already the correct target
    817 		} else {
    818 			// labeled break/continue; look up the target
    819 			sym := n.Left.Sym
    820 			lab := s.label(sym)
    821 			if !lab.used() {
    822 				lab.useNode = n.Left
    823 			}
    824 			if !lab.defined() {
    825 				s.Error("%s label not defined: %v", op, sym)
    826 				lab.reported = true
    827 				return
    828 			}
    829 			switch n.Op {
    830 			case OCONTINUE:
    831 				to = lab.continueTarget
    832 			case OBREAK:
    833 				to = lab.breakTarget
    834 			}
    835 			if to == nil {
    836 				// Valid label but not usable with a break/continue here, e.g.:
    837 				// for {
    838 				// 	continue abc
    839 				// }
    840 				// abc:
    841 				// for {}
    842 				s.Error("invalid %s label %v", op, sym)
    843 				lab.reported = true
    844 				return
    845 			}
    846 		}
    847 
    848 		b := s.endBlock()
    849 		b.AddEdgeTo(to)
    850 
    851 	case OFOR:
    852 		// OFOR: for Ninit; Left; Right { Nbody }
    853 		bCond := s.f.NewBlock(ssa.BlockPlain)
    854 		bBody := s.f.NewBlock(ssa.BlockPlain)
    855 		bIncr := s.f.NewBlock(ssa.BlockPlain)
    856 		bEnd := s.f.NewBlock(ssa.BlockPlain)
    857 
    858 		// first, jump to condition test
    859 		b := s.endBlock()
    860 		b.AddEdgeTo(bCond)
    861 
    862 		// generate code to test condition
    863 		s.startBlock(bCond)
    864 		if n.Left != nil {
    865 			s.condBranch(n.Left, bBody, bEnd, 1)
    866 		} else {
    867 			b := s.endBlock()
    868 			b.Kind = ssa.BlockPlain
    869 			b.AddEdgeTo(bBody)
    870 		}
    871 
    872 		// set up for continue/break in body
    873 		prevContinue := s.continueTo
    874 		prevBreak := s.breakTo
    875 		s.continueTo = bIncr
    876 		s.breakTo = bEnd
    877 		lab := s.labeledNodes[n]
    878 		if lab != nil {
    879 			// labeled for loop
    880 			lab.continueTarget = bIncr
    881 			lab.breakTarget = bEnd
    882 		}
    883 
    884 		// generate body
    885 		s.startBlock(bBody)
    886 		s.stmtList(n.Nbody)
    887 
    888 		// tear down continue/break
    889 		s.continueTo = prevContinue
    890 		s.breakTo = prevBreak
    891 		if lab != nil {
    892 			lab.continueTarget = nil
    893 			lab.breakTarget = nil
    894 		}
    895 
    896 		// done with body, goto incr
    897 		if b := s.endBlock(); b != nil {
    898 			b.AddEdgeTo(bIncr)
    899 		}
    900 
    901 		// generate incr
    902 		s.startBlock(bIncr)
    903 		if n.Right != nil {
    904 			s.stmt(n.Right)
    905 		}
    906 		if b := s.endBlock(); b != nil {
    907 			b.AddEdgeTo(bCond)
    908 		}
    909 		s.startBlock(bEnd)
    910 
    911 	case OSWITCH, OSELECT:
    912 		// These have been mostly rewritten by the front end into their Nbody fields.
    913 		// Our main task is to correctly hook up any break statements.
    914 		bEnd := s.f.NewBlock(ssa.BlockPlain)
    915 
    916 		prevBreak := s.breakTo
    917 		s.breakTo = bEnd
    918 		lab := s.labeledNodes[n]
    919 		if lab != nil {
    920 			// labeled
    921 			lab.breakTarget = bEnd
    922 		}
    923 
    924 		// generate body code
    925 		s.stmtList(n.Nbody)
    926 
    927 		s.breakTo = prevBreak
    928 		if lab != nil {
    929 			lab.breakTarget = nil
    930 		}
    931 
    932 		// OSWITCH never falls through (s.curBlock == nil here).
    933 		// OSELECT does not fall through if we're calling selectgo.
    934 		// OSELECT does fall through if we're calling selectnb{send,recv}[2].
    935 		// In those latter cases, go to the code after the select.
    936 		if b := s.endBlock(); b != nil {
    937 			b.AddEdgeTo(bEnd)
    938 		}
    939 		s.startBlock(bEnd)
    940 
    941 	case OVARKILL:
    942 		// Insert a varkill op to record that a variable is no longer live.
    943 		// We only care about liveness info at call sites, so putting the
    944 		// varkill in the store chain is enough to keep it correctly ordered
    945 		// with respect to call ops.
    946 		if !s.canSSA(n.Left) {
    947 			s.vars[&memVar] = s.newValue1A(ssa.OpVarKill, ssa.TypeMem, n.Left, s.mem())
    948 		}
    949 
    950 	case OVARLIVE:
    951 		// Insert a varlive op to record that a variable is still live.
    952 		if !n.Left.Addrtaken {
    953 			s.Fatalf("VARLIVE variable %v must have Addrtaken set", n.Left)
    954 		}
    955 		s.vars[&memVar] = s.newValue1A(ssa.OpVarLive, ssa.TypeMem, n.Left, s.mem())
    956 
    957 	case OCHECKNIL:
    958 		p := s.expr(n.Left)
    959 		s.nilCheck(p)
    960 
    961 	case OSQRT:
    962 		s.expr(n.Left)
    963 
    964 	default:
    965 		s.Fatalf("unhandled stmt %v", n.Op)
    966 	}
    967 }
    968 
    969 // exit processes any code that needs to be generated just before returning.
    970 // It returns a BlockRet block that ends the control flow. Its control value
    971 // will be set to the final memory state.
    972 func (s *state) exit() *ssa.Block {
    973 	if hasdefer {
    974 		s.rtcall(Deferreturn, true, nil)
    975 	}
    976 
    977 	// Run exit code. Typically, this code copies heap-allocated PPARAMOUT
    978 	// variables back to the stack.
    979 	s.stmtList(s.exitCode)
    980 
    981 	// Store SSAable PPARAMOUT variables back to stack locations.
    982 	for _, n := range s.returns {
    983 		addr := s.decladdrs[n]
    984 		val := s.variable(n, n.Type)
    985 		s.vars[&memVar] = s.newValue1A(ssa.OpVarDef, ssa.TypeMem, n, s.mem())
    986 		s.vars[&memVar] = s.newValue3I(ssa.OpStore, ssa.TypeMem, n.Type.Size(), addr, val, s.mem())
    987 		// TODO: if val is ever spilled, we'd like to use the
    988 		// PPARAMOUT slot for spilling it. That won't happen
    989 		// currently.
    990 	}
    991 
    992 	// Do actual return.
    993 	m := s.mem()
    994 	b := s.endBlock()
    995 	b.Kind = ssa.BlockRet
    996 	b.SetControl(m)
    997 	return b
    998 }
    999 
   1000 type opAndType struct {
   1001 	op    Op
   1002 	etype EType
   1003 }
   1004 
   1005 var opToSSA = map[opAndType]ssa.Op{
   1006 	opAndType{OADD, TINT8}:    ssa.OpAdd8,
   1007 	opAndType{OADD, TUINT8}:   ssa.OpAdd8,
   1008 	opAndType{OADD, TINT16}:   ssa.OpAdd16,
   1009 	opAndType{OADD, TUINT16}:  ssa.OpAdd16,
   1010 	opAndType{OADD, TINT32}:   ssa.OpAdd32,
   1011 	opAndType{OADD, TUINT32}:  ssa.OpAdd32,
   1012 	opAndType{OADD, TPTR32}:   ssa.OpAdd32,
   1013 	opAndType{OADD, TINT64}:   ssa.OpAdd64,
   1014 	opAndType{OADD, TUINT64}:  ssa.OpAdd64,
   1015 	opAndType{OADD, TPTR64}:   ssa.OpAdd64,
   1016 	opAndType{OADD, TFLOAT32}: ssa.OpAdd32F,
   1017 	opAndType{OADD, TFLOAT64}: ssa.OpAdd64F,
   1018 
   1019 	opAndType{OSUB, TINT8}:    ssa.OpSub8,
   1020 	opAndType{OSUB, TUINT8}:   ssa.OpSub8,
   1021 	opAndType{OSUB, TINT16}:   ssa.OpSub16,
   1022 	opAndType{OSUB, TUINT16}:  ssa.OpSub16,
   1023 	opAndType{OSUB, TINT32}:   ssa.OpSub32,
   1024 	opAndType{OSUB, TUINT32}:  ssa.OpSub32,
   1025 	opAndType{OSUB, TINT64}:   ssa.OpSub64,
   1026 	opAndType{OSUB, TUINT64}:  ssa.OpSub64,
   1027 	opAndType{OSUB, TFLOAT32}: ssa.OpSub32F,
   1028 	opAndType{OSUB, TFLOAT64}: ssa.OpSub64F,
   1029 
   1030 	opAndType{ONOT, TBOOL}: ssa.OpNot,
   1031 
   1032 	opAndType{OMINUS, TINT8}:    ssa.OpNeg8,
   1033 	opAndType{OMINUS, TUINT8}:   ssa.OpNeg8,
   1034 	opAndType{OMINUS, TINT16}:   ssa.OpNeg16,
   1035 	opAndType{OMINUS, TUINT16}:  ssa.OpNeg16,
   1036 	opAndType{OMINUS, TINT32}:   ssa.OpNeg32,
   1037 	opAndType{OMINUS, TUINT32}:  ssa.OpNeg32,
   1038 	opAndType{OMINUS, TINT64}:   ssa.OpNeg64,
   1039 	opAndType{OMINUS, TUINT64}:  ssa.OpNeg64,
   1040 	opAndType{OMINUS, TFLOAT32}: ssa.OpNeg32F,
   1041 	opAndType{OMINUS, TFLOAT64}: ssa.OpNeg64F,
   1042 
   1043 	opAndType{OCOM, TINT8}:   ssa.OpCom8,
   1044 	opAndType{OCOM, TUINT8}:  ssa.OpCom8,
   1045 	opAndType{OCOM, TINT16}:  ssa.OpCom16,
   1046 	opAndType{OCOM, TUINT16}: ssa.OpCom16,
   1047 	opAndType{OCOM, TINT32}:  ssa.OpCom32,
   1048 	opAndType{OCOM, TUINT32}: ssa.OpCom32,
   1049 	opAndType{OCOM, TINT64}:  ssa.OpCom64,
   1050 	opAndType{OCOM, TUINT64}: ssa.OpCom64,
   1051 
   1052 	opAndType{OIMAG, TCOMPLEX64}:  ssa.OpComplexImag,
   1053 	opAndType{OIMAG, TCOMPLEX128}: ssa.OpComplexImag,
   1054 	opAndType{OREAL, TCOMPLEX64}:  ssa.OpComplexReal,
   1055 	opAndType{OREAL, TCOMPLEX128}: ssa.OpComplexReal,
   1056 
   1057 	opAndType{OMUL, TINT8}:    ssa.OpMul8,
   1058 	opAndType{OMUL, TUINT8}:   ssa.OpMul8,
   1059 	opAndType{OMUL, TINT16}:   ssa.OpMul16,
   1060 	opAndType{OMUL, TUINT16}:  ssa.OpMul16,
   1061 	opAndType{OMUL, TINT32}:   ssa.OpMul32,
   1062 	opAndType{OMUL, TUINT32}:  ssa.OpMul32,
   1063 	opAndType{OMUL, TINT64}:   ssa.OpMul64,
   1064 	opAndType{OMUL, TUINT64}:  ssa.OpMul64,
   1065 	opAndType{OMUL, TFLOAT32}: ssa.OpMul32F,
   1066 	opAndType{OMUL, TFLOAT64}: ssa.OpMul64F,
   1067 
   1068 	opAndType{ODIV, TFLOAT32}: ssa.OpDiv32F,
   1069 	opAndType{ODIV, TFLOAT64}: ssa.OpDiv64F,
   1070 
   1071 	opAndType{OHMUL, TINT8}:   ssa.OpHmul8,
   1072 	opAndType{OHMUL, TUINT8}:  ssa.OpHmul8u,
   1073 	opAndType{OHMUL, TINT16}:  ssa.OpHmul16,
   1074 	opAndType{OHMUL, TUINT16}: ssa.OpHmul16u,
   1075 	opAndType{OHMUL, TINT32}:  ssa.OpHmul32,
   1076 	opAndType{OHMUL, TUINT32}: ssa.OpHmul32u,
   1077 
   1078 	opAndType{ODIV, TINT8}:   ssa.OpDiv8,
   1079 	opAndType{ODIV, TUINT8}:  ssa.OpDiv8u,
   1080 	opAndType{ODIV, TINT16}:  ssa.OpDiv16,
   1081 	opAndType{ODIV, TUINT16}: ssa.OpDiv16u,
   1082 	opAndType{ODIV, TINT32}:  ssa.OpDiv32,
   1083 	opAndType{ODIV, TUINT32}: ssa.OpDiv32u,
   1084 	opAndType{ODIV, TINT64}:  ssa.OpDiv64,
   1085 	opAndType{ODIV, TUINT64}: ssa.OpDiv64u,
   1086 
   1087 	opAndType{OMOD, TINT8}:   ssa.OpMod8,
   1088 	opAndType{OMOD, TUINT8}:  ssa.OpMod8u,
   1089 	opAndType{OMOD, TINT16}:  ssa.OpMod16,
   1090 	opAndType{OMOD, TUINT16}: ssa.OpMod16u,
   1091 	opAndType{OMOD, TINT32}:  ssa.OpMod32,
   1092 	opAndType{OMOD, TUINT32}: ssa.OpMod32u,
   1093 	opAndType{OMOD, TINT64}:  ssa.OpMod64,
   1094 	opAndType{OMOD, TUINT64}: ssa.OpMod64u,
   1095 
   1096 	opAndType{OAND, TINT8}:   ssa.OpAnd8,
   1097 	opAndType{OAND, TUINT8}:  ssa.OpAnd8,
   1098 	opAndType{OAND, TINT16}:  ssa.OpAnd16,
   1099 	opAndType{OAND, TUINT16}: ssa.OpAnd16,
   1100 	opAndType{OAND, TINT32}:  ssa.OpAnd32,
   1101 	opAndType{OAND, TUINT32}: ssa.OpAnd32,
   1102 	opAndType{OAND, TINT64}:  ssa.OpAnd64,
   1103 	opAndType{OAND, TUINT64}: ssa.OpAnd64,
   1104 
   1105 	opAndType{OOR, TINT8}:   ssa.OpOr8,
   1106 	opAndType{OOR, TUINT8}:  ssa.OpOr8,
   1107 	opAndType{OOR, TINT16}:  ssa.OpOr16,
   1108 	opAndType{OOR, TUINT16}: ssa.OpOr16,
   1109 	opAndType{OOR, TINT32}:  ssa.OpOr32,
   1110 	opAndType{OOR, TUINT32}: ssa.OpOr32,
   1111 	opAndType{OOR, TINT64}:  ssa.OpOr64,
   1112 	opAndType{OOR, TUINT64}: ssa.OpOr64,
   1113 
   1114 	opAndType{OXOR, TINT8}:   ssa.OpXor8,
   1115 	opAndType{OXOR, TUINT8}:  ssa.OpXor8,
   1116 	opAndType{OXOR, TINT16}:  ssa.OpXor16,
   1117 	opAndType{OXOR, TUINT16}: ssa.OpXor16,
   1118 	opAndType{OXOR, TINT32}:  ssa.OpXor32,
   1119 	opAndType{OXOR, TUINT32}: ssa.OpXor32,
   1120 	opAndType{OXOR, TINT64}:  ssa.OpXor64,
   1121 	opAndType{OXOR, TUINT64}: ssa.OpXor64,
   1122 
   1123 	opAndType{OEQ, TBOOL}:      ssa.OpEqB,
   1124 	opAndType{OEQ, TINT8}:      ssa.OpEq8,
   1125 	opAndType{OEQ, TUINT8}:     ssa.OpEq8,
   1126 	opAndType{OEQ, TINT16}:     ssa.OpEq16,
   1127 	opAndType{OEQ, TUINT16}:    ssa.OpEq16,
   1128 	opAndType{OEQ, TINT32}:     ssa.OpEq32,
   1129 	opAndType{OEQ, TUINT32}:    ssa.OpEq32,
   1130 	opAndType{OEQ, TINT64}:     ssa.OpEq64,
   1131 	opAndType{OEQ, TUINT64}:    ssa.OpEq64,
   1132 	opAndType{OEQ, TINTER}:     ssa.OpEqInter,
   1133 	opAndType{OEQ, TSLICE}:     ssa.OpEqSlice,
   1134 	opAndType{OEQ, TFUNC}:      ssa.OpEqPtr,
   1135 	opAndType{OEQ, TMAP}:       ssa.OpEqPtr,
   1136 	opAndType{OEQ, TCHAN}:      ssa.OpEqPtr,
   1137 	opAndType{OEQ, TPTR32}:     ssa.OpEqPtr,
   1138 	opAndType{OEQ, TPTR64}:     ssa.OpEqPtr,
   1139 	opAndType{OEQ, TUINTPTR}:   ssa.OpEqPtr,
   1140 	opAndType{OEQ, TUNSAFEPTR}: ssa.OpEqPtr,
   1141 	opAndType{OEQ, TFLOAT64}:   ssa.OpEq64F,
   1142 	opAndType{OEQ, TFLOAT32}:   ssa.OpEq32F,
   1143 
   1144 	opAndType{ONE, TBOOL}:      ssa.OpNeqB,
   1145 	opAndType{ONE, TINT8}:      ssa.OpNeq8,
   1146 	opAndType{ONE, TUINT8}:     ssa.OpNeq8,
   1147 	opAndType{ONE, TINT16}:     ssa.OpNeq16,
   1148 	opAndType{ONE, TUINT16}:    ssa.OpNeq16,
   1149 	opAndType{ONE, TINT32}:     ssa.OpNeq32,
   1150 	opAndType{ONE, TUINT32}:    ssa.OpNeq32,
   1151 	opAndType{ONE, TINT64}:     ssa.OpNeq64,
   1152 	opAndType{ONE, TUINT64}:    ssa.OpNeq64,
   1153 	opAndType{ONE, TINTER}:     ssa.OpNeqInter,
   1154 	opAndType{ONE, TSLICE}:     ssa.OpNeqSlice,
   1155 	opAndType{ONE, TFUNC}:      ssa.OpNeqPtr,
   1156 	opAndType{ONE, TMAP}:       ssa.OpNeqPtr,
   1157 	opAndType{ONE, TCHAN}:      ssa.OpNeqPtr,
   1158 	opAndType{ONE, TPTR32}:     ssa.OpNeqPtr,
   1159 	opAndType{ONE, TPTR64}:     ssa.OpNeqPtr,
   1160 	opAndType{ONE, TUINTPTR}:   ssa.OpNeqPtr,
   1161 	opAndType{ONE, TUNSAFEPTR}: ssa.OpNeqPtr,
   1162 	opAndType{ONE, TFLOAT64}:   ssa.OpNeq64F,
   1163 	opAndType{ONE, TFLOAT32}:   ssa.OpNeq32F,
   1164 
   1165 	opAndType{OLT, TINT8}:    ssa.OpLess8,
   1166 	opAndType{OLT, TUINT8}:   ssa.OpLess8U,
   1167 	opAndType{OLT, TINT16}:   ssa.OpLess16,
   1168 	opAndType{OLT, TUINT16}:  ssa.OpLess16U,
   1169 	opAndType{OLT, TINT32}:   ssa.OpLess32,
   1170 	opAndType{OLT, TUINT32}:  ssa.OpLess32U,
   1171 	opAndType{OLT, TINT64}:   ssa.OpLess64,
   1172 	opAndType{OLT, TUINT64}:  ssa.OpLess64U,
   1173 	opAndType{OLT, TFLOAT64}: ssa.OpLess64F,
   1174 	opAndType{OLT, TFLOAT32}: ssa.OpLess32F,
   1175 
   1176 	opAndType{OGT, TINT8}:    ssa.OpGreater8,
   1177 	opAndType{OGT, TUINT8}:   ssa.OpGreater8U,
   1178 	opAndType{OGT, TINT16}:   ssa.OpGreater16,
   1179 	opAndType{OGT, TUINT16}:  ssa.OpGreater16U,
   1180 	opAndType{OGT, TINT32}:   ssa.OpGreater32,
   1181 	opAndType{OGT, TUINT32}:  ssa.OpGreater32U,
   1182 	opAndType{OGT, TINT64}:   ssa.OpGreater64,
   1183 	opAndType{OGT, TUINT64}:  ssa.OpGreater64U,
   1184 	opAndType{OGT, TFLOAT64}: ssa.OpGreater64F,
   1185 	opAndType{OGT, TFLOAT32}: ssa.OpGreater32F,
   1186 
   1187 	opAndType{OLE, TINT8}:    ssa.OpLeq8,
   1188 	opAndType{OLE, TUINT8}:   ssa.OpLeq8U,
   1189 	opAndType{OLE, TINT16}:   ssa.OpLeq16,
   1190 	opAndType{OLE, TUINT16}:  ssa.OpLeq16U,
   1191 	opAndType{OLE, TINT32}:   ssa.OpLeq32,
   1192 	opAndType{OLE, TUINT32}:  ssa.OpLeq32U,
   1193 	opAndType{OLE, TINT64}:   ssa.OpLeq64,
   1194 	opAndType{OLE, TUINT64}:  ssa.OpLeq64U,
   1195 	opAndType{OLE, TFLOAT64}: ssa.OpLeq64F,
   1196 	opAndType{OLE, TFLOAT32}: ssa.OpLeq32F,
   1197 
   1198 	opAndType{OGE, TINT8}:    ssa.OpGeq8,
   1199 	opAndType{OGE, TUINT8}:   ssa.OpGeq8U,
   1200 	opAndType{OGE, TINT16}:   ssa.OpGeq16,
   1201 	opAndType{OGE, TUINT16}:  ssa.OpGeq16U,
   1202 	opAndType{OGE, TINT32}:   ssa.OpGeq32,
   1203 	opAndType{OGE, TUINT32}:  ssa.OpGeq32U,
   1204 	opAndType{OGE, TINT64}:   ssa.OpGeq64,
   1205 	opAndType{OGE, TUINT64}:  ssa.OpGeq64U,
   1206 	opAndType{OGE, TFLOAT64}: ssa.OpGeq64F,
   1207 	opAndType{OGE, TFLOAT32}: ssa.OpGeq32F,
   1208 
   1209 	opAndType{OLROT, TUINT8}:  ssa.OpLrot8,
   1210 	opAndType{OLROT, TUINT16}: ssa.OpLrot16,
   1211 	opAndType{OLROT, TUINT32}: ssa.OpLrot32,
   1212 	opAndType{OLROT, TUINT64}: ssa.OpLrot64,
   1213 
   1214 	opAndType{OSQRT, TFLOAT64}: ssa.OpSqrt,
   1215 }
   1216 
   1217 func (s *state) concreteEtype(t *Type) EType {
   1218 	e := t.Etype
   1219 	switch e {
   1220 	default:
   1221 		return e
   1222 	case TINT:
   1223 		if s.config.IntSize == 8 {
   1224 			return TINT64
   1225 		}
   1226 		return TINT32
   1227 	case TUINT:
   1228 		if s.config.IntSize == 8 {
   1229 			return TUINT64
   1230 		}
   1231 		return TUINT32
   1232 	case TUINTPTR:
   1233 		if s.config.PtrSize == 8 {
   1234 			return TUINT64
   1235 		}
   1236 		return TUINT32
   1237 	}
   1238 }
   1239 
   1240 func (s *state) ssaOp(op Op, t *Type) ssa.Op {
   1241 	etype := s.concreteEtype(t)
   1242 	x, ok := opToSSA[opAndType{op, etype}]
   1243 	if !ok {
   1244 		s.Fatalf("unhandled binary op %v %s", op, etype)
   1245 	}
   1246 	return x
   1247 }
   1248 
   1249 func floatForComplex(t *Type) *Type {
   1250 	if t.Size() == 8 {
   1251 		return Types[TFLOAT32]
   1252 	} else {
   1253 		return Types[TFLOAT64]
   1254 	}
   1255 }
   1256 
   1257 type opAndTwoTypes struct {
   1258 	op     Op
   1259 	etype1 EType
   1260 	etype2 EType
   1261 }
   1262 
   1263 type twoTypes struct {
   1264 	etype1 EType
   1265 	etype2 EType
   1266 }
   1267 
   1268 type twoOpsAndType struct {
   1269 	op1              ssa.Op
   1270 	op2              ssa.Op
   1271 	intermediateType EType
   1272 }
   1273 
   1274 var fpConvOpToSSA = map[twoTypes]twoOpsAndType{
   1275 
   1276 	twoTypes{TINT8, TFLOAT32}:  twoOpsAndType{ssa.OpSignExt8to32, ssa.OpCvt32to32F, TINT32},
   1277 	twoTypes{TINT16, TFLOAT32}: twoOpsAndType{ssa.OpSignExt16to32, ssa.OpCvt32to32F, TINT32},
   1278 	twoTypes{TINT32, TFLOAT32}: twoOpsAndType{ssa.OpCopy, ssa.OpCvt32to32F, TINT32},
   1279 	twoTypes{TINT64, TFLOAT32}: twoOpsAndType{ssa.OpCopy, ssa.OpCvt64to32F, TINT64},
   1280 
   1281 	twoTypes{TINT8, TFLOAT64}:  twoOpsAndType{ssa.OpSignExt8to32, ssa.OpCvt32to64F, TINT32},
   1282 	twoTypes{TINT16, TFLOAT64}: twoOpsAndType{ssa.OpSignExt16to32, ssa.OpCvt32to64F, TINT32},
   1283 	twoTypes{TINT32, TFLOAT64}: twoOpsAndType{ssa.OpCopy, ssa.OpCvt32to64F, TINT32},
   1284 	twoTypes{TINT64, TFLOAT64}: twoOpsAndType{ssa.OpCopy, ssa.OpCvt64to64F, TINT64},
   1285 
   1286 	twoTypes{TFLOAT32, TINT8}:  twoOpsAndType{ssa.OpCvt32Fto32, ssa.OpTrunc32to8, TINT32},
   1287 	twoTypes{TFLOAT32, TINT16}: twoOpsAndType{ssa.OpCvt32Fto32, ssa.OpTrunc32to16, TINT32},
   1288 	twoTypes{TFLOAT32, TINT32}: twoOpsAndType{ssa.OpCvt32Fto32, ssa.OpCopy, TINT32},
   1289 	twoTypes{TFLOAT32, TINT64}: twoOpsAndType{ssa.OpCvt32Fto64, ssa.OpCopy, TINT64},
   1290 
   1291 	twoTypes{TFLOAT64, TINT8}:  twoOpsAndType{ssa.OpCvt64Fto32, ssa.OpTrunc32to8, TINT32},
   1292 	twoTypes{TFLOAT64, TINT16}: twoOpsAndType{ssa.OpCvt64Fto32, ssa.OpTrunc32to16, TINT32},
   1293 	twoTypes{TFLOAT64, TINT32}: twoOpsAndType{ssa.OpCvt64Fto32, ssa.OpCopy, TINT32},
   1294 	twoTypes{TFLOAT64, TINT64}: twoOpsAndType{ssa.OpCvt64Fto64, ssa.OpCopy, TINT64},
   1295 	// unsigned
   1296 	twoTypes{TUINT8, TFLOAT32}:  twoOpsAndType{ssa.OpZeroExt8to32, ssa.OpCvt32to32F, TINT32},
   1297 	twoTypes{TUINT16, TFLOAT32}: twoOpsAndType{ssa.OpZeroExt16to32, ssa.OpCvt32to32F, TINT32},
   1298 	twoTypes{TUINT32, TFLOAT32}: twoOpsAndType{ssa.OpZeroExt32to64, ssa.OpCvt64to32F, TINT64}, // go wide to dodge unsigned
   1299 	twoTypes{TUINT64, TFLOAT32}: twoOpsAndType{ssa.OpCopy, ssa.OpInvalid, TUINT64},            // Cvt64Uto32F, branchy code expansion instead
   1300 
   1301 	twoTypes{TUINT8, TFLOAT64}:  twoOpsAndType{ssa.OpZeroExt8to32, ssa.OpCvt32to64F, TINT32},
   1302 	twoTypes{TUINT16, TFLOAT64}: twoOpsAndType{ssa.OpZeroExt16to32, ssa.OpCvt32to64F, TINT32},
   1303 	twoTypes{TUINT32, TFLOAT64}: twoOpsAndType{ssa.OpZeroExt32to64, ssa.OpCvt64to64F, TINT64}, // go wide to dodge unsigned
   1304 	twoTypes{TUINT64, TFLOAT64}: twoOpsAndType{ssa.OpCopy, ssa.OpInvalid, TUINT64},            // Cvt64Uto64F, branchy code expansion instead
   1305 
   1306 	twoTypes{TFLOAT32, TUINT8}:  twoOpsAndType{ssa.OpCvt32Fto32, ssa.OpTrunc32to8, TINT32},
   1307 	twoTypes{TFLOAT32, TUINT16}: twoOpsAndType{ssa.OpCvt32Fto32, ssa.OpTrunc32to16, TINT32},
   1308 	twoTypes{TFLOAT32, TUINT32}: twoOpsAndType{ssa.OpCvt32Fto64, ssa.OpTrunc64to32, TINT64}, // go wide to dodge unsigned
   1309 	twoTypes{TFLOAT32, TUINT64}: twoOpsAndType{ssa.OpInvalid, ssa.OpCopy, TUINT64},          // Cvt32Fto64U, branchy code expansion instead
   1310 
   1311 	twoTypes{TFLOAT64, TUINT8}:  twoOpsAndType{ssa.OpCvt64Fto32, ssa.OpTrunc32to8, TINT32},
   1312 	twoTypes{TFLOAT64, TUINT16}: twoOpsAndType{ssa.OpCvt64Fto32, ssa.OpTrunc32to16, TINT32},
   1313 	twoTypes{TFLOAT64, TUINT32}: twoOpsAndType{ssa.OpCvt64Fto64, ssa.OpTrunc64to32, TINT64}, // go wide to dodge unsigned
   1314 	twoTypes{TFLOAT64, TUINT64}: twoOpsAndType{ssa.OpInvalid, ssa.OpCopy, TUINT64},          // Cvt64Fto64U, branchy code expansion instead
   1315 
   1316 	// float
   1317 	twoTypes{TFLOAT64, TFLOAT32}: twoOpsAndType{ssa.OpCvt64Fto32F, ssa.OpCopy, TFLOAT32},
   1318 	twoTypes{TFLOAT64, TFLOAT64}: twoOpsAndType{ssa.OpCopy, ssa.OpCopy, TFLOAT64},
   1319 	twoTypes{TFLOAT32, TFLOAT32}: twoOpsAndType{ssa.OpCopy, ssa.OpCopy, TFLOAT32},
   1320 	twoTypes{TFLOAT32, TFLOAT64}: twoOpsAndType{ssa.OpCvt32Fto64F, ssa.OpCopy, TFLOAT64},
   1321 }
   1322 
   1323 // this map is used only for 32-bit arch, and only includes the difference
   1324 // on 32-bit arch, don't use int64<->float conversion for uint32
   1325 var fpConvOpToSSA32 = map[twoTypes]twoOpsAndType{
   1326 	twoTypes{TUINT32, TFLOAT32}: twoOpsAndType{ssa.OpCopy, ssa.OpCvt32Uto32F, TUINT32},
   1327 	twoTypes{TUINT32, TFLOAT64}: twoOpsAndType{ssa.OpCopy, ssa.OpCvt32Uto64F, TUINT32},
   1328 	twoTypes{TFLOAT32, TUINT32}: twoOpsAndType{ssa.OpCvt32Fto32U, ssa.OpCopy, TUINT32},
   1329 	twoTypes{TFLOAT64, TUINT32}: twoOpsAndType{ssa.OpCvt64Fto32U, ssa.OpCopy, TUINT32},
   1330 }
   1331 
   1332 // uint64<->float conversions, only on machines that have intructions for that
   1333 var uint64fpConvOpToSSA = map[twoTypes]twoOpsAndType{
   1334 	twoTypes{TUINT64, TFLOAT32}: twoOpsAndType{ssa.OpCopy, ssa.OpCvt64Uto32F, TUINT64},
   1335 	twoTypes{TUINT64, TFLOAT64}: twoOpsAndType{ssa.OpCopy, ssa.OpCvt64Uto64F, TUINT64},
   1336 	twoTypes{TFLOAT32, TUINT64}: twoOpsAndType{ssa.OpCvt32Fto64U, ssa.OpCopy, TUINT64},
   1337 	twoTypes{TFLOAT64, TUINT64}: twoOpsAndType{ssa.OpCvt64Fto64U, ssa.OpCopy, TUINT64},
   1338 }
   1339 
   1340 var shiftOpToSSA = map[opAndTwoTypes]ssa.Op{
   1341 	opAndTwoTypes{OLSH, TINT8, TUINT8}:   ssa.OpLsh8x8,
   1342 	opAndTwoTypes{OLSH, TUINT8, TUINT8}:  ssa.OpLsh8x8,
   1343 	opAndTwoTypes{OLSH, TINT8, TUINT16}:  ssa.OpLsh8x16,
   1344 	opAndTwoTypes{OLSH, TUINT8, TUINT16}: ssa.OpLsh8x16,
   1345 	opAndTwoTypes{OLSH, TINT8, TUINT32}:  ssa.OpLsh8x32,
   1346 	opAndTwoTypes{OLSH, TUINT8, TUINT32}: ssa.OpLsh8x32,
   1347 	opAndTwoTypes{OLSH, TINT8, TUINT64}:  ssa.OpLsh8x64,
   1348 	opAndTwoTypes{OLSH, TUINT8, TUINT64}: ssa.OpLsh8x64,
   1349 
   1350 	opAndTwoTypes{OLSH, TINT16, TUINT8}:   ssa.OpLsh16x8,
   1351 	opAndTwoTypes{OLSH, TUINT16, TUINT8}:  ssa.OpLsh16x8,
   1352 	opAndTwoTypes{OLSH, TINT16, TUINT16}:  ssa.OpLsh16x16,
   1353 	opAndTwoTypes{OLSH, TUINT16, TUINT16}: ssa.OpLsh16x16,
   1354 	opAndTwoTypes{OLSH, TINT16, TUINT32}:  ssa.OpLsh16x32,
   1355 	opAndTwoTypes{OLSH, TUINT16, TUINT32}: ssa.OpLsh16x32,
   1356 	opAndTwoTypes{OLSH, TINT16, TUINT64}:  ssa.OpLsh16x64,
   1357 	opAndTwoTypes{OLSH, TUINT16, TUINT64}: ssa.OpLsh16x64,
   1358 
   1359 	opAndTwoTypes{OLSH, TINT32, TUINT8}:   ssa.OpLsh32x8,
   1360 	opAndTwoTypes{OLSH, TUINT32, TUINT8}:  ssa.OpLsh32x8,
   1361 	opAndTwoTypes{OLSH, TINT32, TUINT16}:  ssa.OpLsh32x16,
   1362 	opAndTwoTypes{OLSH, TUINT32, TUINT16}: ssa.OpLsh32x16,
   1363 	opAndTwoTypes{OLSH, TINT32, TUINT32}:  ssa.OpLsh32x32,
   1364 	opAndTwoTypes{OLSH, TUINT32, TUINT32}: ssa.OpLsh32x32,
   1365 	opAndTwoTypes{OLSH, TINT32, TUINT64}:  ssa.OpLsh32x64,
   1366 	opAndTwoTypes{OLSH, TUINT32, TUINT64}: ssa.OpLsh32x64,
   1367 
   1368 	opAndTwoTypes{OLSH, TINT64, TUINT8}:   ssa.OpLsh64x8,
   1369 	opAndTwoTypes{OLSH, TUINT64, TUINT8}:  ssa.OpLsh64x8,
   1370 	opAndTwoTypes{OLSH, TINT64, TUINT16}:  ssa.OpLsh64x16,
   1371 	opAndTwoTypes{OLSH, TUINT64, TUINT16}: ssa.OpLsh64x16,
   1372 	opAndTwoTypes{OLSH, TINT64, TUINT32}:  ssa.OpLsh64x32,
   1373 	opAndTwoTypes{OLSH, TUINT64, TUINT32}: ssa.OpLsh64x32,
   1374 	opAndTwoTypes{OLSH, TINT64, TUINT64}:  ssa.OpLsh64x64,
   1375 	opAndTwoTypes{OLSH, TUINT64, TUINT64}: ssa.OpLsh64x64,
   1376 
   1377 	opAndTwoTypes{ORSH, TINT8, TUINT8}:   ssa.OpRsh8x8,
   1378 	opAndTwoTypes{ORSH, TUINT8, TUINT8}:  ssa.OpRsh8Ux8,
   1379 	opAndTwoTypes{ORSH, TINT8, TUINT16}:  ssa.OpRsh8x16,
   1380 	opAndTwoTypes{ORSH, TUINT8, TUINT16}: ssa.OpRsh8Ux16,
   1381 	opAndTwoTypes{ORSH, TINT8, TUINT32}:  ssa.OpRsh8x32,
   1382 	opAndTwoTypes{ORSH, TUINT8, TUINT32}: ssa.OpRsh8Ux32,
   1383 	opAndTwoTypes{ORSH, TINT8, TUINT64}:  ssa.OpRsh8x64,
   1384 	opAndTwoTypes{ORSH, TUINT8, TUINT64}: ssa.OpRsh8Ux64,
   1385 
   1386 	opAndTwoTypes{ORSH, TINT16, TUINT8}:   ssa.OpRsh16x8,
   1387 	opAndTwoTypes{ORSH, TUINT16, TUINT8}:  ssa.OpRsh16Ux8,
   1388 	opAndTwoTypes{ORSH, TINT16, TUINT16}:  ssa.OpRsh16x16,
   1389 	opAndTwoTypes{ORSH, TUINT16, TUINT16}: ssa.OpRsh16Ux16,
   1390 	opAndTwoTypes{ORSH, TINT16, TUINT32}:  ssa.OpRsh16x32,
   1391 	opAndTwoTypes{ORSH, TUINT16, TUINT32}: ssa.OpRsh16Ux32,
   1392 	opAndTwoTypes{ORSH, TINT16, TUINT64}:  ssa.OpRsh16x64,
   1393 	opAndTwoTypes{ORSH, TUINT16, TUINT64}: ssa.OpRsh16Ux64,
   1394 
   1395 	opAndTwoTypes{ORSH, TINT32, TUINT8}:   ssa.OpRsh32x8,
   1396 	opAndTwoTypes{ORSH, TUINT32, TUINT8}:  ssa.OpRsh32Ux8,
   1397 	opAndTwoTypes{ORSH, TINT32, TUINT16}:  ssa.OpRsh32x16,
   1398 	opAndTwoTypes{ORSH, TUINT32, TUINT16}: ssa.OpRsh32Ux16,
   1399 	opAndTwoTypes{ORSH, TINT32, TUINT32}:  ssa.OpRsh32x32,
   1400 	opAndTwoTypes{ORSH, TUINT32, TUINT32}: ssa.OpRsh32Ux32,
   1401 	opAndTwoTypes{ORSH, TINT32, TUINT64}:  ssa.OpRsh32x64,
   1402 	opAndTwoTypes{ORSH, TUINT32, TUINT64}: ssa.OpRsh32Ux64,
   1403 
   1404 	opAndTwoTypes{ORSH, TINT64, TUINT8}:   ssa.OpRsh64x8,
   1405 	opAndTwoTypes{ORSH, TUINT64, TUINT8}:  ssa.OpRsh64Ux8,
   1406 	opAndTwoTypes{ORSH, TINT64, TUINT16}:  ssa.OpRsh64x16,
   1407 	opAndTwoTypes{ORSH, TUINT64, TUINT16}: ssa.OpRsh64Ux16,
   1408 	opAndTwoTypes{ORSH, TINT64, TUINT32}:  ssa.OpRsh64x32,
   1409 	opAndTwoTypes{ORSH, TUINT64, TUINT32}: ssa.OpRsh64Ux32,
   1410 	opAndTwoTypes{ORSH, TINT64, TUINT64}:  ssa.OpRsh64x64,
   1411 	opAndTwoTypes{ORSH, TUINT64, TUINT64}: ssa.OpRsh64Ux64,
   1412 }
   1413 
   1414 func (s *state) ssaShiftOp(op Op, t *Type, u *Type) ssa.Op {
   1415 	etype1 := s.concreteEtype(t)
   1416 	etype2 := s.concreteEtype(u)
   1417 	x, ok := shiftOpToSSA[opAndTwoTypes{op, etype1, etype2}]
   1418 	if !ok {
   1419 		s.Fatalf("unhandled shift op %v etype=%s/%s", op, etype1, etype2)
   1420 	}
   1421 	return x
   1422 }
   1423 
   1424 func (s *state) ssaRotateOp(op Op, t *Type) ssa.Op {
   1425 	etype1 := s.concreteEtype(t)
   1426 	x, ok := opToSSA[opAndType{op, etype1}]
   1427 	if !ok {
   1428 		s.Fatalf("unhandled rotate op %v etype=%s", op, etype1)
   1429 	}
   1430 	return x
   1431 }
   1432 
   1433 // expr converts the expression n to ssa, adds it to s and returns the ssa result.
   1434 func (s *state) expr(n *Node) *ssa.Value {
   1435 	if !(n.Op == ONAME || n.Op == OLITERAL && n.Sym != nil) {
   1436 		// ONAMEs and named OLITERALs have the line number
   1437 		// of the decl, not the use. See issue 14742.
   1438 		s.pushLine(n.Lineno)
   1439 		defer s.popLine()
   1440 	}
   1441 
   1442 	s.stmtList(n.Ninit)
   1443 	switch n.Op {
   1444 	case OARRAYBYTESTRTMP:
   1445 		slice := s.expr(n.Left)
   1446 		ptr := s.newValue1(ssa.OpSlicePtr, ptrto(Types[TUINT8]), slice)
   1447 		len := s.newValue1(ssa.OpSliceLen, Types[TINT], slice)
   1448 		return s.newValue2(ssa.OpStringMake, n.Type, ptr, len)
   1449 	case OSTRARRAYBYTETMP:
   1450 		str := s.expr(n.Left)
   1451 		ptr := s.newValue1(ssa.OpStringPtr, ptrto(Types[TUINT8]), str)
   1452 		len := s.newValue1(ssa.OpStringLen, Types[TINT], str)
   1453 		return s.newValue3(ssa.OpSliceMake, n.Type, ptr, len, len)
   1454 	case OCFUNC:
   1455 		aux := s.lookupSymbol(n, &ssa.ExternSymbol{Typ: n.Type, Sym: n.Left.Sym})
   1456 		return s.entryNewValue1A(ssa.OpAddr, n.Type, aux, s.sb)
   1457 	case ONAME:
   1458 		if n.Class == PFUNC {
   1459 			// "value" of a function is the address of the function's closure
   1460 			sym := funcsym(n.Sym)
   1461 			aux := &ssa.ExternSymbol{Typ: n.Type, Sym: sym}
   1462 			return s.entryNewValue1A(ssa.OpAddr, ptrto(n.Type), aux, s.sb)
   1463 		}
   1464 		if s.canSSA(n) {
   1465 			return s.variable(n, n.Type)
   1466 		}
   1467 		addr, _ := s.addr(n, false)
   1468 		return s.newValue2(ssa.OpLoad, n.Type, addr, s.mem())
   1469 	case OCLOSUREVAR:
   1470 		addr, _ := s.addr(n, false)
   1471 		return s.newValue2(ssa.OpLoad, n.Type, addr, s.mem())
   1472 	case OLITERAL:
   1473 		switch u := n.Val().U.(type) {
   1474 		case *Mpint:
   1475 			i := u.Int64()
   1476 			switch n.Type.Size() {
   1477 			case 1:
   1478 				return s.constInt8(n.Type, int8(i))
   1479 			case 2:
   1480 				return s.constInt16(n.Type, int16(i))
   1481 			case 4:
   1482 				return s.constInt32(n.Type, int32(i))
   1483 			case 8:
   1484 				return s.constInt64(n.Type, i)
   1485 			default:
   1486 				s.Fatalf("bad integer size %d", n.Type.Size())
   1487 				return nil
   1488 			}
   1489 		case string:
   1490 			if u == "" {
   1491 				return s.constEmptyString(n.Type)
   1492 			}
   1493 			return s.entryNewValue0A(ssa.OpConstString, n.Type, u)
   1494 		case bool:
   1495 			return s.constBool(u)
   1496 		case *NilVal:
   1497 			t := n.Type
   1498 			switch {
   1499 			case t.IsSlice():
   1500 				return s.constSlice(t)
   1501 			case t.IsInterface():
   1502 				return s.constInterface(t)
   1503 			default:
   1504 				return s.constNil(t)
   1505 			}
   1506 		case *Mpflt:
   1507 			switch n.Type.Size() {
   1508 			case 4:
   1509 				return s.constFloat32(n.Type, u.Float32())
   1510 			case 8:
   1511 				return s.constFloat64(n.Type, u.Float64())
   1512 			default:
   1513 				s.Fatalf("bad float size %d", n.Type.Size())
   1514 				return nil
   1515 			}
   1516 		case *Mpcplx:
   1517 			r := &u.Real
   1518 			i := &u.Imag
   1519 			switch n.Type.Size() {
   1520 			case 8:
   1521 				pt := Types[TFLOAT32]
   1522 				return s.newValue2(ssa.OpComplexMake, n.Type,
   1523 					s.constFloat32(pt, r.Float32()),
   1524 					s.constFloat32(pt, i.Float32()))
   1525 			case 16:
   1526 				pt := Types[TFLOAT64]
   1527 				return s.newValue2(ssa.OpComplexMake, n.Type,
   1528 					s.constFloat64(pt, r.Float64()),
   1529 					s.constFloat64(pt, i.Float64()))
   1530 			default:
   1531 				s.Fatalf("bad float size %d", n.Type.Size())
   1532 				return nil
   1533 			}
   1534 
   1535 		default:
   1536 			s.Fatalf("unhandled OLITERAL %v", n.Val().Ctype())
   1537 			return nil
   1538 		}
   1539 	case OCONVNOP:
   1540 		to := n.Type
   1541 		from := n.Left.Type
   1542 
   1543 		// Assume everything will work out, so set up our return value.
   1544 		// Anything interesting that happens from here is a fatal.
   1545 		x := s.expr(n.Left)
   1546 
   1547 		// Special case for not confusing GC and liveness.
   1548 		// We don't want pointers accidentally classified
   1549 		// as not-pointers or vice-versa because of copy
   1550 		// elision.
   1551 		if to.IsPtrShaped() != from.IsPtrShaped() {
   1552 			return s.newValue2(ssa.OpConvert, to, x, s.mem())
   1553 		}
   1554 
   1555 		v := s.newValue1(ssa.OpCopy, to, x) // ensure that v has the right type
   1556 
   1557 		// CONVNOP closure
   1558 		if to.Etype == TFUNC && from.IsPtrShaped() {
   1559 			return v
   1560 		}
   1561 
   1562 		// named <--> unnamed type or typed <--> untyped const
   1563 		if from.Etype == to.Etype {
   1564 			return v
   1565 		}
   1566 
   1567 		// unsafe.Pointer <--> *T
   1568 		if to.Etype == TUNSAFEPTR && from.IsPtr() || from.Etype == TUNSAFEPTR && to.IsPtr() {
   1569 			return v
   1570 		}
   1571 
   1572 		dowidth(from)
   1573 		dowidth(to)
   1574 		if from.Width != to.Width {
   1575 			s.Fatalf("CONVNOP width mismatch %v (%d) -> %v (%d)\n", from, from.Width, to, to.Width)
   1576 			return nil
   1577 		}
   1578 		if etypesign(from.Etype) != etypesign(to.Etype) {
   1579 			s.Fatalf("CONVNOP sign mismatch %v (%s) -> %v (%s)\n", from, from.Etype, to, to.Etype)
   1580 			return nil
   1581 		}
   1582 
   1583 		if instrumenting {
   1584 			// These appear to be fine, but they fail the
   1585 			// integer constraint below, so okay them here.
   1586 			// Sample non-integer conversion: map[string]string -> *uint8
   1587 			return v
   1588 		}
   1589 
   1590 		if etypesign(from.Etype) == 0 {
   1591 			s.Fatalf("CONVNOP unrecognized non-integer %v -> %v\n", from, to)
   1592 			return nil
   1593 		}
   1594 
   1595 		// integer, same width, same sign
   1596 		return v
   1597 
   1598 	case OCONV:
   1599 		x := s.expr(n.Left)
   1600 		ft := n.Left.Type // from type
   1601 		tt := n.Type      // to type
   1602 		if ft.IsInteger() && tt.IsInteger() {
   1603 			var op ssa.Op
   1604 			if tt.Size() == ft.Size() {
   1605 				op = ssa.OpCopy
   1606 			} else if tt.Size() < ft.Size() {
   1607 				// truncation
   1608 				switch 10*ft.Size() + tt.Size() {
   1609 				case 21:
   1610 					op = ssa.OpTrunc16to8
   1611 				case 41:
   1612 					op = ssa.OpTrunc32to8
   1613 				case 42:
   1614 					op = ssa.OpTrunc32to16
   1615 				case 81:
   1616 					op = ssa.OpTrunc64to8
   1617 				case 82:
   1618 					op = ssa.OpTrunc64to16
   1619 				case 84:
   1620 					op = ssa.OpTrunc64to32
   1621 				default:
   1622 					s.Fatalf("weird integer truncation %v -> %v", ft, tt)
   1623 				}
   1624 			} else if ft.IsSigned() {
   1625 				// sign extension
   1626 				switch 10*ft.Size() + tt.Size() {
   1627 				case 12:
   1628 					op = ssa.OpSignExt8to16
   1629 				case 14:
   1630 					op = ssa.OpSignExt8to32
   1631 				case 18:
   1632 					op = ssa.OpSignExt8to64
   1633 				case 24:
   1634 					op = ssa.OpSignExt16to32
   1635 				case 28:
   1636 					op = ssa.OpSignExt16to64
   1637 				case 48:
   1638 					op = ssa.OpSignExt32to64
   1639 				default:
   1640 					s.Fatalf("bad integer sign extension %v -> %v", ft, tt)
   1641 				}
   1642 			} else {
   1643 				// zero extension
   1644 				switch 10*ft.Size() + tt.Size() {
   1645 				case 12:
   1646 					op = ssa.OpZeroExt8to16
   1647 				case 14:
   1648 					op = ssa.OpZeroExt8to32
   1649 				case 18:
   1650 					op = ssa.OpZeroExt8to64
   1651 				case 24:
   1652 					op = ssa.OpZeroExt16to32
   1653 				case 28:
   1654 					op = ssa.OpZeroExt16to64
   1655 				case 48:
   1656 					op = ssa.OpZeroExt32to64
   1657 				default:
   1658 					s.Fatalf("weird integer sign extension %v -> %v", ft, tt)
   1659 				}
   1660 			}
   1661 			return s.newValue1(op, n.Type, x)
   1662 		}
   1663 
   1664 		if ft.IsFloat() || tt.IsFloat() {
   1665 			conv, ok := fpConvOpToSSA[twoTypes{s.concreteEtype(ft), s.concreteEtype(tt)}]
   1666 			if s.config.IntSize == 4 && Thearch.LinkArch.Name != "amd64p32" && Thearch.LinkArch.Family != sys.MIPS {
   1667 				if conv1, ok1 := fpConvOpToSSA32[twoTypes{s.concreteEtype(ft), s.concreteEtype(tt)}]; ok1 {
   1668 					conv = conv1
   1669 				}
   1670 			}
   1671 			if Thearch.LinkArch.Name == "arm64" {
   1672 				if conv1, ok1 := uint64fpConvOpToSSA[twoTypes{s.concreteEtype(ft), s.concreteEtype(tt)}]; ok1 {
   1673 					conv = conv1
   1674 				}
   1675 			}
   1676 
   1677 			if Thearch.LinkArch.Family == sys.MIPS {
   1678 				if ft.Size() == 4 && ft.IsInteger() && !ft.IsSigned() {
   1679 					// tt is float32 or float64, and ft is also unsigned
   1680 					if tt.Size() == 4 {
   1681 						return s.uint32Tofloat32(n, x, ft, tt)
   1682 					}
   1683 					if tt.Size() == 8 {
   1684 						return s.uint32Tofloat64(n, x, ft, tt)
   1685 					}
   1686 				} else if tt.Size() == 4 && tt.IsInteger() && !tt.IsSigned() {
   1687 					// ft is float32 or float64, and tt is unsigned integer
   1688 					if ft.Size() == 4 {
   1689 						return s.float32ToUint32(n, x, ft, tt)
   1690 					}
   1691 					if ft.Size() == 8 {
   1692 						return s.float64ToUint32(n, x, ft, tt)
   1693 					}
   1694 				}
   1695 			}
   1696 
   1697 			if !ok {
   1698 				s.Fatalf("weird float conversion %v -> %v", ft, tt)
   1699 			}
   1700 			op1, op2, it := conv.op1, conv.op2, conv.intermediateType
   1701 
   1702 			if op1 != ssa.OpInvalid && op2 != ssa.OpInvalid {
   1703 				// normal case, not tripping over unsigned 64
   1704 				if op1 == ssa.OpCopy {
   1705 					if op2 == ssa.OpCopy {
   1706 						return x
   1707 					}
   1708 					return s.newValue1(op2, n.Type, x)
   1709 				}
   1710 				if op2 == ssa.OpCopy {
   1711 					return s.newValue1(op1, n.Type, x)
   1712 				}
   1713 				return s.newValue1(op2, n.Type, s.newValue1(op1, Types[it], x))
   1714 			}
   1715 			// Tricky 64-bit unsigned cases.
   1716 			if ft.IsInteger() {
   1717 				// tt is float32 or float64, and ft is also unsigned
   1718 				if tt.Size() == 4 {
   1719 					return s.uint64Tofloat32(n, x, ft, tt)
   1720 				}
   1721 				if tt.Size() == 8 {
   1722 					return s.uint64Tofloat64(n, x, ft, tt)
   1723 				}
   1724 				s.Fatalf("weird unsigned integer to float conversion %v -> %v", ft, tt)
   1725 			}
   1726 			// ft is float32 or float64, and tt is unsigned integer
   1727 			if ft.Size() == 4 {
   1728 				return s.float32ToUint64(n, x, ft, tt)
   1729 			}
   1730 			if ft.Size() == 8 {
   1731 				return s.float64ToUint64(n, x, ft, tt)
   1732 			}
   1733 			s.Fatalf("weird float to unsigned integer conversion %v -> %v", ft, tt)
   1734 			return nil
   1735 		}
   1736 
   1737 		if ft.IsComplex() && tt.IsComplex() {
   1738 			var op ssa.Op
   1739 			if ft.Size() == tt.Size() {
   1740 				op = ssa.OpCopy
   1741 			} else if ft.Size() == 8 && tt.Size() == 16 {
   1742 				op = ssa.OpCvt32Fto64F
   1743 			} else if ft.Size() == 16 && tt.Size() == 8 {
   1744 				op = ssa.OpCvt64Fto32F
   1745 			} else {
   1746 				s.Fatalf("weird complex conversion %v -> %v", ft, tt)
   1747 			}
   1748 			ftp := floatForComplex(ft)
   1749 			ttp := floatForComplex(tt)
   1750 			return s.newValue2(ssa.OpComplexMake, tt,
   1751 				s.newValue1(op, ttp, s.newValue1(ssa.OpComplexReal, ftp, x)),
   1752 				s.newValue1(op, ttp, s.newValue1(ssa.OpComplexImag, ftp, x)))
   1753 		}
   1754 
   1755 		s.Fatalf("unhandled OCONV %s -> %s", n.Left.Type.Etype, n.Type.Etype)
   1756 		return nil
   1757 
   1758 	case ODOTTYPE:
   1759 		res, _ := s.dottype(n, false)
   1760 		return res
   1761 
   1762 	// binary ops
   1763 	case OLT, OEQ, ONE, OLE, OGE, OGT:
   1764 		a := s.expr(n.Left)
   1765 		b := s.expr(n.Right)
   1766 		if n.Left.Type.IsComplex() {
   1767 			pt := floatForComplex(n.Left.Type)
   1768 			op := s.ssaOp(OEQ, pt)
   1769 			r := s.newValue2(op, Types[TBOOL], s.newValue1(ssa.OpComplexReal, pt, a), s.newValue1(ssa.OpComplexReal, pt, b))
   1770 			i := s.newValue2(op, Types[TBOOL], s.newValue1(ssa.OpComplexImag, pt, a), s.newValue1(ssa.OpComplexImag, pt, b))
   1771 			c := s.newValue2(ssa.OpAndB, Types[TBOOL], r, i)
   1772 			switch n.Op {
   1773 			case OEQ:
   1774 				return c
   1775 			case ONE:
   1776 				return s.newValue1(ssa.OpNot, Types[TBOOL], c)
   1777 			default:
   1778 				s.Fatalf("ordered complex compare %v", n.Op)
   1779 			}
   1780 		}
   1781 		return s.newValue2(s.ssaOp(n.Op, n.Left.Type), Types[TBOOL], a, b)
   1782 	case OMUL:
   1783 		a := s.expr(n.Left)
   1784 		b := s.expr(n.Right)
   1785 		if n.Type.IsComplex() {
   1786 			mulop := ssa.OpMul64F
   1787 			addop := ssa.OpAdd64F
   1788 			subop := ssa.OpSub64F
   1789 			pt := floatForComplex(n.Type) // Could be Float32 or Float64
   1790 			wt := Types[TFLOAT64]         // Compute in Float64 to minimize cancelation error
   1791 
   1792 			areal := s.newValue1(ssa.OpComplexReal, pt, a)
   1793 			breal := s.newValue1(ssa.OpComplexReal, pt, b)
   1794 			aimag := s.newValue1(ssa.OpComplexImag, pt, a)
   1795 			bimag := s.newValue1(ssa.OpComplexImag, pt, b)
   1796 
   1797 			if pt != wt { // Widen for calculation
   1798 				areal = s.newValue1(ssa.OpCvt32Fto64F, wt, areal)
   1799 				breal = s.newValue1(ssa.OpCvt32Fto64F, wt, breal)
   1800 				aimag = s.newValue1(ssa.OpCvt32Fto64F, wt, aimag)
   1801 				bimag = s.newValue1(ssa.OpCvt32Fto64F, wt, bimag)
   1802 			}
   1803 
   1804 			xreal := s.newValue2(subop, wt, s.newValue2(mulop, wt, areal, breal), s.newValue2(mulop, wt, aimag, bimag))
   1805 			ximag := s.newValue2(addop, wt, s.newValue2(mulop, wt, areal, bimag), s.newValue2(mulop, wt, aimag, breal))
   1806 
   1807 			if pt != wt { // Narrow to store back
   1808 				xreal = s.newValue1(ssa.OpCvt64Fto32F, pt, xreal)
   1809 				ximag = s.newValue1(ssa.OpCvt64Fto32F, pt, ximag)
   1810 			}
   1811 
   1812 			return s.newValue2(ssa.OpComplexMake, n.Type, xreal, ximag)
   1813 		}
   1814 		return s.newValue2(s.ssaOp(n.Op, n.Type), a.Type, a, b)
   1815 
   1816 	case ODIV:
   1817 		a := s.expr(n.Left)
   1818 		b := s.expr(n.Right)
   1819 		if n.Type.IsComplex() {
   1820 			// TODO this is not executed because the front-end substitutes a runtime call.
   1821 			// That probably ought to change; with modest optimization the widen/narrow
   1822 			// conversions could all be elided in larger expression trees.
   1823 			mulop := ssa.OpMul64F
   1824 			addop := ssa.OpAdd64F
   1825 			subop := ssa.OpSub64F
   1826 			divop := ssa.OpDiv64F
   1827 			pt := floatForComplex(n.Type) // Could be Float32 or Float64
   1828 			wt := Types[TFLOAT64]         // Compute in Float64 to minimize cancelation error
   1829 
   1830 			areal := s.newValue1(ssa.OpComplexReal, pt, a)
   1831 			breal := s.newValue1(ssa.OpComplexReal, pt, b)
   1832 			aimag := s.newValue1(ssa.OpComplexImag, pt, a)
   1833 			bimag := s.newValue1(ssa.OpComplexImag, pt, b)
   1834 
   1835 			if pt != wt { // Widen for calculation
   1836 				areal = s.newValue1(ssa.OpCvt32Fto64F, wt, areal)
   1837 				breal = s.newValue1(ssa.OpCvt32Fto64F, wt, breal)
   1838 				aimag = s.newValue1(ssa.OpCvt32Fto64F, wt, aimag)
   1839 				bimag = s.newValue1(ssa.OpCvt32Fto64F, wt, bimag)
   1840 			}
   1841 
   1842 			denom := s.newValue2(addop, wt, s.newValue2(mulop, wt, breal, breal), s.newValue2(mulop, wt, bimag, bimag))
   1843 			xreal := s.newValue2(addop, wt, s.newValue2(mulop, wt, areal, breal), s.newValue2(mulop, wt, aimag, bimag))
   1844 			ximag := s.newValue2(subop, wt, s.newValue2(mulop, wt, aimag, breal), s.newValue2(mulop, wt, areal, bimag))
   1845 
   1846 			// TODO not sure if this is best done in wide precision or narrow
   1847 			// Double-rounding might be an issue.
   1848 			// Note that the pre-SSA implementation does the entire calculation
   1849 			// in wide format, so wide is compatible.
   1850 			xreal = s.newValue2(divop, wt, xreal, denom)
   1851 			ximag = s.newValue2(divop, wt, ximag, denom)
   1852 
   1853 			if pt != wt { // Narrow to store back
   1854 				xreal = s.newValue1(ssa.OpCvt64Fto32F, pt, xreal)
   1855 				ximag = s.newValue1(ssa.OpCvt64Fto32F, pt, ximag)
   1856 			}
   1857 			return s.newValue2(ssa.OpComplexMake, n.Type, xreal, ximag)
   1858 		}
   1859 		if n.Type.IsFloat() {
   1860 			return s.newValue2(s.ssaOp(n.Op, n.Type), a.Type, a, b)
   1861 		}
   1862 		return s.intDivide(n, a, b)
   1863 	case OMOD:
   1864 		a := s.expr(n.Left)
   1865 		b := s.expr(n.Right)
   1866 		return s.intDivide(n, a, b)
   1867 	case OADD, OSUB:
   1868 		a := s.expr(n.Left)
   1869 		b := s.expr(n.Right)
   1870 		if n.Type.IsComplex() {
   1871 			pt := floatForComplex(n.Type)
   1872 			op := s.ssaOp(n.Op, pt)
   1873 			return s.newValue2(ssa.OpComplexMake, n.Type,
   1874 				s.newValue2(op, pt, s.newValue1(ssa.OpComplexReal, pt, a), s.newValue1(ssa.OpComplexReal, pt, b)),
   1875 				s.newValue2(op, pt, s.newValue1(ssa.OpComplexImag, pt, a), s.newValue1(ssa.OpComplexImag, pt, b)))
   1876 		}
   1877 		return s.newValue2(s.ssaOp(n.Op, n.Type), a.Type, a, b)
   1878 	case OAND, OOR, OHMUL, OXOR:
   1879 		a := s.expr(n.Left)
   1880 		b := s.expr(n.Right)
   1881 		return s.newValue2(s.ssaOp(n.Op, n.Type), a.Type, a, b)
   1882 	case OLSH, ORSH:
   1883 		a := s.expr(n.Left)
   1884 		b := s.expr(n.Right)
   1885 		return s.newValue2(s.ssaShiftOp(n.Op, n.Type, n.Right.Type), a.Type, a, b)
   1886 	case OLROT:
   1887 		a := s.expr(n.Left)
   1888 		i := n.Right.Int64()
   1889 		if i <= 0 || i >= n.Type.Size()*8 {
   1890 			s.Fatalf("Wrong rotate distance for LROT, expected 1 through %d, saw %d", n.Type.Size()*8-1, i)
   1891 		}
   1892 		return s.newValue1I(s.ssaRotateOp(n.Op, n.Type), a.Type, i, a)
   1893 	case OANDAND, OOROR:
   1894 		// To implement OANDAND (and OOROR), we introduce a
   1895 		// new temporary variable to hold the result. The
   1896 		// variable is associated with the OANDAND node in the
   1897 		// s.vars table (normally variables are only
   1898 		// associated with ONAME nodes). We convert
   1899 		//     A && B
   1900 		// to
   1901 		//     var = A
   1902 		//     if var {
   1903 		//         var = B
   1904 		//     }
   1905 		// Using var in the subsequent block introduces the
   1906 		// necessary phi variable.
   1907 		el := s.expr(n.Left)
   1908 		s.vars[n] = el
   1909 
   1910 		b := s.endBlock()
   1911 		b.Kind = ssa.BlockIf
   1912 		b.SetControl(el)
   1913 		// In theory, we should set b.Likely here based on context.
   1914 		// However, gc only gives us likeliness hints
   1915 		// in a single place, for plain OIF statements,
   1916 		// and passing around context is finnicky, so don't bother for now.
   1917 
   1918 		bRight := s.f.NewBlock(ssa.BlockPlain)
   1919 		bResult := s.f.NewBlock(ssa.BlockPlain)
   1920 		if n.Op == OANDAND {
   1921 			b.AddEdgeTo(bRight)
   1922 			b.AddEdgeTo(bResult)
   1923 		} else if n.Op == OOROR {
   1924 			b.AddEdgeTo(bResult)
   1925 			b.AddEdgeTo(bRight)
   1926 		}
   1927 
   1928 		s.startBlock(bRight)
   1929 		er := s.expr(n.Right)
   1930 		s.vars[n] = er
   1931 
   1932 		b = s.endBlock()
   1933 		b.AddEdgeTo(bResult)
   1934 
   1935 		s.startBlock(bResult)
   1936 		return s.variable(n, Types[TBOOL])
   1937 	case OCOMPLEX:
   1938 		r := s.expr(n.Left)
   1939 		i := s.expr(n.Right)
   1940 		return s.newValue2(ssa.OpComplexMake, n.Type, r, i)
   1941 
   1942 	// unary ops
   1943 	case OMINUS:
   1944 		a := s.expr(n.Left)
   1945 		if n.Type.IsComplex() {
   1946 			tp := floatForComplex(n.Type)
   1947 			negop := s.ssaOp(n.Op, tp)
   1948 			return s.newValue2(ssa.OpComplexMake, n.Type,
   1949 				s.newValue1(negop, tp, s.newValue1(ssa.OpComplexReal, tp, a)),
   1950 				s.newValue1(negop, tp, s.newValue1(ssa.OpComplexImag, tp, a)))
   1951 		}
   1952 		return s.newValue1(s.ssaOp(n.Op, n.Type), a.Type, a)
   1953 	case ONOT, OCOM, OSQRT:
   1954 		a := s.expr(n.Left)
   1955 		return s.newValue1(s.ssaOp(n.Op, n.Type), a.Type, a)
   1956 	case OIMAG, OREAL:
   1957 		a := s.expr(n.Left)
   1958 		return s.newValue1(s.ssaOp(n.Op, n.Left.Type), n.Type, a)
   1959 	case OPLUS:
   1960 		return s.expr(n.Left)
   1961 
   1962 	case OADDR:
   1963 		a, _ := s.addr(n.Left, n.Bounded)
   1964 		// Note we know the volatile result is false because you can't write &f() in Go.
   1965 		return a
   1966 
   1967 	case OINDREGSP:
   1968 		addr := s.entryNewValue1I(ssa.OpOffPtr, ptrto(n.Type), n.Xoffset, s.sp)
   1969 		return s.newValue2(ssa.OpLoad, n.Type, addr, s.mem())
   1970 
   1971 	case OIND:
   1972 		p := s.exprPtr(n.Left, false, n.Lineno)
   1973 		return s.newValue2(ssa.OpLoad, n.Type, p, s.mem())
   1974 
   1975 	case ODOT:
   1976 		t := n.Left.Type
   1977 		if canSSAType(t) {
   1978 			v := s.expr(n.Left)
   1979 			return s.newValue1I(ssa.OpStructSelect, n.Type, int64(fieldIdx(n)), v)
   1980 		}
   1981 		p, _ := s.addr(n, false)
   1982 		return s.newValue2(ssa.OpLoad, n.Type, p, s.mem())
   1983 
   1984 	case ODOTPTR:
   1985 		p := s.exprPtr(n.Left, false, n.Lineno)
   1986 		p = s.newValue1I(ssa.OpOffPtr, p.Type, n.Xoffset, p)
   1987 		return s.newValue2(ssa.OpLoad, n.Type, p, s.mem())
   1988 
   1989 	case OINDEX:
   1990 		switch {
   1991 		case n.Left.Type.IsString():
   1992 			if n.Bounded && Isconst(n.Left, CTSTR) && Isconst(n.Right, CTINT) {
   1993 				// Replace "abc"[1] with 'b'.
   1994 				// Delayed until now because "abc"[1] is not an ideal constant.
   1995 				// See test/fixedbugs/issue11370.go.
   1996 				return s.newValue0I(ssa.OpConst8, Types[TUINT8], int64(int8(n.Left.Val().U.(string)[n.Right.Int64()])))
   1997 			}
   1998 			a := s.expr(n.Left)
   1999 			i := s.expr(n.Right)
   2000 			i = s.extendIndex(i, panicindex)
   2001 			if !n.Bounded {
   2002 				len := s.newValue1(ssa.OpStringLen, Types[TINT], a)
   2003 				s.boundsCheck(i, len)
   2004 			}
   2005 			ptrtyp := ptrto(Types[TUINT8])
   2006 			ptr := s.newValue1(ssa.OpStringPtr, ptrtyp, a)
   2007 			if Isconst(n.Right, CTINT) {
   2008 				ptr = s.newValue1I(ssa.OpOffPtr, ptrtyp, n.Right.Int64(), ptr)
   2009 			} else {
   2010 				ptr = s.newValue2(ssa.OpAddPtr, ptrtyp, ptr, i)
   2011 			}
   2012 			return s.newValue2(ssa.OpLoad, Types[TUINT8], ptr, s.mem())
   2013 		case n.Left.Type.IsSlice():
   2014 			p, _ := s.addr(n, false)
   2015 			return s.newValue2(ssa.OpLoad, n.Left.Type.Elem(), p, s.mem())
   2016 		case n.Left.Type.IsArray():
   2017 			if bound := n.Left.Type.NumElem(); bound <= 1 {
   2018 				// SSA can handle arrays of length at most 1.
   2019 				a := s.expr(n.Left)
   2020 				i := s.expr(n.Right)
   2021 				if bound == 0 {
   2022 					// Bounds check will never succeed.  Might as well
   2023 					// use constants for the bounds check.
   2024 					z := s.constInt(Types[TINT], 0)
   2025 					s.boundsCheck(z, z)
   2026 					// The return value won't be live, return junk.
   2027 					return s.newValue0(ssa.OpUnknown, n.Type)
   2028 				}
   2029 				i = s.extendIndex(i, panicindex)
   2030 				s.boundsCheck(i, s.constInt(Types[TINT], bound))
   2031 				return s.newValue1I(ssa.OpArraySelect, n.Type, 0, a)
   2032 			}
   2033 			p, _ := s.addr(n, false)
   2034 			return s.newValue2(ssa.OpLoad, n.Left.Type.Elem(), p, s.mem())
   2035 		default:
   2036 			s.Fatalf("bad type for index %v", n.Left.Type)
   2037 			return nil
   2038 		}
   2039 
   2040 	case OLEN, OCAP:
   2041 		switch {
   2042 		case n.Left.Type.IsSlice():
   2043 			op := ssa.OpSliceLen
   2044 			if n.Op == OCAP {
   2045 				op = ssa.OpSliceCap
   2046 			}
   2047 			return s.newValue1(op, Types[TINT], s.expr(n.Left))
   2048 		case n.Left.Type.IsString(): // string; not reachable for OCAP
   2049 			return s.newValue1(ssa.OpStringLen, Types[TINT], s.expr(n.Left))
   2050 		case n.Left.Type.IsMap(), n.Left.Type.IsChan():
   2051 			return s.referenceTypeBuiltin(n, s.expr(n.Left))
   2052 		default: // array
   2053 			return s.constInt(Types[TINT], n.Left.Type.NumElem())
   2054 		}
   2055 
   2056 	case OSPTR:
   2057 		a := s.expr(n.Left)
   2058 		if n.Left.Type.IsSlice() {
   2059 			return s.newValue1(ssa.OpSlicePtr, n.Type, a)
   2060 		} else {
   2061 			return s.newValue1(ssa.OpStringPtr, n.Type, a)
   2062 		}
   2063 
   2064 	case OITAB:
   2065 		a := s.expr(n.Left)
   2066 		return s.newValue1(ssa.OpITab, n.Type, a)
   2067 
   2068 	case OIDATA:
   2069 		a := s.expr(n.Left)
   2070 		return s.newValue1(ssa.OpIData, n.Type, a)
   2071 
   2072 	case OEFACE:
   2073 		tab := s.expr(n.Left)
   2074 		data := s.expr(n.Right)
   2075 		return s.newValue2(ssa.OpIMake, n.Type, tab, data)
   2076 
   2077 	case OSLICE, OSLICEARR, OSLICE3, OSLICE3ARR:
   2078 		v := s.expr(n.Left)
   2079 		var i, j, k *ssa.Value
   2080 		low, high, max := n.SliceBounds()
   2081 		if low != nil {
   2082 			i = s.extendIndex(s.expr(low), panicslice)
   2083 		}
   2084 		if high != nil {
   2085 			j = s.extendIndex(s.expr(high), panicslice)
   2086 		}
   2087 		if max != nil {
   2088 			k = s.extendIndex(s.expr(max), panicslice)
   2089 		}
   2090 		p, l, c := s.slice(n.Left.Type, v, i, j, k)
   2091 		return s.newValue3(ssa.OpSliceMake, n.Type, p, l, c)
   2092 
   2093 	case OSLICESTR:
   2094 		v := s.expr(n.Left)
   2095 		var i, j *ssa.Value
   2096 		low, high, _ := n.SliceBounds()
   2097 		if low != nil {
   2098 			i = s.extendIndex(s.expr(low), panicslice)
   2099 		}
   2100 		if high != nil {
   2101 			j = s.extendIndex(s.expr(high), panicslice)
   2102 		}
   2103 		p, l, _ := s.slice(n.Left.Type, v, i, j, nil)
   2104 		return s.newValue2(ssa.OpStringMake, n.Type, p, l)
   2105 
   2106 	case OCALLFUNC:
   2107 		if isIntrinsicCall(n) {
   2108 			return s.intrinsicCall(n)
   2109 		}
   2110 		fallthrough
   2111 
   2112 	case OCALLINTER, OCALLMETH:
   2113 		a := s.call(n, callNormal)
   2114 		return s.newValue2(ssa.OpLoad, n.Type, a, s.mem())
   2115 
   2116 	case OGETG:
   2117 		return s.newValue1(ssa.OpGetG, n.Type, s.mem())
   2118 
   2119 	case OAPPEND:
   2120 		return s.append(n, false)
   2121 
   2122 	default:
   2123 		s.Fatalf("unhandled expr %v", n.Op)
   2124 		return nil
   2125 	}
   2126 }
   2127 
   2128 // append converts an OAPPEND node to SSA.
   2129 // If inplace is false, it converts the OAPPEND expression n to an ssa.Value,
   2130 // adds it to s, and returns the Value.
   2131 // If inplace is true, it writes the result of the OAPPEND expression n
   2132 // back to the slice being appended to, and returns nil.
   2133 // inplace MUST be set to false if the slice can be SSA'd.
   2134 func (s *state) append(n *Node, inplace bool) *ssa.Value {
   2135 	// If inplace is false, process as expression "append(s, e1, e2, e3)":
   2136 	//
   2137 	// ptr, len, cap := s
   2138 	// newlen := len + 3
   2139 	// if newlen > cap {
   2140 	//     ptr, len, cap = growslice(s, newlen)
   2141 	//     newlen = len + 3 // recalculate to avoid a spill
   2142 	// }
   2143 	// // with write barriers, if needed:
   2144 	// *(ptr+len) = e1
   2145 	// *(ptr+len+1) = e2
   2146 	// *(ptr+len+2) = e3
   2147 	// return makeslice(ptr, newlen, cap)
   2148 	//
   2149 	//
   2150 	// If inplace is true, process as statement "s = append(s, e1, e2, e3)":
   2151 	//
   2152 	// a := &s
   2153 	// ptr, len, cap := s
   2154 	// newlen := len + 3
   2155 	// if newlen > cap {
   2156 	//    newptr, len, newcap = growslice(ptr, len, cap, newlen)
   2157 	//    vardef(a)       // if necessary, advise liveness we are writing a new a
   2158 	//    *a.cap = newcap // write before ptr to avoid a spill
   2159 	//    *a.ptr = newptr // with write barrier
   2160 	// }
   2161 	// newlen = len + 3 // recalculate to avoid a spill
   2162 	// *a.len = newlen
   2163 	// // with write barriers, if needed:
   2164 	// *(ptr+len) = e1
   2165 	// *(ptr+len+1) = e2
   2166 	// *(ptr+len+2) = e3
   2167 
   2168 	et := n.Type.Elem()
   2169 	pt := ptrto(et)
   2170 
   2171 	// Evaluate slice
   2172 	sn := n.List.First() // the slice node is the first in the list
   2173 
   2174 	var slice, addr *ssa.Value
   2175 	if inplace {
   2176 		addr, _ = s.addr(sn, false)
   2177 		slice = s.newValue2(ssa.OpLoad, n.Type, addr, s.mem())
   2178 	} else {
   2179 		slice = s.expr(sn)
   2180 	}
   2181 
   2182 	// Allocate new blocks
   2183 	grow := s.f.NewBlock(ssa.BlockPlain)
   2184 	assign := s.f.NewBlock(ssa.BlockPlain)
   2185 
   2186 	// Decide if we need to grow
   2187 	nargs := int64(n.List.Len() - 1)
   2188 	p := s.newValue1(ssa.OpSlicePtr, pt, slice)
   2189 	l := s.newValue1(ssa.OpSliceLen, Types[TINT], slice)
   2190 	c := s.newValue1(ssa.OpSliceCap, Types[TINT], slice)
   2191 	nl := s.newValue2(s.ssaOp(OADD, Types[TINT]), Types[TINT], l, s.constInt(Types[TINT], nargs))
   2192 
   2193 	cmp := s.newValue2(s.ssaOp(OGT, Types[TINT]), Types[TBOOL], nl, c)
   2194 	s.vars[&ptrVar] = p
   2195 
   2196 	if !inplace {
   2197 		s.vars[&newlenVar] = nl
   2198 		s.vars[&capVar] = c
   2199 	} else {
   2200 		s.vars[&lenVar] = l
   2201 	}
   2202 
   2203 	b := s.endBlock()
   2204 	b.Kind = ssa.BlockIf
   2205 	b.Likely = ssa.BranchUnlikely
   2206 	b.SetControl(cmp)
   2207 	b.AddEdgeTo(grow)
   2208 	b.AddEdgeTo(assign)
   2209 
   2210 	// Call growslice
   2211 	s.startBlock(grow)
   2212 	taddr := s.newValue1A(ssa.OpAddr, Types[TUINTPTR], &ssa.ExternSymbol{Typ: Types[TUINTPTR], Sym: typenamesym(n.Type.Elem())}, s.sb)
   2213 
   2214 	r := s.rtcall(growslice, true, []*Type{pt, Types[TINT], Types[TINT]}, taddr, p, l, c, nl)
   2215 
   2216 	if inplace {
   2217 		if sn.Op == ONAME {
   2218 			// Tell liveness we're about to build a new slice
   2219 			s.vars[&memVar] = s.newValue1A(ssa.OpVarDef, ssa.TypeMem, sn, s.mem())
   2220 		}
   2221 		capaddr := s.newValue1I(ssa.OpOffPtr, pt, int64(array_cap), addr)
   2222 		s.vars[&memVar] = s.newValue3I(ssa.OpStore, ssa.TypeMem, s.config.IntSize, capaddr, r[2], s.mem())
   2223 		if ssa.IsStackAddr(addr) {
   2224 			s.vars[&memVar] = s.newValue3I(ssa.OpStore, ssa.TypeMem, pt.Size(), addr, r[0], s.mem())
   2225 		} else {
   2226 			s.insertWBstore(pt, addr, r[0], n.Lineno, 0)
   2227 		}
   2228 		// load the value we just stored to avoid having to spill it
   2229 		s.vars[&ptrVar] = s.newValue2(ssa.OpLoad, pt, addr, s.mem())
   2230 		s.vars[&lenVar] = r[1] // avoid a spill in the fast path
   2231 	} else {
   2232 		s.vars[&ptrVar] = r[0]
   2233 		s.vars[&newlenVar] = s.newValue2(s.ssaOp(OADD, Types[TINT]), Types[TINT], r[1], s.constInt(Types[TINT], nargs))
   2234 		s.vars[&capVar] = r[2]
   2235 	}
   2236 
   2237 	b = s.endBlock()
   2238 	b.AddEdgeTo(assign)
   2239 
   2240 	// assign new elements to slots
   2241 	s.startBlock(assign)
   2242 
   2243 	if inplace {
   2244 		l = s.variable(&lenVar, Types[TINT]) // generates phi for len
   2245 		nl = s.newValue2(s.ssaOp(OADD, Types[TINT]), Types[TINT], l, s.constInt(Types[TINT], nargs))
   2246 		lenaddr := s.newValue1I(ssa.OpOffPtr, pt, int64(array_nel), addr)
   2247 		s.vars[&memVar] = s.newValue3I(ssa.OpStore, ssa.TypeMem, s.config.IntSize, lenaddr, nl, s.mem())
   2248 	}
   2249 
   2250 	// Evaluate args
   2251 	type argRec struct {
   2252 		// if store is true, we're appending the value v.  If false, we're appending the
   2253 		// value at *v.  If store==false, isVolatile reports whether the source
   2254 		// is in the outargs section of the stack frame.
   2255 		v          *ssa.Value
   2256 		store      bool
   2257 		isVolatile bool
   2258 	}
   2259 	args := make([]argRec, 0, nargs)
   2260 	for _, n := range n.List.Slice()[1:] {
   2261 		if canSSAType(n.Type) {
   2262 			args = append(args, argRec{v: s.expr(n), store: true})
   2263 		} else {
   2264 			v, isVolatile := s.addr(n, false)
   2265 			args = append(args, argRec{v: v, isVolatile: isVolatile})
   2266 		}
   2267 	}
   2268 
   2269 	p = s.variable(&ptrVar, pt) // generates phi for ptr
   2270 	if !inplace {
   2271 		nl = s.variable(&newlenVar, Types[TINT]) // generates phi for nl
   2272 		c = s.variable(&capVar, Types[TINT])     // generates phi for cap
   2273 	}
   2274 	p2 := s.newValue2(ssa.OpPtrIndex, pt, p, l)
   2275 	// TODO: just one write barrier call for all of these writes?
   2276 	// TODO: maybe just one writeBarrier.enabled check?
   2277 	for i, arg := range args {
   2278 		addr := s.newValue2(ssa.OpPtrIndex, pt, p2, s.constInt(Types[TINT], int64(i)))
   2279 		if arg.store {
   2280 			if haspointers(et) {
   2281 				s.insertWBstore(et, addr, arg.v, n.Lineno, 0)
   2282 			} else {
   2283 				s.vars[&memVar] = s.newValue3I(ssa.OpStore, ssa.TypeMem, et.Size(), addr, arg.v, s.mem())
   2284 			}
   2285 		} else {
   2286 			if haspointers(et) {
   2287 				s.insertWBmove(et, addr, arg.v, n.Lineno, arg.isVolatile)
   2288 			} else {
   2289 				s.vars[&memVar] = s.newValue3I(ssa.OpMove, ssa.TypeMem, sizeAlignAuxInt(et), addr, arg.v, s.mem())
   2290 			}
   2291 		}
   2292 	}
   2293 
   2294 	delete(s.vars, &ptrVar)
   2295 	if inplace {
   2296 		delete(s.vars, &lenVar)
   2297 		return nil
   2298 	}
   2299 	delete(s.vars, &newlenVar)
   2300 	delete(s.vars, &capVar)
   2301 	// make result
   2302 	return s.newValue3(ssa.OpSliceMake, n.Type, p, nl, c)
   2303 }
   2304 
   2305 // condBranch evaluates the boolean expression cond and branches to yes
   2306 // if cond is true and no if cond is false.
   2307 // This function is intended to handle && and || better than just calling
   2308 // s.expr(cond) and branching on the result.
   2309 func (s *state) condBranch(cond *Node, yes, no *ssa.Block, likely int8) {
   2310 	if cond.Op == OANDAND {
   2311 		mid := s.f.NewBlock(ssa.BlockPlain)
   2312 		s.stmtList(cond.Ninit)
   2313 		s.condBranch(cond.Left, mid, no, max8(likely, 0))
   2314 		s.startBlock(mid)
   2315 		s.condBranch(cond.Right, yes, no, likely)
   2316 		return
   2317 		// Note: if likely==1, then both recursive calls pass 1.
   2318 		// If likely==-1, then we don't have enough information to decide
   2319 		// whether the first branch is likely or not. So we pass 0 for
   2320 		// the likeliness of the first branch.
   2321 		// TODO: have the frontend give us branch prediction hints for
   2322 		// OANDAND and OOROR nodes (if it ever has such info).
   2323 	}
   2324 	if cond.Op == OOROR {
   2325 		mid := s.f.NewBlock(ssa.BlockPlain)
   2326 		s.stmtList(cond.Ninit)
   2327 		s.condBranch(cond.Left, yes, mid, min8(likely, 0))
   2328 		s.startBlock(mid)
   2329 		s.condBranch(cond.Right, yes, no, likely)
   2330 		return
   2331 		// Note: if likely==-1, then both recursive calls pass -1.
   2332 		// If likely==1, then we don't have enough info to decide
   2333 		// the likelihood of the first branch.
   2334 	}
   2335 	if cond.Op == ONOT {
   2336 		s.stmtList(cond.Ninit)
   2337 		s.condBranch(cond.Left, no, yes, -likely)
   2338 		return
   2339 	}
   2340 	c := s.expr(cond)
   2341 	b := s.endBlock()
   2342 	b.Kind = ssa.BlockIf
   2343 	b.SetControl(c)
   2344 	b.Likely = ssa.BranchPrediction(likely) // gc and ssa both use -1/0/+1 for likeliness
   2345 	b.AddEdgeTo(yes)
   2346 	b.AddEdgeTo(no)
   2347 }
   2348 
   2349 type skipMask uint8
   2350 
   2351 const (
   2352 	skipPtr skipMask = 1 << iota
   2353 	skipLen
   2354 	skipCap
   2355 )
   2356 
   2357 // assign does left = right.
   2358 // Right has already been evaluated to ssa, left has not.
   2359 // If deref is true, then we do left = *right instead (and right has already been nil-checked).
   2360 // If deref is true and right == nil, just do left = 0.
   2361 // If deref is true, rightIsVolatile reports whether right points to volatile (clobbered by a call) storage.
   2362 // Include a write barrier if wb is true.
   2363 // skip indicates assignments (at the top level) that can be avoided.
   2364 func (s *state) assign(left *Node, right *ssa.Value, wb, deref bool, line int32, skip skipMask, rightIsVolatile bool) {
   2365 	if left.Op == ONAME && isblank(left) {
   2366 		return
   2367 	}
   2368 	t := left.Type
   2369 	dowidth(t)
   2370 	if s.canSSA(left) {
   2371 		if deref {
   2372 			s.Fatalf("can SSA LHS %v but not RHS %s", left, right)
   2373 		}
   2374 		if left.Op == ODOT {
   2375 			// We're assigning to a field of an ssa-able value.
   2376 			// We need to build a new structure with the new value for the
   2377 			// field we're assigning and the old values for the other fields.
   2378 			// For instance:
   2379 			//   type T struct {a, b, c int}
   2380 			//   var T x
   2381 			//   x.b = 5
   2382 			// For the x.b = 5 assignment we want to generate x = T{x.a, 5, x.c}
   2383 
   2384 			// Grab information about the structure type.
   2385 			t := left.Left.Type
   2386 			nf := t.NumFields()
   2387 			idx := fieldIdx(left)
   2388 
   2389 			// Grab old value of structure.
   2390 			old := s.expr(left.Left)
   2391 
   2392 			// Make new structure.
   2393 			new := s.newValue0(ssa.StructMakeOp(t.NumFields()), t)
   2394 
   2395 			// Add fields as args.
   2396 			for i := 0; i < nf; i++ {
   2397 				if i == idx {
   2398 					new.AddArg(right)
   2399 				} else {
   2400 					new.AddArg(s.newValue1I(ssa.OpStructSelect, t.FieldType(i), int64(i), old))
   2401 				}
   2402 			}
   2403 
   2404 			// Recursively assign the new value we've made to the base of the dot op.
   2405 			s.assign(left.Left, new, false, false, line, 0, rightIsVolatile)
   2406 			// TODO: do we need to update named values here?
   2407 			return
   2408 		}
   2409 		if left.Op == OINDEX && left.Left.Type.IsArray() {
   2410 			// We're assigning to an element of an ssa-able array.
   2411 			// a[i] = v
   2412 			t := left.Left.Type
   2413 			n := t.NumElem()
   2414 
   2415 			i := s.expr(left.Right) // index
   2416 			if n == 0 {
   2417 				// The bounds check must fail.  Might as well
   2418 				// ignore the actual index and just use zeros.
   2419 				z := s.constInt(Types[TINT], 0)
   2420 				s.boundsCheck(z, z)
   2421 				return
   2422 			}
   2423 			if n != 1 {
   2424 				s.Fatalf("assigning to non-1-length array")
   2425 			}
   2426 			// Rewrite to a = [1]{v}
   2427 			i = s.extendIndex(i, panicindex)
   2428 			s.boundsCheck(i, s.constInt(Types[TINT], 1))
   2429 			v := s.newValue1(ssa.OpArrayMake1, t, right)
   2430 			s.assign(left.Left, v, false, false, line, 0, rightIsVolatile)
   2431 			return
   2432 		}
   2433 		// Update variable assignment.
   2434 		s.vars[left] = right
   2435 		s.addNamedValue(left, right)
   2436 		return
   2437 	}
   2438 	// Left is not ssa-able. Compute its address.
   2439 	addr, _ := s.addr(left, false)
   2440 	if left.Op == ONAME && skip == 0 {
   2441 		s.vars[&memVar] = s.newValue1A(ssa.OpVarDef, ssa.TypeMem, left, s.mem())
   2442 	}
   2443 	if deref {
   2444 		// Treat as a mem->mem move.
   2445 		if wb && !ssa.IsStackAddr(addr) {
   2446 			s.insertWBmove(t, addr, right, line, rightIsVolatile)
   2447 			return
   2448 		}
   2449 		if right == nil {
   2450 			s.vars[&memVar] = s.newValue2I(ssa.OpZero, ssa.TypeMem, sizeAlignAuxInt(t), addr, s.mem())
   2451 			return
   2452 		}
   2453 		s.vars[&memVar] = s.newValue3I(ssa.OpMove, ssa.TypeMem, sizeAlignAuxInt(t), addr, right, s.mem())
   2454 		return
   2455 	}
   2456 	// Treat as a store.
   2457 	if wb && !ssa.IsStackAddr(addr) {
   2458 		if skip&skipPtr != 0 {
   2459 			// Special case: if we don't write back the pointers, don't bother
   2460 			// doing the write barrier check.
   2461 			s.storeTypeScalars(t, addr, right, skip)
   2462 			return
   2463 		}
   2464 		s.insertWBstore(t, addr, right, line, skip)
   2465 		return
   2466 	}
   2467 	if skip != 0 {
   2468 		if skip&skipPtr == 0 {
   2469 			s.storeTypePtrs(t, addr, right)
   2470 		}
   2471 		s.storeTypeScalars(t, addr, right, skip)
   2472 		return
   2473 	}
   2474 	s.vars[&memVar] = s.newValue3I(ssa.OpStore, ssa.TypeMem, t.Size(), addr, right, s.mem())
   2475 }
   2476 
   2477 // zeroVal returns the zero value for type t.
   2478 func (s *state) zeroVal(t *Type) *ssa.Value {
   2479 	switch {
   2480 	case t.IsInteger():
   2481 		switch t.Size() {
   2482 		case 1:
   2483 			return s.constInt8(t, 0)
   2484 		case 2:
   2485 			return s.constInt16(t, 0)
   2486 		case 4:
   2487 			return s.constInt32(t, 0)
   2488 		case 8:
   2489 			return s.constInt64(t, 0)
   2490 		default:
   2491 			s.Fatalf("bad sized integer type %v", t)
   2492 		}
   2493 	case t.IsFloat():
   2494 		switch t.Size() {
   2495 		case 4:
   2496 			return s.constFloat32(t, 0)
   2497 		case 8:
   2498 			return s.constFloat64(t, 0)
   2499 		default:
   2500 			s.Fatalf("bad sized float type %v", t)
   2501 		}
   2502 	case t.IsComplex():
   2503 		switch t.Size() {
   2504 		case 8:
   2505 			z := s.constFloat32(Types[TFLOAT32], 0)
   2506 			return s.entryNewValue2(ssa.OpComplexMake, t, z, z)
   2507 		case 16:
   2508 			z := s.constFloat64(Types[TFLOAT64], 0)
   2509 			return s.entryNewValue2(ssa.OpComplexMake, t, z, z)
   2510 		default:
   2511 			s.Fatalf("bad sized complex type %v", t)
   2512 		}
   2513 
   2514 	case t.IsString():
   2515 		return s.constEmptyString(t)
   2516 	case t.IsPtrShaped():
   2517 		return s.constNil(t)
   2518 	case t.IsBoolean():
   2519 		return s.constBool(false)
   2520 	case t.IsInterface():
   2521 		return s.constInterface(t)
   2522 	case t.IsSlice():
   2523 		return s.constSlice(t)
   2524 	case t.IsStruct():
   2525 		n := t.NumFields()
   2526 		v := s.entryNewValue0(ssa.StructMakeOp(t.NumFields()), t)
   2527 		for i := 0; i < n; i++ {
   2528 			v.AddArg(s.zeroVal(t.FieldType(i).(*Type)))
   2529 		}
   2530 		return v
   2531 	case t.IsArray():
   2532 		switch t.NumElem() {
   2533 		case 0:
   2534 			return s.entryNewValue0(ssa.OpArrayMake0, t)
   2535 		case 1:
   2536 			return s.entryNewValue1(ssa.OpArrayMake1, t, s.zeroVal(t.Elem()))
   2537 		}
   2538 	}
   2539 	s.Fatalf("zero for type %v not implemented", t)
   2540 	return nil
   2541 }
   2542 
   2543 type callKind int8
   2544 
   2545 const (
   2546 	callNormal callKind = iota
   2547 	callDefer
   2548 	callGo
   2549 )
   2550 
   2551 // TODO: make this a field of a configuration object instead of a global.
   2552 var intrinsics *intrinsicInfo
   2553 
   2554 type intrinsicInfo struct {
   2555 	std      map[intrinsicKey]intrinsicBuilder
   2556 	intSized map[sizedIntrinsicKey]intrinsicBuilder
   2557 	ptrSized map[sizedIntrinsicKey]intrinsicBuilder
   2558 }
   2559 
   2560 // An intrinsicBuilder converts a call node n into an ssa value that
   2561 // implements that call as an intrinsic. args is a list of arguments to the func.
   2562 type intrinsicBuilder func(s *state, n *Node, args []*ssa.Value) *ssa.Value
   2563 
   2564 type intrinsicKey struct {
   2565 	pkg string
   2566 	fn  string
   2567 }
   2568 
   2569 type sizedIntrinsicKey struct {
   2570 	pkg  string
   2571 	fn   string
   2572 	size int
   2573 }
   2574 
   2575 // disableForInstrumenting returns nil when instrumenting, fn otherwise
   2576 func disableForInstrumenting(fn intrinsicBuilder) intrinsicBuilder {
   2577 	if instrumenting {
   2578 		return nil
   2579 	}
   2580 	return fn
   2581 }
   2582 
   2583 // enableOnArch returns fn on given archs, nil otherwise
   2584 func enableOnArch(fn intrinsicBuilder, archs ...sys.ArchFamily) intrinsicBuilder {
   2585 	if Thearch.LinkArch.InFamily(archs...) {
   2586 		return fn
   2587 	}
   2588 	return nil
   2589 }
   2590 
   2591 func intrinsicInit() {
   2592 	i := &intrinsicInfo{}
   2593 	intrinsics = i
   2594 
   2595 	// initial set of intrinsics.
   2596 	i.std = map[intrinsicKey]intrinsicBuilder{
   2597 		/******** runtime ********/
   2598 		intrinsicKey{"runtime", "slicebytetostringtmp"}: disableForInstrumenting(func(s *state, n *Node, args []*ssa.Value) *ssa.Value {
   2599 			// Compiler frontend optimizations emit OARRAYBYTESTRTMP nodes
   2600 			// for the backend instead of slicebytetostringtmp calls
   2601 			// when not instrumenting.
   2602 			slice := args[0]
   2603 			ptr := s.newValue1(ssa.OpSlicePtr, ptrto(Types[TUINT8]), slice)
   2604 			len := s.newValue1(ssa.OpSliceLen, Types[TINT], slice)
   2605 			return s.newValue2(ssa.OpStringMake, n.Type, ptr, len)
   2606 		}),
   2607 		intrinsicKey{"runtime", "KeepAlive"}: func(s *state, n *Node, args []*ssa.Value) *ssa.Value {
   2608 			data := s.newValue1(ssa.OpIData, ptrto(Types[TUINT8]), args[0])
   2609 			s.vars[&memVar] = s.newValue2(ssa.OpKeepAlive, ssa.TypeMem, data, s.mem())
   2610 			return nil
   2611 		},
   2612 
   2613 		/******** runtime/internal/sys ********/
   2614 		intrinsicKey{"runtime/internal/sys", "Ctz32"}: enableOnArch(func(s *state, n *Node, args []*ssa.Value) *ssa.Value {
   2615 			return s.newValue1(ssa.OpCtz32, Types[TUINT32], args[0])
   2616 		}, sys.AMD64, sys.ARM64, sys.ARM, sys.S390X, sys.MIPS),
   2617 		intrinsicKey{"runtime/internal/sys", "Ctz64"}: enableOnArch(func(s *state, n *Node, args []*ssa.Value) *ssa.Value {
   2618 			return s.newValue1(ssa.OpCtz64, Types[TUINT64], args[0])
   2619 		}, sys.AMD64, sys.ARM64, sys.ARM, sys.S390X, sys.MIPS),
   2620 		intrinsicKey{"runtime/internal/sys", "Bswap32"}: enableOnArch(func(s *state, n *Node, args []*ssa.Value) *ssa.Value {
   2621 			return s.newValue1(ssa.OpBswap32, Types[TUINT32], args[0])
   2622 		}, sys.AMD64, sys.ARM64, sys.ARM, sys.S390X),
   2623 		intrinsicKey{"runtime/internal/sys", "Bswap64"}: enableOnArch(func(s *state, n *Node, args []*ssa.Value) *ssa.Value {
   2624 			return s.newValue1(ssa.OpBswap64, Types[TUINT64], args[0])
   2625 		}, sys.AMD64, sys.ARM64, sys.ARM, sys.S390X),
   2626 
   2627 		/******** runtime/internal/atomic ********/
   2628 		intrinsicKey{"runtime/internal/atomic", "Load"}: enableOnArch(func(s *state, n *Node, args []*ssa.Value) *ssa.Value {
   2629 			v := s.newValue2(ssa.OpAtomicLoad32, ssa.MakeTuple(Types[TUINT32], ssa.TypeMem), args[0], s.mem())
   2630 			s.vars[&memVar] = s.newValue1(ssa.OpSelect1, ssa.TypeMem, v)
   2631 			return s.newValue1(ssa.OpSelect0, Types[TUINT32], v)
   2632 		}, sys.AMD64, sys.ARM64, sys.S390X, sys.MIPS),
   2633 		intrinsicKey{"runtime/internal/atomic", "Load64"}: enableOnArch(func(s *state, n *Node, args []*ssa.Value) *ssa.Value {
   2634 			v := s.newValue2(ssa.OpAtomicLoad64, ssa.MakeTuple(Types[TUINT64], ssa.TypeMem), args[0], s.mem())
   2635 			s.vars[&memVar] = s.newValue1(ssa.OpSelect1, ssa.TypeMem, v)
   2636 			return s.newValue1(ssa.OpSelect0, Types[TUINT64], v)
   2637 		}, sys.AMD64, sys.ARM64, sys.S390X),
   2638 		intrinsicKey{"runtime/internal/atomic", "Loadp"}: enableOnArch(func(s *state, n *Node, args []*ssa.Value) *ssa.Value {
   2639 			v := s.newValue2(ssa.OpAtomicLoadPtr, ssa.MakeTuple(ptrto(Types[TUINT8]), ssa.TypeMem), args[0], s.mem())
   2640 			s.vars[&memVar] = s.newValue1(ssa.OpSelect1, ssa.TypeMem, v)
   2641 			return s.newValue1(ssa.OpSelect0, ptrto(Types[TUINT8]), v)
   2642 		}, sys.AMD64, sys.ARM64, sys.S390X, sys.MIPS),
   2643 
   2644 		intrinsicKey{"runtime/internal/atomic", "Store"}: enableOnArch(func(s *state, n *Node, args []*ssa.Value) *ssa.Value {
   2645 			s.vars[&memVar] = s.newValue3(ssa.OpAtomicStore32, ssa.TypeMem, args[0], args[1], s.mem())
   2646 			return nil
   2647 		}, sys.AMD64, sys.ARM64, sys.S390X, sys.MIPS),
   2648 		intrinsicKey{"runtime/internal/atomic", "Store64"}: enableOnArch(func(s *state, n *Node, args []*ssa.Value) *ssa.Value {
   2649 			s.vars[&memVar] = s.newValue3(ssa.OpAtomicStore64, ssa.TypeMem, args[0], args[1], s.mem())
   2650 			return nil
   2651 		}, sys.AMD64, sys.ARM64, sys.S390X),
   2652 		intrinsicKey{"runtime/internal/atomic", "StorepNoWB"}: enableOnArch(func(s *state, n *Node, args []*ssa.Value) *ssa.Value {
   2653 			s.vars[&memVar] = s.newValue3(ssa.OpAtomicStorePtrNoWB, ssa.TypeMem, args[0], args[1], s.mem())
   2654 			return nil
   2655 		}, sys.AMD64, sys.ARM64, sys.S390X, sys.MIPS),
   2656 
   2657 		intrinsicKey{"runtime/internal/atomic", "Xchg"}: enableOnArch(func(s *state, n *Node, args []*ssa.Value) *ssa.Value {
   2658 			v := s.newValue3(ssa.OpAtomicExchange32, ssa.MakeTuple(Types[TUINT32], ssa.TypeMem), args[0], args[1], s.mem())
   2659 			s.vars[&memVar] = s.newValue1(ssa.OpSelect1, ssa.TypeMem, v)
   2660 			return s.newValue1(ssa.OpSelect0, Types[TUINT32], v)
   2661 		}, sys.AMD64, sys.ARM64, sys.S390X, sys.MIPS),
   2662 		intrinsicKey{"runtime/internal/atomic", "Xchg64"}: enableOnArch(func(s *state, n *Node, args []*ssa.Value) *ssa.Value {
   2663 			v := s.newValue3(ssa.OpAtomicExchange64, ssa.MakeTuple(Types[TUINT64], ssa.TypeMem), args[0], args[1], s.mem())
   2664 			s.vars[&memVar] = s.newValue1(ssa.OpSelect1, ssa.TypeMem, v)
   2665 			return s.newValue1(ssa.OpSelect0, Types[TUINT64], v)
   2666 		}, sys.AMD64, sys.ARM64, sys.S390X),
   2667 
   2668 		intrinsicKey{"runtime/internal/atomic", "Xadd"}: enableOnArch(func(s *state, n *Node, args []*ssa.Value) *ssa.Value {
   2669 			v := s.newValue3(ssa.OpAtomicAdd32, ssa.MakeTuple(Types[TUINT32], ssa.TypeMem), args[0], args[1], s.mem())
   2670 			s.vars[&memVar] = s.newValue1(ssa.OpSelect1, ssa.TypeMem, v)
   2671 			return s.newValue1(ssa.OpSelect0, Types[TUINT32], v)
   2672 		}, sys.AMD64, sys.ARM64, sys.S390X, sys.MIPS),
   2673 		intrinsicKey{"runtime/internal/atomic", "Xadd64"}: enableOnArch(func(s *state, n *Node, args []*ssa.Value) *ssa.Value {
   2674 			v := s.newValue3(ssa.OpAtomicAdd64, ssa.MakeTuple(Types[TUINT64], ssa.TypeMem), args[0], args[1], s.mem())
   2675 			s.vars[&memVar] = s.newValue1(ssa.OpSelect1, ssa.TypeMem, v)
   2676 			return s.newValue1(ssa.OpSelect0, Types[TUINT64], v)
   2677 		}, sys.AMD64, sys.ARM64, sys.S390X),
   2678 
   2679 		intrinsicKey{"runtime/internal/atomic", "Cas"}: enableOnArch(func(s *state, n *Node, args []*ssa.Value) *ssa.Value {
   2680 			v := s.newValue4(ssa.OpAtomicCompareAndSwap32, ssa.MakeTuple(Types[TBOOL], ssa.TypeMem), args[0], args[1], args[2], s.mem())
   2681 			s.vars[&memVar] = s.newValue1(ssa.OpSelect1, ssa.TypeMem, v)
   2682 			return s.newValue1(ssa.OpSelect0, Types[TBOOL], v)
   2683 		}, sys.AMD64, sys.ARM64, sys.S390X, sys.MIPS),
   2684 		intrinsicKey{"runtime/internal/atomic", "Cas64"}: enableOnArch(func(s *state, n *Node, args []*ssa.Value) *ssa.Value {
   2685 			v := s.newValue4(ssa.OpAtomicCompareAndSwap64, ssa.MakeTuple(Types[TBOOL], ssa.TypeMem), args[0], args[1], args[2], s.mem())
   2686 			s.vars[&memVar] = s.newValue1(ssa.OpSelect1, ssa.TypeMem, v)
   2687 			return s.newValue1(ssa.OpSelect0, Types[TBOOL], v)
   2688 		}, sys.AMD64, sys.ARM64, sys.S390X),
   2689 
   2690 		intrinsicKey{"runtime/internal/atomic", "And8"}: enableOnArch(func(s *state, n *Node, args []*ssa.Value) *ssa.Value {
   2691 			s.vars[&memVar] = s.newValue3(ssa.OpAtomicAnd8, ssa.TypeMem, args[0], args[1], s.mem())
   2692 			return nil
   2693 		}, sys.AMD64, sys.ARM64, sys.MIPS),
   2694 		intrinsicKey{"runtime/internal/atomic", "Or8"}: enableOnArch(func(s *state, n *Node, args []*ssa.Value) *ssa.Value {
   2695 			s.vars[&memVar] = s.newValue3(ssa.OpAtomicOr8, ssa.TypeMem, args[0], args[1], s.mem())
   2696 			return nil
   2697 		}, sys.AMD64, sys.ARM64, sys.MIPS),
   2698 	}
   2699 
   2700 	// aliases internal to runtime/internal/atomic
   2701 	i.std[intrinsicKey{"runtime/internal/atomic", "Loadint64"}] =
   2702 		i.std[intrinsicKey{"runtime/internal/atomic", "Load64"}]
   2703 	i.std[intrinsicKey{"runtime/internal/atomic", "Xaddint64"}] =
   2704 		i.std[intrinsicKey{"runtime/internal/atomic", "Xadd64"}]
   2705 
   2706 	// intrinsics which vary depending on the size of int/ptr.
   2707 	i.intSized = map[sizedIntrinsicKey]intrinsicBuilder{
   2708 		sizedIntrinsicKey{"runtime/internal/atomic", "Loaduint", 4}: i.std[intrinsicKey{"runtime/internal/atomic", "Load"}],
   2709 		sizedIntrinsicKey{"runtime/internal/atomic", "Loaduint", 8}: i.std[intrinsicKey{"runtime/internal/atomic", "Load64"}],
   2710 	}
   2711 	i.ptrSized = map[sizedIntrinsicKey]intrinsicBuilder{
   2712 		sizedIntrinsicKey{"runtime/internal/atomic", "Loaduintptr", 4}:  i.std[intrinsicKey{"runtime/internal/atomic", "Load"}],
   2713 		sizedIntrinsicKey{"runtime/internal/atomic", "Loaduintptr", 8}:  i.std[intrinsicKey{"runtime/internal/atomic", "Load64"}],
   2714 		sizedIntrinsicKey{"runtime/internal/atomic", "Storeuintptr", 4}: i.std[intrinsicKey{"runtime/internal/atomic", "Store"}],
   2715 		sizedIntrinsicKey{"runtime/internal/atomic", "Storeuintptr", 8}: i.std[intrinsicKey{"runtime/internal/atomic", "Store64"}],
   2716 		sizedIntrinsicKey{"runtime/internal/atomic", "Xchguintptr", 4}:  i.std[intrinsicKey{"runtime/internal/atomic", "Xchg"}],
   2717 		sizedIntrinsicKey{"runtime/internal/atomic", "Xchguintptr", 8}:  i.std[intrinsicKey{"runtime/internal/atomic", "Xchg64"}],
   2718 		sizedIntrinsicKey{"runtime/internal/atomic", "Xadduintptr", 4}:  i.std[intrinsicKey{"runtime/internal/atomic", "Xadd"}],
   2719 		sizedIntrinsicKey{"runtime/internal/atomic", "Xadduintptr", 8}:  i.std[intrinsicKey{"runtime/internal/atomic", "Xadd64"}],
   2720 		sizedIntrinsicKey{"runtime/internal/atomic", "Casuintptr", 4}:   i.std[intrinsicKey{"runtime/internal/atomic", "Cas"}],
   2721 		sizedIntrinsicKey{"runtime/internal/atomic", "Casuintptr", 8}:   i.std[intrinsicKey{"runtime/internal/atomic", "Cas64"}],
   2722 		sizedIntrinsicKey{"runtime/internal/atomic", "Casp1", 4}:        i.std[intrinsicKey{"runtime/internal/atomic", "Cas"}],
   2723 		sizedIntrinsicKey{"runtime/internal/atomic", "Casp1", 8}:        i.std[intrinsicKey{"runtime/internal/atomic", "Cas64"}],
   2724 	}
   2725 
   2726 	/******** sync/atomic ********/
   2727 	if flag_race {
   2728 		// The race detector needs to be able to intercept these calls.
   2729 		// We can't intrinsify them.
   2730 		return
   2731 	}
   2732 	// these are all aliases to runtime/internal/atomic implementations.
   2733 	i.std[intrinsicKey{"sync/atomic", "LoadInt32"}] =
   2734 		i.std[intrinsicKey{"runtime/internal/atomic", "Load"}]
   2735 	i.std[intrinsicKey{"sync/atomic", "LoadInt64"}] =
   2736 		i.std[intrinsicKey{"runtime/internal/atomic", "Load64"}]
   2737 	i.std[intrinsicKey{"sync/atomic", "LoadPointer"}] =
   2738 		i.std[intrinsicKey{"runtime/internal/atomic", "Loadp"}]
   2739 	i.std[intrinsicKey{"sync/atomic", "LoadUint32"}] =
   2740 		i.std[intrinsicKey{"runtime/internal/atomic", "Load"}]
   2741 	i.std[intrinsicKey{"sync/atomic", "LoadUint64"}] =
   2742 		i.std[intrinsicKey{"runtime/internal/atomic", "Load64"}]
   2743 	i.ptrSized[sizedIntrinsicKey{"sync/atomic", "LoadUintptr", 4}] =
   2744 		i.std[intrinsicKey{"runtime/internal/atomic", "Load"}]
   2745 	i.ptrSized[sizedIntrinsicKey{"sync/atomic", "LoadUintptr", 8}] =
   2746 		i.std[intrinsicKey{"runtime/internal/atomic", "Load64"}]
   2747 
   2748 	i.std[intrinsicKey{"sync/atomic", "StoreInt32"}] =
   2749 		i.std[intrinsicKey{"runtime/internal/atomic", "Store"}]
   2750 	i.std[intrinsicKey{"sync/atomic", "StoreInt64"}] =
   2751 		i.std[intrinsicKey{"runtime/internal/atomic", "Store64"}]
   2752 	// Note: not StorePointer, that needs a write barrier.  Same below for {CompareAnd}Swap.
   2753 	i.std[intrinsicKey{"sync/atomic", "StoreUint32"}] =
   2754 		i.std[intrinsicKey{"runtime/internal/atomic", "Store"}]
   2755 	i.std[intrinsicKey{"sync/atomic", "StoreUint64"}] =
   2756 		i.std[intrinsicKey{"runtime/internal/atomic", "Store64"}]
   2757 	i.ptrSized[sizedIntrinsicKey{"sync/atomic", "StoreUintptr", 4}] =
   2758 		i.std[intrinsicKey{"runtime/internal/atomic", "Store"}]
   2759 	i.ptrSized[sizedIntrinsicKey{"sync/atomic", "StoreUintptr", 8}] =
   2760 		i.std[intrinsicKey{"runtime/internal/atomic", "Store64"}]
   2761 
   2762 	i.std[intrinsicKey{"sync/atomic", "SwapInt32"}] =
   2763 		i.std[intrinsicKey{"runtime/internal/atomic", "Xchg"}]
   2764 	i.std[intrinsicKey{"sync/atomic", "SwapInt64"}] =
   2765 		i.std[intrinsicKey{"runtime/internal/atomic", "Xchg64"}]
   2766 	i.std[intrinsicKey{"sync/atomic", "SwapUint32"}] =
   2767 		i.std[intrinsicKey{"runtime/internal/atomic", "Xchg"}]
   2768 	i.std[intrinsicKey{"sync/atomic", "SwapUint64"}] =
   2769 		i.std[intrinsicKey{"runtime/internal/atomic", "Xchg64"}]
   2770 	i.ptrSized[sizedIntrinsicKey{"sync/atomic", "SwapUintptr", 4}] =
   2771 		i.std[intrinsicKey{"runtime/internal/atomic", "Xchg"}]
   2772 	i.ptrSized[sizedIntrinsicKey{"sync/atomic", "SwapUintptr", 8}] =
   2773 		i.std[intrinsicKey{"runtime/internal/atomic", "Xchg64"}]
   2774 
   2775 	i.std[intrinsicKey{"sync/atomic", "CompareAndSwapInt32"}] =
   2776 		i.std[intrinsicKey{"runtime/internal/atomic", "Cas"}]
   2777 	i.std[intrinsicKey{"sync/atomic", "CompareAndSwapInt64"}] =
   2778 		i.std[intrinsicKey{"runtime/internal/atomic", "Cas64"}]
   2779 	i.std[intrinsicKey{"sync/atomic", "CompareAndSwapUint32"}] =
   2780 		i.std[intrinsicKey{"runtime/internal/atomic", "Cas"}]
   2781 	i.std[intrinsicKey{"sync/atomic", "CompareAndSwapUint64"}] =
   2782 		i.std[intrinsicKey{"runtime/internal/atomic", "Cas64"}]
   2783 	i.ptrSized[sizedIntrinsicKey{"sync/atomic", "CompareAndSwapUintptr", 4}] =
   2784 		i.std[intrinsicKey{"runtime/internal/atomic", "Cas"}]
   2785 	i.ptrSized[sizedIntrinsicKey{"sync/atomic", "CompareAndSwapUintptr", 8}] =
   2786 		i.std[intrinsicKey{"runtime/internal/atomic", "Cas64"}]
   2787 
   2788 	i.std[intrinsicKey{"sync/atomic", "AddInt32"}] =
   2789 		i.std[intrinsicKey{"runtime/internal/atomic", "Xadd"}]
   2790 	i.std[intrinsicKey{"sync/atomic", "AddInt64"}] =
   2791 		i.std[intrinsicKey{"runtime/internal/atomic", "Xadd64"}]
   2792 	i.std[intrinsicKey{"sync/atomic", "AddUint32"}] =
   2793 		i.std[intrinsicKey{"runtime/internal/atomic", "Xadd"}]
   2794 	i.std[intrinsicKey{"sync/atomic", "AddUint64"}] =
   2795 		i.std[intrinsicKey{"runtime/internal/atomic", "Xadd64"}]
   2796 	i.ptrSized[sizedIntrinsicKey{"sync/atomic", "AddUintptr", 4}] =
   2797 		i.std[intrinsicKey{"runtime/internal/atomic", "Xadd"}]
   2798 	i.ptrSized[sizedIntrinsicKey{"sync/atomic", "AddUintptr", 8}] =
   2799 		i.std[intrinsicKey{"runtime/internal/atomic", "Xadd64"}]
   2800 
   2801 	/******** math/big ********/
   2802 	i.intSized[sizedIntrinsicKey{"math/big", "mulWW", 8}] =
   2803 		enableOnArch(func(s *state, n *Node, args []*ssa.Value) *ssa.Value {
   2804 			return s.newValue2(ssa.OpMul64uhilo, ssa.MakeTuple(Types[TUINT64], Types[TUINT64]), args[0], args[1])
   2805 		}, sys.AMD64)
   2806 	i.intSized[sizedIntrinsicKey{"math/big", "divWW", 8}] =
   2807 		enableOnArch(func(s *state, n *Node, args []*ssa.Value) *ssa.Value {
   2808 			return s.newValue3(ssa.OpDiv128u, ssa.MakeTuple(Types[TUINT64], Types[TUINT64]), args[0], args[1], args[2])
   2809 		}, sys.AMD64)
   2810 }
   2811 
   2812 // findIntrinsic returns a function which builds the SSA equivalent of the
   2813 // function identified by the symbol sym.  If sym is not an intrinsic call, returns nil.
   2814 func findIntrinsic(sym *Sym) intrinsicBuilder {
   2815 	if ssa.IntrinsicsDisable {
   2816 		return nil
   2817 	}
   2818 	if sym == nil || sym.Pkg == nil {
   2819 		return nil
   2820 	}
   2821 	if intrinsics == nil {
   2822 		intrinsicInit()
   2823 	}
   2824 	pkg := sym.Pkg.Path
   2825 	if sym.Pkg == localpkg {
   2826 		pkg = myimportpath
   2827 	}
   2828 	fn := sym.Name
   2829 	f := intrinsics.std[intrinsicKey{pkg, fn}]
   2830 	if f != nil {
   2831 		return f
   2832 	}
   2833 	f = intrinsics.intSized[sizedIntrinsicKey{pkg, fn, Widthint}]
   2834 	if f != nil {
   2835 		return f
   2836 	}
   2837 	return intrinsics.ptrSized[sizedIntrinsicKey{pkg, fn, Widthptr}]
   2838 }
   2839 
   2840 func isIntrinsicCall(n *Node) bool {
   2841 	if n == nil || n.Left == nil {
   2842 		return false
   2843 	}
   2844 	return findIntrinsic(n.Left.Sym) != nil
   2845 }
   2846 
   2847 // intrinsicCall converts a call to a recognized intrinsic function into the intrinsic SSA operation.
   2848 func (s *state) intrinsicCall(n *Node) *ssa.Value {
   2849 	v := findIntrinsic(n.Left.Sym)(s, n, s.intrinsicArgs(n))
   2850 	if ssa.IntrinsicsDebug > 0 {
   2851 		x := v
   2852 		if x == nil {
   2853 			x = s.mem()
   2854 		}
   2855 		if x.Op == ssa.OpSelect0 || x.Op == ssa.OpSelect1 {
   2856 			x = x.Args[0]
   2857 		}
   2858 		Warnl(n.Lineno, "intrinsic substitution for %v with %s", n.Left.Sym.Name, x.LongString())
   2859 	}
   2860 	return v
   2861 }
   2862 
   2863 type callArg struct {
   2864 	offset int64
   2865 	v      *ssa.Value
   2866 }
   2867 type byOffset []callArg
   2868 
   2869 func (x byOffset) Len() int      { return len(x) }
   2870 func (x byOffset) Swap(i, j int) { x[i], x[j] = x[j], x[i] }
   2871 func (x byOffset) Less(i, j int) bool {
   2872 	return x[i].offset < x[j].offset
   2873 }
   2874 
   2875 // intrinsicArgs extracts args from n, evaluates them to SSA values, and returns them.
   2876 func (s *state) intrinsicArgs(n *Node) []*ssa.Value {
   2877 	// This code is complicated because of how walk transforms calls. For a call node,
   2878 	// each entry in n.List is either an assignment to OINDREGSP which actually
   2879 	// stores an arg, or an assignment to a temporary which computes an arg
   2880 	// which is later assigned.
   2881 	// The args can also be out of order.
   2882 	// TODO: when walk goes away someday, this code can go away also.
   2883 	var args []callArg
   2884 	temps := map[*Node]*ssa.Value{}
   2885 	for _, a := range n.List.Slice() {
   2886 		if a.Op != OAS {
   2887 			s.Fatalf("non-assignment as a function argument %s", opnames[a.Op])
   2888 		}
   2889 		l, r := a.Left, a.Right
   2890 		switch l.Op {
   2891 		case ONAME:
   2892 			// Evaluate and store to "temporary".
   2893 			// Walk ensures these temporaries are dead outside of n.
   2894 			temps[l] = s.expr(r)
   2895 		case OINDREGSP:
   2896 			// Store a value to an argument slot.
   2897 			var v *ssa.Value
   2898 			if x, ok := temps[r]; ok {
   2899 				// This is a previously computed temporary.
   2900 				v = x
   2901 			} else {
   2902 				// This is an explicit value; evaluate it.
   2903 				v = s.expr(r)
   2904 			}
   2905 			args = append(args, callArg{l.Xoffset, v})
   2906 		default:
   2907 			s.Fatalf("function argument assignment target not allowed: %s", opnames[l.Op])
   2908 		}
   2909 	}
   2910 	sort.Sort(byOffset(args))
   2911 	res := make([]*ssa.Value, len(args))
   2912 	for i, a := range args {
   2913 		res[i] = a.v
   2914 	}
   2915 	return res
   2916 }
   2917 
   2918 // Calls the function n using the specified call type.
   2919 // Returns the address of the return value (or nil if none).
   2920 func (s *state) call(n *Node, k callKind) *ssa.Value {
   2921 	var sym *Sym           // target symbol (if static)
   2922 	var closure *ssa.Value // ptr to closure to run (if dynamic)
   2923 	var codeptr *ssa.Value // ptr to target code (if dynamic)
   2924 	var rcvr *ssa.Value    // receiver to set
   2925 	fn := n.Left
   2926 	switch n.Op {
   2927 	case OCALLFUNC:
   2928 		if k == callNormal && fn.Op == ONAME && fn.Class == PFUNC {
   2929 			sym = fn.Sym
   2930 			break
   2931 		}
   2932 		closure = s.expr(fn)
   2933 	case OCALLMETH:
   2934 		if fn.Op != ODOTMETH {
   2935 			Fatalf("OCALLMETH: n.Left not an ODOTMETH: %v", fn)
   2936 		}
   2937 		if k == callNormal {
   2938 			sym = fn.Sym
   2939 			break
   2940 		}
   2941 		// Make a name n2 for the function.
   2942 		// fn.Sym might be sync.(*Mutex).Unlock.
   2943 		// Make a PFUNC node out of that, then evaluate it.
   2944 		// We get back an SSA value representing &sync.(*Mutex).Unlockf.
   2945 		// We can then pass that to defer or go.
   2946 		n2 := newname(fn.Sym)
   2947 		n2.Class = PFUNC
   2948 		n2.Lineno = fn.Lineno
   2949 		n2.Type = Types[TUINT8] // dummy type for a static closure. Could use runtime.funcval if we had it.
   2950 		closure = s.expr(n2)
   2951 		// Note: receiver is already assigned in n.List, so we don't
   2952 		// want to set it here.
   2953 	case OCALLINTER:
   2954 		if fn.Op != ODOTINTER {
   2955 			Fatalf("OCALLINTER: n.Left not an ODOTINTER: %v", fn.Op)
   2956 		}
   2957 		i := s.expr(fn.Left)
   2958 		itab := s.newValue1(ssa.OpITab, Types[TUINTPTR], i)
   2959 		if k != callNormal {
   2960 			s.nilCheck(itab)
   2961 		}
   2962 		itabidx := fn.Xoffset + 3*int64(Widthptr) + 8 // offset of fun field in runtime.itab
   2963 		itab = s.newValue1I(ssa.OpOffPtr, ptrto(Types[TUINTPTR]), itabidx, itab)
   2964 		if k == callNormal {
   2965 			codeptr = s.newValue2(ssa.OpLoad, Types[TUINTPTR], itab, s.mem())
   2966 		} else {
   2967 			closure = itab
   2968 		}
   2969 		rcvr = s.newValue1(ssa.OpIData, Types[TUINTPTR], i)
   2970 	}
   2971 	dowidth(fn.Type)
   2972 	stksize := fn.Type.ArgWidth() // includes receiver
   2973 
   2974 	// Run all argument assignments. The arg slots have already
   2975 	// been offset by the appropriate amount (+2*widthptr for go/defer,
   2976 	// +widthptr for interface calls).
   2977 	// For OCALLMETH, the receiver is set in these statements.
   2978 	s.stmtList(n.List)
   2979 
   2980 	// Set receiver (for interface calls)
   2981 	if rcvr != nil {
   2982 		argStart := Ctxt.FixedFrameSize()
   2983 		if k != callNormal {
   2984 			argStart += int64(2 * Widthptr)
   2985 		}
   2986 		addr := s.entryNewValue1I(ssa.OpOffPtr, ptrto(Types[TUINTPTR]), argStart, s.sp)
   2987 		s.vars[&memVar] = s.newValue3I(ssa.OpStore, ssa.TypeMem, int64(Widthptr), addr, rcvr, s.mem())
   2988 	}
   2989 
   2990 	// Defer/go args
   2991 	if k != callNormal {
   2992 		// Write argsize and closure (args to Newproc/Deferproc).
   2993 		argStart := Ctxt.FixedFrameSize()
   2994 		argsize := s.constInt32(Types[TUINT32], int32(stksize))
   2995 		addr := s.entryNewValue1I(ssa.OpOffPtr, ptrto(Types[TUINT32]), argStart, s.sp)
   2996 		s.vars[&memVar] = s.newValue3I(ssa.OpStore, ssa.TypeMem, 4, addr, argsize, s.mem())
   2997 		addr = s.entryNewValue1I(ssa.OpOffPtr, ptrto(Types[TUINTPTR]), argStart+int64(Widthptr), s.sp)
   2998 		s.vars[&memVar] = s.newValue3I(ssa.OpStore, ssa.TypeMem, int64(Widthptr), addr, closure, s.mem())
   2999 		stksize += 2 * int64(Widthptr)
   3000 	}
   3001 
   3002 	// call target
   3003 	var call *ssa.Value
   3004 	switch {
   3005 	case k == callDefer:
   3006 		call = s.newValue1(ssa.OpDeferCall, ssa.TypeMem, s.mem())
   3007 	case k == callGo:
   3008 		call = s.newValue1(ssa.OpGoCall, ssa.TypeMem, s.mem())
   3009 	case closure != nil:
   3010 		codeptr = s.newValue2(ssa.OpLoad, Types[TUINTPTR], closure, s.mem())
   3011 		call = s.newValue3(ssa.OpClosureCall, ssa.TypeMem, codeptr, closure, s.mem())
   3012 	case codeptr != nil:
   3013 		call = s.newValue2(ssa.OpInterCall, ssa.TypeMem, codeptr, s.mem())
   3014 	case sym != nil:
   3015 		call = s.newValue1A(ssa.OpStaticCall, ssa.TypeMem, sym, s.mem())
   3016 	default:
   3017 		Fatalf("bad call type %v %v", n.Op, n)
   3018 	}
   3019 	call.AuxInt = stksize // Call operations carry the argsize of the callee along with them
   3020 	s.vars[&memVar] = call
   3021 
   3022 	// Finish block for defers
   3023 	if k == callDefer {
   3024 		b := s.endBlock()
   3025 		b.Kind = ssa.BlockDefer
   3026 		b.SetControl(call)
   3027 		bNext := s.f.NewBlock(ssa.BlockPlain)
   3028 		b.AddEdgeTo(bNext)
   3029 		// Add recover edge to exit code.
   3030 		r := s.f.NewBlock(ssa.BlockPlain)
   3031 		s.startBlock(r)
   3032 		s.exit()
   3033 		b.AddEdgeTo(r)
   3034 		b.Likely = ssa.BranchLikely
   3035 		s.startBlock(bNext)
   3036 	}
   3037 
   3038 	res := n.Left.Type.Results()
   3039 	if res.NumFields() == 0 || k != callNormal {
   3040 		// call has no return value. Continue with the next statement.
   3041 		return nil
   3042 	}
   3043 	fp := res.Field(0)
   3044 	return s.entryNewValue1I(ssa.OpOffPtr, ptrto(fp.Type), fp.Offset+Ctxt.FixedFrameSize(), s.sp)
   3045 }
   3046 
   3047 // etypesign returns the signed-ness of e, for integer/pointer etypes.
   3048 // -1 means signed, +1 means unsigned, 0 means non-integer/non-pointer.
   3049 func etypesign(e EType) int8 {
   3050 	switch e {
   3051 	case TINT8, TINT16, TINT32, TINT64, TINT:
   3052 		return -1
   3053 	case TUINT8, TUINT16, TUINT32, TUINT64, TUINT, TUINTPTR, TUNSAFEPTR:
   3054 		return +1
   3055 	}
   3056 	return 0
   3057 }
   3058 
   3059 // lookupSymbol is used to retrieve the symbol (Extern, Arg or Auto) used for a particular node.
   3060 // This improves the effectiveness of cse by using the same Aux values for the
   3061 // same symbols.
   3062 func (s *state) lookupSymbol(n *Node, sym interface{}) interface{} {
   3063 	switch sym.(type) {
   3064 	default:
   3065 		s.Fatalf("sym %v is of uknown type %T", sym, sym)
   3066 	case *ssa.ExternSymbol, *ssa.ArgSymbol, *ssa.AutoSymbol:
   3067 		// these are the only valid types
   3068 	}
   3069 
   3070 	if lsym, ok := s.varsyms[n]; ok {
   3071 		return lsym
   3072 	} else {
   3073 		s.varsyms[n] = sym
   3074 		return sym
   3075 	}
   3076 }
   3077 
   3078 // addr converts the address of the expression n to SSA, adds it to s and returns the SSA result.
   3079 // Also returns a bool reporting whether the returned value is "volatile", that is it
   3080 // points to the outargs section and thus the referent will be clobbered by any call.
   3081 // The value that the returned Value represents is guaranteed to be non-nil.
   3082 // If bounded is true then this address does not require a nil check for its operand
   3083 // even if that would otherwise be implied.
   3084 func (s *state) addr(n *Node, bounded bool) (*ssa.Value, bool) {
   3085 	t := ptrto(n.Type)
   3086 	switch n.Op {
   3087 	case ONAME:
   3088 		switch n.Class {
   3089 		case PEXTERN:
   3090 			// global variable
   3091 			aux := s.lookupSymbol(n, &ssa.ExternSymbol{Typ: n.Type, Sym: n.Sym})
   3092 			v := s.entryNewValue1A(ssa.OpAddr, t, aux, s.sb)
   3093 			// TODO: Make OpAddr use AuxInt as well as Aux.
   3094 			if n.Xoffset != 0 {
   3095 				v = s.entryNewValue1I(ssa.OpOffPtr, v.Type, n.Xoffset, v)
   3096 			}
   3097 			return v, false
   3098 		case PPARAM:
   3099 			// parameter slot
   3100 			v := s.decladdrs[n]
   3101 			if v != nil {
   3102 				return v, false
   3103 			}
   3104 			if n == nodfp {
   3105 				// Special arg that points to the frame pointer (Used by ORECOVER).
   3106 				aux := s.lookupSymbol(n, &ssa.ArgSymbol{Typ: n.Type, Node: n})
   3107 				return s.entryNewValue1A(ssa.OpAddr, t, aux, s.sp), false
   3108 			}
   3109 			s.Fatalf("addr of undeclared ONAME %v. declared: %v", n, s.decladdrs)
   3110 			return nil, false
   3111 		case PAUTO:
   3112 			aux := s.lookupSymbol(n, &ssa.AutoSymbol{Typ: n.Type, Node: n})
   3113 			return s.newValue1A(ssa.OpAddr, t, aux, s.sp), false
   3114 		case PPARAMOUT: // Same as PAUTO -- cannot generate LEA early.
   3115 			// ensure that we reuse symbols for out parameters so
   3116 			// that cse works on their addresses
   3117 			aux := s.lookupSymbol(n, &ssa.ArgSymbol{Typ: n.Type, Node: n})
   3118 			return s.newValue1A(ssa.OpAddr, t, aux, s.sp), false
   3119 		default:
   3120 			s.Fatalf("variable address class %v not implemented", classnames[n.Class])
   3121 			return nil, false
   3122 		}
   3123 	case OINDREGSP:
   3124 		// indirect off REGSP
   3125 		// used for storing/loading arguments/returns to/from callees
   3126 		return s.entryNewValue1I(ssa.OpOffPtr, t, n.Xoffset, s.sp), true
   3127 	case OINDEX:
   3128 		if n.Left.Type.IsSlice() {
   3129 			a := s.expr(n.Left)
   3130 			i := s.expr(n.Right)
   3131 			i = s.extendIndex(i, panicindex)
   3132 			len := s.newValue1(ssa.OpSliceLen, Types[TINT], a)
   3133 			if !n.Bounded {
   3134 				s.boundsCheck(i, len)
   3135 			}
   3136 			p := s.newValue1(ssa.OpSlicePtr, t, a)
   3137 			return s.newValue2(ssa.OpPtrIndex, t, p, i), false
   3138 		} else { // array
   3139 			a, isVolatile := s.addr(n.Left, bounded)
   3140 			i := s.expr(n.Right)
   3141 			i = s.extendIndex(i, panicindex)
   3142 			len := s.constInt(Types[TINT], n.Left.Type.NumElem())
   3143 			if !n.Bounded {
   3144 				s.boundsCheck(i, len)
   3145 			}
   3146 			return s.newValue2(ssa.OpPtrIndex, ptrto(n.Left.Type.Elem()), a, i), isVolatile
   3147 		}
   3148 	case OIND:
   3149 		return s.exprPtr(n.Left, bounded, n.Lineno), false
   3150 	case ODOT:
   3151 		p, isVolatile := s.addr(n.Left, bounded)
   3152 		return s.newValue1I(ssa.OpOffPtr, t, n.Xoffset, p), isVolatile
   3153 	case ODOTPTR:
   3154 		p := s.exprPtr(n.Left, bounded, n.Lineno)
   3155 		return s.newValue1I(ssa.OpOffPtr, t, n.Xoffset, p), false
   3156 	case OCLOSUREVAR:
   3157 		return s.newValue1I(ssa.OpOffPtr, t, n.Xoffset,
   3158 			s.entryNewValue0(ssa.OpGetClosurePtr, ptrto(Types[TUINT8]))), false
   3159 	case OCONVNOP:
   3160 		addr, isVolatile := s.addr(n.Left, bounded)
   3161 		return s.newValue1(ssa.OpCopy, t, addr), isVolatile // ensure that addr has the right type
   3162 	case OCALLFUNC, OCALLINTER, OCALLMETH:
   3163 		return s.call(n, callNormal), true
   3164 	case ODOTTYPE:
   3165 		v, _ := s.dottype(n, false)
   3166 		if v.Op != ssa.OpLoad {
   3167 			s.Fatalf("dottype of non-load")
   3168 		}
   3169 		if v.Args[1] != s.mem() {
   3170 			s.Fatalf("memory no longer live from dottype load")
   3171 		}
   3172 		return v.Args[0], false
   3173 	default:
   3174 		s.Fatalf("unhandled addr %v", n.Op)
   3175 		return nil, false
   3176 	}
   3177 }
   3178 
   3179 // canSSA reports whether n is SSA-able.
   3180 // n must be an ONAME (or an ODOT sequence with an ONAME base).
   3181 func (s *state) canSSA(n *Node) bool {
   3182 	if Debug['N'] != 0 {
   3183 		return false
   3184 	}
   3185 	for n.Op == ODOT || (n.Op == OINDEX && n.Left.Type.IsArray()) {
   3186 		n = n.Left
   3187 	}
   3188 	if n.Op != ONAME {
   3189 		return false
   3190 	}
   3191 	if n.Addrtaken {
   3192 		return false
   3193 	}
   3194 	if n.isParamHeapCopy() {
   3195 		return false
   3196 	}
   3197 	if n.Class == PAUTOHEAP {
   3198 		Fatalf("canSSA of PAUTOHEAP %v", n)
   3199 	}
   3200 	switch n.Class {
   3201 	case PEXTERN:
   3202 		return false
   3203 	case PPARAMOUT:
   3204 		if hasdefer {
   3205 			// TODO: handle this case?  Named return values must be
   3206 			// in memory so that the deferred function can see them.
   3207 			// Maybe do: if !strings.HasPrefix(n.String(), "~") { return false }
   3208 			return false
   3209 		}
   3210 		if s.cgoUnsafeArgs {
   3211 			// Cgo effectively takes the address of all result args,
   3212 			// but the compiler can't see that.
   3213 			return false
   3214 		}
   3215 	}
   3216 	if n.Class == PPARAM && n.String() == ".this" {
   3217 		// wrappers generated by genwrapper need to update
   3218 		// the .this pointer in place.
   3219 		// TODO: treat as a PPARMOUT?
   3220 		return false
   3221 	}
   3222 	return canSSAType(n.Type)
   3223 	// TODO: try to make more variables SSAable?
   3224 }
   3225 
   3226 // canSSA reports whether variables of type t are SSA-able.
   3227 func canSSAType(t *Type) bool {
   3228 	dowidth(t)
   3229 	if t.Width > int64(4*Widthptr) {
   3230 		// 4*Widthptr is an arbitrary constant. We want it
   3231 		// to be at least 3*Widthptr so slices can be registerized.
   3232 		// Too big and we'll introduce too much register pressure.
   3233 		return false
   3234 	}
   3235 	switch t.Etype {
   3236 	case TARRAY:
   3237 		// We can't do larger arrays because dynamic indexing is
   3238 		// not supported on SSA variables.
   3239 		// TODO: allow if all indexes are constant.
   3240 		if t.NumElem() == 0 {
   3241 			return true
   3242 		}
   3243 		if t.NumElem() == 1 {
   3244 			return canSSAType(t.Elem())
   3245 		}
   3246 		return false
   3247 	case TSTRUCT:
   3248 		if t.NumFields() > ssa.MaxStruct {
   3249 			return false
   3250 		}
   3251 		for _, t1 := range t.Fields().Slice() {
   3252 			if !canSSAType(t1.Type) {
   3253 				return false
   3254 			}
   3255 		}
   3256 		return true
   3257 	default:
   3258 		return true
   3259 	}
   3260 }
   3261 
   3262 // exprPtr evaluates n to a pointer and nil-checks it.
   3263 func (s *state) exprPtr(n *Node, bounded bool, lineno int32) *ssa.Value {
   3264 	p := s.expr(n)
   3265 	if bounded || n.NonNil {
   3266 		if s.f.Config.Debug_checknil() && lineno > 1 {
   3267 			s.f.Config.Warnl(lineno, "removed nil check")
   3268 		}
   3269 		return p
   3270 	}
   3271 	s.nilCheck(p)
   3272 	return p
   3273 }
   3274 
   3275 // nilCheck generates nil pointer checking code.
   3276 // Used only for automatically inserted nil checks,
   3277 // not for user code like 'x != nil'.
   3278 func (s *state) nilCheck(ptr *ssa.Value) {
   3279 	if disable_checknil != 0 {
   3280 		return
   3281 	}
   3282 	s.newValue2(ssa.OpNilCheck, ssa.TypeVoid, ptr, s.mem())
   3283 }
   3284 
   3285 // boundsCheck generates bounds checking code. Checks if 0 <= idx < len, branches to exit if not.
   3286 // Starts a new block on return.
   3287 // idx is already converted to full int width.
   3288 func (s *state) boundsCheck(idx, len *ssa.Value) {
   3289 	if Debug['B'] != 0 {
   3290 		return
   3291 	}
   3292 
   3293 	// bounds check
   3294 	cmp := s.newValue2(ssa.OpIsInBounds, Types[TBOOL], idx, len)
   3295 	s.check(cmp, panicindex)
   3296 }
   3297 
   3298 // sliceBoundsCheck generates slice bounds checking code. Checks if 0 <= idx <= len, branches to exit if not.
   3299 // Starts a new block on return.
   3300 // idx and len are already converted to full int width.
   3301 func (s *state) sliceBoundsCheck(idx, len *ssa.Value) {
   3302 	if Debug['B'] != 0 {
   3303 		return
   3304 	}
   3305 
   3306 	// bounds check
   3307 	cmp := s.newValue2(ssa.OpIsSliceInBounds, Types[TBOOL], idx, len)
   3308 	s.check(cmp, panicslice)
   3309 }
   3310 
   3311 // If cmp (a bool) is false, panic using the given function.
   3312 func (s *state) check(cmp *ssa.Value, fn *Node) {
   3313 	b := s.endBlock()
   3314 	b.Kind = ssa.BlockIf
   3315 	b.SetControl(cmp)
   3316 	b.Likely = ssa.BranchLikely
   3317 	bNext := s.f.NewBlock(ssa.BlockPlain)
   3318 	line := s.peekLine()
   3319 	bPanic := s.panics[funcLine{fn, line}]
   3320 	if bPanic == nil {
   3321 		bPanic = s.f.NewBlock(ssa.BlockPlain)
   3322 		s.panics[funcLine{fn, line}] = bPanic
   3323 		s.startBlock(bPanic)
   3324 		// The panic call takes/returns memory to ensure that the right
   3325 		// memory state is observed if the panic happens.
   3326 		s.rtcall(fn, false, nil)
   3327 	}
   3328 	b.AddEdgeTo(bNext)
   3329 	b.AddEdgeTo(bPanic)
   3330 	s.startBlock(bNext)
   3331 }
   3332 
   3333 func (s *state) intDivide(n *Node, a, b *ssa.Value) *ssa.Value {
   3334 	needcheck := true
   3335 	switch b.Op {
   3336 	case ssa.OpConst8, ssa.OpConst16, ssa.OpConst32, ssa.OpConst64:
   3337 		if b.AuxInt != 0 {
   3338 			needcheck = false
   3339 		}
   3340 	}
   3341 	if needcheck {
   3342 		// do a size-appropriate check for zero
   3343 		cmp := s.newValue2(s.ssaOp(ONE, n.Type), Types[TBOOL], b, s.zeroVal(n.Type))
   3344 		s.check(cmp, panicdivide)
   3345 	}
   3346 	return s.newValue2(s.ssaOp(n.Op, n.Type), a.Type, a, b)
   3347 }
   3348 
   3349 // rtcall issues a call to the given runtime function fn with the listed args.
   3350 // Returns a slice of results of the given result types.
   3351 // The call is added to the end of the current block.
   3352 // If returns is false, the block is marked as an exit block.
   3353 func (s *state) rtcall(fn *Node, returns bool, results []*Type, args ...*ssa.Value) []*ssa.Value {
   3354 	// Write args to the stack
   3355 	off := Ctxt.FixedFrameSize()
   3356 	for _, arg := range args {
   3357 		t := arg.Type
   3358 		off = Rnd(off, t.Alignment())
   3359 		ptr := s.sp
   3360 		if off != 0 {
   3361 			ptr = s.newValue1I(ssa.OpOffPtr, t.PtrTo(), off, s.sp)
   3362 		}
   3363 		size := t.Size()
   3364 		s.vars[&memVar] = s.newValue3I(ssa.OpStore, ssa.TypeMem, size, ptr, arg, s.mem())
   3365 		off += size
   3366 	}
   3367 	off = Rnd(off, int64(Widthptr))
   3368 	if Thearch.LinkArch.Name == "amd64p32" {
   3369 		// amd64p32 wants 8-byte alignment of the start of the return values.
   3370 		off = Rnd(off, 8)
   3371 	}
   3372 
   3373 	// Issue call
   3374 	call := s.newValue1A(ssa.OpStaticCall, ssa.TypeMem, fn.Sym, s.mem())
   3375 	s.vars[&memVar] = call
   3376 
   3377 	if !returns {
   3378 		// Finish block
   3379 		b := s.endBlock()
   3380 		b.Kind = ssa.BlockExit
   3381 		b.SetControl(call)
   3382 		call.AuxInt = off - Ctxt.FixedFrameSize()
   3383 		if len(results) > 0 {
   3384 			Fatalf("panic call can't have results")
   3385 		}
   3386 		return nil
   3387 	}
   3388 
   3389 	// Load results
   3390 	res := make([]*ssa.Value, len(results))
   3391 	for i, t := range results {
   3392 		off = Rnd(off, t.Alignment())
   3393 		ptr := s.sp
   3394 		if off != 0 {
   3395 			ptr = s.newValue1I(ssa.OpOffPtr, ptrto(t), off, s.sp)
   3396 		}
   3397 		res[i] = s.newValue2(ssa.OpLoad, t, ptr, s.mem())
   3398 		off += t.Size()
   3399 	}
   3400 	off = Rnd(off, int64(Widthptr))
   3401 
   3402 	// Remember how much callee stack space we needed.
   3403 	call.AuxInt = off
   3404 
   3405 	return res
   3406 }
   3407 
   3408 // insertWBmove inserts the assignment *left = *right including a write barrier.
   3409 // t is the type being assigned.
   3410 // If right == nil, then we're zeroing *left.
   3411 func (s *state) insertWBmove(t *Type, left, right *ssa.Value, line int32, rightIsVolatile bool) {
   3412 	// if writeBarrier.enabled {
   3413 	//   typedmemmove(&t, left, right)
   3414 	// } else {
   3415 	//   *left = *right
   3416 	// }
   3417 	//
   3418 	// or
   3419 	//
   3420 	// if writeBarrier.enabled {
   3421 	//   typedmemclr(&t, left)
   3422 	// } else {
   3423 	//   *left = zeroValue
   3424 	// }
   3425 
   3426 	if s.noWB {
   3427 		s.Error("write barrier prohibited")
   3428 	}
   3429 	if s.WBLineno == 0 {
   3430 		s.WBLineno = left.Line
   3431 	}
   3432 
   3433 	var val *ssa.Value
   3434 	if right == nil {
   3435 		val = s.newValue2I(ssa.OpZeroWB, ssa.TypeMem, sizeAlignAuxInt(t), left, s.mem())
   3436 	} else {
   3437 		var op ssa.Op
   3438 		if rightIsVolatile {
   3439 			op = ssa.OpMoveWBVolatile
   3440 		} else {
   3441 			op = ssa.OpMoveWB
   3442 		}
   3443 		val = s.newValue3I(op, ssa.TypeMem, sizeAlignAuxInt(t), left, right, s.mem())
   3444 	}
   3445 	val.Aux = &ssa.ExternSymbol{Typ: Types[TUINTPTR], Sym: typenamesym(t)}
   3446 	s.vars[&memVar] = val
   3447 
   3448 	// WB ops will be expanded to branches at writebarrier phase.
   3449 	// To make it easy, we put WB ops at the end of a block, so
   3450 	// that it does not need to split a block into two parts when
   3451 	// expanding WB ops.
   3452 	b := s.f.NewBlock(ssa.BlockPlain)
   3453 	s.endBlock().AddEdgeTo(b)
   3454 	s.startBlock(b)
   3455 }
   3456 
   3457 // insertWBstore inserts the assignment *left = right including a write barrier.
   3458 // t is the type being assigned.
   3459 func (s *state) insertWBstore(t *Type, left, right *ssa.Value, line int32, skip skipMask) {
   3460 	// store scalar fields
   3461 	// if writeBarrier.enabled {
   3462 	//   writebarrierptr for pointer fields
   3463 	// } else {
   3464 	//   store pointer fields
   3465 	// }
   3466 
   3467 	if s.noWB {
   3468 		s.Error("write barrier prohibited")
   3469 	}
   3470 	if s.WBLineno == 0 {
   3471 		s.WBLineno = left.Line
   3472 	}
   3473 	s.storeTypeScalars(t, left, right, skip)
   3474 	s.storeTypePtrsWB(t, left, right)
   3475 
   3476 	// WB ops will be expanded to branches at writebarrier phase.
   3477 	// To make it easy, we put WB ops at the end of a block, so
   3478 	// that it does not need to split a block into two parts when
   3479 	// expanding WB ops.
   3480 	b := s.f.NewBlock(ssa.BlockPlain)
   3481 	s.endBlock().AddEdgeTo(b)
   3482 	s.startBlock(b)
   3483 }
   3484 
   3485 // do *left = right for all scalar (non-pointer) parts of t.
   3486 func (s *state) storeTypeScalars(t *Type, left, right *ssa.Value, skip skipMask) {
   3487 	switch {
   3488 	case t.IsBoolean() || t.IsInteger() || t.IsFloat() || t.IsComplex():
   3489 		s.vars[&memVar] = s.newValue3I(ssa.OpStore, ssa.TypeMem, t.Size(), left, right, s.mem())
   3490 	case t.IsPtrShaped():
   3491 		// no scalar fields.
   3492 	case t.IsString():
   3493 		if skip&skipLen != 0 {
   3494 			return
   3495 		}
   3496 		len := s.newValue1(ssa.OpStringLen, Types[TINT], right)
   3497 		lenAddr := s.newValue1I(ssa.OpOffPtr, ptrto(Types[TINT]), s.config.IntSize, left)
   3498 		s.vars[&memVar] = s.newValue3I(ssa.OpStore, ssa.TypeMem, s.config.IntSize, lenAddr, len, s.mem())
   3499 	case t.IsSlice():
   3500 		if skip&skipLen == 0 {
   3501 			len := s.newValue1(ssa.OpSliceLen, Types[TINT], right)
   3502 			lenAddr := s.newValue1I(ssa.OpOffPtr, ptrto(Types[TINT]), s.config.IntSize, left)
   3503 			s.vars[&memVar] = s.newValue3I(ssa.OpStore, ssa.TypeMem, s.config.IntSize, lenAddr, len, s.mem())
   3504 		}
   3505 		if skip&skipCap == 0 {
   3506 			cap := s.newValue1(ssa.OpSliceCap, Types[TINT], right)
   3507 			capAddr := s.newValue1I(ssa.OpOffPtr, ptrto(Types[TINT]), 2*s.config.IntSize, left)
   3508 			s.vars[&memVar] = s.newValue3I(ssa.OpStore, ssa.TypeMem, s.config.IntSize, capAddr, cap, s.mem())
   3509 		}
   3510 	case t.IsInterface():
   3511 		// itab field doesn't need a write barrier (even though it is a pointer).
   3512 		itab := s.newValue1(ssa.OpITab, ptrto(Types[TUINT8]), right)
   3513 		s.vars[&memVar] = s.newValue3I(ssa.OpStore, ssa.TypeMem, s.config.IntSize, left, itab, s.mem())
   3514 	case t.IsStruct():
   3515 		n := t.NumFields()
   3516 		for i := 0; i < n; i++ {
   3517 			ft := t.FieldType(i)
   3518 			addr := s.newValue1I(ssa.OpOffPtr, ft.PtrTo(), t.FieldOff(i), left)
   3519 			val := s.newValue1I(ssa.OpStructSelect, ft, int64(i), right)
   3520 			s.storeTypeScalars(ft.(*Type), addr, val, 0)
   3521 		}
   3522 	case t.IsArray() && t.NumElem() == 0:
   3523 		// nothing
   3524 	case t.IsArray() && t.NumElem() == 1:
   3525 		s.storeTypeScalars(t.Elem(), left, s.newValue1I(ssa.OpArraySelect, t.Elem(), 0, right), 0)
   3526 	default:
   3527 		s.Fatalf("bad write barrier type %v", t)
   3528 	}
   3529 }
   3530 
   3531 // do *left = right for all pointer parts of t.
   3532 func (s *state) storeTypePtrs(t *Type, left, right *ssa.Value) {
   3533 	switch {
   3534 	case t.IsPtrShaped():
   3535 		s.vars[&memVar] = s.newValue3I(ssa.OpStore, ssa.TypeMem, s.config.PtrSize, left, right, s.mem())
   3536 	case t.IsString():
   3537 		ptr := s.newValue1(ssa.OpStringPtr, ptrto(Types[TUINT8]), right)
   3538 		s.vars[&memVar] = s.newValue3I(ssa.OpStore, ssa.TypeMem, s.config.PtrSize, left, ptr, s.mem())
   3539 	case t.IsSlice():
   3540 		ptr := s.newValue1(ssa.OpSlicePtr, ptrto(Types[TUINT8]), right)
   3541 		s.vars[&memVar] = s.newValue3I(ssa.OpStore, ssa.TypeMem, s.config.PtrSize, left, ptr, s.mem())
   3542 	case t.IsInterface():
   3543 		// itab field is treated as a scalar.
   3544 		idata := s.newValue1(ssa.OpIData, ptrto(Types[TUINT8]), right)
   3545 		idataAddr := s.newValue1I(ssa.OpOffPtr, ptrto(Types[TUINT8]), s.config.PtrSize, left)
   3546 		s.vars[&memVar] = s.newValue3I(ssa.OpStore, ssa.TypeMem, s.config.PtrSize, idataAddr, idata, s.mem())
   3547 	case t.IsStruct():
   3548 		n := t.NumFields()
   3549 		for i := 0; i < n; i++ {
   3550 			ft := t.FieldType(i)
   3551 			if !haspointers(ft.(*Type)) {
   3552 				continue
   3553 			}
   3554 			addr := s.newValue1I(ssa.OpOffPtr, ft.PtrTo(), t.FieldOff(i), left)
   3555 			val := s.newValue1I(ssa.OpStructSelect, ft, int64(i), right)
   3556 			s.storeTypePtrs(ft.(*Type), addr, val)
   3557 		}
   3558 	case t.IsArray() && t.NumElem() == 0:
   3559 		// nothing
   3560 	case t.IsArray() && t.NumElem() == 1:
   3561 		s.storeTypePtrs(t.Elem(), left, s.newValue1I(ssa.OpArraySelect, t.Elem(), 0, right))
   3562 	default:
   3563 		s.Fatalf("bad write barrier type %v", t)
   3564 	}
   3565 }
   3566 
   3567 // do *left = right for all pointer parts of t, with write barriers if necessary.
   3568 func (s *state) storeTypePtrsWB(t *Type, left, right *ssa.Value) {
   3569 	switch {
   3570 	case t.IsPtrShaped():
   3571 		s.vars[&memVar] = s.newValue3I(ssa.OpStoreWB, ssa.TypeMem, s.config.PtrSize, left, right, s.mem())
   3572 	case t.IsString():
   3573 		ptr := s.newValue1(ssa.OpStringPtr, ptrto(Types[TUINT8]), right)
   3574 		s.vars[&memVar] = s.newValue3I(ssa.OpStoreWB, ssa.TypeMem, s.config.PtrSize, left, ptr, s.mem())
   3575 	case t.IsSlice():
   3576 		ptr := s.newValue1(ssa.OpSlicePtr, ptrto(Types[TUINT8]), right)
   3577 		s.vars[&memVar] = s.newValue3I(ssa.OpStoreWB, ssa.TypeMem, s.config.PtrSize, left, ptr, s.mem())
   3578 	case t.IsInterface():
   3579 		// itab field is treated as a scalar.
   3580 		idata := s.newValue1(ssa.OpIData, ptrto(Types[TUINT8]), right)
   3581 		idataAddr := s.newValue1I(ssa.OpOffPtr, ptrto(Types[TUINT8]), s.config.PtrSize, left)
   3582 		s.vars[&memVar] = s.newValue3I(ssa.OpStoreWB, ssa.TypeMem, s.config.PtrSize, idataAddr, idata, s.mem())
   3583 	case t.IsStruct():
   3584 		n := t.NumFields()
   3585 		for i := 0; i < n; i++ {
   3586 			ft := t.FieldType(i)
   3587 			if !haspointers(ft.(*Type)) {
   3588 				continue
   3589 			}
   3590 			addr := s.newValue1I(ssa.OpOffPtr, ft.PtrTo(), t.FieldOff(i), left)
   3591 			val := s.newValue1I(ssa.OpStructSelect, ft, int64(i), right)
   3592 			s.storeTypePtrsWB(ft.(*Type), addr, val)
   3593 		}
   3594 	case t.IsArray() && t.NumElem() == 0:
   3595 		// nothing
   3596 	case t.IsArray() && t.NumElem() == 1:
   3597 		s.storeTypePtrsWB(t.Elem(), left, s.newValue1I(ssa.OpArraySelect, t.Elem(), 0, right))
   3598 	default:
   3599 		s.Fatalf("bad write barrier type %v", t)
   3600 	}
   3601 }
   3602 
   3603 // slice computes the slice v[i:j:k] and returns ptr, len, and cap of result.
   3604 // i,j,k may be nil, in which case they are set to their default value.
   3605 // t is a slice, ptr to array, or string type.
   3606 func (s *state) slice(t *Type, v, i, j, k *ssa.Value) (p, l, c *ssa.Value) {
   3607 	var elemtype *Type
   3608 	var ptrtype *Type
   3609 	var ptr *ssa.Value
   3610 	var len *ssa.Value
   3611 	var cap *ssa.Value
   3612 	zero := s.constInt(Types[TINT], 0)
   3613 	switch {
   3614 	case t.IsSlice():
   3615 		elemtype = t.Elem()
   3616 		ptrtype = ptrto(elemtype)
   3617 		ptr = s.newValue1(ssa.OpSlicePtr, ptrtype, v)
   3618 		len = s.newValue1(ssa.OpSliceLen, Types[TINT], v)
   3619 		cap = s.newValue1(ssa.OpSliceCap, Types[TINT], v)
   3620 	case t.IsString():
   3621 		elemtype = Types[TUINT8]
   3622 		ptrtype = ptrto(elemtype)
   3623 		ptr = s.newValue1(ssa.OpStringPtr, ptrtype, v)
   3624 		len = s.newValue1(ssa.OpStringLen, Types[TINT], v)
   3625 		cap = len
   3626 	case t.IsPtr():
   3627 		if !t.Elem().IsArray() {
   3628 			s.Fatalf("bad ptr to array in slice %v\n", t)
   3629 		}
   3630 		elemtype = t.Elem().Elem()
   3631 		ptrtype = ptrto(elemtype)
   3632 		s.nilCheck(v)
   3633 		ptr = v
   3634 		len = s.constInt(Types[TINT], t.Elem().NumElem())
   3635 		cap = len
   3636 	default:
   3637 		s.Fatalf("bad type in slice %v\n", t)
   3638 	}
   3639 
   3640 	// Set default values
   3641 	if i == nil {
   3642 		i = zero
   3643 	}
   3644 	if j == nil {
   3645 		j = len
   3646 	}
   3647 	if k == nil {
   3648 		k = cap
   3649 	}
   3650 
   3651 	// Panic if slice indices are not in bounds.
   3652 	s.sliceBoundsCheck(i, j)
   3653 	if j != k {
   3654 		s.sliceBoundsCheck(j, k)
   3655 	}
   3656 	if k != cap {
   3657 		s.sliceBoundsCheck(k, cap)
   3658 	}
   3659 
   3660 	// Generate the following code assuming that indexes are in bounds.
   3661 	// The masking is to make sure that we don't generate a slice
   3662 	// that points to the next object in memory.
   3663 	// rlen = j - i
   3664 	// rcap = k - i
   3665 	// delta = i * elemsize
   3666 	// rptr = p + delta&mask(rcap)
   3667 	// result = (SliceMake rptr rlen rcap)
   3668 	// where mask(x) is 0 if x==0 and -1 if x>0.
   3669 	subOp := s.ssaOp(OSUB, Types[TINT])
   3670 	mulOp := s.ssaOp(OMUL, Types[TINT])
   3671 	andOp := s.ssaOp(OAND, Types[TINT])
   3672 	rlen := s.newValue2(subOp, Types[TINT], j, i)
   3673 	var rcap *ssa.Value
   3674 	switch {
   3675 	case t.IsString():
   3676 		// Capacity of the result is unimportant. However, we use
   3677 		// rcap to test if we've generated a zero-length slice.
   3678 		// Use length of strings for that.
   3679 		rcap = rlen
   3680 	case j == k:
   3681 		rcap = rlen
   3682 	default:
   3683 		rcap = s.newValue2(subOp, Types[TINT], k, i)
   3684 	}
   3685 
   3686 	var rptr *ssa.Value
   3687 	if (i.Op == ssa.OpConst64 || i.Op == ssa.OpConst32) && i.AuxInt == 0 {
   3688 		// No pointer arithmetic necessary.
   3689 		rptr = ptr
   3690 	} else {
   3691 		// delta = # of bytes to offset pointer by.
   3692 		delta := s.newValue2(mulOp, Types[TINT], i, s.constInt(Types[TINT], elemtype.Width))
   3693 		// If we're slicing to the point where the capacity is zero,
   3694 		// zero out the delta.
   3695 		mask := s.newValue1(ssa.OpSlicemask, Types[TINT], rcap)
   3696 		delta = s.newValue2(andOp, Types[TINT], delta, mask)
   3697 		// Compute rptr = ptr + delta
   3698 		rptr = s.newValue2(ssa.OpAddPtr, ptrtype, ptr, delta)
   3699 	}
   3700 
   3701 	return rptr, rlen, rcap
   3702 }
   3703 
   3704 type u642fcvtTab struct {
   3705 	geq, cvt2F, and, rsh, or, add ssa.Op
   3706 	one                           func(*state, ssa.Type, int64) *ssa.Value
   3707 }
   3708 
   3709 var u64_f64 u642fcvtTab = u642fcvtTab{
   3710 	geq:   ssa.OpGeq64,
   3711 	cvt2F: ssa.OpCvt64to64F,
   3712 	and:   ssa.OpAnd64,
   3713 	rsh:   ssa.OpRsh64Ux64,
   3714 	or:    ssa.OpOr64,
   3715 	add:   ssa.OpAdd64F,
   3716 	one:   (*state).constInt64,
   3717 }
   3718 
   3719 var u64_f32 u642fcvtTab = u642fcvtTab{
   3720 	geq:   ssa.OpGeq64,
   3721 	cvt2F: ssa.OpCvt64to32F,
   3722 	and:   ssa.OpAnd64,
   3723 	rsh:   ssa.OpRsh64Ux64,
   3724 	or:    ssa.OpOr64,
   3725 	add:   ssa.OpAdd32F,
   3726 	one:   (*state).constInt64,
   3727 }
   3728 
   3729 func (s *state) uint64Tofloat64(n *Node, x *ssa.Value, ft, tt *Type) *ssa.Value {
   3730 	return s.uint64Tofloat(&u64_f64, n, x, ft, tt)
   3731 }
   3732 
   3733 func (s *state) uint64Tofloat32(n *Node, x *ssa.Value, ft, tt *Type) *ssa.Value {
   3734 	return s.uint64Tofloat(&u64_f32, n, x, ft, tt)
   3735 }
   3736 
   3737 func (s *state) uint64Tofloat(cvttab *u642fcvtTab, n *Node, x *ssa.Value, ft, tt *Type) *ssa.Value {
   3738 	// if x >= 0 {
   3739 	//    result = (floatY) x
   3740 	// } else {
   3741 	// 	  y = uintX(x) ; y = x & 1
   3742 	// 	  z = uintX(x) ; z = z >> 1
   3743 	// 	  z = z >> 1
   3744 	// 	  z = z | y
   3745 	// 	  result = floatY(z)
   3746 	// 	  result = result + result
   3747 	// }
   3748 	//
   3749 	// Code borrowed from old code generator.
   3750 	// What's going on: large 64-bit "unsigned" looks like
   3751 	// negative number to hardware's integer-to-float
   3752 	// conversion. However, because the mantissa is only
   3753 	// 63 bits, we don't need the LSB, so instead we do an
   3754 	// unsigned right shift (divide by two), convert, and
   3755 	// double. However, before we do that, we need to be
   3756 	// sure that we do not lose a "1" if that made the
   3757 	// difference in the resulting rounding. Therefore, we
   3758 	// preserve it, and OR (not ADD) it back in. The case
   3759 	// that matters is when the eleven discarded bits are
   3760 	// equal to 10000000001; that rounds up, and the 1 cannot
   3761 	// be lost else it would round down if the LSB of the
   3762 	// candidate mantissa is 0.
   3763 	cmp := s.newValue2(cvttab.geq, Types[TBOOL], x, s.zeroVal(ft))
   3764 	b := s.endBlock()
   3765 	b.Kind = ssa.BlockIf
   3766 	b.SetControl(cmp)
   3767 	b.Likely = ssa.BranchLikely
   3768 
   3769 	bThen := s.f.NewBlock(ssa.BlockPlain)
   3770 	bElse := s.f.NewBlock(ssa.BlockPlain)
   3771 	bAfter := s.f.NewBlock(ssa.BlockPlain)
   3772 
   3773 	b.AddEdgeTo(bThen)
   3774 	s.startBlock(bThen)
   3775 	a0 := s.newValue1(cvttab.cvt2F, tt, x)
   3776 	s.vars[n] = a0
   3777 	s.endBlock()
   3778 	bThen.AddEdgeTo(bAfter)
   3779 
   3780 	b.AddEdgeTo(bElse)
   3781 	s.startBlock(bElse)
   3782 	one := cvttab.one(s, ft, 1)
   3783 	y := s.newValue2(cvttab.and, ft, x, one)
   3784 	z := s.newValue2(cvttab.rsh, ft, x, one)
   3785 	z = s.newValue2(cvttab.or, ft, z, y)
   3786 	a := s.newValue1(cvttab.cvt2F, tt, z)
   3787 	a1 := s.newValue2(cvttab.add, tt, a, a)
   3788 	s.vars[n] = a1
   3789 	s.endBlock()
   3790 	bElse.AddEdgeTo(bAfter)
   3791 
   3792 	s.startBlock(bAfter)
   3793 	return s.variable(n, n.Type)
   3794 }
   3795 
   3796 type u322fcvtTab struct {
   3797 	cvtI2F, cvtF2F ssa.Op
   3798 }
   3799 
   3800 var u32_f64 u322fcvtTab = u322fcvtTab{
   3801 	cvtI2F: ssa.OpCvt32to64F,
   3802 	cvtF2F: ssa.OpCopy,
   3803 }
   3804 
   3805 var u32_f32 u322fcvtTab = u322fcvtTab{
   3806 	cvtI2F: ssa.OpCvt32to32F,
   3807 	cvtF2F: ssa.OpCvt64Fto32F,
   3808 }
   3809 
   3810 func (s *state) uint32Tofloat64(n *Node, x *ssa.Value, ft, tt *Type) *ssa.Value {
   3811 	return s.uint32Tofloat(&u32_f64, n, x, ft, tt)
   3812 }
   3813 
   3814 func (s *state) uint32Tofloat32(n *Node, x *ssa.Value, ft, tt *Type) *ssa.Value {
   3815 	return s.uint32Tofloat(&u32_f32, n, x, ft, tt)
   3816 }
   3817 
   3818 func (s *state) uint32Tofloat(cvttab *u322fcvtTab, n *Node, x *ssa.Value, ft, tt *Type) *ssa.Value {
   3819 	// if x >= 0 {
   3820 	// 	result = floatY(x)
   3821 	// } else {
   3822 	// 	result = floatY(float64(x) + (1<<32))
   3823 	// }
   3824 	cmp := s.newValue2(ssa.OpGeq32, Types[TBOOL], x, s.zeroVal(ft))
   3825 	b := s.endBlock()
   3826 	b.Kind = ssa.BlockIf
   3827 	b.SetControl(cmp)
   3828 	b.Likely = ssa.BranchLikely
   3829 
   3830 	bThen := s.f.NewBlock(ssa.BlockPlain)
   3831 	bElse := s.f.NewBlock(ssa.BlockPlain)
   3832 	bAfter := s.f.NewBlock(ssa.BlockPlain)
   3833 
   3834 	b.AddEdgeTo(bThen)
   3835 	s.startBlock(bThen)
   3836 	a0 := s.newValue1(cvttab.cvtI2F, tt, x)
   3837 	s.vars[n] = a0
   3838 	s.endBlock()
   3839 	bThen.AddEdgeTo(bAfter)
   3840 
   3841 	b.AddEdgeTo(bElse)
   3842 	s.startBlock(bElse)
   3843 	a1 := s.newValue1(ssa.OpCvt32to64F, Types[TFLOAT64], x)
   3844 	twoToThe32 := s.constFloat64(Types[TFLOAT64], float64(1<<32))
   3845 	a2 := s.newValue2(ssa.OpAdd64F, Types[TFLOAT64], a1, twoToThe32)
   3846 	a3 := s.newValue1(cvttab.cvtF2F, tt, a2)
   3847 
   3848 	s.vars[n] = a3
   3849 	s.endBlock()
   3850 	bElse.AddEdgeTo(bAfter)
   3851 
   3852 	s.startBlock(bAfter)
   3853 	return s.variable(n, n.Type)
   3854 }
   3855 
   3856 // referenceTypeBuiltin generates code for the len/cap builtins for maps and channels.
   3857 func (s *state) referenceTypeBuiltin(n *Node, x *ssa.Value) *ssa.Value {
   3858 	if !n.Left.Type.IsMap() && !n.Left.Type.IsChan() {
   3859 		s.Fatalf("node must be a map or a channel")
   3860 	}
   3861 	// if n == nil {
   3862 	//   return 0
   3863 	// } else {
   3864 	//   // len
   3865 	//   return *((*int)n)
   3866 	//   // cap
   3867 	//   return *(((*int)n)+1)
   3868 	// }
   3869 	lenType := n.Type
   3870 	nilValue := s.constNil(Types[TUINTPTR])
   3871 	cmp := s.newValue2(ssa.OpEqPtr, Types[TBOOL], x, nilValue)
   3872 	b := s.endBlock()
   3873 	b.Kind = ssa.BlockIf
   3874 	b.SetControl(cmp)
   3875 	b.Likely = ssa.BranchUnlikely
   3876 
   3877 	bThen := s.f.NewBlock(ssa.BlockPlain)
   3878 	bElse := s.f.NewBlock(ssa.BlockPlain)
   3879 	bAfter := s.f.NewBlock(ssa.BlockPlain)
   3880 
   3881 	// length/capacity of a nil map/chan is zero
   3882 	b.AddEdgeTo(bThen)
   3883 	s.startBlock(bThen)
   3884 	s.vars[n] = s.zeroVal(lenType)
   3885 	s.endBlock()
   3886 	bThen.AddEdgeTo(bAfter)
   3887 
   3888 	b.AddEdgeTo(bElse)
   3889 	s.startBlock(bElse)
   3890 	if n.Op == OLEN {
   3891 		// length is stored in the first word for map/chan
   3892 		s.vars[n] = s.newValue2(ssa.OpLoad, lenType, x, s.mem())
   3893 	} else if n.Op == OCAP {
   3894 		// capacity is stored in the second word for chan
   3895 		sw := s.newValue1I(ssa.OpOffPtr, lenType.PtrTo(), lenType.Width, x)
   3896 		s.vars[n] = s.newValue2(ssa.OpLoad, lenType, sw, s.mem())
   3897 	} else {
   3898 		s.Fatalf("op must be OLEN or OCAP")
   3899 	}
   3900 	s.endBlock()
   3901 	bElse.AddEdgeTo(bAfter)
   3902 
   3903 	s.startBlock(bAfter)
   3904 	return s.variable(n, lenType)
   3905 }
   3906 
   3907 type f2uCvtTab struct {
   3908 	ltf, cvt2U, subf, or ssa.Op
   3909 	floatValue           func(*state, ssa.Type, float64) *ssa.Value
   3910 	intValue             func(*state, ssa.Type, int64) *ssa.Value
   3911 	cutoff               uint64
   3912 }
   3913 
   3914 var f32_u64 f2uCvtTab = f2uCvtTab{
   3915 	ltf:        ssa.OpLess32F,
   3916 	cvt2U:      ssa.OpCvt32Fto64,
   3917 	subf:       ssa.OpSub32F,
   3918 	or:         ssa.OpOr64,
   3919 	floatValue: (*state).constFloat32,
   3920 	intValue:   (*state).constInt64,
   3921 	cutoff:     9223372036854775808,
   3922 }
   3923 
   3924 var f64_u64 f2uCvtTab = f2uCvtTab{
   3925 	ltf:        ssa.OpLess64F,
   3926 	cvt2U:      ssa.OpCvt64Fto64,
   3927 	subf:       ssa.OpSub64F,
   3928 	or:         ssa.OpOr64,
   3929 	floatValue: (*state).constFloat64,
   3930 	intValue:   (*state).constInt64,
   3931 	cutoff:     9223372036854775808,
   3932 }
   3933 
   3934 var f32_u32 f2uCvtTab = f2uCvtTab{
   3935 	ltf:        ssa.OpLess32F,
   3936 	cvt2U:      ssa.OpCvt32Fto32,
   3937 	subf:       ssa.OpSub32F,
   3938 	or:         ssa.OpOr32,
   3939 	floatValue: (*state).constFloat32,
   3940 	intValue:   func(s *state, t ssa.Type, v int64) *ssa.Value { return s.constInt32(t, int32(v)) },
   3941 	cutoff:     2147483648,
   3942 }
   3943 
   3944 var f64_u32 f2uCvtTab = f2uCvtTab{
   3945 	ltf:        ssa.OpLess64F,
   3946 	cvt2U:      ssa.OpCvt64Fto32,
   3947 	subf:       ssa.OpSub64F,
   3948 	or:         ssa.OpOr32,
   3949 	floatValue: (*state).constFloat64,
   3950 	intValue:   func(s *state, t ssa.Type, v int64) *ssa.Value { return s.constInt32(t, int32(v)) },
   3951 	cutoff:     2147483648,
   3952 }
   3953 
   3954 func (s *state) float32ToUint64(n *Node, x *ssa.Value, ft, tt *Type) *ssa.Value {
   3955 	return s.floatToUint(&f32_u64, n, x, ft, tt)
   3956 }
   3957 func (s *state) float64ToUint64(n *Node, x *ssa.Value, ft, tt *Type) *ssa.Value {
   3958 	return s.floatToUint(&f64_u64, n, x, ft, tt)
   3959 }
   3960 
   3961 func (s *state) float32ToUint32(n *Node, x *ssa.Value, ft, tt *Type) *ssa.Value {
   3962 	return s.floatToUint(&f32_u32, n, x, ft, tt)
   3963 }
   3964 
   3965 func (s *state) float64ToUint32(n *Node, x *ssa.Value, ft, tt *Type) *ssa.Value {
   3966 	return s.floatToUint(&f64_u32, n, x, ft, tt)
   3967 }
   3968 
   3969 func (s *state) floatToUint(cvttab *f2uCvtTab, n *Node, x *ssa.Value, ft, tt *Type) *ssa.Value {
   3970 	// cutoff:=1<<(intY_Size-1)
   3971 	// if x < floatX(cutoff) {
   3972 	// 	result = uintY(x)
   3973 	// } else {
   3974 	// 	y = x - floatX(cutoff)
   3975 	// 	z = uintY(y)
   3976 	// 	result = z | -(cutoff)
   3977 	// }
   3978 	cutoff := cvttab.floatValue(s, ft, float64(cvttab.cutoff))
   3979 	cmp := s.newValue2(cvttab.ltf, Types[TBOOL], x, cutoff)
   3980 	b := s.endBlock()
   3981 	b.Kind = ssa.BlockIf
   3982 	b.SetControl(cmp)
   3983 	b.Likely = ssa.BranchLikely
   3984 
   3985 	bThen := s.f.NewBlock(ssa.BlockPlain)
   3986 	bElse := s.f.NewBlock(ssa.BlockPlain)
   3987 	bAfter := s.f.NewBlock(ssa.BlockPlain)
   3988 
   3989 	b.AddEdgeTo(bThen)
   3990 	s.startBlock(bThen)
   3991 	a0 := s.newValue1(cvttab.cvt2U, tt, x)
   3992 	s.vars[n] = a0
   3993 	s.endBlock()
   3994 	bThen.AddEdgeTo(bAfter)
   3995 
   3996 	b.AddEdgeTo(bElse)
   3997 	s.startBlock(bElse)
   3998 	y := s.newValue2(cvttab.subf, ft, x, cutoff)
   3999 	y = s.newValue1(cvttab.cvt2U, tt, y)
   4000 	z := cvttab.intValue(s, tt, int64(-cvttab.cutoff))
   4001 	a1 := s.newValue2(cvttab.or, tt, y, z)
   4002 	s.vars[n] = a1
   4003 	s.endBlock()
   4004 	bElse.AddEdgeTo(bAfter)
   4005 
   4006 	s.startBlock(bAfter)
   4007 	return s.variable(n, n.Type)
   4008 }
   4009 
   4010 // ifaceType returns the value for the word containing the type.
   4011 // t is the type of the interface expression.
   4012 // v is the corresponding value.
   4013 func (s *state) ifaceType(t *Type, v *ssa.Value) *ssa.Value {
   4014 	byteptr := ptrto(Types[TUINT8]) // type used in runtime prototypes for runtime type (*byte)
   4015 
   4016 	if t.IsEmptyInterface() {
   4017 		// Have eface. The type is the first word in the struct.
   4018 		return s.newValue1(ssa.OpITab, byteptr, v)
   4019 	}
   4020 
   4021 	// Have iface.
   4022 	// The first word in the struct is the itab.
   4023 	// If the itab is nil, return 0.
   4024 	// Otherwise, the second word in the itab is the type.
   4025 
   4026 	tab := s.newValue1(ssa.OpITab, byteptr, v)
   4027 	s.vars[&typVar] = tab
   4028 	isnonnil := s.newValue2(ssa.OpNeqPtr, Types[TBOOL], tab, s.constNil(byteptr))
   4029 	b := s.endBlock()
   4030 	b.Kind = ssa.BlockIf
   4031 	b.SetControl(isnonnil)
   4032 	b.Likely = ssa.BranchLikely
   4033 
   4034 	bLoad := s.f.NewBlock(ssa.BlockPlain)
   4035 	bEnd := s.f.NewBlock(ssa.BlockPlain)
   4036 
   4037 	b.AddEdgeTo(bLoad)
   4038 	b.AddEdgeTo(bEnd)
   4039 	bLoad.AddEdgeTo(bEnd)
   4040 
   4041 	s.startBlock(bLoad)
   4042 	off := s.newValue1I(ssa.OpOffPtr, byteptr, int64(Widthptr), tab)
   4043 	s.vars[&typVar] = s.newValue2(ssa.OpLoad, byteptr, off, s.mem())
   4044 	s.endBlock()
   4045 
   4046 	s.startBlock(bEnd)
   4047 	typ := s.variable(&typVar, byteptr)
   4048 	delete(s.vars, &typVar)
   4049 	return typ
   4050 }
   4051 
   4052 // dottype generates SSA for a type assertion node.
   4053 // commaok indicates whether to panic or return a bool.
   4054 // If commaok is false, resok will be nil.
   4055 func (s *state) dottype(n *Node, commaok bool) (res, resok *ssa.Value) {
   4056 	iface := s.expr(n.Left)            // input interface
   4057 	target := s.expr(typename(n.Type)) // target type
   4058 	byteptr := ptrto(Types[TUINT8])
   4059 
   4060 	if n.Type.IsInterface() {
   4061 		if n.Type.IsEmptyInterface() {
   4062 			// Converting to an empty interface.
   4063 			// Input could be an empty or nonempty interface.
   4064 			if Debug_typeassert > 0 {
   4065 				Warnl(n.Lineno, "type assertion inlined")
   4066 			}
   4067 
   4068 			// Get itab/type field from input.
   4069 			itab := s.newValue1(ssa.OpITab, byteptr, iface)
   4070 			// Conversion succeeds iff that field is not nil.
   4071 			cond := s.newValue2(ssa.OpNeqPtr, Types[TBOOL], itab, s.constNil(byteptr))
   4072 
   4073 			if n.Left.Type.IsEmptyInterface() && commaok {
   4074 				// Converting empty interface to empty interface with ,ok is just a nil check.
   4075 				return iface, cond
   4076 			}
   4077 
   4078 			// Branch on nilness.
   4079 			b := s.endBlock()
   4080 			b.Kind = ssa.BlockIf
   4081 			b.SetControl(cond)
   4082 			b.Likely = ssa.BranchLikely
   4083 			bOk := s.f.NewBlock(ssa.BlockPlain)
   4084 			bFail := s.f.NewBlock(ssa.BlockPlain)
   4085 			b.AddEdgeTo(bOk)
   4086 			b.AddEdgeTo(bFail)
   4087 
   4088 			if !commaok {
   4089 				// On failure, panic by calling panicnildottype.
   4090 				s.startBlock(bFail)
   4091 				s.rtcall(panicnildottype, false, nil, target)
   4092 
   4093 				// On success, return (perhaps modified) input interface.
   4094 				s.startBlock(bOk)
   4095 				if n.Left.Type.IsEmptyInterface() {
   4096 					res = iface // Use input interface unchanged.
   4097 					return
   4098 				}
   4099 				// Load type out of itab, build interface with existing idata.
   4100 				off := s.newValue1I(ssa.OpOffPtr, byteptr, int64(Widthptr), itab)
   4101 				typ := s.newValue2(ssa.OpLoad, byteptr, off, s.mem())
   4102 				idata := s.newValue1(ssa.OpIData, n.Type, iface)
   4103 				res = s.newValue2(ssa.OpIMake, n.Type, typ, idata)
   4104 				return
   4105 			}
   4106 
   4107 			s.startBlock(bOk)
   4108 			// nonempty -> empty
   4109 			// Need to load type from itab
   4110 			off := s.newValue1I(ssa.OpOffPtr, byteptr, int64(Widthptr), itab)
   4111 			s.vars[&typVar] = s.newValue2(ssa.OpLoad, byteptr, off, s.mem())
   4112 			s.endBlock()
   4113 
   4114 			// itab is nil, might as well use that as the nil result.
   4115 			s.startBlock(bFail)
   4116 			s.vars[&typVar] = itab
   4117 			s.endBlock()
   4118 
   4119 			// Merge point.
   4120 			bEnd := s.f.NewBlock(ssa.BlockPlain)
   4121 			bOk.AddEdgeTo(bEnd)
   4122 			bFail.AddEdgeTo(bEnd)
   4123 			s.startBlock(bEnd)
   4124 			idata := s.newValue1(ssa.OpIData, n.Type, iface)
   4125 			res = s.newValue2(ssa.OpIMake, n.Type, s.variable(&typVar, byteptr), idata)
   4126 			resok = cond
   4127 			delete(s.vars, &typVar)
   4128 			return
   4129 		}
   4130 		// converting to a nonempty interface needs a runtime call.
   4131 		if Debug_typeassert > 0 {
   4132 			Warnl(n.Lineno, "type assertion not inlined")
   4133 		}
   4134 		if n.Left.Type.IsEmptyInterface() {
   4135 			if commaok {
   4136 				call := s.rtcall(assertE2I2, true, []*Type{n.Type, Types[TBOOL]}, target, iface)
   4137 				return call[0], call[1]
   4138 			}
   4139 			return s.rtcall(assertE2I, true, []*Type{n.Type}, target, iface)[0], nil
   4140 		}
   4141 		if commaok {
   4142 			call := s.rtcall(assertI2I2, true, []*Type{n.Type, Types[TBOOL]}, target, iface)
   4143 			return call[0], call[1]
   4144 		}
   4145 		return s.rtcall(assertI2I, true, []*Type{n.Type}, target, iface)[0], nil
   4146 	}
   4147 
   4148 	if Debug_typeassert > 0 {
   4149 		Warnl(n.Lineno, "type assertion inlined")
   4150 	}
   4151 
   4152 	// Converting to a concrete type.
   4153 	direct := isdirectiface(n.Type)
   4154 	typ := s.ifaceType(n.Left.Type, iface) // actual concrete type of input interface
   4155 
   4156 	if Debug_typeassert > 0 {
   4157 		Warnl(n.Lineno, "type assertion inlined")
   4158 	}
   4159 
   4160 	var tmp *Node       // temporary for use with large types
   4161 	var addr *ssa.Value // address of tmp
   4162 	if commaok && !canSSAType(n.Type) {
   4163 		// unSSAable type, use temporary.
   4164 		// TODO: get rid of some of these temporaries.
   4165 		tmp = temp(n.Type)
   4166 		addr, _ = s.addr(tmp, false)
   4167 		s.vars[&memVar] = s.newValue1A(ssa.OpVarDef, ssa.TypeMem, tmp, s.mem())
   4168 	}
   4169 
   4170 	// TODO:  If we have a nonempty interface and its itab field is nil,
   4171 	// then this test is redundant and ifaceType should just branch directly to bFail.
   4172 	cond := s.newValue2(ssa.OpEqPtr, Types[TBOOL], typ, target)
   4173 	b := s.endBlock()
   4174 	b.Kind = ssa.BlockIf
   4175 	b.SetControl(cond)
   4176 	b.Likely = ssa.BranchLikely
   4177 
   4178 	bOk := s.f.NewBlock(ssa.BlockPlain)
   4179 	bFail := s.f.NewBlock(ssa.BlockPlain)
   4180 	b.AddEdgeTo(bOk)
   4181 	b.AddEdgeTo(bFail)
   4182 
   4183 	if !commaok {
   4184 		// on failure, panic by calling panicdottype
   4185 		s.startBlock(bFail)
   4186 		taddr := s.newValue1A(ssa.OpAddr, byteptr, &ssa.ExternSymbol{Typ: byteptr, Sym: typenamesym(n.Left.Type)}, s.sb)
   4187 		s.rtcall(panicdottype, false, nil, typ, target, taddr)
   4188 
   4189 		// on success, return data from interface
   4190 		s.startBlock(bOk)
   4191 		if direct {
   4192 			return s.newValue1(ssa.OpIData, n.Type, iface), nil
   4193 		}
   4194 		p := s.newValue1(ssa.OpIData, ptrto(n.Type), iface)
   4195 		return s.newValue2(ssa.OpLoad, n.Type, p, s.mem()), nil
   4196 	}
   4197 
   4198 	// commaok is the more complicated case because we have
   4199 	// a control flow merge point.
   4200 	bEnd := s.f.NewBlock(ssa.BlockPlain)
   4201 	// Note that we need a new valVar each time (unlike okVar where we can
   4202 	// reuse the variable) because it might have a different type every time.
   4203 	valVar := &Node{Op: ONAME, Class: Pxxx, Sym: &Sym{Name: "val"}}
   4204 
   4205 	// type assertion succeeded
   4206 	s.startBlock(bOk)
   4207 	if tmp == nil {
   4208 		if direct {
   4209 			s.vars[valVar] = s.newValue1(ssa.OpIData, n.Type, iface)
   4210 		} else {
   4211 			p := s.newValue1(ssa.OpIData, ptrto(n.Type), iface)
   4212 			s.vars[valVar] = s.newValue2(ssa.OpLoad, n.Type, p, s.mem())
   4213 		}
   4214 	} else {
   4215 		p := s.newValue1(ssa.OpIData, ptrto(n.Type), iface)
   4216 		s.vars[&memVar] = s.newValue3I(ssa.OpMove, ssa.TypeMem, sizeAlignAuxInt(n.Type), addr, p, s.mem())
   4217 	}
   4218 	s.vars[&okVar] = s.constBool(true)
   4219 	s.endBlock()
   4220 	bOk.AddEdgeTo(bEnd)
   4221 
   4222 	// type assertion failed
   4223 	s.startBlock(bFail)
   4224 	if tmp == nil {
   4225 		s.vars[valVar] = s.zeroVal(n.Type)
   4226 	} else {
   4227 		s.vars[&memVar] = s.newValue2I(ssa.OpZero, ssa.TypeMem, sizeAlignAuxInt(n.Type), addr, s.mem())
   4228 	}
   4229 	s.vars[&okVar] = s.constBool(false)
   4230 	s.endBlock()
   4231 	bFail.AddEdgeTo(bEnd)
   4232 
   4233 	// merge point
   4234 	s.startBlock(bEnd)
   4235 	if tmp == nil {
   4236 		res = s.variable(valVar, n.Type)
   4237 		delete(s.vars, valVar)
   4238 	} else {
   4239 		res = s.newValue2(ssa.OpLoad, n.Type, addr, s.mem())
   4240 		s.vars[&memVar] = s.newValue1A(ssa.OpVarKill, ssa.TypeMem, tmp, s.mem())
   4241 	}
   4242 	resok = s.variable(&okVar, Types[TBOOL])
   4243 	delete(s.vars, &okVar)
   4244 	return res, resok
   4245 }
   4246 
   4247 // checkgoto checks that a goto from from to to does not
   4248 // jump into a block or jump over variable declarations.
   4249 // It is a copy of checkgoto in the pre-SSA backend,
   4250 // modified only for line number handling.
   4251 // TODO: document how this works and why it is designed the way it is.
   4252 func (s *state) checkgoto(from *Node, to *Node) {
   4253 	if from.Sym == to.Sym {
   4254 		return
   4255 	}
   4256 
   4257 	nf := 0
   4258 	for fs := from.Sym; fs != nil; fs = fs.Link {
   4259 		nf++
   4260 	}
   4261 	nt := 0
   4262 	for fs := to.Sym; fs != nil; fs = fs.Link {
   4263 		nt++
   4264 	}
   4265 	fs := from.Sym
   4266 	for ; nf > nt; nf-- {
   4267 		fs = fs.Link
   4268 	}
   4269 	if fs != to.Sym {
   4270 		// decide what to complain about.
   4271 		// prefer to complain about 'into block' over declarations,
   4272 		// so scan backward to find most recent block or else dcl.
   4273 		var block *Sym
   4274 
   4275 		var dcl *Sym
   4276 		ts := to.Sym
   4277 		for ; nt > nf; nt-- {
   4278 			if ts.Pkg == nil {
   4279 				block = ts
   4280 			} else {
   4281 				dcl = ts
   4282 			}
   4283 			ts = ts.Link
   4284 		}
   4285 
   4286 		for ts != fs {
   4287 			if ts.Pkg == nil {
   4288 				block = ts
   4289 			} else {
   4290 				dcl = ts
   4291 			}
   4292 			ts = ts.Link
   4293 			fs = fs.Link
   4294 		}
   4295 
   4296 		lno := from.Left.Lineno
   4297 		if block != nil {
   4298 			yyerrorl(lno, "goto %v jumps into block starting at %v", from.Left.Sym, linestr(block.Lastlineno))
   4299 		} else {
   4300 			yyerrorl(lno, "goto %v jumps over declaration of %v at %v", from.Left.Sym, dcl, linestr(dcl.Lastlineno))
   4301 		}
   4302 	}
   4303 }
   4304 
   4305 // variable returns the value of a variable at the current location.
   4306 func (s *state) variable(name *Node, t ssa.Type) *ssa.Value {
   4307 	v := s.vars[name]
   4308 	if v != nil {
   4309 		return v
   4310 	}
   4311 	v = s.fwdVars[name]
   4312 	if v != nil {
   4313 		return v
   4314 	}
   4315 
   4316 	if s.curBlock == s.f.Entry {
   4317 		// No variable should be live at entry.
   4318 		s.Fatalf("Value live at entry. It shouldn't be. func %s, node %v, value %v", s.f.Name, name, v)
   4319 	}
   4320 	// Make a FwdRef, which records a value that's live on block input.
   4321 	// We'll find the matching definition as part of insertPhis.
   4322 	v = s.newValue0A(ssa.OpFwdRef, t, name)
   4323 	s.fwdVars[name] = v
   4324 	s.addNamedValue(name, v)
   4325 	return v
   4326 }
   4327 
   4328 func (s *state) mem() *ssa.Value {
   4329 	return s.variable(&memVar, ssa.TypeMem)
   4330 }
   4331 
   4332 func (s *state) addNamedValue(n *Node, v *ssa.Value) {
   4333 	if n.Class == Pxxx {
   4334 		// Don't track our dummy nodes (&memVar etc.).
   4335 		return
   4336 	}
   4337 	if n.IsAutoTmp() {
   4338 		// Don't track temporary variables.
   4339 		return
   4340 	}
   4341 	if n.Class == PPARAMOUT {
   4342 		// Don't track named output values.  This prevents return values
   4343 		// from being assigned too early. See #14591 and #14762. TODO: allow this.
   4344 		return
   4345 	}
   4346 	if n.Class == PAUTO && n.Xoffset != 0 {
   4347 		s.Fatalf("AUTO var with offset %v %d", n, n.Xoffset)
   4348 	}
   4349 	loc := ssa.LocalSlot{N: n, Type: n.Type, Off: 0}
   4350 	values, ok := s.f.NamedValues[loc]
   4351 	if !ok {
   4352 		s.f.Names = append(s.f.Names, loc)
   4353 	}
   4354 	s.f.NamedValues[loc] = append(values, v)
   4355 }
   4356 
   4357 // Branch is an unresolved branch.
   4358 type Branch struct {
   4359 	P *obj.Prog  // branch instruction
   4360 	B *ssa.Block // target
   4361 }
   4362 
   4363 // SSAGenState contains state needed during Prog generation.
   4364 type SSAGenState struct {
   4365 	// Branches remembers all the branch instructions we've seen
   4366 	// and where they would like to go.
   4367 	Branches []Branch
   4368 
   4369 	// bstart remembers where each block starts (indexed by block ID)
   4370 	bstart []*obj.Prog
   4371 
   4372 	// 387 port: maps from SSE registers (REG_X?) to 387 registers (REG_F?)
   4373 	SSEto387 map[int16]int16
   4374 	// Some architectures require a 64-bit temporary for FP-related register shuffling. Examples include x86-387, PPC, and Sparc V8.
   4375 	ScratchFpMem *Node
   4376 }
   4377 
   4378 // Pc returns the current Prog.
   4379 func (s *SSAGenState) Pc() *obj.Prog {
   4380 	return pc
   4381 }
   4382 
   4383 // SetLineno sets the current source line number.
   4384 func (s *SSAGenState) SetLineno(l int32) {
   4385 	lineno = l
   4386 }
   4387 
   4388 // genssa appends entries to ptxt for each instruction in f.
   4389 // gcargs and gclocals are filled in with pointer maps for the frame.
   4390 func genssa(f *ssa.Func, ptxt *obj.Prog, gcargs, gclocals *Sym) {
   4391 	var s SSAGenState
   4392 
   4393 	e := f.Config.Frontend().(*ssaExport)
   4394 
   4395 	// Remember where each block starts.
   4396 	s.bstart = make([]*obj.Prog, f.NumBlocks())
   4397 
   4398 	var valueProgs map[*obj.Prog]*ssa.Value
   4399 	var blockProgs map[*obj.Prog]*ssa.Block
   4400 	var logProgs = e.log
   4401 	if logProgs {
   4402 		valueProgs = make(map[*obj.Prog]*ssa.Value, f.NumValues())
   4403 		blockProgs = make(map[*obj.Prog]*ssa.Block, f.NumBlocks())
   4404 		f.Logf("genssa %s\n", f.Name)
   4405 		blockProgs[pc] = f.Blocks[0]
   4406 	}
   4407 
   4408 	if Thearch.Use387 {
   4409 		s.SSEto387 = map[int16]int16{}
   4410 	}
   4411 
   4412 	s.ScratchFpMem = scratchFpMem
   4413 	scratchFpMem = nil
   4414 
   4415 	// Emit basic blocks
   4416 	for i, b := range f.Blocks {
   4417 		s.bstart[b.ID] = pc
   4418 		// Emit values in block
   4419 		Thearch.SSAMarkMoves(&s, b)
   4420 		for _, v := range b.Values {
   4421 			x := pc
   4422 			Thearch.SSAGenValue(&s, v)
   4423 			if logProgs {
   4424 				for ; x != pc; x = x.Link {
   4425 					valueProgs[x] = v
   4426 				}
   4427 			}
   4428 		}
   4429 		// Emit control flow instructions for block
   4430 		var next *ssa.Block
   4431 		if i < len(f.Blocks)-1 && Debug['N'] == 0 {
   4432 			// If -N, leave next==nil so every block with successors
   4433 			// ends in a JMP (except call blocks - plive doesn't like
   4434 			// select{send,recv} followed by a JMP call).  Helps keep
   4435 			// line numbers for otherwise empty blocks.
   4436 			next = f.Blocks[i+1]
   4437 		}
   4438 		x := pc
   4439 		Thearch.SSAGenBlock(&s, b, next)
   4440 		if logProgs {
   4441 			for ; x != pc; x = x.Link {
   4442 				blockProgs[x] = b
   4443 			}
   4444 		}
   4445 	}
   4446 
   4447 	// Resolve branches
   4448 	for _, br := range s.Branches {
   4449 		br.P.To.Val = s.bstart[br.B.ID]
   4450 	}
   4451 
   4452 	if logProgs {
   4453 		for p := ptxt; p != nil; p = p.Link {
   4454 			var s string
   4455 			if v, ok := valueProgs[p]; ok {
   4456 				s = v.String()
   4457 			} else if b, ok := blockProgs[p]; ok {
   4458 				s = b.String()
   4459 			} else {
   4460 				s = "   " // most value and branch strings are 2-3 characters long
   4461 			}
   4462 			f.Logf("%s\t%s\n", s, p)
   4463 		}
   4464 		if f.Config.HTML != nil {
   4465 			saved := ptxt.Ctxt.LineHist.PrintFilenameOnly
   4466 			ptxt.Ctxt.LineHist.PrintFilenameOnly = true
   4467 			var buf bytes.Buffer
   4468 			buf.WriteString("<code>")
   4469 			buf.WriteString("<dl class=\"ssa-gen\">")
   4470 			for p := ptxt; p != nil; p = p.Link {
   4471 				buf.WriteString("<dt class=\"ssa-prog-src\">")
   4472 				if v, ok := valueProgs[p]; ok {
   4473 					buf.WriteString(v.HTML())
   4474 				} else if b, ok := blockProgs[p]; ok {
   4475 					buf.WriteString(b.HTML())
   4476 				}
   4477 				buf.WriteString("</dt>")
   4478 				buf.WriteString("<dd class=\"ssa-prog\">")
   4479 				buf.WriteString(html.EscapeString(p.String()))
   4480 				buf.WriteString("</dd>")
   4481 				buf.WriteString("</li>")
   4482 			}
   4483 			buf.WriteString("</dl>")
   4484 			buf.WriteString("</code>")
   4485 			f.Config.HTML.WriteColumn("genssa", buf.String())
   4486 			ptxt.Ctxt.LineHist.PrintFilenameOnly = saved
   4487 		}
   4488 	}
   4489 
   4490 	// Emit static data
   4491 	if f.StaticData != nil {
   4492 		for _, n := range f.StaticData.([]*Node) {
   4493 			if !gen_as_init(n, false) {
   4494 				Fatalf("non-static data marked as static: %v\n\n", n)
   4495 			}
   4496 		}
   4497 	}
   4498 
   4499 	// Generate gc bitmaps.
   4500 	liveness(Curfn, ptxt, gcargs, gclocals)
   4501 
   4502 	// Add frame prologue. Zero ambiguously live variables.
   4503 	Thearch.Defframe(ptxt)
   4504 	if Debug['f'] != 0 {
   4505 		frame(0)
   4506 	}
   4507 
   4508 	// Remove leftover instrumentation from the instruction stream.
   4509 	removevardef(ptxt)
   4510 
   4511 	f.Config.HTML.Close()
   4512 	f.Config.HTML = nil
   4513 }
   4514 
   4515 type FloatingEQNEJump struct {
   4516 	Jump  obj.As
   4517 	Index int
   4518 }
   4519 
   4520 func oneFPJump(b *ssa.Block, jumps *FloatingEQNEJump, likely ssa.BranchPrediction, branches []Branch) []Branch {
   4521 	p := Prog(jumps.Jump)
   4522 	p.To.Type = obj.TYPE_BRANCH
   4523 	to := jumps.Index
   4524 	branches = append(branches, Branch{p, b.Succs[to].Block()})
   4525 	if to == 1 {
   4526 		likely = -likely
   4527 	}
   4528 	// liblink reorders the instruction stream as it sees fit.
   4529 	// Pass along what we know so liblink can make use of it.
   4530 	// TODO: Once we've fully switched to SSA,
   4531 	// make liblink leave our output alone.
   4532 	switch likely {
   4533 	case ssa.BranchUnlikely:
   4534 		p.From.Type = obj.TYPE_CONST
   4535 		p.From.Offset = 0
   4536 	case ssa.BranchLikely:
   4537 		p.From.Type = obj.TYPE_CONST
   4538 		p.From.Offset = 1
   4539 	}
   4540 	return branches
   4541 }
   4542 
   4543 func SSAGenFPJump(s *SSAGenState, b, next *ssa.Block, jumps *[2][2]FloatingEQNEJump) {
   4544 	likely := b.Likely
   4545 	switch next {
   4546 	case b.Succs[0].Block():
   4547 		s.Branches = oneFPJump(b, &jumps[0][0], likely, s.Branches)
   4548 		s.Branches = oneFPJump(b, &jumps[0][1], likely, s.Branches)
   4549 	case b.Succs[1].Block():
   4550 		s.Branches = oneFPJump(b, &jumps[1][0], likely, s.Branches)
   4551 		s.Branches = oneFPJump(b, &jumps[1][1], likely, s.Branches)
   4552 	default:
   4553 		s.Branches = oneFPJump(b, &jumps[1][0], likely, s.Branches)
   4554 		s.Branches = oneFPJump(b, &jumps[1][1], likely, s.Branches)
   4555 		q := Prog(obj.AJMP)
   4556 		q.To.Type = obj.TYPE_BRANCH
   4557 		s.Branches = append(s.Branches, Branch{q, b.Succs[1].Block()})
   4558 	}
   4559 }
   4560 
   4561 func AuxOffset(v *ssa.Value) (offset int64) {
   4562 	if v.Aux == nil {
   4563 		return 0
   4564 	}
   4565 	switch sym := v.Aux.(type) {
   4566 
   4567 	case *ssa.AutoSymbol:
   4568 		n := sym.Node.(*Node)
   4569 		return n.Xoffset
   4570 	}
   4571 	return 0
   4572 }
   4573 
   4574 // AddAux adds the offset in the aux fields (AuxInt and Aux) of v to a.
   4575 func AddAux(a *obj.Addr, v *ssa.Value) {
   4576 	AddAux2(a, v, v.AuxInt)
   4577 }
   4578 func AddAux2(a *obj.Addr, v *ssa.Value, offset int64) {
   4579 	if a.Type != obj.TYPE_MEM && a.Type != obj.TYPE_ADDR {
   4580 		v.Fatalf("bad AddAux addr %v", a)
   4581 	}
   4582 	// add integer offset
   4583 	a.Offset += offset
   4584 
   4585 	// If no additional symbol offset, we're done.
   4586 	if v.Aux == nil {
   4587 		return
   4588 	}
   4589 	// Add symbol's offset from its base register.
   4590 	switch sym := v.Aux.(type) {
   4591 	case *ssa.ExternSymbol:
   4592 		a.Name = obj.NAME_EXTERN
   4593 		switch s := sym.Sym.(type) {
   4594 		case *Sym:
   4595 			a.Sym = Linksym(s)
   4596 		case *obj.LSym:
   4597 			a.Sym = s
   4598 		default:
   4599 			v.Fatalf("ExternSymbol.Sym is %T", s)
   4600 		}
   4601 	case *ssa.ArgSymbol:
   4602 		n := sym.Node.(*Node)
   4603 		a.Name = obj.NAME_PARAM
   4604 		a.Node = n
   4605 		a.Sym = Linksym(n.Orig.Sym)
   4606 		a.Offset += n.Xoffset
   4607 	case *ssa.AutoSymbol:
   4608 		n := sym.Node.(*Node)
   4609 		a.Name = obj.NAME_AUTO
   4610 		a.Node = n
   4611 		a.Sym = Linksym(n.Sym)
   4612 		a.Offset += n.Xoffset
   4613 	default:
   4614 		v.Fatalf("aux in %s not implemented %#v", v, v.Aux)
   4615 	}
   4616 }
   4617 
   4618 // sizeAlignAuxInt returns an AuxInt encoding the size and alignment of type t.
   4619 func sizeAlignAuxInt(t *Type) int64 {
   4620 	return ssa.MakeSizeAndAlign(t.Size(), t.Alignment()).Int64()
   4621 }
   4622 
   4623 // extendIndex extends v to a full int width.
   4624 // panic using the given function if v does not fit in an int (only on 32-bit archs).
   4625 func (s *state) extendIndex(v *ssa.Value, panicfn *Node) *ssa.Value {
   4626 	size := v.Type.Size()
   4627 	if size == s.config.IntSize {
   4628 		return v
   4629 	}
   4630 	if size > s.config.IntSize {
   4631 		// truncate 64-bit indexes on 32-bit pointer archs. Test the
   4632 		// high word and branch to out-of-bounds failure if it is not 0.
   4633 		if Debug['B'] == 0 {
   4634 			hi := s.newValue1(ssa.OpInt64Hi, Types[TUINT32], v)
   4635 			cmp := s.newValue2(ssa.OpEq32, Types[TBOOL], hi, s.constInt32(Types[TUINT32], 0))
   4636 			s.check(cmp, panicfn)
   4637 		}
   4638 		return s.newValue1(ssa.OpTrunc64to32, Types[TINT], v)
   4639 	}
   4640 
   4641 	// Extend value to the required size
   4642 	var op ssa.Op
   4643 	if v.Type.IsSigned() {
   4644 		switch 10*size + s.config.IntSize {
   4645 		case 14:
   4646 			op = ssa.OpSignExt8to32
   4647 		case 18:
   4648 			op = ssa.OpSignExt8to64
   4649 		case 24:
   4650 			op = ssa.OpSignExt16to32
   4651 		case 28:
   4652 			op = ssa.OpSignExt16to64
   4653 		case 48:
   4654 			op = ssa.OpSignExt32to64
   4655 		default:
   4656 			s.Fatalf("bad signed index extension %s", v.Type)
   4657 		}
   4658 	} else {
   4659 		switch 10*size + s.config.IntSize {
   4660 		case 14:
   4661 			op = ssa.OpZeroExt8to32
   4662 		case 18:
   4663 			op = ssa.OpZeroExt8to64
   4664 		case 24:
   4665 			op = ssa.OpZeroExt16to32
   4666 		case 28:
   4667 			op = ssa.OpZeroExt16to64
   4668 		case 48:
   4669 			op = ssa.OpZeroExt32to64
   4670 		default:
   4671 			s.Fatalf("bad unsigned index extension %s", v.Type)
   4672 		}
   4673 	}
   4674 	return s.newValue1(op, Types[TINT], v)
   4675 }
   4676 
   4677 // CheckLoweredPhi checks that regalloc and stackalloc correctly handled phi values.
   4678 // Called during ssaGenValue.
   4679 func CheckLoweredPhi(v *ssa.Value) {
   4680 	if v.Op != ssa.OpPhi {
   4681 		v.Fatalf("CheckLoweredPhi called with non-phi value: %v", v.LongString())
   4682 	}
   4683 	if v.Type.IsMemory() {
   4684 		return
   4685 	}
   4686 	f := v.Block.Func
   4687 	loc := f.RegAlloc[v.ID]
   4688 	for _, a := range v.Args {
   4689 		if aloc := f.RegAlloc[a.ID]; aloc != loc { // TODO: .Equal() instead?
   4690 			v.Fatalf("phi arg at different location than phi: %v @ %v, but arg %v @ %v\n%s\n", v, loc, a, aloc, v.Block.Func)
   4691 		}
   4692 	}
   4693 }
   4694 
   4695 // CheckLoweredGetClosurePtr checks that v is the first instruction in the function's entry block.
   4696 // The output of LoweredGetClosurePtr is generally hardwired to the correct register.
   4697 // That register contains the closure pointer on closure entry.
   4698 func CheckLoweredGetClosurePtr(v *ssa.Value) {
   4699 	entry := v.Block.Func.Entry
   4700 	if entry != v.Block || entry.Values[0] != v {
   4701 		Fatalf("in %s, badly placed LoweredGetClosurePtr: %v %v", v.Block.Func.Name, v.Block, v)
   4702 	}
   4703 }
   4704 
   4705 // KeepAlive marks the variable referenced by OpKeepAlive as live.
   4706 // Called during ssaGenValue.
   4707 func KeepAlive(v *ssa.Value) {
   4708 	if v.Op != ssa.OpKeepAlive {
   4709 		v.Fatalf("KeepAlive called with non-KeepAlive value: %v", v.LongString())
   4710 	}
   4711 	if !v.Args[0].Type.IsPtrShaped() {
   4712 		v.Fatalf("keeping non-pointer alive %v", v.Args[0])
   4713 	}
   4714 	n, _ := AutoVar(v.Args[0])
   4715 	if n == nil {
   4716 		v.Fatalf("KeepAlive with non-spilled value %s %s", v, v.Args[0])
   4717 	}
   4718 	// Note: KeepAlive arg may be a small part of a larger variable n.  We keep the
   4719 	// whole variable n alive at this point. (Typically, this happens when
   4720 	// we are requested to keep the idata portion of an interface{} alive, and
   4721 	// we end up keeping the whole interface{} alive.  That's ok.)
   4722 	Gvarlive(n)
   4723 }
   4724 
   4725 // AutoVar returns a *Node and int64 representing the auto variable and offset within it
   4726 // where v should be spilled.
   4727 func AutoVar(v *ssa.Value) (*Node, int64) {
   4728 	loc := v.Block.Func.RegAlloc[v.ID].(ssa.LocalSlot)
   4729 	if v.Type.Size() > loc.Type.Size() {
   4730 		v.Fatalf("spill/restore type %s doesn't fit in slot type %s", v.Type, loc.Type)
   4731 	}
   4732 	return loc.N.(*Node), loc.Off
   4733 }
   4734 
   4735 func AddrAuto(a *obj.Addr, v *ssa.Value) {
   4736 	n, off := AutoVar(v)
   4737 	a.Type = obj.TYPE_MEM
   4738 	a.Node = n
   4739 	a.Sym = Linksym(n.Sym)
   4740 	a.Offset = n.Xoffset + off
   4741 	if n.Class == PPARAM || n.Class == PPARAMOUT {
   4742 		a.Name = obj.NAME_PARAM
   4743 	} else {
   4744 		a.Name = obj.NAME_AUTO
   4745 	}
   4746 }
   4747 
   4748 func (s *SSAGenState) AddrScratch(a *obj.Addr) {
   4749 	if s.ScratchFpMem == nil {
   4750 		panic("no scratch memory available; forgot to declare usesScratch for Op?")
   4751 	}
   4752 	a.Type = obj.TYPE_MEM
   4753 	a.Name = obj.NAME_AUTO
   4754 	a.Node = s.ScratchFpMem
   4755 	a.Sym = Linksym(s.ScratchFpMem.Sym)
   4756 	a.Reg = int16(Thearch.REGSP)
   4757 	a.Offset = s.ScratchFpMem.Xoffset
   4758 }
   4759 
   4760 // fieldIdx finds the index of the field referred to by the ODOT node n.
   4761 func fieldIdx(n *Node) int {
   4762 	t := n.Left.Type
   4763 	f := n.Sym
   4764 	if !t.IsStruct() {
   4765 		panic("ODOT's LHS is not a struct")
   4766 	}
   4767 
   4768 	var i int
   4769 	for _, t1 := range t.Fields().Slice() {
   4770 		if t1.Sym != f {
   4771 			i++
   4772 			continue
   4773 		}
   4774 		if t1.Offset != n.Xoffset {
   4775 			panic("field offset doesn't match")
   4776 		}
   4777 		return i
   4778 	}
   4779 	panic(fmt.Sprintf("can't find field in expr %v\n", n))
   4780 
   4781 	// TODO: keep the result of this function somewhere in the ODOT Node
   4782 	// so we don't have to recompute it each time we need it.
   4783 }
   4784 
   4785 // ssaExport exports a bunch of compiler services for the ssa backend.
   4786 type ssaExport struct {
   4787 	log bool
   4788 }
   4789 
   4790 func (s *ssaExport) TypeBool() ssa.Type    { return Types[TBOOL] }
   4791 func (s *ssaExport) TypeInt8() ssa.Type    { return Types[TINT8] }
   4792 func (s *ssaExport) TypeInt16() ssa.Type   { return Types[TINT16] }
   4793 func (s *ssaExport) TypeInt32() ssa.Type   { return Types[TINT32] }
   4794 func (s *ssaExport) TypeInt64() ssa.Type   { return Types[TINT64] }
   4795 func (s *ssaExport) TypeUInt8() ssa.Type   { return Types[TUINT8] }
   4796 func (s *ssaExport) TypeUInt16() ssa.Type  { return Types[TUINT16] }
   4797 func (s *ssaExport) TypeUInt32() ssa.Type  { return Types[TUINT32] }
   4798 func (s *ssaExport) TypeUInt64() ssa.Type  { return Types[TUINT64] }
   4799 func (s *ssaExport) TypeFloat32() ssa.Type { return Types[TFLOAT32] }
   4800 func (s *ssaExport) TypeFloat64() ssa.Type { return Types[TFLOAT64] }
   4801 func (s *ssaExport) TypeInt() ssa.Type     { return Types[TINT] }
   4802 func (s *ssaExport) TypeUintptr() ssa.Type { return Types[TUINTPTR] }
   4803 func (s *ssaExport) TypeString() ssa.Type  { return Types[TSTRING] }
   4804 func (s *ssaExport) TypeBytePtr() ssa.Type { return ptrto(Types[TUINT8]) }
   4805 
   4806 // StringData returns a symbol (a *Sym wrapped in an interface) which
   4807 // is the data component of a global string constant containing s.
   4808 func (*ssaExport) StringData(s string) interface{} {
   4809 	// TODO: is idealstring correct?  It might not matter...
   4810 	data := stringsym(s)
   4811 	return &ssa.ExternSymbol{Typ: idealstring, Sym: data}
   4812 }
   4813 
   4814 func (e *ssaExport) Auto(t ssa.Type) ssa.GCNode {
   4815 	n := temp(t.(*Type)) // Note: adds new auto to Curfn.Func.Dcl list
   4816 	return n
   4817 }
   4818 
   4819 func (e *ssaExport) SplitString(name ssa.LocalSlot) (ssa.LocalSlot, ssa.LocalSlot) {
   4820 	n := name.N.(*Node)
   4821 	ptrType := ptrto(Types[TUINT8])
   4822 	lenType := Types[TINT]
   4823 	if n.Class == PAUTO && !n.Addrtaken {
   4824 		// Split this string up into two separate variables.
   4825 		p := e.namedAuto(n.Sym.Name+".ptr", ptrType)
   4826 		l := e.namedAuto(n.Sym.Name+".len", lenType)
   4827 		return ssa.LocalSlot{N: p, Type: ptrType, Off: 0}, ssa.LocalSlot{N: l, Type: lenType, Off: 0}
   4828 	}
   4829 	// Return the two parts of the larger variable.
   4830 	return ssa.LocalSlot{N: n, Type: ptrType, Off: name.Off}, ssa.LocalSlot{N: n, Type: lenType, Off: name.Off + int64(Widthptr)}
   4831 }
   4832 
   4833 func (e *ssaExport) SplitInterface(name ssa.LocalSlot) (ssa.LocalSlot, ssa.LocalSlot) {
   4834 	n := name.N.(*Node)
   4835 	t := ptrto(Types[TUINT8])
   4836 	if n.Class == PAUTO && !n.Addrtaken {
   4837 		// Split this interface up into two separate variables.
   4838 		f := ".itab"
   4839 		if n.Type.IsEmptyInterface() {
   4840 			f = ".type"
   4841 		}
   4842 		c := e.namedAuto(n.Sym.Name+f, t)
   4843 		d := e.namedAuto(n.Sym.Name+".data", t)
   4844 		return ssa.LocalSlot{N: c, Type: t, Off: 0}, ssa.LocalSlot{N: d, Type: t, Off: 0}
   4845 	}
   4846 	// Return the two parts of the larger variable.
   4847 	return ssa.LocalSlot{N: n, Type: t, Off: name.Off}, ssa.LocalSlot{N: n, Type: t, Off: name.Off + int64(Widthptr)}
   4848 }
   4849 
   4850 func (e *ssaExport) SplitSlice(name ssa.LocalSlot) (ssa.LocalSlot, ssa.LocalSlot, ssa.LocalSlot) {
   4851 	n := name.N.(*Node)
   4852 	ptrType := ptrto(name.Type.ElemType().(*Type))
   4853 	lenType := Types[TINT]
   4854 	if n.Class == PAUTO && !n.Addrtaken {
   4855 		// Split this slice up into three separate variables.
   4856 		p := e.namedAuto(n.Sym.Name+".ptr", ptrType)
   4857 		l := e.namedAuto(n.Sym.Name+".len", lenType)
   4858 		c := e.namedAuto(n.Sym.Name+".cap", lenType)
   4859 		return ssa.LocalSlot{N: p, Type: ptrType, Off: 0}, ssa.LocalSlot{N: l, Type: lenType, Off: 0}, ssa.LocalSlot{N: c, Type: lenType, Off: 0}
   4860 	}
   4861 	// Return the three parts of the larger variable.
   4862 	return ssa.LocalSlot{N: n, Type: ptrType, Off: name.Off},
   4863 		ssa.LocalSlot{N: n, Type: lenType, Off: name.Off + int64(Widthptr)},
   4864 		ssa.LocalSlot{N: n, Type: lenType, Off: name.Off + int64(2*Widthptr)}
   4865 }
   4866 
   4867 func (e *ssaExport) SplitComplex(name ssa.LocalSlot) (ssa.LocalSlot, ssa.LocalSlot) {
   4868 	n := name.N.(*Node)
   4869 	s := name.Type.Size() / 2
   4870 	var t *Type
   4871 	if s == 8 {
   4872 		t = Types[TFLOAT64]
   4873 	} else {
   4874 		t = Types[TFLOAT32]
   4875 	}
   4876 	if n.Class == PAUTO && !n.Addrtaken {
   4877 		// Split this complex up into two separate variables.
   4878 		c := e.namedAuto(n.Sym.Name+".real", t)
   4879 		d := e.namedAuto(n.Sym.Name+".imag", t)
   4880 		return ssa.LocalSlot{N: c, Type: t, Off: 0}, ssa.LocalSlot{N: d, Type: t, Off: 0}
   4881 	}
   4882 	// Return the two parts of the larger variable.
   4883 	return ssa.LocalSlot{N: n, Type: t, Off: name.Off}, ssa.LocalSlot{N: n, Type: t, Off: name.Off + s}
   4884 }
   4885 
   4886 func (e *ssaExport) SplitInt64(name ssa.LocalSlot) (ssa.LocalSlot, ssa.LocalSlot) {
   4887 	n := name.N.(*Node)
   4888 	var t *Type
   4889 	if name.Type.IsSigned() {
   4890 		t = Types[TINT32]
   4891 	} else {
   4892 		t = Types[TUINT32]
   4893 	}
   4894 	if n.Class == PAUTO && !n.Addrtaken {
   4895 		// Split this int64 up into two separate variables.
   4896 		h := e.namedAuto(n.Sym.Name+".hi", t)
   4897 		l := e.namedAuto(n.Sym.Name+".lo", Types[TUINT32])
   4898 		return ssa.LocalSlot{N: h, Type: t, Off: 0}, ssa.LocalSlot{N: l, Type: Types[TUINT32], Off: 0}
   4899 	}
   4900 	// Return the two parts of the larger variable.
   4901 	if Thearch.LinkArch.ByteOrder == binary.BigEndian {
   4902 		return ssa.LocalSlot{N: n, Type: t, Off: name.Off}, ssa.LocalSlot{N: n, Type: Types[TUINT32], Off: name.Off + 4}
   4903 	}
   4904 	return ssa.LocalSlot{N: n, Type: t, Off: name.Off + 4}, ssa.LocalSlot{N: n, Type: Types[TUINT32], Off: name.Off}
   4905 }
   4906 
   4907 func (e *ssaExport) SplitStruct(name ssa.LocalSlot, i int) ssa.LocalSlot {
   4908 	n := name.N.(*Node)
   4909 	st := name.Type
   4910 	ft := st.FieldType(i)
   4911 	if n.Class == PAUTO && !n.Addrtaken {
   4912 		// Note: the _ field may appear several times.  But
   4913 		// have no fear, identically-named but distinct Autos are
   4914 		// ok, albeit maybe confusing for a debugger.
   4915 		x := e.namedAuto(n.Sym.Name+"."+st.FieldName(i), ft)
   4916 		return ssa.LocalSlot{N: x, Type: ft, Off: 0}
   4917 	}
   4918 	return ssa.LocalSlot{N: n, Type: ft, Off: name.Off + st.FieldOff(i)}
   4919 }
   4920 
   4921 func (e *ssaExport) SplitArray(name ssa.LocalSlot) ssa.LocalSlot {
   4922 	n := name.N.(*Node)
   4923 	at := name.Type
   4924 	if at.NumElem() != 1 {
   4925 		Fatalf("bad array size")
   4926 	}
   4927 	et := at.ElemType()
   4928 	if n.Class == PAUTO && !n.Addrtaken {
   4929 		x := e.namedAuto(n.Sym.Name+"[0]", et)
   4930 		return ssa.LocalSlot{N: x, Type: et, Off: 0}
   4931 	}
   4932 	return ssa.LocalSlot{N: n, Type: et, Off: name.Off}
   4933 }
   4934 
   4935 // namedAuto returns a new AUTO variable with the given name and type.
   4936 // These are exposed to the debugger.
   4937 func (e *ssaExport) namedAuto(name string, typ ssa.Type) ssa.GCNode {
   4938 	t := typ.(*Type)
   4939 	s := &Sym{Name: name, Pkg: localpkg}
   4940 	n := nod(ONAME, nil, nil)
   4941 	s.Def = n
   4942 	s.Def.Used = true
   4943 	n.Sym = s
   4944 	n.Type = t
   4945 	n.Class = PAUTO
   4946 	n.Addable = true
   4947 	n.Ullman = 1
   4948 	n.Esc = EscNever
   4949 	n.Xoffset = 0
   4950 	n.Name.Curfn = Curfn
   4951 	Curfn.Func.Dcl = append(Curfn.Func.Dcl, n)
   4952 
   4953 	dowidth(t)
   4954 	return n
   4955 }
   4956 
   4957 func (e *ssaExport) CanSSA(t ssa.Type) bool {
   4958 	return canSSAType(t.(*Type))
   4959 }
   4960 
   4961 func (e *ssaExport) Line(line int32) string {
   4962 	return linestr(line)
   4963 }
   4964 
   4965 // Log logs a message from the compiler.
   4966 func (e *ssaExport) Logf(msg string, args ...interface{}) {
   4967 	if e.log {
   4968 		fmt.Printf(msg, args...)
   4969 	}
   4970 }
   4971 
   4972 func (e *ssaExport) Log() bool {
   4973 	return e.log
   4974 }
   4975 
   4976 // Fatal reports a compiler error and exits.
   4977 func (e *ssaExport) Fatalf(line int32, msg string, args ...interface{}) {
   4978 	lineno = line
   4979 	Fatalf(msg, args...)
   4980 }
   4981 
   4982 // Warnl reports a "warning", which is usually flag-triggered
   4983 // logging output for the benefit of tests.
   4984 func (e *ssaExport) Warnl(line int32, fmt_ string, args ...interface{}) {
   4985 	Warnl(line, fmt_, args...)
   4986 }
   4987 
   4988 func (e *ssaExport) Debug_checknil() bool {
   4989 	return Debug_checknil != 0
   4990 }
   4991 
   4992 func (e *ssaExport) Debug_wb() bool {
   4993 	return Debug_wb != 0
   4994 }
   4995 
   4996 func (e *ssaExport) Syslook(name string) interface{} {
   4997 	return syslook(name).Sym
   4998 }
   4999 
   5000 func (n *Node) Typ() ssa.Type {
   5001 	return n.Type
   5002 }
   5003