Home | History | Annotate | Download | only in mips64
      1 // Copyright 2016 The Go Authors. All rights reserved.
      2 // Use of this source code is governed by a BSD-style
      3 // license that can be found in the LICENSE file.
      4 
      5 package mips64
      6 
      7 import (
      8 	"math"
      9 
     10 	"cmd/compile/internal/gc"
     11 	"cmd/compile/internal/ssa"
     12 	"cmd/compile/internal/types"
     13 	"cmd/internal/obj"
     14 	"cmd/internal/obj/mips"
     15 )
     16 
     17 // isFPreg returns whether r is an FP register
     18 func isFPreg(r int16) bool {
     19 	return mips.REG_F0 <= r && r <= mips.REG_F31
     20 }
     21 
     22 // isHILO returns whether r is HI or LO register
     23 func isHILO(r int16) bool {
     24 	return r == mips.REG_HI || r == mips.REG_LO
     25 }
     26 
     27 // loadByType returns the load instruction of the given type.
     28 func loadByType(t *types.Type, r int16) obj.As {
     29 	if isFPreg(r) {
     30 		if t.Size() == 4 { // float32 or int32
     31 			return mips.AMOVF
     32 		} else { // float64 or int64
     33 			return mips.AMOVD
     34 		}
     35 	} else {
     36 		switch t.Size() {
     37 		case 1:
     38 			if t.IsSigned() {
     39 				return mips.AMOVB
     40 			} else {
     41 				return mips.AMOVBU
     42 			}
     43 		case 2:
     44 			if t.IsSigned() {
     45 				return mips.AMOVH
     46 			} else {
     47 				return mips.AMOVHU
     48 			}
     49 		case 4:
     50 			if t.IsSigned() {
     51 				return mips.AMOVW
     52 			} else {
     53 				return mips.AMOVWU
     54 			}
     55 		case 8:
     56 			return mips.AMOVV
     57 		}
     58 	}
     59 	panic("bad load type")
     60 }
     61 
     62 // storeByType returns the store instruction of the given type.
     63 func storeByType(t *types.Type, r int16) obj.As {
     64 	if isFPreg(r) {
     65 		if t.Size() == 4 { // float32 or int32
     66 			return mips.AMOVF
     67 		} else { // float64 or int64
     68 			return mips.AMOVD
     69 		}
     70 	} else {
     71 		switch t.Size() {
     72 		case 1:
     73 			return mips.AMOVB
     74 		case 2:
     75 			return mips.AMOVH
     76 		case 4:
     77 			return mips.AMOVW
     78 		case 8:
     79 			return mips.AMOVV
     80 		}
     81 	}
     82 	panic("bad store type")
     83 }
     84 
     85 func ssaGenValue(s *gc.SSAGenState, v *ssa.Value) {
     86 	switch v.Op {
     87 	case ssa.OpCopy, ssa.OpMIPS64MOVVconvert, ssa.OpMIPS64MOVVreg:
     88 		if v.Type.IsMemory() {
     89 			return
     90 		}
     91 		x := v.Args[0].Reg()
     92 		y := v.Reg()
     93 		if x == y {
     94 			return
     95 		}
     96 		as := mips.AMOVV
     97 		if isFPreg(x) && isFPreg(y) {
     98 			as = mips.AMOVD
     99 		}
    100 		p := s.Prog(as)
    101 		p.From.Type = obj.TYPE_REG
    102 		p.From.Reg = x
    103 		p.To.Type = obj.TYPE_REG
    104 		p.To.Reg = y
    105 		if isHILO(x) && isHILO(y) || isHILO(x) && isFPreg(y) || isFPreg(x) && isHILO(y) {
    106 			// cannot move between special registers, use TMP as intermediate
    107 			p.To.Reg = mips.REGTMP
    108 			p = s.Prog(mips.AMOVV)
    109 			p.From.Type = obj.TYPE_REG
    110 			p.From.Reg = mips.REGTMP
    111 			p.To.Type = obj.TYPE_REG
    112 			p.To.Reg = y
    113 		}
    114 	case ssa.OpMIPS64MOVVnop:
    115 		if v.Reg() != v.Args[0].Reg() {
    116 			v.Fatalf("input[0] and output not in same register %s", v.LongString())
    117 		}
    118 		// nothing to do
    119 	case ssa.OpLoadReg:
    120 		if v.Type.IsFlags() {
    121 			v.Fatalf("load flags not implemented: %v", v.LongString())
    122 			return
    123 		}
    124 		r := v.Reg()
    125 		p := s.Prog(loadByType(v.Type, r))
    126 		gc.AddrAuto(&p.From, v.Args[0])
    127 		p.To.Type = obj.TYPE_REG
    128 		p.To.Reg = r
    129 		if isHILO(r) {
    130 			// cannot directly load, load to TMP and move
    131 			p.To.Reg = mips.REGTMP
    132 			p = s.Prog(mips.AMOVV)
    133 			p.From.Type = obj.TYPE_REG
    134 			p.From.Reg = mips.REGTMP
    135 			p.To.Type = obj.TYPE_REG
    136 			p.To.Reg = r
    137 		}
    138 	case ssa.OpStoreReg:
    139 		if v.Type.IsFlags() {
    140 			v.Fatalf("store flags not implemented: %v", v.LongString())
    141 			return
    142 		}
    143 		r := v.Args[0].Reg()
    144 		if isHILO(r) {
    145 			// cannot directly store, move to TMP and store
    146 			p := s.Prog(mips.AMOVV)
    147 			p.From.Type = obj.TYPE_REG
    148 			p.From.Reg = r
    149 			p.To.Type = obj.TYPE_REG
    150 			p.To.Reg = mips.REGTMP
    151 			r = mips.REGTMP
    152 		}
    153 		p := s.Prog(storeByType(v.Type, r))
    154 		p.From.Type = obj.TYPE_REG
    155 		p.From.Reg = r
    156 		gc.AddrAuto(&p.To, v)
    157 	case ssa.OpMIPS64ADDV,
    158 		ssa.OpMIPS64SUBV,
    159 		ssa.OpMIPS64AND,
    160 		ssa.OpMIPS64OR,
    161 		ssa.OpMIPS64XOR,
    162 		ssa.OpMIPS64NOR,
    163 		ssa.OpMIPS64SLLV,
    164 		ssa.OpMIPS64SRLV,
    165 		ssa.OpMIPS64SRAV,
    166 		ssa.OpMIPS64ADDF,
    167 		ssa.OpMIPS64ADDD,
    168 		ssa.OpMIPS64SUBF,
    169 		ssa.OpMIPS64SUBD,
    170 		ssa.OpMIPS64MULF,
    171 		ssa.OpMIPS64MULD,
    172 		ssa.OpMIPS64DIVF,
    173 		ssa.OpMIPS64DIVD:
    174 		p := s.Prog(v.Op.Asm())
    175 		p.From.Type = obj.TYPE_REG
    176 		p.From.Reg = v.Args[1].Reg()
    177 		p.Reg = v.Args[0].Reg()
    178 		p.To.Type = obj.TYPE_REG
    179 		p.To.Reg = v.Reg()
    180 	case ssa.OpMIPS64SGT,
    181 		ssa.OpMIPS64SGTU:
    182 		p := s.Prog(v.Op.Asm())
    183 		p.From.Type = obj.TYPE_REG
    184 		p.From.Reg = v.Args[0].Reg()
    185 		p.Reg = v.Args[1].Reg()
    186 		p.To.Type = obj.TYPE_REG
    187 		p.To.Reg = v.Reg()
    188 	case ssa.OpMIPS64ADDVconst,
    189 		ssa.OpMIPS64SUBVconst,
    190 		ssa.OpMIPS64ANDconst,
    191 		ssa.OpMIPS64ORconst,
    192 		ssa.OpMIPS64XORconst,
    193 		ssa.OpMIPS64NORconst,
    194 		ssa.OpMIPS64SLLVconst,
    195 		ssa.OpMIPS64SRLVconst,
    196 		ssa.OpMIPS64SRAVconst,
    197 		ssa.OpMIPS64SGTconst,
    198 		ssa.OpMIPS64SGTUconst:
    199 		p := s.Prog(v.Op.Asm())
    200 		p.From.Type = obj.TYPE_CONST
    201 		p.From.Offset = v.AuxInt
    202 		p.Reg = v.Args[0].Reg()
    203 		p.To.Type = obj.TYPE_REG
    204 		p.To.Reg = v.Reg()
    205 	case ssa.OpMIPS64MULV,
    206 		ssa.OpMIPS64MULVU,
    207 		ssa.OpMIPS64DIVV,
    208 		ssa.OpMIPS64DIVVU:
    209 		// result in hi,lo
    210 		p := s.Prog(v.Op.Asm())
    211 		p.From.Type = obj.TYPE_REG
    212 		p.From.Reg = v.Args[1].Reg()
    213 		p.Reg = v.Args[0].Reg()
    214 	case ssa.OpMIPS64MOVVconst:
    215 		r := v.Reg()
    216 		p := s.Prog(v.Op.Asm())
    217 		p.From.Type = obj.TYPE_CONST
    218 		p.From.Offset = v.AuxInt
    219 		p.To.Type = obj.TYPE_REG
    220 		p.To.Reg = r
    221 		if isFPreg(r) || isHILO(r) {
    222 			// cannot move into FP or special registers, use TMP as intermediate
    223 			p.To.Reg = mips.REGTMP
    224 			p = s.Prog(mips.AMOVV)
    225 			p.From.Type = obj.TYPE_REG
    226 			p.From.Reg = mips.REGTMP
    227 			p.To.Type = obj.TYPE_REG
    228 			p.To.Reg = r
    229 		}
    230 	case ssa.OpMIPS64MOVFconst,
    231 		ssa.OpMIPS64MOVDconst:
    232 		p := s.Prog(v.Op.Asm())
    233 		p.From.Type = obj.TYPE_FCONST
    234 		p.From.Val = math.Float64frombits(uint64(v.AuxInt))
    235 		p.To.Type = obj.TYPE_REG
    236 		p.To.Reg = v.Reg()
    237 	case ssa.OpMIPS64CMPEQF,
    238 		ssa.OpMIPS64CMPEQD,
    239 		ssa.OpMIPS64CMPGEF,
    240 		ssa.OpMIPS64CMPGED,
    241 		ssa.OpMIPS64CMPGTF,
    242 		ssa.OpMIPS64CMPGTD:
    243 		p := s.Prog(v.Op.Asm())
    244 		p.From.Type = obj.TYPE_REG
    245 		p.From.Reg = v.Args[0].Reg()
    246 		p.Reg = v.Args[1].Reg()
    247 	case ssa.OpMIPS64MOVVaddr:
    248 		p := s.Prog(mips.AMOVV)
    249 		p.From.Type = obj.TYPE_ADDR
    250 		p.From.Reg = v.Args[0].Reg()
    251 		var wantreg string
    252 		// MOVV $sym+off(base), R
    253 		// the assembler expands it as the following:
    254 		// - base is SP: add constant offset to SP (R29)
    255 		//               when constant is large, tmp register (R23) may be used
    256 		// - base is SB: load external address with relocation
    257 		switch v.Aux.(type) {
    258 		default:
    259 			v.Fatalf("aux is of unknown type %T", v.Aux)
    260 		case *obj.LSym:
    261 			wantreg = "SB"
    262 			gc.AddAux(&p.From, v)
    263 		case *gc.Node:
    264 			wantreg = "SP"
    265 			gc.AddAux(&p.From, v)
    266 		case nil:
    267 			// No sym, just MOVV $off(SP), R
    268 			wantreg = "SP"
    269 			p.From.Offset = v.AuxInt
    270 		}
    271 		if reg := v.Args[0].RegName(); reg != wantreg {
    272 			v.Fatalf("bad reg %s for symbol type %T, want %s", reg, v.Aux, wantreg)
    273 		}
    274 		p.To.Type = obj.TYPE_REG
    275 		p.To.Reg = v.Reg()
    276 	case ssa.OpMIPS64MOVBload,
    277 		ssa.OpMIPS64MOVBUload,
    278 		ssa.OpMIPS64MOVHload,
    279 		ssa.OpMIPS64MOVHUload,
    280 		ssa.OpMIPS64MOVWload,
    281 		ssa.OpMIPS64MOVWUload,
    282 		ssa.OpMIPS64MOVVload,
    283 		ssa.OpMIPS64MOVFload,
    284 		ssa.OpMIPS64MOVDload:
    285 		p := s.Prog(v.Op.Asm())
    286 		p.From.Type = obj.TYPE_MEM
    287 		p.From.Reg = v.Args[0].Reg()
    288 		gc.AddAux(&p.From, v)
    289 		p.To.Type = obj.TYPE_REG
    290 		p.To.Reg = v.Reg()
    291 	case ssa.OpMIPS64MOVBstore,
    292 		ssa.OpMIPS64MOVHstore,
    293 		ssa.OpMIPS64MOVWstore,
    294 		ssa.OpMIPS64MOVVstore,
    295 		ssa.OpMIPS64MOVFstore,
    296 		ssa.OpMIPS64MOVDstore:
    297 		p := s.Prog(v.Op.Asm())
    298 		p.From.Type = obj.TYPE_REG
    299 		p.From.Reg = v.Args[1].Reg()
    300 		p.To.Type = obj.TYPE_MEM
    301 		p.To.Reg = v.Args[0].Reg()
    302 		gc.AddAux(&p.To, v)
    303 	case ssa.OpMIPS64MOVBstorezero,
    304 		ssa.OpMIPS64MOVHstorezero,
    305 		ssa.OpMIPS64MOVWstorezero,
    306 		ssa.OpMIPS64MOVVstorezero:
    307 		p := s.Prog(v.Op.Asm())
    308 		p.From.Type = obj.TYPE_REG
    309 		p.From.Reg = mips.REGZERO
    310 		p.To.Type = obj.TYPE_MEM
    311 		p.To.Reg = v.Args[0].Reg()
    312 		gc.AddAux(&p.To, v)
    313 	case ssa.OpMIPS64MOVBreg,
    314 		ssa.OpMIPS64MOVBUreg,
    315 		ssa.OpMIPS64MOVHreg,
    316 		ssa.OpMIPS64MOVHUreg,
    317 		ssa.OpMIPS64MOVWreg,
    318 		ssa.OpMIPS64MOVWUreg:
    319 		a := v.Args[0]
    320 		for a.Op == ssa.OpCopy || a.Op == ssa.OpMIPS64MOVVreg {
    321 			a = a.Args[0]
    322 		}
    323 		if a.Op == ssa.OpLoadReg {
    324 			t := a.Type
    325 			switch {
    326 			case v.Op == ssa.OpMIPS64MOVBreg && t.Size() == 1 && t.IsSigned(),
    327 				v.Op == ssa.OpMIPS64MOVBUreg && t.Size() == 1 && !t.IsSigned(),
    328 				v.Op == ssa.OpMIPS64MOVHreg && t.Size() == 2 && t.IsSigned(),
    329 				v.Op == ssa.OpMIPS64MOVHUreg && t.Size() == 2 && !t.IsSigned(),
    330 				v.Op == ssa.OpMIPS64MOVWreg && t.Size() == 4 && t.IsSigned(),
    331 				v.Op == ssa.OpMIPS64MOVWUreg && t.Size() == 4 && !t.IsSigned():
    332 				// arg is a proper-typed load, already zero/sign-extended, don't extend again
    333 				if v.Reg() == v.Args[0].Reg() {
    334 					return
    335 				}
    336 				p := s.Prog(mips.AMOVV)
    337 				p.From.Type = obj.TYPE_REG
    338 				p.From.Reg = v.Args[0].Reg()
    339 				p.To.Type = obj.TYPE_REG
    340 				p.To.Reg = v.Reg()
    341 				return
    342 			default:
    343 			}
    344 		}
    345 		fallthrough
    346 	case ssa.OpMIPS64MOVWF,
    347 		ssa.OpMIPS64MOVWD,
    348 		ssa.OpMIPS64TRUNCFW,
    349 		ssa.OpMIPS64TRUNCDW,
    350 		ssa.OpMIPS64MOVVF,
    351 		ssa.OpMIPS64MOVVD,
    352 		ssa.OpMIPS64TRUNCFV,
    353 		ssa.OpMIPS64TRUNCDV,
    354 		ssa.OpMIPS64MOVFD,
    355 		ssa.OpMIPS64MOVDF,
    356 		ssa.OpMIPS64NEGF,
    357 		ssa.OpMIPS64NEGD:
    358 		p := s.Prog(v.Op.Asm())
    359 		p.From.Type = obj.TYPE_REG
    360 		p.From.Reg = v.Args[0].Reg()
    361 		p.To.Type = obj.TYPE_REG
    362 		p.To.Reg = v.Reg()
    363 	case ssa.OpMIPS64NEGV:
    364 		// SUB from REGZERO
    365 		p := s.Prog(mips.ASUBVU)
    366 		p.From.Type = obj.TYPE_REG
    367 		p.From.Reg = v.Args[0].Reg()
    368 		p.Reg = mips.REGZERO
    369 		p.To.Type = obj.TYPE_REG
    370 		p.To.Reg = v.Reg()
    371 	case ssa.OpMIPS64DUFFZERO:
    372 		// runtime.duffzero expects start address - 8 in R1
    373 		p := s.Prog(mips.ASUBVU)
    374 		p.From.Type = obj.TYPE_CONST
    375 		p.From.Offset = 8
    376 		p.Reg = v.Args[0].Reg()
    377 		p.To.Type = obj.TYPE_REG
    378 		p.To.Reg = mips.REG_R1
    379 		p = s.Prog(obj.ADUFFZERO)
    380 		p.To.Type = obj.TYPE_MEM
    381 		p.To.Name = obj.NAME_EXTERN
    382 		p.To.Sym = gc.Duffzero
    383 		p.To.Offset = v.AuxInt
    384 	case ssa.OpMIPS64LoweredZero:
    385 		// SUBV	$8, R1
    386 		// MOVV	R0, 8(R1)
    387 		// ADDV	$8, R1
    388 		// BNE	Rarg1, R1, -2(PC)
    389 		// arg1 is the address of the last element to zero
    390 		var sz int64
    391 		var mov obj.As
    392 		switch {
    393 		case v.AuxInt%8 == 0:
    394 			sz = 8
    395 			mov = mips.AMOVV
    396 		case v.AuxInt%4 == 0:
    397 			sz = 4
    398 			mov = mips.AMOVW
    399 		case v.AuxInt%2 == 0:
    400 			sz = 2
    401 			mov = mips.AMOVH
    402 		default:
    403 			sz = 1
    404 			mov = mips.AMOVB
    405 		}
    406 		p := s.Prog(mips.ASUBVU)
    407 		p.From.Type = obj.TYPE_CONST
    408 		p.From.Offset = sz
    409 		p.To.Type = obj.TYPE_REG
    410 		p.To.Reg = mips.REG_R1
    411 		p2 := s.Prog(mov)
    412 		p2.From.Type = obj.TYPE_REG
    413 		p2.From.Reg = mips.REGZERO
    414 		p2.To.Type = obj.TYPE_MEM
    415 		p2.To.Reg = mips.REG_R1
    416 		p2.To.Offset = sz
    417 		p3 := s.Prog(mips.AADDVU)
    418 		p3.From.Type = obj.TYPE_CONST
    419 		p3.From.Offset = sz
    420 		p3.To.Type = obj.TYPE_REG
    421 		p3.To.Reg = mips.REG_R1
    422 		p4 := s.Prog(mips.ABNE)
    423 		p4.From.Type = obj.TYPE_REG
    424 		p4.From.Reg = v.Args[1].Reg()
    425 		p4.Reg = mips.REG_R1
    426 		p4.To.Type = obj.TYPE_BRANCH
    427 		gc.Patch(p4, p2)
    428 	case ssa.OpMIPS64LoweredMove:
    429 		// SUBV	$8, R1
    430 		// MOVV	8(R1), Rtmp
    431 		// MOVV	Rtmp, (R2)
    432 		// ADDV	$8, R1
    433 		// ADDV	$8, R2
    434 		// BNE	Rarg2, R1, -4(PC)
    435 		// arg2 is the address of the last element of src
    436 		var sz int64
    437 		var mov obj.As
    438 		switch {
    439 		case v.AuxInt%8 == 0:
    440 			sz = 8
    441 			mov = mips.AMOVV
    442 		case v.AuxInt%4 == 0:
    443 			sz = 4
    444 			mov = mips.AMOVW
    445 		case v.AuxInt%2 == 0:
    446 			sz = 2
    447 			mov = mips.AMOVH
    448 		default:
    449 			sz = 1
    450 			mov = mips.AMOVB
    451 		}
    452 		p := s.Prog(mips.ASUBVU)
    453 		p.From.Type = obj.TYPE_CONST
    454 		p.From.Offset = sz
    455 		p.To.Type = obj.TYPE_REG
    456 		p.To.Reg = mips.REG_R1
    457 		p2 := s.Prog(mov)
    458 		p2.From.Type = obj.TYPE_MEM
    459 		p2.From.Reg = mips.REG_R1
    460 		p2.From.Offset = sz
    461 		p2.To.Type = obj.TYPE_REG
    462 		p2.To.Reg = mips.REGTMP
    463 		p3 := s.Prog(mov)
    464 		p3.From.Type = obj.TYPE_REG
    465 		p3.From.Reg = mips.REGTMP
    466 		p3.To.Type = obj.TYPE_MEM
    467 		p3.To.Reg = mips.REG_R2
    468 		p4 := s.Prog(mips.AADDVU)
    469 		p4.From.Type = obj.TYPE_CONST
    470 		p4.From.Offset = sz
    471 		p4.To.Type = obj.TYPE_REG
    472 		p4.To.Reg = mips.REG_R1
    473 		p5 := s.Prog(mips.AADDVU)
    474 		p5.From.Type = obj.TYPE_CONST
    475 		p5.From.Offset = sz
    476 		p5.To.Type = obj.TYPE_REG
    477 		p5.To.Reg = mips.REG_R2
    478 		p6 := s.Prog(mips.ABNE)
    479 		p6.From.Type = obj.TYPE_REG
    480 		p6.From.Reg = v.Args[2].Reg()
    481 		p6.Reg = mips.REG_R1
    482 		p6.To.Type = obj.TYPE_BRANCH
    483 		gc.Patch(p6, p2)
    484 	case ssa.OpMIPS64CALLstatic, ssa.OpMIPS64CALLclosure, ssa.OpMIPS64CALLinter:
    485 		s.Call(v)
    486 	case ssa.OpMIPS64LoweredAtomicLoad32, ssa.OpMIPS64LoweredAtomicLoad64:
    487 		as := mips.AMOVV
    488 		if v.Op == ssa.OpMIPS64LoweredAtomicLoad32 {
    489 			as = mips.AMOVW
    490 		}
    491 		s.Prog(mips.ASYNC)
    492 		p := s.Prog(as)
    493 		p.From.Type = obj.TYPE_MEM
    494 		p.From.Reg = v.Args[0].Reg()
    495 		p.To.Type = obj.TYPE_REG
    496 		p.To.Reg = v.Reg0()
    497 		s.Prog(mips.ASYNC)
    498 	case ssa.OpMIPS64LoweredAtomicStore32, ssa.OpMIPS64LoweredAtomicStore64:
    499 		as := mips.AMOVV
    500 		if v.Op == ssa.OpMIPS64LoweredAtomicStore32 {
    501 			as = mips.AMOVW
    502 		}
    503 		s.Prog(mips.ASYNC)
    504 		p := s.Prog(as)
    505 		p.From.Type = obj.TYPE_REG
    506 		p.From.Reg = v.Args[1].Reg()
    507 		p.To.Type = obj.TYPE_MEM
    508 		p.To.Reg = v.Args[0].Reg()
    509 		s.Prog(mips.ASYNC)
    510 	case ssa.OpMIPS64LoweredAtomicStorezero32, ssa.OpMIPS64LoweredAtomicStorezero64:
    511 		as := mips.AMOVV
    512 		if v.Op == ssa.OpMIPS64LoweredAtomicStorezero32 {
    513 			as = mips.AMOVW
    514 		}
    515 		s.Prog(mips.ASYNC)
    516 		p := s.Prog(as)
    517 		p.From.Type = obj.TYPE_REG
    518 		p.From.Reg = mips.REGZERO
    519 		p.To.Type = obj.TYPE_MEM
    520 		p.To.Reg = v.Args[0].Reg()
    521 		s.Prog(mips.ASYNC)
    522 	case ssa.OpMIPS64LoweredAtomicExchange32, ssa.OpMIPS64LoweredAtomicExchange64:
    523 		// SYNC
    524 		// MOVV	Rarg1, Rtmp
    525 		// LL	(Rarg0), Rout
    526 		// SC	Rtmp, (Rarg0)
    527 		// BEQ	Rtmp, -3(PC)
    528 		// SYNC
    529 		ll := mips.ALLV
    530 		sc := mips.ASCV
    531 		if v.Op == ssa.OpMIPS64LoweredAtomicExchange32 {
    532 			ll = mips.ALL
    533 			sc = mips.ASC
    534 		}
    535 		s.Prog(mips.ASYNC)
    536 		p := s.Prog(mips.AMOVV)
    537 		p.From.Type = obj.TYPE_REG
    538 		p.From.Reg = v.Args[1].Reg()
    539 		p.To.Type = obj.TYPE_REG
    540 		p.To.Reg = mips.REGTMP
    541 		p1 := s.Prog(ll)
    542 		p1.From.Type = obj.TYPE_MEM
    543 		p1.From.Reg = v.Args[0].Reg()
    544 		p1.To.Type = obj.TYPE_REG
    545 		p1.To.Reg = v.Reg0()
    546 		p2 := s.Prog(sc)
    547 		p2.From.Type = obj.TYPE_REG
    548 		p2.From.Reg = mips.REGTMP
    549 		p2.To.Type = obj.TYPE_MEM
    550 		p2.To.Reg = v.Args[0].Reg()
    551 		p3 := s.Prog(mips.ABEQ)
    552 		p3.From.Type = obj.TYPE_REG
    553 		p3.From.Reg = mips.REGTMP
    554 		p3.To.Type = obj.TYPE_BRANCH
    555 		gc.Patch(p3, p)
    556 		s.Prog(mips.ASYNC)
    557 	case ssa.OpMIPS64LoweredAtomicAdd32, ssa.OpMIPS64LoweredAtomicAdd64:
    558 		// SYNC
    559 		// LL	(Rarg0), Rout
    560 		// ADDV Rarg1, Rout, Rtmp
    561 		// SC	Rtmp, (Rarg0)
    562 		// BEQ	Rtmp, -3(PC)
    563 		// SYNC
    564 		// ADDV Rarg1, Rout
    565 		ll := mips.ALLV
    566 		sc := mips.ASCV
    567 		if v.Op == ssa.OpMIPS64LoweredAtomicAdd32 {
    568 			ll = mips.ALL
    569 			sc = mips.ASC
    570 		}
    571 		s.Prog(mips.ASYNC)
    572 		p := s.Prog(ll)
    573 		p.From.Type = obj.TYPE_MEM
    574 		p.From.Reg = v.Args[0].Reg()
    575 		p.To.Type = obj.TYPE_REG
    576 		p.To.Reg = v.Reg0()
    577 		p1 := s.Prog(mips.AADDVU)
    578 		p1.From.Type = obj.TYPE_REG
    579 		p1.From.Reg = v.Args[1].Reg()
    580 		p1.Reg = v.Reg0()
    581 		p1.To.Type = obj.TYPE_REG
    582 		p1.To.Reg = mips.REGTMP
    583 		p2 := s.Prog(sc)
    584 		p2.From.Type = obj.TYPE_REG
    585 		p2.From.Reg = mips.REGTMP
    586 		p2.To.Type = obj.TYPE_MEM
    587 		p2.To.Reg = v.Args[0].Reg()
    588 		p3 := s.Prog(mips.ABEQ)
    589 		p3.From.Type = obj.TYPE_REG
    590 		p3.From.Reg = mips.REGTMP
    591 		p3.To.Type = obj.TYPE_BRANCH
    592 		gc.Patch(p3, p)
    593 		s.Prog(mips.ASYNC)
    594 		p4 := s.Prog(mips.AADDVU)
    595 		p4.From.Type = obj.TYPE_REG
    596 		p4.From.Reg = v.Args[1].Reg()
    597 		p4.Reg = v.Reg0()
    598 		p4.To.Type = obj.TYPE_REG
    599 		p4.To.Reg = v.Reg0()
    600 	case ssa.OpMIPS64LoweredAtomicAddconst32, ssa.OpMIPS64LoweredAtomicAddconst64:
    601 		// SYNC
    602 		// LL	(Rarg0), Rout
    603 		// ADDV $auxint, Rout, Rtmp
    604 		// SC	Rtmp, (Rarg0)
    605 		// BEQ	Rtmp, -3(PC)
    606 		// SYNC
    607 		// ADDV $auxint, Rout
    608 		ll := mips.ALLV
    609 		sc := mips.ASCV
    610 		if v.Op == ssa.OpMIPS64LoweredAtomicAddconst32 {
    611 			ll = mips.ALL
    612 			sc = mips.ASC
    613 		}
    614 		s.Prog(mips.ASYNC)
    615 		p := s.Prog(ll)
    616 		p.From.Type = obj.TYPE_MEM
    617 		p.From.Reg = v.Args[0].Reg()
    618 		p.To.Type = obj.TYPE_REG
    619 		p.To.Reg = v.Reg0()
    620 		p1 := s.Prog(mips.AADDVU)
    621 		p1.From.Type = obj.TYPE_CONST
    622 		p1.From.Offset = v.AuxInt
    623 		p1.Reg = v.Reg0()
    624 		p1.To.Type = obj.TYPE_REG
    625 		p1.To.Reg = mips.REGTMP
    626 		p2 := s.Prog(sc)
    627 		p2.From.Type = obj.TYPE_REG
    628 		p2.From.Reg = mips.REGTMP
    629 		p2.To.Type = obj.TYPE_MEM
    630 		p2.To.Reg = v.Args[0].Reg()
    631 		p3 := s.Prog(mips.ABEQ)
    632 		p3.From.Type = obj.TYPE_REG
    633 		p3.From.Reg = mips.REGTMP
    634 		p3.To.Type = obj.TYPE_BRANCH
    635 		gc.Patch(p3, p)
    636 		s.Prog(mips.ASYNC)
    637 		p4 := s.Prog(mips.AADDVU)
    638 		p4.From.Type = obj.TYPE_CONST
    639 		p4.From.Offset = v.AuxInt
    640 		p4.Reg = v.Reg0()
    641 		p4.To.Type = obj.TYPE_REG
    642 		p4.To.Reg = v.Reg0()
    643 	case ssa.OpMIPS64LoweredAtomicCas32, ssa.OpMIPS64LoweredAtomicCas64:
    644 		// MOVV $0, Rout
    645 		// SYNC
    646 		// LL	(Rarg0), Rtmp
    647 		// BNE	Rtmp, Rarg1, 4(PC)
    648 		// MOVV Rarg2, Rout
    649 		// SC	Rout, (Rarg0)
    650 		// BEQ	Rout, -4(PC)
    651 		// SYNC
    652 		ll := mips.ALLV
    653 		sc := mips.ASCV
    654 		if v.Op == ssa.OpMIPS64LoweredAtomicCas32 {
    655 			ll = mips.ALL
    656 			sc = mips.ASC
    657 		}
    658 		p := s.Prog(mips.AMOVV)
    659 		p.From.Type = obj.TYPE_REG
    660 		p.From.Reg = mips.REGZERO
    661 		p.To.Type = obj.TYPE_REG
    662 		p.To.Reg = v.Reg0()
    663 		s.Prog(mips.ASYNC)
    664 		p1 := s.Prog(ll)
    665 		p1.From.Type = obj.TYPE_MEM
    666 		p1.From.Reg = v.Args[0].Reg()
    667 		p1.To.Type = obj.TYPE_REG
    668 		p1.To.Reg = mips.REGTMP
    669 		p2 := s.Prog(mips.ABNE)
    670 		p2.From.Type = obj.TYPE_REG
    671 		p2.From.Reg = v.Args[1].Reg()
    672 		p2.Reg = mips.REGTMP
    673 		p2.To.Type = obj.TYPE_BRANCH
    674 		p3 := s.Prog(mips.AMOVV)
    675 		p3.From.Type = obj.TYPE_REG
    676 		p3.From.Reg = v.Args[2].Reg()
    677 		p3.To.Type = obj.TYPE_REG
    678 		p3.To.Reg = v.Reg0()
    679 		p4 := s.Prog(sc)
    680 		p4.From.Type = obj.TYPE_REG
    681 		p4.From.Reg = v.Reg0()
    682 		p4.To.Type = obj.TYPE_MEM
    683 		p4.To.Reg = v.Args[0].Reg()
    684 		p5 := s.Prog(mips.ABEQ)
    685 		p5.From.Type = obj.TYPE_REG
    686 		p5.From.Reg = v.Reg0()
    687 		p5.To.Type = obj.TYPE_BRANCH
    688 		gc.Patch(p5, p1)
    689 		p6 := s.Prog(mips.ASYNC)
    690 		gc.Patch(p2, p6)
    691 	case ssa.OpMIPS64LoweredNilCheck:
    692 		// Issue a load which will fault if arg is nil.
    693 		p := s.Prog(mips.AMOVB)
    694 		p.From.Type = obj.TYPE_MEM
    695 		p.From.Reg = v.Args[0].Reg()
    696 		gc.AddAux(&p.From, v)
    697 		p.To.Type = obj.TYPE_REG
    698 		p.To.Reg = mips.REGTMP
    699 		if gc.Debug_checknil != 0 && v.Pos.Line() > 1 { // v.Pos.Line()==1 in generated wrappers
    700 			gc.Warnl(v.Pos, "generated nil check")
    701 		}
    702 	case ssa.OpMIPS64FPFlagTrue,
    703 		ssa.OpMIPS64FPFlagFalse:
    704 		// MOVV	$0, r
    705 		// BFPF	2(PC)
    706 		// MOVV	$1, r
    707 		branch := mips.ABFPF
    708 		if v.Op == ssa.OpMIPS64FPFlagFalse {
    709 			branch = mips.ABFPT
    710 		}
    711 		p := s.Prog(mips.AMOVV)
    712 		p.From.Type = obj.TYPE_REG
    713 		p.From.Reg = mips.REGZERO
    714 		p.To.Type = obj.TYPE_REG
    715 		p.To.Reg = v.Reg()
    716 		p2 := s.Prog(branch)
    717 		p2.To.Type = obj.TYPE_BRANCH
    718 		p3 := s.Prog(mips.AMOVV)
    719 		p3.From.Type = obj.TYPE_CONST
    720 		p3.From.Offset = 1
    721 		p3.To.Type = obj.TYPE_REG
    722 		p3.To.Reg = v.Reg()
    723 		p4 := s.Prog(obj.ANOP) // not a machine instruction, for branch to land
    724 		gc.Patch(p2, p4)
    725 	case ssa.OpMIPS64LoweredGetClosurePtr:
    726 		// Closure pointer is R22 (mips.REGCTXT).
    727 		gc.CheckLoweredGetClosurePtr(v)
    728 	case ssa.OpMIPS64LoweredGetCallerSP:
    729 		// caller's SP is FixedFrameSize below the address of the first arg
    730 		p := s.Prog(mips.AMOVV)
    731 		p.From.Type = obj.TYPE_ADDR
    732 		p.From.Offset = -gc.Ctxt.FixedFrameSize()
    733 		p.From.Name = obj.NAME_PARAM
    734 		p.To.Type = obj.TYPE_REG
    735 		p.To.Reg = v.Reg()
    736 	case ssa.OpClobber:
    737 		// TODO: implement for clobberdead experiment. Nop is ok for now.
    738 	default:
    739 		v.Fatalf("genValue not implemented: %s", v.LongString())
    740 	}
    741 }
    742 
    743 var blockJump = map[ssa.BlockKind]struct {
    744 	asm, invasm obj.As
    745 }{
    746 	ssa.BlockMIPS64EQ:  {mips.ABEQ, mips.ABNE},
    747 	ssa.BlockMIPS64NE:  {mips.ABNE, mips.ABEQ},
    748 	ssa.BlockMIPS64LTZ: {mips.ABLTZ, mips.ABGEZ},
    749 	ssa.BlockMIPS64GEZ: {mips.ABGEZ, mips.ABLTZ},
    750 	ssa.BlockMIPS64LEZ: {mips.ABLEZ, mips.ABGTZ},
    751 	ssa.BlockMIPS64GTZ: {mips.ABGTZ, mips.ABLEZ},
    752 	ssa.BlockMIPS64FPT: {mips.ABFPT, mips.ABFPF},
    753 	ssa.BlockMIPS64FPF: {mips.ABFPF, mips.ABFPT},
    754 }
    755 
    756 func ssaGenBlock(s *gc.SSAGenState, b, next *ssa.Block) {
    757 	switch b.Kind {
    758 	case ssa.BlockPlain:
    759 		if b.Succs[0].Block() != next {
    760 			p := s.Prog(obj.AJMP)
    761 			p.To.Type = obj.TYPE_BRANCH
    762 			s.Branches = append(s.Branches, gc.Branch{P: p, B: b.Succs[0].Block()})
    763 		}
    764 	case ssa.BlockDefer:
    765 		// defer returns in R1:
    766 		// 0 if we should continue executing
    767 		// 1 if we should jump to deferreturn call
    768 		p := s.Prog(mips.ABNE)
    769 		p.From.Type = obj.TYPE_REG
    770 		p.From.Reg = mips.REGZERO
    771 		p.Reg = mips.REG_R1
    772 		p.To.Type = obj.TYPE_BRANCH
    773 		s.Branches = append(s.Branches, gc.Branch{P: p, B: b.Succs[1].Block()})
    774 		if b.Succs[0].Block() != next {
    775 			p := s.Prog(obj.AJMP)
    776 			p.To.Type = obj.TYPE_BRANCH
    777 			s.Branches = append(s.Branches, gc.Branch{P: p, B: b.Succs[0].Block()})
    778 		}
    779 	case ssa.BlockExit:
    780 		s.Prog(obj.AUNDEF) // tell plive.go that we never reach here
    781 	case ssa.BlockRet:
    782 		s.Prog(obj.ARET)
    783 	case ssa.BlockRetJmp:
    784 		p := s.Prog(obj.ARET)
    785 		p.To.Type = obj.TYPE_MEM
    786 		p.To.Name = obj.NAME_EXTERN
    787 		p.To.Sym = b.Aux.(*obj.LSym)
    788 	case ssa.BlockMIPS64EQ, ssa.BlockMIPS64NE,
    789 		ssa.BlockMIPS64LTZ, ssa.BlockMIPS64GEZ,
    790 		ssa.BlockMIPS64LEZ, ssa.BlockMIPS64GTZ,
    791 		ssa.BlockMIPS64FPT, ssa.BlockMIPS64FPF:
    792 		jmp := blockJump[b.Kind]
    793 		var p *obj.Prog
    794 		switch next {
    795 		case b.Succs[0].Block():
    796 			p = s.Prog(jmp.invasm)
    797 			p.To.Type = obj.TYPE_BRANCH
    798 			s.Branches = append(s.Branches, gc.Branch{P: p, B: b.Succs[1].Block()})
    799 		case b.Succs[1].Block():
    800 			p = s.Prog(jmp.asm)
    801 			p.To.Type = obj.TYPE_BRANCH
    802 			s.Branches = append(s.Branches, gc.Branch{P: p, B: b.Succs[0].Block()})
    803 		default:
    804 			p = s.Prog(jmp.asm)
    805 			p.To.Type = obj.TYPE_BRANCH
    806 			s.Branches = append(s.Branches, gc.Branch{P: p, B: b.Succs[0].Block()})
    807 			q := s.Prog(obj.AJMP)
    808 			q.To.Type = obj.TYPE_BRANCH
    809 			s.Branches = append(s.Branches, gc.Branch{P: q, B: b.Succs[1].Block()})
    810 		}
    811 		if !b.Control.Type.IsFlags() {
    812 			p.From.Type = obj.TYPE_REG
    813 			p.From.Reg = b.Control.Reg()
    814 		}
    815 	default:
    816 		b.Fatalf("branch not implemented: %s. Control: %s", b.LongString(), b.Control.LongString())
    817 	}
    818 }
    819