Home | History | Annotate | Download | only in atomic
      1 // Copyright 2016 The Go Authors. All rights reserved.
      2 // Use of this source code is governed by a BSD-style
      3 // license that can be found in the LICENSE file.
      4 
      5 // +build mips mipsle
      6 
      7 package atomic
      8 
      9 import (
     10 	"runtime/internal/sys"
     11 	"unsafe"
     12 )
     13 
     14 // TODO implement lock striping
     15 var lock struct {
     16 	state uint32
     17 	pad   [sys.CacheLineSize - 4]byte
     18 }
     19 
     20 //go:noescape
     21 func spinLock(state *uint32)
     22 
     23 //go:noescape
     24 func spinUnlock(state *uint32)
     25 
     26 //go:nosplit
     27 func lockAndCheck(addr *uint64) {
     28 	// ensure 8-byte alignement
     29 	if uintptr(unsafe.Pointer(addr))&7 != 0 {
     30 		addr = nil
     31 	}
     32 	// force dereference before taking lock
     33 	_ = *addr
     34 
     35 	spinLock(&lock.state)
     36 }
     37 
     38 //go:nosplit
     39 func unlock() {
     40 	spinUnlock(&lock.state)
     41 }
     42 
     43 //go:nosplit
     44 func unlockNoFence() {
     45 	lock.state = 0
     46 }
     47 
     48 //go:nosplit
     49 func Xadd64(addr *uint64, delta int64) (new uint64) {
     50 	lockAndCheck(addr)
     51 
     52 	new = *addr + uint64(delta)
     53 	*addr = new
     54 
     55 	unlock()
     56 	return
     57 }
     58 
     59 //go:nosplit
     60 func Xchg64(addr *uint64, new uint64) (old uint64) {
     61 	lockAndCheck(addr)
     62 
     63 	old = *addr
     64 	*addr = new
     65 
     66 	unlock()
     67 	return
     68 }
     69 
     70 //go:nosplit
     71 func Cas64(addr *uint64, old, new uint64) (swapped bool) {
     72 	lockAndCheck(addr)
     73 
     74 	if (*addr) == old {
     75 		*addr = new
     76 		unlock()
     77 		return true
     78 	}
     79 
     80 	unlockNoFence()
     81 	return false
     82 }
     83 
     84 //go:nosplit
     85 func Load64(addr *uint64) (val uint64) {
     86 	lockAndCheck(addr)
     87 
     88 	val = *addr
     89 
     90 	unlock()
     91 	return
     92 }
     93 
     94 //go:nosplit
     95 func Store64(addr *uint64, val uint64) {
     96 	lockAndCheck(addr)
     97 
     98 	*addr = val
     99 
    100 	unlock()
    101 	return
    102 }
    103 
    104 //go:noescape
    105 func Xadd(ptr *uint32, delta int32) uint32
    106 
    107 //go:noescape
    108 func Xadduintptr(ptr *uintptr, delta uintptr) uintptr
    109 
    110 //go:noescape
    111 func Xchg(ptr *uint32, new uint32) uint32
    112 
    113 //go:noescape
    114 func Xchguintptr(ptr *uintptr, new uintptr) uintptr
    115 
    116 //go:noescape
    117 func Load(ptr *uint32) uint32
    118 
    119 //go:noescape
    120 func Loadp(ptr unsafe.Pointer) unsafe.Pointer
    121 
    122 //go:noescape
    123 func And8(ptr *uint8, val uint8)
    124 
    125 //go:noescape
    126 func Or8(ptr *uint8, val uint8)
    127 
    128 //go:noescape
    129 func Store(ptr *uint32, val uint32)
    130 
    131 // NO go:noescape annotation; see atomic_pointer.go.
    132 func StorepNoWB(ptr unsafe.Pointer, val unsafe.Pointer)
    133