1 // Copyright 2015 The Go Authors. All rights reserved. 2 // Use of this source code is governed by a BSD-style 3 // license that can be found in the LICENSE file. 4 5 // Note: some of these functions are semantically inlined 6 // by the compiler (in src/cmd/compile/internal/gc/ssa.go). 7 8 #include "textflag.h" 9 10 // bool Cas(int32 *val, int32 old, int32 new) 11 // Atomically: 12 // if(*val == old){ 13 // *val = new; 14 // return 1; 15 // } else 16 // return 0; 17 TEXT runtimeinternalatomicCas(SB),NOSPLIT,$0-17 18 MOVQ ptr+0(FP), BX 19 MOVL old+8(FP), AX 20 MOVL new+12(FP), CX 21 LOCK 22 CMPXCHGL CX, 0(BX) 23 SETEQ ret+16(FP) 24 RET 25 26 // bool runtimeinternalatomicCas64(uint64 *val, uint64 old, uint64 new) 27 // Atomically: 28 // if(*val == *old){ 29 // *val = new; 30 // return 1; 31 // } else { 32 // return 0; 33 // } 34 TEXT runtimeinternalatomicCas64(SB), NOSPLIT, $0-25 35 MOVQ ptr+0(FP), BX 36 MOVQ old+8(FP), AX 37 MOVQ new+16(FP), CX 38 LOCK 39 CMPXCHGQ CX, 0(BX) 40 SETEQ ret+24(FP) 41 RET 42 43 TEXT runtimeinternalatomicCasuintptr(SB), NOSPLIT, $0-25 44 JMP runtimeinternalatomicCas64(SB) 45 46 TEXT runtimeinternalatomicLoaduintptr(SB), NOSPLIT, $0-16 47 JMP runtimeinternalatomicLoad64(SB) 48 49 TEXT runtimeinternalatomicLoaduint(SB), NOSPLIT, $0-16 50 JMP runtimeinternalatomicLoad64(SB) 51 52 TEXT runtimeinternalatomicStoreuintptr(SB), NOSPLIT, $0-16 53 JMP runtimeinternalatomicStore64(SB) 54 55 TEXT runtimeinternalatomicLoadint64(SB), NOSPLIT, $0-16 56 JMP runtimeinternalatomicLoad64(SB) 57 58 TEXT runtimeinternalatomicXaddint64(SB), NOSPLIT, $0-24 59 JMP runtimeinternalatomicXadd64(SB) 60 61 // bool Casp1(void **val, void *old, void *new) 62 // Atomically: 63 // if(*val == old){ 64 // *val = new; 65 // return 1; 66 // } else 67 // return 0; 68 TEXT runtimeinternalatomicCasp1(SB), NOSPLIT, $0-25 69 MOVQ ptr+0(FP), BX 70 MOVQ old+8(FP), AX 71 MOVQ new+16(FP), CX 72 LOCK 73 CMPXCHGQ CX, 0(BX) 74 SETEQ ret+24(FP) 75 RET 76 77 // uint32 Xadd(uint32 volatile *val, int32 delta) 78 // Atomically: 79 // *val += delta; 80 // return *val; 81 TEXT runtimeinternalatomicXadd(SB), NOSPLIT, $0-20 82 MOVQ ptr+0(FP), BX 83 MOVL delta+8(FP), AX 84 MOVL AX, CX 85 LOCK 86 XADDL AX, 0(BX) 87 ADDL CX, AX 88 MOVL AX, ret+16(FP) 89 RET 90 91 TEXT runtimeinternalatomicXadd64(SB), NOSPLIT, $0-24 92 MOVQ ptr+0(FP), BX 93 MOVQ delta+8(FP), AX 94 MOVQ AX, CX 95 LOCK 96 XADDQ AX, 0(BX) 97 ADDQ CX, AX 98 MOVQ AX, ret+16(FP) 99 RET 100 101 TEXT runtimeinternalatomicXadduintptr(SB), NOSPLIT, $0-24 102 JMP runtimeinternalatomicXadd64(SB) 103 104 TEXT runtimeinternalatomicXchg(SB), NOSPLIT, $0-20 105 MOVQ ptr+0(FP), BX 106 MOVL new+8(FP), AX 107 XCHGL AX, 0(BX) 108 MOVL AX, ret+16(FP) 109 RET 110 111 TEXT runtimeinternalatomicXchg64(SB), NOSPLIT, $0-24 112 MOVQ ptr+0(FP), BX 113 MOVQ new+8(FP), AX 114 XCHGQ AX, 0(BX) 115 MOVQ AX, ret+16(FP) 116 RET 117 118 TEXT runtimeinternalatomicXchguintptr(SB), NOSPLIT, $0-24 119 JMP runtimeinternalatomicXchg64(SB) 120 121 TEXT runtimeinternalatomicStorepNoWB(SB), NOSPLIT, $0-16 122 MOVQ ptr+0(FP), BX 123 MOVQ val+8(FP), AX 124 XCHGQ AX, 0(BX) 125 RET 126 127 TEXT runtimeinternalatomicStore(SB), NOSPLIT, $0-12 128 MOVQ ptr+0(FP), BX 129 MOVL val+8(FP), AX 130 XCHGL AX, 0(BX) 131 RET 132 133 TEXT runtimeinternalatomicStore64(SB), NOSPLIT, $0-16 134 MOVQ ptr+0(FP), BX 135 MOVQ val+8(FP), AX 136 XCHGQ AX, 0(BX) 137 RET 138 139 // void runtimeinternalatomicOr8(byte volatile*, byte); 140 TEXT runtimeinternalatomicOr8(SB), NOSPLIT, $0-9 141 MOVQ ptr+0(FP), AX 142 MOVB val+8(FP), BX 143 LOCK 144 ORB BX, (AX) 145 RET 146 147 // void runtimeinternalatomicAnd8(byte volatile*, byte); 148 TEXT runtimeinternalatomicAnd8(SB), NOSPLIT, $0-9 149 MOVQ ptr+0(FP), AX 150 MOVB val+8(FP), BX 151 LOCK 152 ANDB BX, (AX) 153 RET 154