Source file src/sync/atomic/doc.go
1 // Copyright 2011 The Go Authors. All rights reserved. 2 // Use of this source code is governed by a BSD-style 3 // license that can be found in the LICENSE file. 4 5 // Package atomic provides low-level atomic memory primitives 6 // useful for implementing synchronization algorithms. 7 // 8 // These functions require great care to be used correctly. 9 // Except for special, low-level applications, synchronization is better 10 // done with channels or the facilities of the [sync] package. 11 // Share memory by communicating; 12 // don't communicate by sharing memory. 13 // 14 // The swap operation, implemented by the SwapT functions, is the atomic 15 // equivalent of: 16 // 17 // old = *addr 18 // *addr = new 19 // return old 20 // 21 // The compare-and-swap operation, implemented by the CompareAndSwapT 22 // functions, is the atomic equivalent of: 23 // 24 // if *addr == old { 25 // *addr = new 26 // return true 27 // } 28 // return false 29 // 30 // The add operation, implemented by the AddT functions, is the atomic 31 // equivalent of: 32 // 33 // *addr += delta 34 // return *addr 35 // 36 // The load and store operations, implemented by the LoadT and StoreT 37 // functions, are the atomic equivalents of "return *addr" and 38 // "*addr = val". 39 // 40 // In the terminology of [the Go memory model], if the effect of 41 // an atomic operation A is observed by atomic operation B, 42 // then A “synchronizes before” B. 43 // Additionally, all the atomic operations executed in a program 44 // behave as though executed in some sequentially consistent order. 45 // This definition provides the same semantics as 46 // C++'s sequentially consistent atomics and Java's volatile variables. 47 // 48 // [the Go memory model]: https://go.dev/ref/mem 49 package atomic 50 51 import ( 52 "unsafe" 53 ) 54 55 // BUG(rsc): On 386, the 64-bit functions use instructions unavailable before the Pentium MMX. 56 // 57 // On non-Linux ARM, the 64-bit functions use instructions unavailable before the ARMv6k core. 58 // 59 // On ARM, 386, and 32-bit MIPS, it is the caller's responsibility to arrange 60 // for 64-bit alignment of 64-bit words accessed atomically via the primitive 61 // atomic functions (types [Int64] and [Uint64] are automatically aligned). 62 // The first word in an allocated struct, array, or slice; in a global 63 // variable; or in a local variable (because on 32-bit architectures, the 64 // subject of 64-bit atomic operations will escape to the heap) can be 65 // relied upon to be 64-bit aligned. 66 67 // SwapInt32 atomically stores new into *addr and returns the previous *addr value. 68 // Consider using the more ergonomic and less error-prone [Int32.Swap] instead. 69 // 70 //go:noescape 71 func SwapInt32(addr *int32, new int32) (old int32) 72 73 // SwapUint32 atomically stores new into *addr and returns the previous *addr value. 74 // Consider using the more ergonomic and less error-prone [Uint32.Swap] instead. 75 // 76 //go:noescape 77 func SwapUint32(addr *uint32, new uint32) (old uint32) 78 79 // SwapUintptr atomically stores new into *addr and returns the previous *addr value. 80 // Consider using the more ergonomic and less error-prone [Uintptr.Swap] instead. 81 // 82 //go:noescape 83 //go:linknamestd SwapUintptr 84 func SwapUintptr(addr *uintptr, new uintptr) (old uintptr) 85 86 // SwapPointer atomically stores new into *addr and returns the previous *addr value. 87 // Consider using the more ergonomic and less error-prone [Pointer.Swap] instead. 88 func SwapPointer(addr *unsafe.Pointer, new unsafe.Pointer) (old unsafe.Pointer) 89 90 // CompareAndSwapInt32 executes the compare-and-swap operation for an int32 value. 91 // Consider using the more ergonomic and less error-prone [Int32.CompareAndSwap] instead. 92 // 93 //go:noescape 94 func CompareAndSwapInt32(addr *int32, old, new int32) (swapped bool) 95 96 // CompareAndSwapUint32 executes the compare-and-swap operation for a uint32 value. 97 // Consider using the more ergonomic and less error-prone [Uint32.CompareAndSwap] instead. 98 // 99 //go:noescape 100 func CompareAndSwapUint32(addr *uint32, old, new uint32) (swapped bool) 101 102 // CompareAndSwapUintptr executes the compare-and-swap operation for a uintptr value. 103 // Consider using the more ergonomic and less error-prone [Uintptr.CompareAndSwap] instead. 104 // 105 //go:noescape 106 //go:linknamestd CompareAndSwapUintptr 107 func CompareAndSwapUintptr(addr *uintptr, old, new uintptr) (swapped bool) 108 109 // CompareAndSwapPointer executes the compare-and-swap operation for a unsafe.Pointer value. 110 // Consider using the more ergonomic and less error-prone [Pointer.CompareAndSwap] instead. 111 func CompareAndSwapPointer(addr *unsafe.Pointer, old, new unsafe.Pointer) (swapped bool) 112 113 // AddInt32 atomically adds delta to *addr and returns the new value. 114 // Consider using the more ergonomic and less error-prone [Int32.Add] instead. 115 // 116 //go:noescape 117 func AddInt32(addr *int32, delta int32) (new int32) 118 119 // AddUint32 atomically adds delta to *addr and returns the new value. 120 // To subtract a signed positive constant value c from x, do AddUint32(&x, ^uint32(c-1)). 121 // In particular, to decrement x, do AddUint32(&x, ^uint32(0)). 122 // Consider using the more ergonomic and less error-prone [Uint32.Add] instead. 123 // 124 //go:noescape 125 func AddUint32(addr *uint32, delta uint32) (new uint32) 126 127 // AddUintptr atomically adds delta to *addr and returns the new value. 128 // Consider using the more ergonomic and less error-prone [Uintptr.Add] instead. 129 // 130 //go:noescape 131 func AddUintptr(addr *uintptr, delta uintptr) (new uintptr) 132 133 // AndInt32 atomically performs a bitwise AND operation on *addr using the bitmask provided as mask 134 // and returns the old value. 135 // Consider using the more ergonomic and less error-prone [Int32.And] instead. 136 // 137 //go:noescape 138 func AndInt32(addr *int32, mask int32) (old int32) 139 140 // AndUint32 atomically performs a bitwise AND operation on *addr using the bitmask provided as mask 141 // and returns the old value. 142 // Consider using the more ergonomic and less error-prone [Uint32.And] instead. 143 // 144 //go:noescape 145 func AndUint32(addr *uint32, mask uint32) (old uint32) 146 147 // AndUintptr atomically performs a bitwise AND operation on *addr using the bitmask provided as mask 148 // and returns the old value. 149 // Consider using the more ergonomic and less error-prone [Uintptr.And] instead. 150 // 151 //go:noescape 152 func AndUintptr(addr *uintptr, mask uintptr) (old uintptr) 153 154 // OrInt32 atomically performs a bitwise OR operation on *addr using the bitmask provided as mask 155 // and returns the old value. 156 // Consider using the more ergonomic and less error-prone [Int32.Or] instead. 157 // 158 //go:noescape 159 func OrInt32(addr *int32, mask int32) (old int32) 160 161 // OrUint32 atomically performs a bitwise OR operation on *addr using the bitmask provided as mask 162 // and returns the old value. 163 // Consider using the more ergonomic and less error-prone [Uint32.Or] instead. 164 // 165 //go:noescape 166 func OrUint32(addr *uint32, mask uint32) (old uint32) 167 168 // OrUintptr atomically performs a bitwise OR operation on *addr using the bitmask provided as mask 169 // and returns the old value. 170 // Consider using the more ergonomic and less error-prone [Uintptr.Or] instead. 171 // 172 //go:noescape 173 func OrUintptr(addr *uintptr, mask uintptr) (old uintptr) 174 175 // LoadInt32 atomically loads *addr. 176 // Consider using the more ergonomic and less error-prone [Int32.Load] instead. 177 // 178 //go:noescape 179 func LoadInt32(addr *int32) (val int32) 180 181 // LoadUint32 atomically loads *addr. 182 // Consider using the more ergonomic and less error-prone [Uint32.Load] instead. 183 // 184 //go:noescape 185 func LoadUint32(addr *uint32) (val uint32) 186 187 // LoadUintptr atomically loads *addr. 188 // Consider using the more ergonomic and less error-prone [Uintptr.Load] instead. 189 // 190 //go:noescape 191 func LoadUintptr(addr *uintptr) (val uintptr) 192 193 // LoadPointer atomically loads *addr. 194 // Consider using the more ergonomic and less error-prone [Pointer.Load] instead. 195 func LoadPointer(addr *unsafe.Pointer) (val unsafe.Pointer) 196 197 // StoreInt32 atomically stores val into *addr. 198 // Consider using the more ergonomic and less error-prone [Int32.Store] instead. 199 // 200 //go:noescape 201 func StoreInt32(addr *int32, val int32) 202 203 // StoreUint32 atomically stores val into *addr. 204 // Consider using the more ergonomic and less error-prone [Uint32.Store] instead. 205 // 206 //go:noescape 207 func StoreUint32(addr *uint32, val uint32) 208 209 // StoreUintptr atomically stores val into *addr. 210 // Consider using the more ergonomic and less error-prone [Uintptr.Store] instead. 211 // 212 //go:noescape 213 //go:linknamestd StoreUintptr 214 func StoreUintptr(addr *uintptr, val uintptr) 215 216 // StorePointer atomically stores val into *addr. 217 // Consider using the more ergonomic and less error-prone [Pointer.Store] instead. 218 func StorePointer(addr *unsafe.Pointer, val unsafe.Pointer) 219