--- /dev/null
+// Copyright 2009 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+package runtime
+
+import (
+ "runtime/internal/atomic"
+ "unsafe"
+)
+
+// These functions cannot have go:noescape annotations,
+// because while ptr does not escape, new does.
+// If new is marked as not escaping, the compiler will make incorrect
+// escape analysis decisions about the pointer value being stored.
+// Instead, these are wrappers around the actual atomics (casp1 and so on)
+// that use noescape to convey which arguments do not escape.
+
+// atomicstorep performs *ptr = new atomically and invokes a write barrier.
+//
+//go:nosplit
+func atomicstorep(ptr unsafe.Pointer, new unsafe.Pointer) {
+ writebarrierptr_prewrite((*uintptr)(ptr), uintptr(new))
+ atomic.StorepNoWB(noescape(ptr), new)
+}
+
+//go:nosplit
+func casp(ptr *unsafe.Pointer, old, new unsafe.Pointer) bool {
+ // The write barrier is only necessary if the CAS succeeds,
+ // but since it needs to happen before the write becomes
+ // public, we have to do it conservatively all the time.
+ writebarrierptr_prewrite((*uintptr)(unsafe.Pointer(ptr)), uintptr(new))
+ return atomic.Casp1((*unsafe.Pointer)(noescape(unsafe.Pointer(ptr))), noescape(old), new)
+}
+
+// Like above, but implement in terms of sync/atomic's uintptr operations.
+// We cannot just call the runtime routines, because the race detector expects
+// to be able to intercept the sync/atomic forms but not the runtime forms.
+
+//go:linkname sync_atomic_StoreUintptr sync_atomic.StoreUintptr
+func sync_atomic_StoreUintptr(ptr *uintptr, new uintptr)
+
+//go:linkname sync_atomic_StorePointer sync_atomic.StorePointer
+//go:nosplit
+func sync_atomic_StorePointer(ptr *unsafe.Pointer, new unsafe.Pointer) {
+ writebarrierptr_prewrite((*uintptr)(unsafe.Pointer(ptr)), uintptr(new))
+ sync_atomic_StoreUintptr((*uintptr)(unsafe.Pointer(ptr)), uintptr(new))
+}
+
+//go:linkname sync_atomic_SwapUintptr sync_atomic.SwapUintptr
+func sync_atomic_SwapUintptr(ptr *uintptr, new uintptr) uintptr
+
+//go:linkname sync_atomic_SwapPointer sync_atomic.SwapPointer
+//go:nosplit
+func sync_atomic_SwapPointer(ptr *unsafe.Pointer, new unsafe.Pointer) unsafe.Pointer {
+ writebarrierptr_prewrite((*uintptr)(unsafe.Pointer(ptr)), uintptr(new))
+ old := unsafe.Pointer(sync_atomic_SwapUintptr((*uintptr)(noescape(unsafe.Pointer(ptr))), uintptr(new)))
+ return old
+}
+
+//go:linkname sync_atomic_CompareAndSwapUintptr sync_atomic.CompareAndSwapUintptr
+func sync_atomic_CompareAndSwapUintptr(ptr *uintptr, old, new uintptr) bool
+
+//go:linkname sync_atomic_CompareAndSwapPointer sync_atomic.CompareAndSwapPointer
+//go:nosplit
+func sync_atomic_CompareAndSwapPointer(ptr *unsafe.Pointer, old, new unsafe.Pointer) bool {
+ writebarrierptr_prewrite((*uintptr)(unsafe.Pointer(ptr)), uintptr(new))
+ return sync_atomic_CompareAndSwapUintptr((*uintptr)(noescape(unsafe.Pointer(ptr))), uintptr(old), uintptr(new))
+}
package runtime
import (
- "runtime/internal/atomic"
"runtime/internal/sys"
"unsafe"
)
support_aes = v
}
-// Here for gccgo until we port atomic_pointer.go and mgc.go.
-//go:nosplit
-func casp(ptr *unsafe.Pointer, old, new unsafe.Pointer) bool {
- if !atomic.Casp1((*unsafe.Pointer)(noescape(unsafe.Pointer(ptr))), noescape(old), new) {
- return false
- }
- return true
-}
-
// Here for gccgo until we port lock_*.go.
func lock(l *mutex)
func unlock(l *mutex)
// Temporary for gccgo until we port mheap.go
func setprofilebucket(p unsafe.Pointer, b *bucket)
-// Temporary for gccgo until we port atomic_pointer.go.
-//go:nosplit
-func atomicstorep(ptr unsafe.Pointer, new unsafe.Pointer) {
- atomic.StorepNoWB(noescape(ptr), new)
-}
-
// Get signal trampoline, written in C.
func getSigtramp() uintptr
return __atomic_exchange_n (addr, new, __ATOMIC_SEQ_CST);
}
-void *SwapPointer (void **, void *)
- __asm__ (GOSYM_PREFIX "sync_atomic.SwapPointer")
- __attribute__ ((no_split_stack));
-
-void *
-SwapPointer (void **addr, void *new)
-{
- return __atomic_exchange_n (addr, new, __ATOMIC_SEQ_CST);
-}
-
_Bool CompareAndSwapInt32 (int32_t *, int32_t, int32_t)
__asm__ (GOSYM_PREFIX "sync_atomic.CompareAndSwapInt32")
__attribute__ ((no_split_stack));
return __sync_bool_compare_and_swap (val, old, new);
}
-_Bool CompareAndSwapPointer (void **, void *, void *)
- __asm__ (GOSYM_PREFIX "sync_atomic.CompareAndSwapPointer")
- __attribute__ ((no_split_stack));
-
-_Bool
-CompareAndSwapPointer (void **val, void *old, void *new)
-{
- return __sync_bool_compare_and_swap (val, old, new);
-}
-
int32_t AddInt32 (int32_t *, int32_t)
__asm__ (GOSYM_PREFIX "sync_atomic.AddInt32")
__attribute__ ((no_split_stack));
while (! __sync_bool_compare_and_swap (addr, v, val))
v = *addr;
}
-
-void StorePointer (void **addr, void *val)
- __asm__ (GOSYM_PREFIX "sync_atomic.StorePointer")
- __attribute__ ((no_split_stack));
-
-void
-StorePointer (void **addr, void *val)
-{
- void *v;
-
- v = *addr;
- while (! __sync_bool_compare_and_swap (addr, v, val))
- v = *addr;
-}