Commit 86ff1853 by Ian Lance Taylor

re PR go/84215 (Random results in go/libgo tests)

	PR go/84215
    runtime, sync/atomic: use write barrier for atomic pointer functions
    
    This copies atomic_pointer.go from 1.10rc2.  It was omitted during the
    transition of the runtime from C to Go, and I forgot about it.
    
    This may help with https://gcc.gnu.org/PR84215.
    
    Reviewed-on: https://go-review.googlesource.com/93197

From-SVN: r257599
parent 966a140d
89105404f94005ffa8e2b08df78015dc9ac91362 cebdbf3f293f5b0f3120c009c47da0ceadc113cb
The first line of this file holds the git revision number of the last The first line of this file holds the git revision number of the last
merge done from the gofrontend repository. merge done from the gofrontend repository.
// Copyright 2009 The Go Authors. All rights reserved.
// Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE file.
package runtime
import (
"runtime/internal/atomic"
"unsafe"
)
// These functions cannot have go:noescape annotations,
// because while ptr does not escape, new does.
// If new is marked as not escaping, the compiler will make incorrect
// escape analysis decisions about the pointer value being stored.
// Instead, these are wrappers around the actual atomics (casp1 and so on)
// that use noescape to convey which arguments do not escape.
// atomicstorep performs *ptr = new atomically and invokes a write barrier.
//
//go:nosplit
func atomicstorep(ptr unsafe.Pointer, new unsafe.Pointer) {
writebarrierptr_prewrite((*uintptr)(ptr), uintptr(new))
atomic.StorepNoWB(noescape(ptr), new)
}
//go:nosplit
func casp(ptr *unsafe.Pointer, old, new unsafe.Pointer) bool {
// The write barrier is only necessary if the CAS succeeds,
// but since it needs to happen before the write becomes
// public, we have to do it conservatively all the time.
writebarrierptr_prewrite((*uintptr)(unsafe.Pointer(ptr)), uintptr(new))
return atomic.Casp1((*unsafe.Pointer)(noescape(unsafe.Pointer(ptr))), noescape(old), new)
}
// Like above, but implement in terms of sync/atomic's uintptr operations.
// We cannot just call the runtime routines, because the race detector expects
// to be able to intercept the sync/atomic forms but not the runtime forms.
//go:linkname sync_atomic_StoreUintptr sync_atomic.StoreUintptr
func sync_atomic_StoreUintptr(ptr *uintptr, new uintptr)
//go:linkname sync_atomic_StorePointer sync_atomic.StorePointer
//go:nosplit
func sync_atomic_StorePointer(ptr *unsafe.Pointer, new unsafe.Pointer) {
writebarrierptr_prewrite((*uintptr)(unsafe.Pointer(ptr)), uintptr(new))
sync_atomic_StoreUintptr((*uintptr)(unsafe.Pointer(ptr)), uintptr(new))
}
//go:linkname sync_atomic_SwapUintptr sync_atomic.SwapUintptr
func sync_atomic_SwapUintptr(ptr *uintptr, new uintptr) uintptr
//go:linkname sync_atomic_SwapPointer sync_atomic.SwapPointer
//go:nosplit
func sync_atomic_SwapPointer(ptr *unsafe.Pointer, new unsafe.Pointer) unsafe.Pointer {
writebarrierptr_prewrite((*uintptr)(unsafe.Pointer(ptr)), uintptr(new))
old := unsafe.Pointer(sync_atomic_SwapUintptr((*uintptr)(noescape(unsafe.Pointer(ptr))), uintptr(new)))
return old
}
//go:linkname sync_atomic_CompareAndSwapUintptr sync_atomic.CompareAndSwapUintptr
func sync_atomic_CompareAndSwapUintptr(ptr *uintptr, old, new uintptr) bool
//go:linkname sync_atomic_CompareAndSwapPointer sync_atomic.CompareAndSwapPointer
//go:nosplit
func sync_atomic_CompareAndSwapPointer(ptr *unsafe.Pointer, old, new unsafe.Pointer) bool {
writebarrierptr_prewrite((*uintptr)(unsafe.Pointer(ptr)), uintptr(new))
return sync_atomic_CompareAndSwapUintptr((*uintptr)(noescape(unsafe.Pointer(ptr))), uintptr(old), uintptr(new))
}
...@@ -5,7 +5,6 @@ ...@@ -5,7 +5,6 @@
package runtime package runtime
import ( import (
"runtime/internal/atomic"
"runtime/internal/sys" "runtime/internal/sys"
"unsafe" "unsafe"
) )
...@@ -307,15 +306,6 @@ func setSupportAES(v bool) { ...@@ -307,15 +306,6 @@ func setSupportAES(v bool) {
support_aes = v support_aes = v
} }
// Here for gccgo until we port atomic_pointer.go and mgc.go.
//go:nosplit
func casp(ptr *unsafe.Pointer, old, new unsafe.Pointer) bool {
if !atomic.Casp1((*unsafe.Pointer)(noescape(unsafe.Pointer(ptr))), noescape(old), new) {
return false
}
return true
}
// Here for gccgo until we port lock_*.go. // Here for gccgo until we port lock_*.go.
func lock(l *mutex) func lock(l *mutex)
func unlock(l *mutex) func unlock(l *mutex)
...@@ -347,12 +337,6 @@ func persistentalloc(size, align uintptr, sysStat *uint64) unsafe.Pointer ...@@ -347,12 +337,6 @@ func persistentalloc(size, align uintptr, sysStat *uint64) unsafe.Pointer
// Temporary for gccgo until we port mheap.go // Temporary for gccgo until we port mheap.go
func setprofilebucket(p unsafe.Pointer, b *bucket) func setprofilebucket(p unsafe.Pointer, b *bucket)
// Temporary for gccgo until we port atomic_pointer.go.
//go:nosplit
func atomicstorep(ptr unsafe.Pointer, new unsafe.Pointer) {
atomic.StorepNoWB(noescape(ptr), new)
}
// Get signal trampoline, written in C. // Get signal trampoline, written in C.
func getSigtramp() uintptr func getSigtramp() uintptr
......
...@@ -62,16 +62,6 @@ SwapUintptr (uintptr_t *addr, uintptr_t new) ...@@ -62,16 +62,6 @@ SwapUintptr (uintptr_t *addr, uintptr_t new)
return __atomic_exchange_n (addr, new, __ATOMIC_SEQ_CST); return __atomic_exchange_n (addr, new, __ATOMIC_SEQ_CST);
} }
void *SwapPointer (void **, void *)
__asm__ (GOSYM_PREFIX "sync_atomic.SwapPointer")
__attribute__ ((no_split_stack));
void *
SwapPointer (void **addr, void *new)
{
return __atomic_exchange_n (addr, new, __ATOMIC_SEQ_CST);
}
_Bool CompareAndSwapInt32 (int32_t *, int32_t, int32_t) _Bool CompareAndSwapInt32 (int32_t *, int32_t, int32_t)
__asm__ (GOSYM_PREFIX "sync_atomic.CompareAndSwapInt32") __asm__ (GOSYM_PREFIX "sync_atomic.CompareAndSwapInt32")
__attribute__ ((no_split_stack)); __attribute__ ((no_split_stack));
...@@ -126,16 +116,6 @@ CompareAndSwapUintptr (uintptr_t *val, uintptr_t old, uintptr_t new) ...@@ -126,16 +116,6 @@ CompareAndSwapUintptr (uintptr_t *val, uintptr_t old, uintptr_t new)
return __sync_bool_compare_and_swap (val, old, new); return __sync_bool_compare_and_swap (val, old, new);
} }
_Bool CompareAndSwapPointer (void **, void *, void *)
__asm__ (GOSYM_PREFIX "sync_atomic.CompareAndSwapPointer")
__attribute__ ((no_split_stack));
_Bool
CompareAndSwapPointer (void **val, void *old, void *new)
{
return __sync_bool_compare_and_swap (val, old, new);
}
int32_t AddInt32 (int32_t *, int32_t) int32_t AddInt32 (int32_t *, int32_t)
__asm__ (GOSYM_PREFIX "sync_atomic.AddInt32") __asm__ (GOSYM_PREFIX "sync_atomic.AddInt32")
__attribute__ ((no_split_stack)); __attribute__ ((no_split_stack));
...@@ -357,17 +337,3 @@ StoreUintptr (uintptr_t *addr, uintptr_t val) ...@@ -357,17 +337,3 @@ StoreUintptr (uintptr_t *addr, uintptr_t val)
while (! __sync_bool_compare_and_swap (addr, v, val)) while (! __sync_bool_compare_and_swap (addr, v, val))
v = *addr; v = *addr;
} }
void StorePointer (void **addr, void *val)
__asm__ (GOSYM_PREFIX "sync_atomic.StorePointer")
__attribute__ ((no_split_stack));
void
StorePointer (void **addr, void *val)
{
void *v;
v = *addr;
while (! __sync_bool_compare_and_swap (addr, v, val))
v = *addr;
}
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment