From: Cherry Zhang Date: Tue, 20 Nov 2018 15:20:06 +0000 (-0500) Subject: runtime: add arg maps for sync/atomic functions in ARM64 race mode X-Git-Tag: go1.12beta1~294 X-Git-Url: http://www.git.cypherpunks.su/?a=commitdiff_plain;h=c6e698d5dd0d3a309c2d93368dcc451820deb66d;p=gostls13.git runtime: add arg maps for sync/atomic functions in ARM64 race mode In race mode, these functions are defined and declared in different packages, which therefore don't have implicit arg maps. When they are defer'd, and the stack needs to move, the runtime fails with missing stack maps. This CL adds arg maps (FUNCDATA) to them. Updates #28848 Change-Id: I0271563b7e78e7797ce2990c303dced957efaa86 Reviewed-on: https://go-review.googlesource.com/c/150457 Run-TryBot: Cherry Zhang TryBot-Result: Gobot Gobot Reviewed-by: Brad Fitzpatrick --- diff --git a/src/runtime/race_arm64.s b/src/runtime/race_arm64.s index 7223be3d68..48b119f8c4 100644 --- a/src/runtime/race_arm64.s +++ b/src/runtime/race_arm64.s @@ -192,69 +192,86 @@ TEXT runtime·racefuncexit(SB), NOSPLIT, $0-0 // Load TEXT sync∕atomic·LoadInt32(SB), NOSPLIT, $0 + GO_ARGS MOVD $__tsan_go_atomic32_load(SB), R9 BL racecallatomic<>(SB) RET TEXT sync∕atomic·LoadInt64(SB), NOSPLIT, $0 + GO_ARGS MOVD $__tsan_go_atomic64_load(SB), R9 BL racecallatomic<>(SB) RET TEXT sync∕atomic·LoadUint32(SB), NOSPLIT, $0 + GO_ARGS JMP sync∕atomic·LoadInt32(SB) TEXT sync∕atomic·LoadUint64(SB), NOSPLIT, $0 + GO_ARGS JMP sync∕atomic·LoadInt64(SB) TEXT sync∕atomic·LoadUintptr(SB), NOSPLIT, $0 + GO_ARGS JMP sync∕atomic·LoadInt64(SB) TEXT sync∕atomic·LoadPointer(SB), NOSPLIT, $0 + GO_ARGS JMP sync∕atomic·LoadInt64(SB) // Store TEXT sync∕atomic·StoreInt32(SB), NOSPLIT, $0 + GO_ARGS MOVD $__tsan_go_atomic32_store(SB), R9 BL racecallatomic<>(SB) RET TEXT sync∕atomic·StoreInt64(SB), NOSPLIT, $0 + GO_ARGS MOVD $__tsan_go_atomic64_store(SB), R9 BL racecallatomic<>(SB) RET TEXT sync∕atomic·StoreUint32(SB), NOSPLIT, $0 + GO_ARGS JMP sync∕atomic·StoreInt32(SB) TEXT sync∕atomic·StoreUint64(SB), NOSPLIT, $0 + GO_ARGS JMP sync∕atomic·StoreInt64(SB) TEXT sync∕atomic·StoreUintptr(SB), NOSPLIT, $0 + GO_ARGS JMP sync∕atomic·StoreInt64(SB) // Swap TEXT sync∕atomic·SwapInt32(SB), NOSPLIT, $0 + GO_ARGS MOVD $__tsan_go_atomic32_exchange(SB), R9 BL racecallatomic<>(SB) RET TEXT sync∕atomic·SwapInt64(SB), NOSPLIT, $0 + GO_ARGS MOVD $__tsan_go_atomic64_exchange(SB), R9 BL racecallatomic<>(SB) RET TEXT sync∕atomic·SwapUint32(SB), NOSPLIT, $0 + GO_ARGS JMP sync∕atomic·SwapInt32(SB) TEXT sync∕atomic·SwapUint64(SB), NOSPLIT, $0 + GO_ARGS JMP sync∕atomic·SwapInt64(SB) TEXT sync∕atomic·SwapUintptr(SB), NOSPLIT, $0 + GO_ARGS JMP sync∕atomic·SwapInt64(SB) // Add TEXT sync∕atomic·AddInt32(SB), NOSPLIT, $0 + GO_ARGS MOVD $__tsan_go_atomic32_fetch_add(SB), R9 BL racecallatomic<>(SB) MOVW add+8(FP), R0 // convert fetch_add to add_fetch @@ -264,6 +281,7 @@ TEXT sync∕atomic·AddInt32(SB), NOSPLIT, $0 RET TEXT sync∕atomic·AddInt64(SB), NOSPLIT, $0 + GO_ARGS MOVD $__tsan_go_atomic64_fetch_add(SB), R9 BL racecallatomic<>(SB) MOVD add+8(FP), R0 // convert fetch_add to add_fetch @@ -273,32 +291,40 @@ TEXT sync∕atomic·AddInt64(SB), NOSPLIT, $0 RET TEXT sync∕atomic·AddUint32(SB), NOSPLIT, $0 + GO_ARGS JMP sync∕atomic·AddInt32(SB) TEXT sync∕atomic·AddUint64(SB), NOSPLIT, $0 + GO_ARGS JMP sync∕atomic·AddInt64(SB) TEXT sync∕atomic·AddUintptr(SB), NOSPLIT, $0 + GO_ARGS JMP sync∕atomic·AddInt64(SB) // CompareAndSwap TEXT sync∕atomic·CompareAndSwapInt32(SB), NOSPLIT, $0 + GO_ARGS MOVD $__tsan_go_atomic32_compare_exchange(SB), R9 BL racecallatomic<>(SB) RET TEXT sync∕atomic·CompareAndSwapInt64(SB), NOSPLIT, $0 + GO_ARGS MOVD $__tsan_go_atomic64_compare_exchange(SB), R9 BL racecallatomic<>(SB) RET TEXT sync∕atomic·CompareAndSwapUint32(SB), NOSPLIT, $0 + GO_ARGS JMP sync∕atomic·CompareAndSwapInt32(SB) TEXT sync∕atomic·CompareAndSwapUint64(SB), NOSPLIT, $0 + GO_ARGS JMP sync∕atomic·CompareAndSwapInt64(SB) TEXT sync∕atomic·CompareAndSwapUintptr(SB), NOSPLIT, $0 + GO_ARGS JMP sync∕atomic·CompareAndSwapInt64(SB) // Generic atomic operation implementation.