aboutsummaryrefslogtreecommitdiff
path: root/src/internal/runtime
diff options
context:
space:
mode:
Diffstat (limited to 'src/internal/runtime')
-rw-r--r--src/internal/runtime/atomic/atomic_386.go1
-rw-r--r--src/internal/runtime/atomic/atomic_andor_generic.go13
-rw-r--r--src/internal/runtime/atomic/atomic_andor_test.go2
-rw-r--r--src/internal/runtime/atomic/atomic_arm.go1
-rw-r--r--src/internal/runtime/atomic/atomic_mips64x.go18
-rw-r--r--src/internal/runtime/atomic/atomic_mips64x.s64
-rw-r--r--src/internal/runtime/atomic/atomic_mipsx.go34
-rw-r--r--src/internal/runtime/atomic/atomic_mipsx.s36
-rw-r--r--src/internal/runtime/atomic/atomic_wasm.go2
-rw-r--r--src/internal/runtime/exithook/hooks.go85
-rw-r--r--src/internal/runtime/syscall/asm_linux_loong64.s26
11 files changed, 255 insertions, 27 deletions
diff --git a/src/internal/runtime/atomic/atomic_386.go b/src/internal/runtime/atomic/atomic_386.go
index e74dcaa92d..a023baddb7 100644
--- a/src/internal/runtime/atomic/atomic_386.go
+++ b/src/internal/runtime/atomic/atomic_386.go
@@ -12,6 +12,7 @@ import "unsafe"
//
//go:linkname Load
//go:linkname Loadp
+//go:linkname LoadAcquintptr
//go:nosplit
//go:noinline
diff --git a/src/internal/runtime/atomic/atomic_andor_generic.go b/src/internal/runtime/atomic/atomic_andor_generic.go
index f8b148dda5..433ee0bd6c 100644
--- a/src/internal/runtime/atomic/atomic_andor_generic.go
+++ b/src/internal/runtime/atomic/atomic_andor_generic.go
@@ -2,10 +2,21 @@
// Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE file.
-//go:build arm || mips || mipsle || mips64 || mips64le || wasm
+//go:build arm || wasm
+
+// Export some functions via linkname to assembly in sync/atomic.
+//
+//go:linkname And32
+//go:linkname Or32
+//go:linkname And64
+//go:linkname Or64
+//go:linkname Anduintptr
+//go:linkname Oruintptr
package atomic
+import _ "unsafe" // For linkname
+
//go:nosplit
func And32(ptr *uint32, val uint32) uint32 {
for {
diff --git a/src/internal/runtime/atomic/atomic_andor_test.go b/src/internal/runtime/atomic/atomic_andor_test.go
index 631a6e637d..5b594d8edf 100644
--- a/src/internal/runtime/atomic/atomic_andor_test.go
+++ b/src/internal/runtime/atomic/atomic_andor_test.go
@@ -54,6 +54,7 @@ func TestAnd32(t *testing.T) {
func TestAnd64(t *testing.T) {
// Basic sanity check.
x := uint64(0xffffffffffffffff)
+ sink = &x
for i := uint64(0); i < 64; i++ {
old := x
v := atomic.And64(&x, ^(1 << i))
@@ -131,6 +132,7 @@ func TestOr32(t *testing.T) {
func TestOr64(t *testing.T) {
// Basic sanity check.
x := uint64(0)
+ sink = &x
for i := uint64(0); i < 64; i++ {
old := x
v := atomic.Or64(&x, 1<<i)
diff --git a/src/internal/runtime/atomic/atomic_arm.go b/src/internal/runtime/atomic/atomic_arm.go
index 567e951244..b58f643ca3 100644
--- a/src/internal/runtime/atomic/atomic_arm.go
+++ b/src/internal/runtime/atomic/atomic_arm.go
@@ -19,6 +19,7 @@ const (
//
//go:linkname Xchg
//go:linkname Xchguintptr
+//go:linkname Xadd
type spinlock struct {
v uint32
diff --git a/src/internal/runtime/atomic/atomic_mips64x.go b/src/internal/runtime/atomic/atomic_mips64x.go
index 1e12b83801..f434c939e3 100644
--- a/src/internal/runtime/atomic/atomic_mips64x.go
+++ b/src/internal/runtime/atomic/atomic_mips64x.go
@@ -62,6 +62,24 @@ func And(ptr *uint32, val uint32)
func Or(ptr *uint32, val uint32)
//go:noescape
+func And32(ptr *uint32, val uint32) uint32
+
+//go:noescape
+func Or32(ptr *uint32, val uint32) uint32
+
+//go:noescape
+func And64(ptr *uint64, val uint64) uint64
+
+//go:noescape
+func Or64(ptr *uint64, val uint64) uint64
+
+//go:noescape
+func Anduintptr(ptr *uintptr, val uintptr) uintptr
+
+//go:noescape
+func Oruintptr(ptr *uintptr, val uintptr) uintptr
+
+//go:noescape
func Cas64(ptr *uint64, old, new uint64) bool
//go:noescape
diff --git a/src/internal/runtime/atomic/atomic_mips64x.s b/src/internal/runtime/atomic/atomic_mips64x.s
index b4411d87da..7b0e080238 100644
--- a/src/internal/runtime/atomic/atomic_mips64x.s
+++ b/src/internal/runtime/atomic/atomic_mips64x.s
@@ -310,6 +310,70 @@ TEXT ·And(SB), NOSPLIT, $0-12
SYNC
RET
+// func Or32(addr *uint32, v uint32) old uint32
+TEXT ·Or32(SB), NOSPLIT, $0-20
+ MOVV ptr+0(FP), R1
+ MOVW val+8(FP), R2
+
+ SYNC
+ LL (R1), R3
+ OR R2, R3, R4
+ SC R4, (R1)
+ BEQ R4, -3(PC)
+ SYNC
+ MOVW R3, ret+16(FP)
+ RET
+
+// func And32(addr *uint32, v uint32) old uint32
+TEXT ·And32(SB), NOSPLIT, $0-20
+ MOVV ptr+0(FP), R1
+ MOVW val+8(FP), R2
+
+ SYNC
+ LL (R1), R3
+ AND R2, R3, R4
+ SC R4, (R1)
+ BEQ R4, -3(PC)
+ SYNC
+ MOVW R3, ret+16(FP)
+ RET
+
+// func Or64(addr *uint64, v uint64) old uint64
+TEXT ·Or64(SB), NOSPLIT, $0-24
+ MOVV ptr+0(FP), R1
+ MOVV val+8(FP), R2
+
+ SYNC
+ LLV (R1), R3
+ OR R2, R3, R4
+ SCV R4, (R1)
+ BEQ R4, -3(PC)
+ SYNC
+ MOVV R3, ret+16(FP)
+ RET
+
+// func And64(addr *uint64, v uint64) old uint64
+TEXT ·And64(SB), NOSPLIT, $0-24
+ MOVV ptr+0(FP), R1
+ MOVV val+8(FP), R2
+
+ SYNC
+ LLV (R1), R3
+ AND R2, R3, R4
+ SCV R4, (R1)
+ BEQ R4, -3(PC)
+ SYNC
+ MOVV R3, ret+16(FP)
+ RET
+
+// func Anduintptr(addr *uintptr, v uintptr) old uintptr
+TEXT ·Anduintptr(SB), NOSPLIT, $0-24
+ JMP ·And64(SB)
+
+// func Oruintptr(addr *uintptr, v uintptr) old uintptr
+TEXT ·Oruintptr(SB), NOSPLIT, $0-24
+ JMP ·Or64(SB)
+
// uint32 ·Load(uint32 volatile* ptr)
TEXT ·Load(SB),NOSPLIT|NOFRAME,$0-12
MOVV ptr+0(FP), R1
diff --git a/src/internal/runtime/atomic/atomic_mipsx.go b/src/internal/runtime/atomic/atomic_mipsx.go
index e3dcde1bde..aba4143ea6 100644
--- a/src/internal/runtime/atomic/atomic_mipsx.go
+++ b/src/internal/runtime/atomic/atomic_mipsx.go
@@ -11,6 +11,8 @@
//go:linkname Cas64
//go:linkname Load64
//go:linkname Store64
+//go:linkname Or64
+//go:linkname And64
package atomic
@@ -104,6 +106,26 @@ func Store64(addr *uint64, val uint64) {
return
}
+//go:nosplit
+func Or64(addr *uint64, val uint64) (old uint64) {
+ for {
+ old = *addr
+ if Cas64(addr, old, old|val) {
+ return old
+ }
+ }
+}
+
+//go:nosplit
+func And64(addr *uint64, val uint64) (old uint64) {
+ for {
+ old = *addr
+ if Cas64(addr, old, old&val) {
+ return old
+ }
+ }
+}
+
//go:noescape
func Xadd(ptr *uint32, delta int32) uint32
@@ -144,6 +166,18 @@ func And(ptr *uint32, val uint32)
func Or(ptr *uint32, val uint32)
//go:noescape
+func And32(ptr *uint32, val uint32) uint32
+
+//go:noescape
+func Or32(ptr *uint32, val uint32) uint32
+
+//go:noescape
+func Anduintptr(ptr *uintptr, val uintptr) uintptr
+
+//go:noescape
+func Oruintptr(ptr *uintptr, val uintptr) uintptr
+
+//go:noescape
func Store(ptr *uint32, val uint32)
//go:noescape
diff --git a/src/internal/runtime/atomic/atomic_mipsx.s b/src/internal/runtime/atomic/atomic_mipsx.s
index 8f5fc53cb7..4ccc0a363b 100644
--- a/src/internal/runtime/atomic/atomic_mipsx.s
+++ b/src/internal/runtime/atomic/atomic_mipsx.s
@@ -240,6 +240,42 @@ TEXT ·And(SB), NOSPLIT, $0-8
SYNC
RET
+// func Or32(addr *uint32, v uint32) old uint32
+TEXT ·Or32(SB), NOSPLIT, $0-12
+ MOVW ptr+0(FP), R1
+ MOVW val+4(FP), R2
+
+ SYNC
+ LL (R1), R3
+ OR R2, R3, R4
+ SC R4, (R1)
+ BEQ R4, -4(PC)
+ SYNC
+ MOVW R3, ret+8(FP)
+ RET
+
+// func And32(addr *uint32, v uint32) old uint32
+TEXT ·And32(SB), NOSPLIT, $0-12
+ MOVW ptr+0(FP), R1
+ MOVW val+4(FP), R2
+
+ SYNC
+ LL (R1), R3
+ AND R2, R3, R4
+ SC R4, (R1)
+ BEQ R4, -4(PC)
+ SYNC
+ MOVW R3, ret+8(FP)
+ RET
+
+// func Anduintptr(addr *uintptr, v uintptr) old uintptr
+TEXT ·Anduintptr(SB), NOSPLIT, $0-12
+ JMP ·And32(SB)
+
+// func Oruintptr(addr *uintptr, v uintptr) old uintptr
+TEXT ·Oruintptr(SB), NOSPLIT, $0-12
+ JMP ·Or32(SB)
+
TEXT ·spinLock(SB),NOSPLIT,$0-4
MOVW state+0(FP), R1
MOVW $1, R2
diff --git a/src/internal/runtime/atomic/atomic_wasm.go b/src/internal/runtime/atomic/atomic_wasm.go
index 835fc43ccf..d1dcfec7ad 100644
--- a/src/internal/runtime/atomic/atomic_wasm.go
+++ b/src/internal/runtime/atomic/atomic_wasm.go
@@ -13,6 +13,7 @@
//go:linkname Loadint32
//go:linkname Loadint64
//go:linkname Loaduintptr
+//go:linkname LoadAcquintptr
//go:linkname Xadd
//go:linkname Xaddint32
//go:linkname Xaddint64
@@ -33,6 +34,7 @@
//go:linkname Storeint32
//go:linkname Storeint64
//go:linkname Storeuintptr
+//go:linkname StoreReluintptr
package atomic
diff --git a/src/internal/runtime/exithook/hooks.go b/src/internal/runtime/exithook/hooks.go
new file mode 100644
index 0000000000..eb8aa1ce0a
--- /dev/null
+++ b/src/internal/runtime/exithook/hooks.go
@@ -0,0 +1,85 @@
+// Copyright 2024 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+// Package exithook provides limited support for on-exit cleanup.
+//
+// CAREFUL! The expectation is that Add should only be called
+// from a safe context (e.g. not an error/panic path or signal
+// handler, preemption enabled, allocation allowed, write barriers
+// allowed, etc), and that the exit function F will be invoked under
+// similar circumstances. That is the say, we are expecting that F
+// uses normal / high-level Go code as opposed to one of the more
+// restricted dialects used for the trickier parts of the runtime.
+package exithook
+
+import (
+ "internal/runtime/atomic"
+ _ "unsafe" // for linkname
+)
+
+// A Hook is a function to be run at program termination
+// (when someone invokes os.Exit, or when main.main returns).
+// Hooks are run in reverse order of registration:
+// the first hook added is the last one run.
+type Hook struct {
+ F func() // func to run
+ RunOnFailure bool // whether to run on non-zero exit code
+}
+
+var (
+ locked atomic.Int32
+ runGoid atomic.Uint64
+ hooks []Hook
+ running bool
+
+ // runtime sets these for us
+ Gosched func()
+ Goid func() uint64
+ Throw func(string)
+)
+
+// Add adds a new exit hook.
+func Add(h Hook) {
+ for !locked.CompareAndSwap(0, 1) {
+ Gosched()
+ }
+ hooks = append(hooks, h)
+ locked.Store(0)
+}
+
+// Run runs the exit hooks.
+//
+// If an exit hook panics, Run will throw with the panic on the stack.
+// If an exit hook invokes exit in the same goroutine, the goroutine will throw.
+// If an exit hook invokes exit in another goroutine, that exit will block.
+func Run(code int) {
+ for !locked.CompareAndSwap(0, 1) {
+ if Goid() == runGoid.Load() {
+ Throw("exit hook invoked exit")
+ }
+ Gosched()
+ }
+ defer locked.Store(0)
+ runGoid.Store(Goid())
+ defer runGoid.Store(0)
+
+ defer func() {
+ if e := recover(); e != nil {
+ Throw("exit hook invoked panic")
+ }
+ }()
+
+ for len(hooks) > 0 {
+ h := hooks[len(hooks)-1]
+ hooks = hooks[:len(hooks)-1]
+ if code != 0 && !h.RunOnFailure {
+ continue
+ }
+ h.F()
+ }
+}
+
+type exitError string
+
+func (e exitError) Error() string { return string(e) }
diff --git a/src/internal/runtime/syscall/asm_linux_loong64.s b/src/internal/runtime/syscall/asm_linux_loong64.s
index 11c5bc2468..ff8ad75b05 100644
--- a/src/internal/runtime/syscall/asm_linux_loong64.s
+++ b/src/internal/runtime/syscall/asm_linux_loong64.s
@@ -22,7 +22,6 @@
// r2 | R5 | R5
// err | R6 | part of R4
TEXT ·Syscall6<ABIInternal>(SB),NOSPLIT,$0-80
-#ifdef GOEXPERIMENT_regabiargs
MOVV R4, R11 // syscall entry
MOVV R5, R4
MOVV R6, R5
@@ -30,39 +29,14 @@ TEXT ·Syscall6<ABIInternal>(SB),NOSPLIT,$0-80
MOVV R8, R7
MOVV R9, R8
MOVV R10, R9
-#else
- MOVV num+0(FP), R11 // syscall entry
- MOVV a1+8(FP), R4
- MOVV a2+16(FP), R5
- MOVV a3+24(FP), R6
- MOVV a4+32(FP), R7
- MOVV a5+40(FP), R8
- MOVV a6+48(FP), R9
-#endif
SYSCALL
-#ifdef GOEXPERIMENT_regabiargs
MOVV R0, R5 // r2 is not used. Always set to 0.
MOVW $-4096, R12
BGEU R12, R4, ok
SUBVU R4, R0, R6 // errno
MOVV $-1, R4 // r1
-#else
- MOVW $-4096, R12
- BGEU R12, R4, ok
- MOVV $-1, R12
- MOVV R12, r1+56(FP)
- MOVV R0, r2+64(FP)
- SUBVU R4, R0, R4
- MOVV R4, errno+72(FP)
-#endif
RET
ok:
-#ifdef GOEXPERIMENT_regabiargs
// r1 already in R4
MOVV R0, R6 // errno
-#else
- MOVV R4, r1+56(FP)
- MOVV R0, r2+64(FP) // r2 is not used. Always set to 0.
- MOVV R0, errno+72(FP)
-#endif
RET