From 2f5b420fb5984842afab37a9c2e66e6599107483 Mon Sep 17 00:00:00 2001 From: qiulaidongfeng <2645477756@qq.com> Date: Wed, 24 Apr 2024 22:49:31 +0000 Subject: runtime,reflect: move zeroVal to internal/abi Change-Id: I0e19e4aa2ea47a714e27b8d66c23c449e27861f2 GitHub-Last-Rev: 2d59b9589efcf4ade6cfd7c8feffc46bf9ba912c GitHub-Pull-Request: golang/go#67014 Reviewed-on: https://go-review.googlesource.com/c/go/+/581395 Reviewed-by: Keith Randall LUCI-TryBot-Result: Go LUCI Reviewed-by: Keith Randall Reviewed-by: Joedian Reid --- src/internal/abi/runtime.go | 5 ++++- src/reflect/value.go | 11 ++++------- src/runtime/iface.go | 4 ++-- src/runtime/map.go | 12 ++++++------ src/runtime/map_fast32.go | 8 ++++---- src/runtime/map_fast64.go | 8 ++++---- src/runtime/map_faststr.go | 16 ++++++++-------- src/runtime/runtime.go | 3 --- 8 files changed, 32 insertions(+), 35 deletions(-) diff --git a/src/internal/abi/runtime.go b/src/internal/abi/runtime.go index 9b91cdf5ef..2a3181a48d 100644 --- a/src/internal/abi/runtime.go +++ b/src/internal/abi/runtime.go @@ -4,5 +4,8 @@ package abi -// ZeroValSize is the size in bytes of runtime.zeroVal. +// ZeroValSize is the size in bytes of [ZeroVal]. const ZeroValSize = 1024 + +// ZeroVal is a region containing all zero bytes. +var ZeroVal [ZeroValSize]byte diff --git a/src/reflect/value.go b/src/reflect/value.go index 4b936bf5bb..06f2c2b7da 100644 --- a/src/reflect/value.go +++ b/src/reflect/value.go @@ -1591,7 +1591,7 @@ func (v Value) IsZero() bool { // v.ptr doesn't escape, as Equal functions are compiler generated // and never escape. The escape analysis doesn't know, as it is a // function pointer call. - return typ.Equal(abi.NoEscape(v.ptr), unsafe.Pointer(&zeroVal[0])) + return typ.Equal(abi.NoEscape(v.ptr), unsafe.Pointer(&abi.ZeroVal[0])) } if typ.TFlag&abi.TFlagRegularMemory != 0 { // For some types where the zero value is a value where all bits of this type are 0 @@ -1617,7 +1617,7 @@ func (v Value) IsZero() bool { // If the type is comparable, then compare directly with zero. if typ.Equal != nil && typ.Size() <= abi.ZeroValSize { // See noescape justification above. - return typ.Equal(abi.NoEscape(v.ptr), unsafe.Pointer(&zeroVal[0])) + return typ.Equal(abi.NoEscape(v.ptr), unsafe.Pointer(&abi.ZeroVal[0])) } if typ.TFlag&abi.TFlagRegularMemory != 0 { // For some types where the zero value is a value where all bits of this type are 0 @@ -2312,7 +2312,7 @@ func (v Value) Set(x Value) { } x = x.assignTo("reflect.Set", v.typ(), target) if x.flag&flagIndir != 0 { - if x.ptr == unsafe.Pointer(&zeroVal[0]) { + if x.ptr == unsafe.Pointer(&abi.ZeroVal[0]) { typedmemclr(v.typ(), v.ptr) } else { typedmemmove(v.typ(), v.ptr, x.ptr) @@ -3280,7 +3280,7 @@ func Zero(typ Type) Value { if t.IfaceIndir() { var p unsafe.Pointer if t.Size() <= abi.ZeroValSize { - p = unsafe.Pointer(&zeroVal[0]) + p = unsafe.Pointer(&abi.ZeroVal[0]) } else { p = unsafe_New(t) } @@ -3289,9 +3289,6 @@ func Zero(typ Type) Value { return Value{t, nil, fl} } -//go:linkname zeroVal runtime.zeroVal -var zeroVal [abi.ZeroValSize]byte - // New returns a Value representing a pointer to a new zero value // for the specified type. That is, the returned Value's Type is [PointerTo](typ). func New(typ Type) Value { diff --git a/src/runtime/iface.go b/src/runtime/iface.go index e280180665..28eb8fb5ec 100644 --- a/src/runtime/iface.go +++ b/src/runtime/iface.go @@ -391,7 +391,7 @@ func convT64(val uint64) (x unsafe.Pointer) { func convTstring(val string) (x unsafe.Pointer) { if val == "" { - x = unsafe.Pointer(&zeroVal[0]) + x = unsafe.Pointer(&abi.ZeroVal[0]) } else { x = mallocgc(unsafe.Sizeof(val), stringType, true) *(*string)(x) = val @@ -402,7 +402,7 @@ func convTstring(val string) (x unsafe.Pointer) { func convTslice(val []byte) (x unsafe.Pointer) { // Note: this must work for any element type, not just byte. if (*slice)(unsafe.Pointer(&val)).array == nil { - x = unsafe.Pointer(&zeroVal[0]) + x = unsafe.Pointer(&abi.ZeroVal[0]) } else { x = mallocgc(unsafe.Sizeof(val), sliceType, true) *(*[]byte)(x) = val diff --git a/src/runtime/map.go b/src/runtime/map.go index d97e209deb..9e8ae67a35 100644 --- a/src/runtime/map.go +++ b/src/runtime/map.go @@ -402,7 +402,7 @@ func mapaccess1(t *maptype, h *hmap, key unsafe.Pointer) unsafe.Pointer { if err := mapKeyError(t, key); err != nil { panic(err) // see issue 23734 } - return unsafe.Pointer(&zeroVal[0]) + return unsafe.Pointer(&abi.ZeroVal[0]) } if h.flags&hashWriting != 0 { fatal("concurrent map read and map write") @@ -443,7 +443,7 @@ bucketloop: } } } - return unsafe.Pointer(&zeroVal[0]) + return unsafe.Pointer(&abi.ZeroVal[0]) } func mapaccess2(t *maptype, h *hmap, key unsafe.Pointer) (unsafe.Pointer, bool) { @@ -463,7 +463,7 @@ func mapaccess2(t *maptype, h *hmap, key unsafe.Pointer) (unsafe.Pointer, bool) if err := mapKeyError(t, key); err != nil { panic(err) // see issue 23734 } - return unsafe.Pointer(&zeroVal[0]), false + return unsafe.Pointer(&abi.ZeroVal[0]), false } if h.flags&hashWriting != 0 { fatal("concurrent map read and map write") @@ -504,7 +504,7 @@ bucketloop: } } } - return unsafe.Pointer(&zeroVal[0]), false + return unsafe.Pointer(&abi.ZeroVal[0]), false } // returns both key and elem. Used by map iterator. @@ -553,7 +553,7 @@ bucketloop: func mapaccess1_fat(t *maptype, h *hmap, key, zero unsafe.Pointer) unsafe.Pointer { e := mapaccess1(t, h, key) - if e == unsafe.Pointer(&zeroVal[0]) { + if e == unsafe.Pointer(&abi.ZeroVal[0]) { return zero } return e @@ -561,7 +561,7 @@ func mapaccess1_fat(t *maptype, h *hmap, key, zero unsafe.Pointer) unsafe.Pointe func mapaccess2_fat(t *maptype, h *hmap, key, zero unsafe.Pointer) (unsafe.Pointer, bool) { e := mapaccess1(t, h, key) - if e == unsafe.Pointer(&zeroVal[0]) { + if e == unsafe.Pointer(&abi.ZeroVal[0]) { return zero, false } return e, true diff --git a/src/runtime/map_fast32.go b/src/runtime/map_fast32.go index 7e52240e77..06dcbcabc4 100644 --- a/src/runtime/map_fast32.go +++ b/src/runtime/map_fast32.go @@ -16,7 +16,7 @@ func mapaccess1_fast32(t *maptype, h *hmap, key uint32) unsafe.Pointer { racereadpc(unsafe.Pointer(h), callerpc, abi.FuncPCABIInternal(mapaccess1_fast32)) } if h == nil || h.count == 0 { - return unsafe.Pointer(&zeroVal[0]) + return unsafe.Pointer(&abi.ZeroVal[0]) } if h.flags&hashWriting != 0 { fatal("concurrent map read and map write") @@ -47,7 +47,7 @@ func mapaccess1_fast32(t *maptype, h *hmap, key uint32) unsafe.Pointer { } } } - return unsafe.Pointer(&zeroVal[0]) + return unsafe.Pointer(&abi.ZeroVal[0]) } func mapaccess2_fast32(t *maptype, h *hmap, key uint32) (unsafe.Pointer, bool) { @@ -56,7 +56,7 @@ func mapaccess2_fast32(t *maptype, h *hmap, key uint32) (unsafe.Pointer, bool) { racereadpc(unsafe.Pointer(h), callerpc, abi.FuncPCABIInternal(mapaccess2_fast32)) } if h == nil || h.count == 0 { - return unsafe.Pointer(&zeroVal[0]), false + return unsafe.Pointer(&abi.ZeroVal[0]), false } if h.flags&hashWriting != 0 { fatal("concurrent map read and map write") @@ -87,7 +87,7 @@ func mapaccess2_fast32(t *maptype, h *hmap, key uint32) (unsafe.Pointer, bool) { } } } - return unsafe.Pointer(&zeroVal[0]), false + return unsafe.Pointer(&abi.ZeroVal[0]), false } func mapassign_fast32(t *maptype, h *hmap, key uint32) unsafe.Pointer { diff --git a/src/runtime/map_fast64.go b/src/runtime/map_fast64.go index 2c365183cb..c8b34dd41b 100644 --- a/src/runtime/map_fast64.go +++ b/src/runtime/map_fast64.go @@ -16,7 +16,7 @@ func mapaccess1_fast64(t *maptype, h *hmap, key uint64) unsafe.Pointer { racereadpc(unsafe.Pointer(h), callerpc, abi.FuncPCABIInternal(mapaccess1_fast64)) } if h == nil || h.count == 0 { - return unsafe.Pointer(&zeroVal[0]) + return unsafe.Pointer(&abi.ZeroVal[0]) } if h.flags&hashWriting != 0 { fatal("concurrent map read and map write") @@ -47,7 +47,7 @@ func mapaccess1_fast64(t *maptype, h *hmap, key uint64) unsafe.Pointer { } } } - return unsafe.Pointer(&zeroVal[0]) + return unsafe.Pointer(&abi.ZeroVal[0]) } func mapaccess2_fast64(t *maptype, h *hmap, key uint64) (unsafe.Pointer, bool) { @@ -56,7 +56,7 @@ func mapaccess2_fast64(t *maptype, h *hmap, key uint64) (unsafe.Pointer, bool) { racereadpc(unsafe.Pointer(h), callerpc, abi.FuncPCABIInternal(mapaccess2_fast64)) } if h == nil || h.count == 0 { - return unsafe.Pointer(&zeroVal[0]), false + return unsafe.Pointer(&abi.ZeroVal[0]), false } if h.flags&hashWriting != 0 { fatal("concurrent map read and map write") @@ -87,7 +87,7 @@ func mapaccess2_fast64(t *maptype, h *hmap, key uint64) (unsafe.Pointer, bool) { } } } - return unsafe.Pointer(&zeroVal[0]), false + return unsafe.Pointer(&abi.ZeroVal[0]), false } func mapassign_fast64(t *maptype, h *hmap, key uint64) unsafe.Pointer { diff --git a/src/runtime/map_faststr.go b/src/runtime/map_faststr.go index d989190f71..38841aee4b 100644 --- a/src/runtime/map_faststr.go +++ b/src/runtime/map_faststr.go @@ -16,7 +16,7 @@ func mapaccess1_faststr(t *maptype, h *hmap, ky string) unsafe.Pointer { racereadpc(unsafe.Pointer(h), callerpc, abi.FuncPCABIInternal(mapaccess1_faststr)) } if h == nil || h.count == 0 { - return unsafe.Pointer(&zeroVal[0]) + return unsafe.Pointer(&abi.ZeroVal[0]) } if h.flags&hashWriting != 0 { fatal("concurrent map read and map write") @@ -39,7 +39,7 @@ func mapaccess1_faststr(t *maptype, h *hmap, ky string) unsafe.Pointer { return add(unsafe.Pointer(b), dataOffset+abi.MapBucketCount*2*goarch.PtrSize+i*uintptr(t.ValueSize)) } } - return unsafe.Pointer(&zeroVal[0]) + return unsafe.Pointer(&abi.ZeroVal[0]) } // long key, try not to do more comparisons than necessary keymaybe := uintptr(abi.MapBucketCount) @@ -74,7 +74,7 @@ func mapaccess1_faststr(t *maptype, h *hmap, ky string) unsafe.Pointer { return add(unsafe.Pointer(b), dataOffset+abi.MapBucketCount*2*goarch.PtrSize+keymaybe*uintptr(t.ValueSize)) } } - return unsafe.Pointer(&zeroVal[0]) + return unsafe.Pointer(&abi.ZeroVal[0]) } dohash: hash := t.Hasher(noescape(unsafe.Pointer(&ky)), uintptr(h.hash0)) @@ -102,7 +102,7 @@ dohash: } } } - return unsafe.Pointer(&zeroVal[0]) + return unsafe.Pointer(&abi.ZeroVal[0]) } func mapaccess2_faststr(t *maptype, h *hmap, ky string) (unsafe.Pointer, bool) { @@ -111,7 +111,7 @@ func mapaccess2_faststr(t *maptype, h *hmap, ky string) (unsafe.Pointer, bool) { racereadpc(unsafe.Pointer(h), callerpc, abi.FuncPCABIInternal(mapaccess2_faststr)) } if h == nil || h.count == 0 { - return unsafe.Pointer(&zeroVal[0]), false + return unsafe.Pointer(&abi.ZeroVal[0]), false } if h.flags&hashWriting != 0 { fatal("concurrent map read and map write") @@ -134,7 +134,7 @@ func mapaccess2_faststr(t *maptype, h *hmap, ky string) (unsafe.Pointer, bool) { return add(unsafe.Pointer(b), dataOffset+abi.MapBucketCount*2*goarch.PtrSize+i*uintptr(t.ValueSize)), true } } - return unsafe.Pointer(&zeroVal[0]), false + return unsafe.Pointer(&abi.ZeroVal[0]), false } // long key, try not to do more comparisons than necessary keymaybe := uintptr(abi.MapBucketCount) @@ -169,7 +169,7 @@ func mapaccess2_faststr(t *maptype, h *hmap, ky string) (unsafe.Pointer, bool) { return add(unsafe.Pointer(b), dataOffset+abi.MapBucketCount*2*goarch.PtrSize+keymaybe*uintptr(t.ValueSize)), true } } - return unsafe.Pointer(&zeroVal[0]), false + return unsafe.Pointer(&abi.ZeroVal[0]), false } dohash: hash := t.Hasher(noescape(unsafe.Pointer(&ky)), uintptr(h.hash0)) @@ -197,7 +197,7 @@ dohash: } } } - return unsafe.Pointer(&zeroVal[0]), false + return unsafe.Pointer(&abi.ZeroVal[0]), false } func mapassign_faststr(t *maptype, h *hmap, s string) unsafe.Pointer { diff --git a/src/runtime/runtime.go b/src/runtime/runtime.go index 6ec0369a7e..cc6f03d2a0 100644 --- a/src/runtime/runtime.go +++ b/src/runtime/runtime.go @@ -5,7 +5,6 @@ package runtime import ( - "internal/abi" "internal/runtime/atomic" "unsafe" ) @@ -297,5 +296,3 @@ func setCrashFD(fd uintptr) uintptr { var auxv []uintptr func getAuxv() []uintptr { return auxv } // accessed from x/sys/cpu; see issue 57336 - -var zeroVal [abi.ZeroValSize]byte -- cgit v1.2.3-54-g00ecf