aboutsummaryrefslogtreecommitdiff
path: root/src/internal
diff options
context:
space:
mode:
authorBrad Fitzpatrick <bradfitz@golang.org>2021-08-02 14:55:51 -0700
committerBrad Fitzpatrick <bradfitz@golang.org>2021-11-02 01:28:01 +0000
commita59e33224e42d60a97fa720a45e1b74eb6aaa3d0 (patch)
tree9b4653d9f59f88d2650280abdc09b4a9abfcb090 /src/internal
parent81fea0b4fd3b134d4c1d121abad171e358037ce3 (diff)
downloadgo-a59e33224e42d60a97fa720a45e1b74eb6aaa3d0.tar.gz
go-a59e33224e42d60a97fa720a45e1b74eb6aaa3d0.zip
net/netip: add new IP address package
Co-authored-by: Alex Willmer <alex@moreati.org.uk> (GitHub @moreati) Co-authored-by: Alexander Yastrebov <yastrebov.alex@gmail.com> Co-authored-by: David Anderson <dave@natulte.net> (Tailscale CLA) Co-authored-by: David Crawshaw <crawshaw@tailscale.com> (Tailscale CLA) Co-authored-by: Dmytro Shynkevych <dmytro@tailscale.com> (Tailscale CLA) Co-authored-by: Elias Naur <mail@eliasnaur.com> Co-authored-by: Joe Tsai <joetsai@digital-static.net> (Tailscale CLA) Co-authored-by: Jonathan Yu <jawnsy@cpan.org> (GitHub @jawnsy) Co-authored-by: Josh Bleecher Snyder <josharian@gmail.com> (Tailscale CLA) Co-authored-by: Maisem Ali <maisem@tailscale.com> (Tailscale CLA) Co-authored-by: Manuel Mendez (Go AUTHORS mmendez534@...) Co-authored-by: Matt Layher <mdlayher@gmail.com> Co-authored-by: Noah Treuhaft <noah.treuhaft@gmail.com> (GitHub @nwt) Co-authored-by: Stefan Majer <stefan.majer@gmail.com> Co-authored-by: Terin Stock <terinjokes@gmail.com> (Cloudflare CLA) Co-authored-by: Tobias Klauser <tklauser@distanz.ch> Fixes #46518 Change-Id: I0041f9e1115d61fa6e95fcf32b01d9faee708712 Reviewed-on: https://go-review.googlesource.com/c/go/+/339309 Run-TryBot: Brad Fitzpatrick <bradfitz@golang.org> TryBot-Result: Go Bot <gobot@golang.org> Reviewed-by: Russ Cox <rsc@golang.org> Trust: Brad Fitzpatrick <bradfitz@golang.org>
Diffstat (limited to 'src/internal')
-rw-r--r--src/internal/fuzz/fuzz.go9
-rw-r--r--src/internal/godebug/godebug.go34
-rw-r--r--src/internal/godebug/godebug_test.go34
-rw-r--r--src/internal/intern/intern.go178
-rw-r--r--src/internal/intern/intern_test.go199
5 files changed, 447 insertions, 7 deletions
diff --git a/src/internal/fuzz/fuzz.go b/src/internal/fuzz/fuzz.go
index 78319a7496..2ebe2a64db 100644
--- a/src/internal/fuzz/fuzz.go
+++ b/src/internal/fuzz/fuzz.go
@@ -12,6 +12,7 @@ import (
"crypto/sha256"
"errors"
"fmt"
+ "internal/godebug"
"io"
"io/ioutil"
"math/bits"
@@ -1063,13 +1064,7 @@ var (
func shouldPrintDebugInfo() bool {
debugInfoOnce.Do(func() {
- debug := strings.Split(os.Getenv("GODEBUG"), ",")
- for _, f := range debug {
- if f == "fuzzdebug=1" {
- debugInfo = true
- break
- }
- }
+ debugInfo = godebug.Get("fuzzdebug") == "1"
})
return debugInfo
}
diff --git a/src/internal/godebug/godebug.go b/src/internal/godebug/godebug.go
new file mode 100644
index 0000000000..ac434e5fd8
--- /dev/null
+++ b/src/internal/godebug/godebug.go
@@ -0,0 +1,34 @@
+// Copyright 2021 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+// Package godebug parses the GODEBUG environment variable.
+package godebug
+
+import "os"
+
+// Get returns the value for the provided GODEBUG key.
+func Get(key string) string {
+ return get(os.Getenv("GODEBUG"), key)
+}
+
+// get returns the value part of key=value in s (a GODEBUG value).
+func get(s, key string) string {
+ for i := 0; i < len(s)-len(key)-1; i++ {
+ if i > 0 && s[i-1] != ',' {
+ continue
+ }
+ afterKey := s[i+len(key):]
+ if afterKey[0] != '=' || s[i:i+len(key)] != key {
+ continue
+ }
+ val := afterKey[1:]
+ for i, b := range val {
+ if b == ',' {
+ return val[:i]
+ }
+ }
+ return val
+ }
+ return ""
+}
diff --git a/src/internal/godebug/godebug_test.go b/src/internal/godebug/godebug_test.go
new file mode 100644
index 0000000000..41b9117b73
--- /dev/null
+++ b/src/internal/godebug/godebug_test.go
@@ -0,0 +1,34 @@
+// Copyright 2021 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+package godebug
+
+import "testing"
+
+func TestGet(t *testing.T) {
+ tests := []struct {
+ godebug string
+ key string
+ want string
+ }{
+ {"", "", ""},
+ {"", "foo", ""},
+ {"foo=bar", "foo", "bar"},
+ {"foo=bar,after=x", "foo", "bar"},
+ {"before=x,foo=bar,after=x", "foo", "bar"},
+ {"before=x,foo=bar", "foo", "bar"},
+ {",,,foo=bar,,,", "foo", "bar"},
+ {"foodecoy=wrong,foo=bar", "foo", "bar"},
+ {"foo=", "foo", ""},
+ {"foo", "foo", ""},
+ {",foo", "foo", ""},
+ {"foo=bar,baz", "loooooooong", ""},
+ }
+ for _, tt := range tests {
+ got := get(tt.godebug, tt.key)
+ if got != tt.want {
+ t.Errorf("get(%q, %q) = %q; want %q", tt.godebug, tt.key, got, tt.want)
+ }
+ }
+}
diff --git a/src/internal/intern/intern.go b/src/internal/intern/intern.go
new file mode 100644
index 0000000000..666caa6d2f
--- /dev/null
+++ b/src/internal/intern/intern.go
@@ -0,0 +1,178 @@
+// Copyright 2020 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+// Package intern lets you make smaller comparable values by boxing
+// a larger comparable value (such as a 16 byte string header) down
+// into a globally unique 8 byte pointer.
+//
+// The globally unique pointers are garbage collected with weak
+// references and finalizers. This package hides that.
+package intern
+
+import (
+ "internal/godebug"
+ "runtime"
+ "sync"
+ "unsafe"
+)
+
+// A Value pointer is the handle to an underlying comparable value.
+// See func Get for how Value pointers may be used.
+type Value struct {
+ _ [0]func() // prevent people from accidentally using value type as comparable
+ cmpVal interface{}
+ // resurrected is guarded by mu (for all instances of Value).
+ // It is set true whenever v is synthesized from a uintptr.
+ resurrected bool
+}
+
+// Get returns the comparable value passed to the Get func
+// that returned v.
+func (v *Value) Get() interface{} { return v.cmpVal }
+
+// key is a key in our global value map.
+// It contains type-specialized fields to avoid allocations
+// when converting common types to empty interfaces.
+type key struct {
+ s string
+ cmpVal interface{}
+ // isString reports whether key contains a string.
+ // Without it, the zero value of key is ambiguous.
+ isString bool
+}
+
+// keyFor returns a key to use with cmpVal.
+func keyFor(cmpVal interface{}) key {
+ if s, ok := cmpVal.(string); ok {
+ return key{s: s, isString: true}
+ }
+ return key{cmpVal: cmpVal}
+}
+
+// Value returns a *Value built from k.
+func (k key) Value() *Value {
+ if k.isString {
+ return &Value{cmpVal: k.s}
+ }
+ return &Value{cmpVal: k.cmpVal}
+}
+
+var (
+ // mu guards valMap, a weakref map of *Value by underlying value.
+ // It also guards the resurrected field of all *Values.
+ mu sync.Mutex
+ valMap = map[key]uintptr{} // to uintptr(*Value)
+ valSafe = safeMap() // non-nil in safe+leaky mode
+)
+
+// safeMap returns a non-nil map if we're in safe-but-leaky mode,
+// as controlled by GODEBUG=intern=leaky
+func safeMap() map[key]*Value {
+ if godebug.Get("intern") == "leaky" {
+ return map[key]*Value{}
+ }
+ return nil
+}
+
+// Get returns a pointer representing the comparable value cmpVal.
+//
+// The returned pointer will be the same for Get(v) and Get(v2)
+// if and only if v == v2, and can be used as a map key.
+func Get(cmpVal interface{}) *Value {
+ return get(keyFor(cmpVal))
+}
+
+// GetByString is identical to Get, except that it is specialized for strings.
+// This avoids an allocation from putting a string into an interface{}
+// to pass as an argument to Get.
+func GetByString(s string) *Value {
+ return get(key{s: s, isString: true})
+}
+
+// We play unsafe games that violate Go's rules (and assume a non-moving
+// collector). So we quiet Go here.
+// See the comment below Get for more implementation details.
+//go:nocheckptr
+func get(k key) *Value {
+ mu.Lock()
+ defer mu.Unlock()
+
+ var v *Value
+ if valSafe != nil {
+ v = valSafe[k]
+ } else if addr, ok := valMap[k]; ok {
+ v = (*Value)(unsafe.Pointer(addr))
+ v.resurrected = true
+ }
+ if v != nil {
+ return v
+ }
+ v = k.Value()
+ if valSafe != nil {
+ valSafe[k] = v
+ } else {
+ // SetFinalizer before uintptr conversion (theoretical concern;
+ // see https://github.com/go4org/intern/issues/13)
+ runtime.SetFinalizer(v, finalize)
+ valMap[k] = uintptr(unsafe.Pointer(v))
+ }
+ return v
+}
+
+func finalize(v *Value) {
+ mu.Lock()
+ defer mu.Unlock()
+ if v.resurrected {
+ // We lost the race. Somebody resurrected it while we
+ // were about to finalize it. Try again next round.
+ v.resurrected = false
+ runtime.SetFinalizer(v, finalize)
+ return
+ }
+ delete(valMap, keyFor(v.cmpVal))
+}
+
+// Interning is simple if you don't require that unused values be
+// garbage collectable. But we do require that; we don't want to be
+// DOS vector. We do this by using a uintptr to hide the pointer from
+// the garbage collector, and using a finalizer to eliminate the
+// pointer when no other code is using it.
+//
+// The obvious implementation of this is to use a
+// map[interface{}]uintptr-of-*interface{}, and set up a finalizer to
+// delete from the map. Unfortunately, this is racy. Because pointers
+// are being created in violation of Go's unsafety rules, it's
+// possible to create a pointer to a value concurrently with the GC
+// concluding that the value can be collected. There are other races
+// that break the equality invariant as well, but the use-after-free
+// will cause a runtime crash.
+//
+// To make this work, the finalizer needs to know that no references
+// have been unsafely created since the finalizer was set up. To do
+// this, values carry a "resurrected" sentinel, which gets set
+// whenever a pointer is unsafely created. If the finalizer encounters
+// the sentinel, it clears the sentinel and delays collection for one
+// additional GC cycle, by re-installing itself as finalizer. This
+// ensures that the unsafely created pointer is visible to the GC, and
+// will correctly prevent collection.
+//
+// This technique does mean that interned values that get reused take
+// at least 3 GC cycles to fully collect (1 to clear the sentinel, 1
+// to clean up the unsafe map, 1 to be actually deleted).
+//
+// @ianlancetaylor commented in
+// https://github.com/golang/go/issues/41303#issuecomment-717401656
+// that it is possible to implement weak references in terms of
+// finalizers without unsafe. Unfortunately, the approach he outlined
+// does not work here, for two reasons. First, there is no way to
+// construct a strong pointer out of a weak pointer; our map stores
+// weak pointers, but we must return strong pointers to callers.
+// Second, and more fundamentally, we must return not just _a_ strong
+// pointer to callers, but _the same_ strong pointer to callers. In
+// order to return _the same_ strong pointer to callers, we must track
+// it, which is exactly what we cannot do with strong pointers.
+//
+// See https://github.com/inetaf/netaddr/issues/53 for more
+// discussion, and https://github.com/go4org/intern/issues/2 for an
+// illustration of the subtleties at play.
diff --git a/src/internal/intern/intern_test.go b/src/internal/intern/intern_test.go
new file mode 100644
index 0000000000..d1e409ef95
--- /dev/null
+++ b/src/internal/intern/intern_test.go
@@ -0,0 +1,199 @@
+// Copyright 2020 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+package intern
+
+import (
+ "fmt"
+ "runtime"
+ "testing"
+)
+
+func TestBasics(t *testing.T) {
+ clearMap()
+ foo := Get("foo")
+ bar := Get("bar")
+ empty := Get("")
+ nilEface := Get(nil)
+ i := Get(0x7777777)
+ foo2 := Get("foo")
+ bar2 := Get("bar")
+ empty2 := Get("")
+ nilEface2 := Get(nil)
+ i2 := Get(0x7777777)
+ foo3 := GetByString("foo")
+ empty3 := GetByString("")
+
+ if foo.Get() != foo2.Get() {
+ t.Error("foo/foo2 values differ")
+ }
+ if foo.Get() != foo3.Get() {
+ t.Error("foo/foo3 values differ")
+ }
+ if foo.Get() != "foo" {
+ t.Error("foo.Get not foo")
+ }
+ if foo != foo2 {
+ t.Error("foo/foo2 pointers differ")
+ }
+ if foo != foo3 {
+ t.Error("foo/foo3 pointers differ")
+ }
+
+ if bar.Get() != bar2.Get() {
+ t.Error("bar values differ")
+ }
+ if bar.Get() != "bar" {
+ t.Error("bar.Get not bar")
+ }
+ if bar != bar2 {
+ t.Error("bar pointers differ")
+ }
+
+ if i.Get() != i.Get() {
+ t.Error("i values differ")
+ }
+ if i.Get() != 0x7777777 {
+ t.Error("i.Get not 0x7777777")
+ }
+ if i != i2 {
+ t.Error("i pointers differ")
+ }
+
+ if empty.Get() != empty2.Get() {
+ t.Error("empty/empty2 values differ")
+ }
+ if empty.Get() != empty.Get() {
+ t.Error("empty/empty3 values differ")
+ }
+ if empty.Get() != "" {
+ t.Error("empty.Get not empty string")
+ }
+ if empty != empty2 {
+ t.Error("empty/empty2 pointers differ")
+ }
+ if empty != empty3 {
+ t.Error("empty/empty3 pointers differ")
+ }
+
+ if nilEface.Get() != nilEface2.Get() {
+ t.Error("nilEface values differ")
+ }
+ if nilEface.Get() != nil {
+ t.Error("nilEface.Get not nil")
+ }
+ if nilEface != nilEface2 {
+ t.Error("nilEface pointers differ")
+ }
+
+ if n := mapLen(); n != 5 {
+ t.Errorf("map len = %d; want 4", n)
+ }
+
+ wantEmpty(t)
+}
+
+func wantEmpty(t testing.TB) {
+ t.Helper()
+ const gcTries = 5000
+ for try := 0; try < gcTries; try++ {
+ runtime.GC()
+ n := mapLen()
+ if n == 0 {
+ break
+ }
+ if try == gcTries-1 {
+ t.Errorf("map len = %d after (%d GC tries); want 0, contents: %v", n, gcTries, mapKeys())
+ }
+ }
+}
+
+func TestStress(t *testing.T) {
+ iters := 10000
+ if testing.Short() {
+ iters = 1000
+ }
+ var sink []byte
+ for i := 0; i < iters; i++ {
+ _ = Get("foo")
+ sink = make([]byte, 1<<20)
+ }
+ _ = sink
+}
+
+func BenchmarkStress(b *testing.B) {
+ done := make(chan struct{})
+ defer close(done)
+ go func() {
+ for {
+ select {
+ case <-done:
+ return
+ default:
+ }
+ runtime.GC()
+ }
+ }()
+
+ clearMap()
+ v1 := Get("foo")
+ b.ReportAllocs()
+ b.RunParallel(func(pb *testing.PB) {
+ for pb.Next() {
+ v2 := Get("foo")
+ if v1 != v2 {
+ b.Fatal("wrong value")
+ }
+ // And also a key we don't retain:
+ _ = Get("bar")
+ }
+ })
+ runtime.GC()
+ wantEmpty(b)
+}
+
+func mapLen() int {
+ mu.Lock()
+ defer mu.Unlock()
+ return len(valMap)
+}
+
+func mapKeys() (keys []string) {
+ mu.Lock()
+ defer mu.Unlock()
+ for k := range valMap {
+ keys = append(keys, fmt.Sprint(k))
+ }
+ return keys
+}
+
+func clearMap() {
+ mu.Lock()
+ defer mu.Unlock()
+ for k := range valMap {
+ delete(valMap, k)
+ }
+}
+
+var (
+ globalString = "not a constant"
+ sink string
+)
+
+func TestGetByStringAllocs(t *testing.T) {
+ allocs := int(testing.AllocsPerRun(100, func() {
+ GetByString(globalString)
+ }))
+ if allocs != 0 {
+ t.Errorf("GetString allocated %d objects, want 0", allocs)
+ }
+}
+
+func BenchmarkGetByString(b *testing.B) {
+ b.ReportAllocs()
+ for i := 0; i < b.N; i++ {
+ v := GetByString(globalString)
+ sink = v.Get().(string)
+ }
+}