aboutsummaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorKeith Randall <khr@golang.org>2023-05-02 17:37:00 +0000
committerKeith Randall <khr@google.com>2023-05-19 18:10:11 +0000
commitbd3f44e4ffe54e9cf841ebc8356e403bb38436bd (patch)
treefa91f127398bdf3fd9058b37bd7a228f4ad2ae96
parentb60db8f7d92fb2c87b6f416ebb38a6924978aa8c (diff)
downloadgo-bd3f44e4ffe54e9cf841ebc8356e403bb38436bd.tar.gz
go-bd3f44e4ffe54e9cf841ebc8356e403bb38436bd.zip
cmd/compile: constant-fold loads from constant dictionaries and types
Retrying the original CL with a small modification. The original CL did not handle the case of reading an itab out of a dictionary correctly. When we read an itab out of a dictionary, we must treat the type inside that itab as maybe being put in an interface. Original CL: 486895 Revert CL: 490156 Change-Id: Id2dc1699d184cd8c63dac83986a70b60b4e6cbd7 Reviewed-on: https://go-review.googlesource.com/c/go/+/491495 Reviewed-by: Matthew Dempsky <mdempsky@google.com> Reviewed-by: Cuong Manh Le <cuong.manhle.vn@gmail.com> Run-TryBot: Keith Randall <khr@golang.org> Reviewed-by: Keith Randall <khr@google.com> TryBot-Result: Gopher Robot <gobot@golang.org>
-rw-r--r--src/cmd/compile/internal/noder/reader.go2
-rw-r--r--src/cmd/compile/internal/reflectdata/reflect.go13
-rw-r--r--src/cmd/compile/internal/ssa/_gen/generic.rules27
-rw-r--r--src/cmd/compile/internal/ssa/rewrite.go95
-rw-r--r--src/cmd/compile/internal/ssa/rewritegeneric.go347
-rw-r--r--src/cmd/internal/obj/link.go18
-rw-r--r--src/cmd/internal/objabi/reloctype.go3
-rw-r--r--src/cmd/link/internal/ld/deadcode.go8
-rw-r--r--src/cmd/link/internal/ld/decodesym.go6
-rw-r--r--test/typeparam/devirtualize1.go22
-rw-r--r--test/typeparam/devirtualize2.go28
11 files changed, 513 insertions, 56 deletions
diff --git a/src/cmd/compile/internal/noder/reader.go b/src/cmd/compile/internal/noder/reader.go
index 27f51af922..70f51e2253 100644
--- a/src/cmd/compile/internal/noder/reader.go
+++ b/src/cmd/compile/internal/noder/reader.go
@@ -3993,7 +3993,7 @@ func setBasePos(pos src.XPos) {
//
// N.B., this variable name is known to Delve:
// https://github.com/go-delve/delve/blob/cb91509630529e6055be845688fd21eb89ae8714/pkg/proc/eval.go#L28
-const dictParamName = ".dict"
+const dictParamName = typecheck.LocalDictName
// shapeSig returns a copy of fn's signature, except adding a
// dictionary parameter and promoting the receiver parameter (if any)
diff --git a/src/cmd/compile/internal/reflectdata/reflect.go b/src/cmd/compile/internal/reflectdata/reflect.go
index eeda3cb4e2..ff2eb1aba9 100644
--- a/src/cmd/compile/internal/reflectdata/reflect.go
+++ b/src/cmd/compile/internal/reflectdata/reflect.go
@@ -835,7 +835,14 @@ func TypeLinksymLookup(name string) *obj.LSym {
}
func TypeLinksym(t *types.Type) *obj.LSym {
- return TypeSym(t).Linksym()
+ lsym := TypeSym(t).Linksym()
+ signatmu.Lock()
+ if lsym.Extra == nil {
+ ti := lsym.NewTypeInfo()
+ ti.Type = t
+ }
+ signatmu.Unlock()
+ return lsym
}
// Deprecated: Use TypePtrAt instead.
@@ -1869,7 +1876,9 @@ func MarkTypeUsedInInterface(t *types.Type, from *obj.LSym) {
// Shape types shouldn't be put in interfaces, so we shouldn't ever get here.
base.Fatalf("shape types have no methods %+v", t)
}
- tsym := TypeLinksym(t)
+ MarkTypeSymUsedInInterface(TypeLinksym(t), from)
+}
+func MarkTypeSymUsedInInterface(tsym *obj.LSym, from *obj.LSym) {
// Emit a marker relocation. The linker will know the type is converted
// to an interface if "from" is reachable.
r := obj.Addrel(from)
diff --git a/src/cmd/compile/internal/ssa/_gen/generic.rules b/src/cmd/compile/internal/ssa/_gen/generic.rules
index 2ee8010857..cdb346321e 100644
--- a/src/cmd/compile/internal/ssa/_gen/generic.rules
+++ b/src/cmd/compile/internal/ssa/_gen/generic.rules
@@ -2065,6 +2065,10 @@
&& warnRule(fe.Debug_checknil(), v, "removed nil check")
=> (Invalid)
+// Addresses of globals are always non-nil.
+(NilCheck (Addr {_} (SB)) _) => (Invalid)
+(NilCheck (Convert (Addr {_} (SB)) _) _) => (Invalid)
+
// for late-expanded calls, recognize memequal applied to a single constant byte
// Support is limited by 1, 2, 4, 8 byte sizes
(StaticLECall {callAux} sptr (Addr {scon} (SB)) (Const64 [1]) mem)
@@ -2152,6 +2156,8 @@
(NeqPtr (OffPtr [o1] p1) (OffPtr [o2] p2)) && isSamePtr(p1, p2) => (ConstBool [o1 != o2])
(EqPtr (Const(32|64) [c]) (Const(32|64) [d])) => (ConstBool [c == d])
(NeqPtr (Const(32|64) [c]) (Const(32|64) [d])) => (ConstBool [c != d])
+(EqPtr (Convert (Addr {x} _) _) (Addr {y} _)) => (ConstBool [x==y])
+(NeqPtr (Convert (Addr {x} _) _) (Addr {y} _)) => (ConstBool [x!=y])
(EqPtr (LocalAddr _ _) (Addr _)) => (ConstBool [false])
(EqPtr (OffPtr (LocalAddr _ _)) (Addr _)) => (ConstBool [false])
@@ -2173,7 +2179,8 @@
// Evaluate constant user nil checks.
(IsNonNil (ConstNil)) => (ConstBool [false])
(IsNonNil (Const(32|64) [c])) => (ConstBool [c != 0])
-(IsNonNil (Addr _)) => (ConstBool [true])
+(IsNonNil (Addr _) ) => (ConstBool [true])
+(IsNonNil (Convert (Addr _) _)) => (ConstBool [true])
(IsNonNil (LocalAddr _ _)) => (ConstBool [true])
// Inline small or disjoint runtime.memmove calls with constant length.
@@ -2216,11 +2223,7 @@
=> (Move {types.Types[types.TUINT8]} [int64(sz)] dst src mem)
// De-virtualize late-expanded interface calls into late-expanded static calls.
-// Note that (ITab (IMake)) doesn't get rewritten until after the first opt pass,
-// so this rule should trigger reliably.
-// devirtLECall removes the first argument, adds the devirtualized symbol to the AuxCall, and changes the opcode
-(InterLECall [argsize] {auxCall} (Load (OffPtr [off] (ITab (IMake (Addr {itab} (SB)) _))) _) ___) && devirtLESym(v, auxCall, itab, off) !=
- nil => devirtLECall(v, devirtLESym(v, auxCall, itab, off))
+(InterLECall [argsize] {auxCall} (Addr {fn} (SB)) ___) => devirtLECall(v, fn.(*obj.LSym))
// Move and Zero optimizations.
// Move source and destination may overlap.
@@ -2734,3 +2737,15 @@
(RotateLeft(64|32|16|8) (RotateLeft(64|32|16|8) x c) d) && c.Type.Size() == 4 && d.Type.Size() == 4 => (RotateLeft(64|32|16|8) x (Add32 <c.Type> c d))
(RotateLeft(64|32|16|8) (RotateLeft(64|32|16|8) x c) d) && c.Type.Size() == 2 && d.Type.Size() == 2 => (RotateLeft(64|32|16|8) x (Add16 <c.Type> c d))
(RotateLeft(64|32|16|8) (RotateLeft(64|32|16|8) x c) d) && c.Type.Size() == 1 && d.Type.Size() == 1 => (RotateLeft(64|32|16|8) x (Add8 <c.Type> c d))
+
+// Loading constant values from dictionaries and itabs.
+(Load <t> (OffPtr [off] (Addr {s} sb) ) _) && t.IsUintptr() && isFixedSym(s, off) => (Addr {fixedSym(b.Func, s, off)} sb)
+(Load <t> (OffPtr [off] (Convert (Addr {s} sb) _) ) _) && t.IsUintptr() && isFixedSym(s, off) => (Addr {fixedSym(b.Func, s, off)} sb)
+(Load <t> (OffPtr [off] (ITab (IMake (Addr {s} sb) _))) _) && t.IsUintptr() && isFixedSym(s, off) => (Addr {fixedSym(b.Func, s, off)} sb)
+(Load <t> (OffPtr [off] (ITab (IMake (Convert (Addr {s} sb) _) _))) _) && t.IsUintptr() && isFixedSym(s, off) => (Addr {fixedSym(b.Func, s, off)} sb)
+
+// Loading constant values from runtime._type.hash.
+(Load <t> (OffPtr [off] (Addr {sym} _) ) _) && t.IsInteger() && t.Size() == 4 && isFixed32(config, sym, off) => (Const32 [fixed32(config, sym, off)])
+(Load <t> (OffPtr [off] (Convert (Addr {sym} _) _) ) _) && t.IsInteger() && t.Size() == 4 && isFixed32(config, sym, off) => (Const32 [fixed32(config, sym, off)])
+(Load <t> (OffPtr [off] (ITab (IMake (Addr {sym} _) _))) _) && t.IsInteger() && t.Size() == 4 && isFixed32(config, sym, off) => (Const32 [fixed32(config, sym, off)])
+(Load <t> (OffPtr [off] (ITab (IMake (Convert (Addr {sym} _) _) _))) _) && t.IsInteger() && t.Size() == 4 && isFixed32(config, sym, off) => (Const32 [fixed32(config, sym, off)])
diff --git a/src/cmd/compile/internal/ssa/rewrite.go b/src/cmd/compile/internal/ssa/rewrite.go
index 2db7acf872..29fd89c28a 100644
--- a/src/cmd/compile/internal/ssa/rewrite.go
+++ b/src/cmd/compile/internal/ssa/rewrite.go
@@ -7,6 +7,7 @@ package ssa
import (
"cmd/compile/internal/base"
"cmd/compile/internal/logopt"
+ "cmd/compile/internal/reflectdata"
"cmd/compile/internal/types"
"cmd/internal/obj"
"cmd/internal/obj/s390x"
@@ -20,6 +21,7 @@ import (
"math/bits"
"os"
"path/filepath"
+ "strings"
)
type deadValueChoice bool
@@ -800,25 +802,6 @@ func loadLSymOffset(lsym *obj.LSym, offset int64) *obj.LSym {
return nil
}
-// de-virtualize an InterLECall
-// 'sym' is the symbol for the itab.
-func devirtLESym(v *Value, aux Aux, sym Sym, offset int64) *obj.LSym {
- n, ok := sym.(*obj.LSym)
- if !ok {
- return nil
- }
-
- lsym := loadLSymOffset(n, offset)
- if f := v.Block.Func; f.pass.debug > 0 {
- if lsym != nil {
- f.Warnl(v.Pos, "de-virtualizing call")
- } else {
- f.Warnl(v.Pos, "couldn't de-virtualize call")
- }
- }
- return lsym
-}
-
func devirtLECall(v *Value, sym *obj.LSym) *Value {
v.Op = OpStaticLECall
auxcall := v.Aux.(*AuxCall)
@@ -828,6 +811,9 @@ func devirtLECall(v *Value, sym *obj.LSym) *Value {
copy(v.Args[0:], v.Args[1:])
v.Args[len(v.Args)-1] = nil // aid GC
v.Args = v.Args[:len(v.Args)-1]
+ if f := v.Block.Func; f.pass.debug > 0 {
+ f.Warnl(v.Pos, "de-virtualizing call")
+ }
return v
}
@@ -1750,6 +1736,77 @@ func symIsROZero(sym Sym) bool {
return true
}
+// isFixed32 returns true if the int32 at offset off in symbol sym
+// is known and constant.
+func isFixed32(c *Config, sym Sym, off int64) bool {
+ return isFixed(c, sym, off, 4)
+}
+
+// isFixed returns true if the range [off,off+size] of the symbol sym
+// is known and constant.
+func isFixed(c *Config, sym Sym, off, size int64) bool {
+ lsym := sym.(*obj.LSym)
+ if lsym.Extra == nil {
+ return false
+ }
+ if _, ok := (*lsym.Extra).(*obj.TypeInfo); ok {
+ if off == 2*c.PtrSize && size == 4 {
+ return true // type hash field
+ }
+ }
+ return false
+}
+func fixed32(c *Config, sym Sym, off int64) int32 {
+ lsym := sym.(*obj.LSym)
+ if ti, ok := (*lsym.Extra).(*obj.TypeInfo); ok {
+ if off == 2*c.PtrSize {
+ return int32(types.TypeHash(ti.Type.(*types.Type)))
+ }
+ }
+ base.Fatalf("fixed32 data not known for %s:%d", sym, off)
+ return 0
+}
+
+// isFixedSym returns true if the contents of sym at the given offset
+// is known and is the constant address of another symbol.
+func isFixedSym(sym Sym, off int64) bool {
+ lsym := sym.(*obj.LSym)
+ switch {
+ case lsym.Type == objabi.SRODATA:
+ // itabs, dictionaries
+ default:
+ return false
+ }
+ for _, r := range lsym.R {
+ if (r.Type == objabi.R_ADDR || r.Type == objabi.R_WEAKADDR) && int64(r.Off) == off && r.Add == 0 {
+ return true
+ }
+ }
+ return false
+}
+func fixedSym(f *Func, sym Sym, off int64) Sym {
+ lsym := sym.(*obj.LSym)
+ for _, r := range lsym.R {
+ if (r.Type == objabi.R_ADDR || r.Type == objabi.R_WEAKADDR) && int64(r.Off) == off {
+ if strings.HasPrefix(r.Sym.Name, "type:") {
+ // In case we're loading a type out of a dictionary, we need to record
+ // that the containing function might put that type in an interface.
+ // That information is currently recorded in relocations in the dictionary,
+ // but if we perform this load at compile time then the dictionary
+ // might be dead.
+ reflectdata.MarkTypeSymUsedInInterface(r.Sym, f.fe.Func().Linksym())
+ } else if strings.HasPrefix(r.Sym.Name, "go:itab") {
+ // Same, but if we're using an itab we need to record that the
+ // itab._type might be put in an interface.
+ reflectdata.MarkTypeSymUsedInInterface(r.Sym, f.fe.Func().Linksym())
+ }
+ return r.Sym
+ }
+ }
+ base.Fatalf("fixedSym data not known for %s:%d", sym, off)
+ return nil
+}
+
// read8 reads one byte from the read-only global sym at offset off.
func read8(sym interface{}, off int64) uint8 {
lsym := sym.(*obj.LSym)
diff --git a/src/cmd/compile/internal/ssa/rewritegeneric.go b/src/cmd/compile/internal/ssa/rewritegeneric.go
index 78f13e679d..e5bd8bc36f 100644
--- a/src/cmd/compile/internal/ssa/rewritegeneric.go
+++ b/src/cmd/compile/internal/ssa/rewritegeneric.go
@@ -3,6 +3,7 @@
package ssa
import "math"
+import "cmd/internal/obj"
import "cmd/compile/internal/types"
import "cmd/compile/internal/ir"
@@ -10201,6 +10202,28 @@ func rewriteValuegeneric_OpEqPtr(v *Value) bool {
}
break
}
+ // match: (EqPtr (Convert (Addr {x} _) _) (Addr {y} _))
+ // result: (ConstBool [x==y])
+ for {
+ for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
+ if v_0.Op != OpConvert {
+ continue
+ }
+ v_0_0 := v_0.Args[0]
+ if v_0_0.Op != OpAddr {
+ continue
+ }
+ x := auxToSym(v_0_0.Aux)
+ if v_1.Op != OpAddr {
+ continue
+ }
+ y := auxToSym(v_1.Aux)
+ v.reset(OpConstBool)
+ v.AuxInt = boolToAuxInt(x == y)
+ return true
+ }
+ break
+ }
// match: (EqPtr (LocalAddr _ _) (Addr _))
// result: (ConstBool [false])
for {
@@ -10405,41 +10428,22 @@ func rewriteValuegeneric_OpIMake(v *Value) bool {
return false
}
func rewriteValuegeneric_OpInterLECall(v *Value) bool {
- // match: (InterLECall [argsize] {auxCall} (Load (OffPtr [off] (ITab (IMake (Addr {itab} (SB)) _))) _) ___)
- // cond: devirtLESym(v, auxCall, itab, off) != nil
- // result: devirtLECall(v, devirtLESym(v, auxCall, itab, off))
+ // match: (InterLECall [argsize] {auxCall} (Addr {fn} (SB)) ___)
+ // result: devirtLECall(v, fn.(*obj.LSym))
for {
if len(v.Args) < 1 {
break
}
- auxCall := auxToCall(v.Aux)
v_0 := v.Args[0]
- if v_0.Op != OpLoad {
+ if v_0.Op != OpAddr {
break
}
+ fn := auxToSym(v_0.Aux)
v_0_0 := v_0.Args[0]
- if v_0_0.Op != OpOffPtr {
- break
- }
- off := auxIntToInt64(v_0_0.AuxInt)
- v_0_0_0 := v_0_0.Args[0]
- if v_0_0_0.Op != OpITab {
- break
- }
- v_0_0_0_0 := v_0_0_0.Args[0]
- if v_0_0_0_0.Op != OpIMake {
- break
- }
- v_0_0_0_0_0 := v_0_0_0_0.Args[0]
- if v_0_0_0_0_0.Op != OpAddr {
- break
- }
- itab := auxToSym(v_0_0_0_0_0.Aux)
- v_0_0_0_0_0_0 := v_0_0_0_0_0.Args[0]
- if v_0_0_0_0_0_0.Op != OpSB || !(devirtLESym(v, auxCall, itab, off) != nil) {
+ if v_0_0.Op != OpSB {
break
}
- v.copyOf(devirtLECall(v, devirtLESym(v, auxCall, itab, off)))
+ v.copyOf(devirtLECall(v, fn.(*obj.LSym)))
return true
}
return false
@@ -11170,7 +11174,7 @@ func rewriteValuegeneric_OpIsNonNil(v *Value) bool {
v.AuxInt = boolToAuxInt(c != 0)
return true
}
- // match: (IsNonNil (Addr _))
+ // match: (IsNonNil (Addr _) )
// result: (ConstBool [true])
for {
if v_0.Op != OpAddr {
@@ -11180,6 +11184,20 @@ func rewriteValuegeneric_OpIsNonNil(v *Value) bool {
v.AuxInt = boolToAuxInt(true)
return true
}
+ // match: (IsNonNil (Convert (Addr _) _))
+ // result: (ConstBool [true])
+ for {
+ if v_0.Op != OpConvert {
+ break
+ }
+ v_0_0 := v_0.Args[0]
+ if v_0_0.Op != OpAddr {
+ break
+ }
+ v.reset(OpConstBool)
+ v.AuxInt = boolToAuxInt(true)
+ return true
+ }
// match: (IsNonNil (LocalAddr _ _))
// result: (ConstBool [true])
for {
@@ -12566,6 +12584,7 @@ func rewriteValuegeneric_OpLoad(v *Value) bool {
v_1 := v.Args[1]
v_0 := v.Args[0]
b := v.Block
+ config := b.Func.Config
fe := b.Func.fe
// match: (Load <t1> p1 (Store {t2} p2 x _))
// cond: isSamePtr(p1, p2) && t1.Compare(x.Type) == types.CMPeq && t1.Size() == t2.Size()
@@ -13257,6 +13276,230 @@ func rewriteValuegeneric_OpLoad(v *Value) bool {
v.AddArg(v0)
return true
}
+ // match: (Load <t> (OffPtr [off] (Addr {s} sb) ) _)
+ // cond: t.IsUintptr() && isFixedSym(s, off)
+ // result: (Addr {fixedSym(b.Func, s, off)} sb)
+ for {
+ t := v.Type
+ if v_0.Op != OpOffPtr {
+ break
+ }
+ off := auxIntToInt64(v_0.AuxInt)
+ v_0_0 := v_0.Args[0]
+ if v_0_0.Op != OpAddr {
+ break
+ }
+ s := auxToSym(v_0_0.Aux)
+ sb := v_0_0.Args[0]
+ if !(t.IsUintptr() && isFixedSym(s, off)) {
+ break
+ }
+ v.reset(OpAddr)
+ v.Aux = symToAux(fixedSym(b.Func, s, off))
+ v.AddArg(sb)
+ return true
+ }
+ // match: (Load <t> (OffPtr [off] (Convert (Addr {s} sb) _) ) _)
+ // cond: t.IsUintptr() && isFixedSym(s, off)
+ // result: (Addr {fixedSym(b.Func, s, off)} sb)
+ for {
+ t := v.Type
+ if v_0.Op != OpOffPtr {
+ break
+ }
+ off := auxIntToInt64(v_0.AuxInt)
+ v_0_0 := v_0.Args[0]
+ if v_0_0.Op != OpConvert {
+ break
+ }
+ v_0_0_0 := v_0_0.Args[0]
+ if v_0_0_0.Op != OpAddr {
+ break
+ }
+ s := auxToSym(v_0_0_0.Aux)
+ sb := v_0_0_0.Args[0]
+ if !(t.IsUintptr() && isFixedSym(s, off)) {
+ break
+ }
+ v.reset(OpAddr)
+ v.Aux = symToAux(fixedSym(b.Func, s, off))
+ v.AddArg(sb)
+ return true
+ }
+ // match: (Load <t> (OffPtr [off] (ITab (IMake (Addr {s} sb) _))) _)
+ // cond: t.IsUintptr() && isFixedSym(s, off)
+ // result: (Addr {fixedSym(b.Func, s, off)} sb)
+ for {
+ t := v.Type
+ if v_0.Op != OpOffPtr {
+ break
+ }
+ off := auxIntToInt64(v_0.AuxInt)
+ v_0_0 := v_0.Args[0]
+ if v_0_0.Op != OpITab {
+ break
+ }
+ v_0_0_0 := v_0_0.Args[0]
+ if v_0_0_0.Op != OpIMake {
+ break
+ }
+ v_0_0_0_0 := v_0_0_0.Args[0]
+ if v_0_0_0_0.Op != OpAddr {
+ break
+ }
+ s := auxToSym(v_0_0_0_0.Aux)
+ sb := v_0_0_0_0.Args[0]
+ if !(t.IsUintptr() && isFixedSym(s, off)) {
+ break
+ }
+ v.reset(OpAddr)
+ v.Aux = symToAux(fixedSym(b.Func, s, off))
+ v.AddArg(sb)
+ return true
+ }
+ // match: (Load <t> (OffPtr [off] (ITab (IMake (Convert (Addr {s} sb) _) _))) _)
+ // cond: t.IsUintptr() && isFixedSym(s, off)
+ // result: (Addr {fixedSym(b.Func, s, off)} sb)
+ for {
+ t := v.Type
+ if v_0.Op != OpOffPtr {
+ break
+ }
+ off := auxIntToInt64(v_0.AuxInt)
+ v_0_0 := v_0.Args[0]
+ if v_0_0.Op != OpITab {
+ break
+ }
+ v_0_0_0 := v_0_0.Args[0]
+ if v_0_0_0.Op != OpIMake {
+ break
+ }
+ v_0_0_0_0 := v_0_0_0.Args[0]
+ if v_0_0_0_0.Op != OpConvert {
+ break
+ }
+ v_0_0_0_0_0 := v_0_0_0_0.Args[0]
+ if v_0_0_0_0_0.Op != OpAddr {
+ break
+ }
+ s := auxToSym(v_0_0_0_0_0.Aux)
+ sb := v_0_0_0_0_0.Args[0]
+ if !(t.IsUintptr() && isFixedSym(s, off)) {
+ break
+ }
+ v.reset(OpAddr)
+ v.Aux = symToAux(fixedSym(b.Func, s, off))
+ v.AddArg(sb)
+ return true
+ }
+ // match: (Load <t> (OffPtr [off] (Addr {sym} _) ) _)
+ // cond: t.IsInteger() && t.Size() == 4 && isFixed32(config, sym, off)
+ // result: (Const32 [fixed32(config, sym, off)])
+ for {
+ t := v.Type
+ if v_0.Op != OpOffPtr {
+ break
+ }
+ off := auxIntToInt64(v_0.AuxInt)
+ v_0_0 := v_0.Args[0]
+ if v_0_0.Op != OpAddr {
+ break
+ }
+ sym := auxToSym(v_0_0.Aux)
+ if !(t.IsInteger() && t.Size() == 4 && isFixed32(config, sym, off)) {
+ break
+ }
+ v.reset(OpConst32)
+ v.AuxInt = int32ToAuxInt(fixed32(config, sym, off))
+ return true
+ }
+ // match: (Load <t> (OffPtr [off] (Convert (Addr {sym} _) _) ) _)
+ // cond: t.IsInteger() && t.Size() == 4 && isFixed32(config, sym, off)
+ // result: (Const32 [fixed32(config, sym, off)])
+ for {
+ t := v.Type
+ if v_0.Op != OpOffPtr {
+ break
+ }
+ off := auxIntToInt64(v_0.AuxInt)
+ v_0_0 := v_0.Args[0]
+ if v_0_0.Op != OpConvert {
+ break
+ }
+ v_0_0_0 := v_0_0.Args[0]
+ if v_0_0_0.Op != OpAddr {
+ break
+ }
+ sym := auxToSym(v_0_0_0.Aux)
+ if !(t.IsInteger() && t.Size() == 4 && isFixed32(config, sym, off)) {
+ break
+ }
+ v.reset(OpConst32)
+ v.AuxInt = int32ToAuxInt(fixed32(config, sym, off))
+ return true
+ }
+ // match: (Load <t> (OffPtr [off] (ITab (IMake (Addr {sym} _) _))) _)
+ // cond: t.IsInteger() && t.Size() == 4 && isFixed32(config, sym, off)
+ // result: (Const32 [fixed32(config, sym, off)])
+ for {
+ t := v.Type
+ if v_0.Op != OpOffPtr {
+ break
+ }
+ off := auxIntToInt64(v_0.AuxInt)
+ v_0_0 := v_0.Args[0]
+ if v_0_0.Op != OpITab {
+ break
+ }
+ v_0_0_0 := v_0_0.Args[0]
+ if v_0_0_0.Op != OpIMake {
+ break
+ }
+ v_0_0_0_0 := v_0_0_0.Args[0]
+ if v_0_0_0_0.Op != OpAddr {
+ break
+ }
+ sym := auxToSym(v_0_0_0_0.Aux)
+ if !(t.IsInteger() && t.Size() == 4 && isFixed32(config, sym, off)) {
+ break
+ }
+ v.reset(OpConst32)
+ v.AuxInt = int32ToAuxInt(fixed32(config, sym, off))
+ return true
+ }
+ // match: (Load <t> (OffPtr [off] (ITab (IMake (Convert (Addr {sym} _) _) _))) _)
+ // cond: t.IsInteger() && t.Size() == 4 && isFixed32(config, sym, off)
+ // result: (Const32 [fixed32(config, sym, off)])
+ for {
+ t := v.Type
+ if v_0.Op != OpOffPtr {
+ break
+ }
+ off := auxIntToInt64(v_0.AuxInt)
+ v_0_0 := v_0.Args[0]
+ if v_0_0.Op != OpITab {
+ break
+ }
+ v_0_0_0 := v_0_0.Args[0]
+ if v_0_0_0.Op != OpIMake {
+ break
+ }
+ v_0_0_0_0 := v_0_0_0.Args[0]
+ if v_0_0_0_0.Op != OpConvert {
+ break
+ }
+ v_0_0_0_0_0 := v_0_0_0_0.Args[0]
+ if v_0_0_0_0_0.Op != OpAddr {
+ break
+ }
+ sym := auxToSym(v_0_0_0_0_0.Aux)
+ if !(t.IsInteger() && t.Size() == 4 && isFixed32(config, sym, off)) {
+ break
+ }
+ v.reset(OpConst32)
+ v.AuxInt = int32ToAuxInt(fixed32(config, sym, off))
+ return true
+ }
return false
}
func rewriteValuegeneric_OpLsh16x16(v *Value) bool {
@@ -18546,6 +18789,28 @@ func rewriteValuegeneric_OpNeqPtr(v *Value) bool {
}
break
}
+ // match: (NeqPtr (Convert (Addr {x} _) _) (Addr {y} _))
+ // result: (ConstBool [x!=y])
+ for {
+ for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
+ if v_0.Op != OpConvert {
+ continue
+ }
+ v_0_0 := v_0.Args[0]
+ if v_0_0.Op != OpAddr {
+ continue
+ }
+ x := auxToSym(v_0_0.Aux)
+ if v_1.Op != OpAddr {
+ continue
+ }
+ y := auxToSym(v_1.Aux)
+ v.reset(OpConstBool)
+ v.AuxInt = boolToAuxInt(x != y)
+ return true
+ }
+ break
+ }
// match: (NeqPtr (LocalAddr _ _) (Addr _))
// result: (ConstBool [true])
for {
@@ -18747,6 +19012,36 @@ func rewriteValuegeneric_OpNilCheck(v *Value) bool {
v.reset(OpInvalid)
return true
}
+ // match: (NilCheck (Addr {_} (SB)) _)
+ // result: (Invalid)
+ for {
+ if v_0.Op != OpAddr {
+ break
+ }
+ v_0_0 := v_0.Args[0]
+ if v_0_0.Op != OpSB {
+ break
+ }
+ v.reset(OpInvalid)
+ return true
+ }
+ // match: (NilCheck (Convert (Addr {_} (SB)) _) _)
+ // result: (Invalid)
+ for {
+ if v_0.Op != OpConvert {
+ break
+ }
+ v_0_0 := v_0.Args[0]
+ if v_0_0.Op != OpAddr {
+ break
+ }
+ v_0_0_0 := v_0_0.Args[0]
+ if v_0_0_0.Op != OpSB {
+ break
+ }
+ v.reset(OpInvalid)
+ return true
+ }
return false
}
func rewriteValuegeneric_OpNot(v *Value) bool {
diff --git a/src/cmd/internal/obj/link.go b/src/cmd/internal/obj/link.go
index def92e103b..8b853e22c0 100644
--- a/src/cmd/internal/obj/link.go
+++ b/src/cmd/internal/obj/link.go
@@ -465,7 +465,7 @@ type LSym struct {
P []byte
R []Reloc
- Extra *interface{} // *FuncInfo, *VarInfo, or *FileInfo, if present
+ Extra *interface{} // *FuncInfo, *VarInfo, *FileInfo, or *TypeInfo, if present
Pkg string
PkgIdx int32
@@ -588,6 +588,22 @@ func (s *LSym) File() *FileInfo {
return f
}
+// A TypeInfo contains information for a symbol
+// that contains a runtime._type.
+type TypeInfo struct {
+ Type interface{} // a *cmd/compile/internal/types.Type
+}
+
+func (s *LSym) NewTypeInfo() *TypeInfo {
+ if s.Extra != nil {
+ panic(fmt.Sprintf("invalid use of LSym - NewTypeInfo with Extra of type %T", *s.Extra))
+ }
+ t := new(TypeInfo)
+ s.Extra = new(interface{})
+ *s.Extra = t
+ return t
+}
+
// WasmImport represents a WebAssembly (WASM) imported function with
// parameters and results translated into WASM types based on the Go function
// declaration.
diff --git a/src/cmd/internal/objabi/reloctype.go b/src/cmd/internal/objabi/reloctype.go
index 4dcfe44388..0b231855ec 100644
--- a/src/cmd/internal/objabi/reloctype.go
+++ b/src/cmd/internal/objabi/reloctype.go
@@ -83,7 +83,8 @@ const (
// direct references. (This is used for types reachable by reflection.)
R_USETYPE
// R_USEIFACE marks a type is converted to an interface in the function this
- // relocation is applied to. The target is a type descriptor.
+ // relocation is applied to. The target is a type descriptor or an itab
+ // (in the latter case it refers to the conrete type contained in the itab).
// This is a marker relocation (0-sized), for the linker's reachabililty
// analysis.
R_USEIFACE
diff --git a/src/cmd/link/internal/ld/deadcode.go b/src/cmd/link/internal/ld/deadcode.go
index c0969b25b0..c687565878 100644
--- a/src/cmd/link/internal/ld/deadcode.go
+++ b/src/cmd/link/internal/ld/deadcode.go
@@ -180,6 +180,14 @@ func (d *deadcodePass) flood() {
// converted to an interface, i.e. should have UsedInIface set. See the
// comment below for why we need to unset the Reachable bit and re-mark it.
rs := r.Sym()
+ if d.ldr.IsItab(rs) {
+ // This relocation can also point at an itab, in which case it
+ // means "the _type field of that itab".
+ rs = decodeItabType(d.ldr, d.ctxt.Arch, rs)
+ }
+ if !d.ldr.IsGoType(rs) && !d.ctxt.linkShared {
+ panic(fmt.Sprintf("R_USEIFACE in %s references %s which is not a type or itab", d.ldr.SymName(symIdx), d.ldr.SymName(rs)))
+ }
if !d.ldr.AttrUsedInIface(rs) {
d.ldr.SetAttrUsedInIface(rs, true)
if d.ldr.AttrReachable(rs) {
diff --git a/src/cmd/link/internal/ld/decodesym.go b/src/cmd/link/internal/ld/decodesym.go
index c01d6c1163..05da11ec1e 100644
--- a/src/cmd/link/internal/ld/decodesym.go
+++ b/src/cmd/link/internal/ld/decodesym.go
@@ -300,3 +300,9 @@ func findShlibSection(ctxt *Link, path string, addr uint64) *elf.Section {
func decodetypeGcprogShlib(ctxt *Link, data []byte) uint64 {
return decodeInuxi(ctxt.Arch, data[2*int32(ctxt.Arch.PtrSize)+8+1*int32(ctxt.Arch.PtrSize):], ctxt.Arch.PtrSize)
}
+
+// decodeItabType returns the itab._type field from an itab.
+func decodeItabType(ldr *loader.Loader, arch *sys.Arch, symIdx loader.Sym) loader.Sym {
+ relocs := ldr.Relocs(symIdx)
+ return decodeRelocSym(ldr, symIdx, &relocs, int32(arch.PtrSize))
+}
diff --git a/test/typeparam/devirtualize1.go b/test/typeparam/devirtualize1.go
new file mode 100644
index 0000000000..fb26225e6e
--- /dev/null
+++ b/test/typeparam/devirtualize1.go
@@ -0,0 +1,22 @@
+// run
+
+// Copyright 2023 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+package main
+
+type S struct {
+ x int
+}
+
+func (t *S) M1() {
+}
+
+func F[T any](x T) any {
+ return x
+}
+
+func main() {
+ F(&S{}).(interface{ M1() }).M1()
+}
diff --git a/test/typeparam/devirtualize2.go b/test/typeparam/devirtualize2.go
new file mode 100644
index 0000000000..ca6dc8fa86
--- /dev/null
+++ b/test/typeparam/devirtualize2.go
@@ -0,0 +1,28 @@
+// run
+
+// Copyright 2023 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+package main
+
+type S struct {
+ x int
+}
+
+func (t *S) M1() {
+}
+func (t *S) M2() {
+}
+
+type I interface {
+ M1()
+}
+
+func F[T I](x T) I {
+ return x
+}
+
+func main() {
+ F(&S{}).(interface{ M2() }).M2()
+}