aboutsummaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorMatthew Dempsky <mdempsky@google.com>2020-12-01 03:25:29 -0800
committerMatthew Dempsky <mdempsky@google.com>2020-12-01 19:24:00 +0000
commit5ffa275f3cab631483a1ce76a63fc4ede3d204e8 (patch)
treeb887c2c7990b97f4d251c5c52e1307548a15c978
parent6ca23a45feebc8672a1851dbc65c5b34d481ca30 (diff)
downloadgo-5ffa275f3cab631483a1ce76a63fc4ede3d204e8.tar.gz
go-5ffa275f3cab631483a1ce76a63fc4ede3d204e8.zip
[dev.regabi] cmd/compile: first pass at abstracting Type
Passes toolstash/buildall. [git-generate] cd src/cmd/compile/internal/ssa rf ' ex . ../ir ../gc { import "cmd/compile/internal/types" var t *types.Type t.Etype -> t.Kind() t.Sym -> t.GetSym() t.Orig -> t.Underlying() } ' cd ../types rf ' mv EType Kind mv IRNode Object mv Type.Etype Type.kind mv Type.Sym Type.sym mv Type.Orig Type.underlying mv Type.Cache Type.cache mv Type.GetSym Type.Sym mv Bytetype ByteType mv Runetype RuneType mv Errortype ErrorType ' cd ../gc sed -i 's/Bytetype/ByteType/; s/Runetype/RuneType/' mkbuiltin.go git codereview gofmt go install cmd/compile/internal/... go test cmd/compile -u || go test cmd/compile Change-Id: Ibecb2d7100d3318a49238eb4a78d70acb49eedca Reviewed-on: https://go-review.googlesource.com/c/go/+/274437 Run-TryBot: Matthew Dempsky <mdempsky@google.com> Reviewed-by: Russ Cox <rsc@golang.org> Trust: Matthew Dempsky <mdempsky@google.com>
-rw-r--r--src/cmd/compile/fmtmap_test.go8
-rw-r--r--src/cmd/compile/internal/gc/alg.go12
-rw-r--r--src/cmd/compile/internal/gc/align.go12
-rw-r--r--src/cmd/compile/internal/gc/bexport.go10
-rw-r--r--src/cmd/compile/internal/gc/builtin.go4
-rw-r--r--src/cmd/compile/internal/gc/const.go22
-rw-r--r--src/cmd/compile/internal/gc/dcl.go18
-rw-r--r--src/cmd/compile/internal/gc/embed.go4
-rw-r--r--src/cmd/compile/internal/gc/escape.go2
-rw-r--r--src/cmd/compile/internal/gc/go.go2
-rw-r--r--src/cmd/compile/internal/gc/iexport.go18
-rw-r--r--src/cmd/compile/internal/gc/inl.go4
-rw-r--r--src/cmd/compile/internal/gc/mkbuiltin.go4
-rw-r--r--src/cmd/compile/internal/gc/obj.go6
-rw-r--r--src/cmd/compile/internal/gc/order.go2
-rw-r--r--src/cmd/compile/internal/gc/plive.go4
-rw-r--r--src/cmd/compile/internal/gc/range.go12
-rw-r--r--src/cmd/compile/internal/gc/reflect.go76
-rw-r--r--src/cmd/compile/internal/gc/ssa.go56
-rw-r--r--src/cmd/compile/internal/gc/subr.go72
-rw-r--r--src/cmd/compile/internal/gc/swt.go4
-rw-r--r--src/cmd/compile/internal/gc/typecheck.go80
-rw-r--r--src/cmd/compile/internal/gc/universe.go26
-rw-r--r--src/cmd/compile/internal/gc/walk.go48
-rw-r--r--src/cmd/compile/internal/ir/fmt.go50
-rw-r--r--src/cmd/compile/internal/ir/node.go2
-rw-r--r--src/cmd/compile/internal/ir/type.go2
-rw-r--r--src/cmd/compile/internal/ir/val.go2
-rw-r--r--src/cmd/compile/internal/ssa/expand_calls.go16
-rw-r--r--src/cmd/compile/internal/ssa/export_test.go6
-rw-r--r--src/cmd/compile/internal/ssa/regalloc.go4
-rw-r--r--src/cmd/compile/internal/types/etype_string.go4
-rw-r--r--src/cmd/compile/internal/types/identity.go12
-rw-r--r--src/cmd/compile/internal/types/scope.go8
-rw-r--r--src/cmd/compile/internal/types/sym.go2
-rw-r--r--src/cmd/compile/internal/types/type.go182
36 files changed, 398 insertions, 398 deletions
diff --git a/src/cmd/compile/fmtmap_test.go b/src/cmd/compile/fmtmap_test.go
index ca31705f72..fde9c51b27 100644
--- a/src/cmd/compile/fmtmap_test.go
+++ b/src/cmd/compile/fmtmap_test.go
@@ -127,10 +127,10 @@ var knownFormats = map[string]string{
"cmd/compile/internal/syntax.position %s": "",
"cmd/compile/internal/syntax.token %q": "",
"cmd/compile/internal/syntax.token %s": "",
- "cmd/compile/internal/types.EType %d": "",
- "cmd/compile/internal/types.EType %s": "",
- "cmd/compile/internal/types.EType %v": "",
- "cmd/compile/internal/types.IRNode %v": "",
+ "cmd/compile/internal/types.Kind %d": "",
+ "cmd/compile/internal/types.Kind %s": "",
+ "cmd/compile/internal/types.Kind %v": "",
+ "cmd/compile/internal/types.Object %v": "",
"cmd/internal/obj.ABI %v": "",
"error %v": "",
"float64 %.2f": "",
diff --git a/src/cmd/compile/internal/gc/alg.go b/src/cmd/compile/internal/gc/alg.go
index 806417d03d..b2716399a5 100644
--- a/src/cmd/compile/internal/gc/alg.go
+++ b/src/cmd/compile/internal/gc/alg.go
@@ -68,7 +68,7 @@ func IncomparableField(t *types.Type) *types.Field {
// EqCanPanic reports whether == on type t could panic (has an interface somewhere).
// t must be comparable.
func EqCanPanic(t *types.Type) bool {
- switch t.Etype {
+ switch t.Kind() {
default:
return false
case types.TINTER:
@@ -120,7 +120,7 @@ func algtype1(t *types.Type) (AlgKind, *types.Type) {
return ANOEQ, t
}
- switch t.Etype {
+ switch t.Kind() {
case types.TANY, types.TFORW:
// will be defined later.
return ANOEQ, t
@@ -274,7 +274,7 @@ func genhash(t *types.Type) *obj.LSym {
// (And the closure generated by genhash will also get
// dead-code eliminated, as we call the subtype hashers
// directly.)
- switch t.Etype {
+ switch t.Kind() {
case types.TARRAY:
genhash(t.Elem())
case types.TSTRUCT:
@@ -303,7 +303,7 @@ func genhash(t *types.Type) *obj.LSym {
np := ir.AsNode(tfn.Type().Params().Field(0).Nname)
nh := ir.AsNode(tfn.Type().Params().Field(1).Nname)
- switch t.Etype {
+ switch t.Kind() {
case types.TARRAY:
// An array of pure memory would be handled by the
// standard algorithm, so the element type must not be
@@ -536,7 +536,7 @@ func geneq(t *types.Type) *obj.LSym {
// We reach here only for types that have equality but
// cannot be handled by the standard algorithms,
// so t must be either an array or a struct.
- switch t.Etype {
+ switch t.Kind() {
default:
base.Fatalf("geneq %v", t)
@@ -613,7 +613,7 @@ func geneq(t *types.Type) *obj.LSym {
}
}
- switch t.Elem().Etype {
+ switch t.Elem().Kind() {
case types.TSTRING:
// Do two loops. First, check that all the lengths match (cheap).
// Second, check that all the contents match (expensive).
diff --git a/src/cmd/compile/internal/gc/align.go b/src/cmd/compile/internal/gc/align.go
index 5171983af0..af426f5b24 100644
--- a/src/cmd/compile/internal/gc/align.go
+++ b/src/cmd/compile/internal/gc/align.go
@@ -185,7 +185,7 @@ func findTypeLoop(t *types.Type, path *[]*types.Type) bool {
// We implement a simple DFS loop-finding algorithm. This
// could be faster, but type cycles are rare.
- if t.Sym != nil {
+ if t.Sym() != nil {
// Declared type. Check for loops and otherwise
// recurse on the type expression used in the type
// declaration.
@@ -193,7 +193,7 @@ func findTypeLoop(t *types.Type, path *[]*types.Type) bool {
// Type imported from package, so it can't be part of
// a type loop (otherwise that package should have
// failed to compile).
- if t.Sym.Pkg != ir.LocalPkg {
+ if t.Sym().Pkg != ir.LocalPkg {
return false
}
@@ -212,7 +212,7 @@ func findTypeLoop(t *types.Type, path *[]*types.Type) bool {
} else {
// Anonymous type. Recurse on contained types.
- switch t.Etype {
+ switch t.Kind() {
case types.TARRAY:
if findTypeLoop(t.Elem(), path) {
return true
@@ -321,15 +321,15 @@ func dowidth(t *types.Type) {
t.Width = -2
t.Align = 0 // 0 means use t.Width, below
- et := t.Etype
+ et := t.Kind()
switch et {
case types.TFUNC, types.TCHAN, types.TMAP, types.TSTRING:
break
// simtype == 0 during bootstrap
default:
- if simtype[t.Etype] != 0 {
- et = simtype[t.Etype]
+ if simtype[t.Kind()] != 0 {
+ et = simtype[t.Kind()]
}
}
diff --git a/src/cmd/compile/internal/gc/bexport.go b/src/cmd/compile/internal/gc/bexport.go
index dbbac559ae..43c4ce7150 100644
--- a/src/cmd/compile/internal/gc/bexport.go
+++ b/src/cmd/compile/internal/gc/bexport.go
@@ -35,7 +35,7 @@ func (p *exporter) markType(t *types.Type) {
// only their unexpanded method set (i.e., exclusive of
// interface embeddings), and the switch statement below
// handles their full method set.
- if t.Sym != nil && t.Etype != types.TINTER {
+ if t.Sym() != nil && t.Kind() != types.TINTER {
for _, m := range t.Methods().Slice() {
if types.IsExported(m.Sym.Name) {
p.markObject(ir.AsNode(m.Nname))
@@ -52,7 +52,7 @@ func (p *exporter) markType(t *types.Type) {
// Notably, we don't mark function parameter types, because
// the user already needs some way to construct values of
// those types.
- switch t.Etype {
+ switch t.Kind() {
case types.TPTR, types.TARRAY, types.TSLICE:
p.markType(t.Elem())
@@ -153,11 +153,11 @@ func predeclared() []*types.Type {
types.Types[types.TSTRING],
// basic type aliases
- types.Bytetype,
- types.Runetype,
+ types.ByteType,
+ types.RuneType,
// error
- types.Errortype,
+ types.ErrorType,
// untyped types
types.UntypedBool,
diff --git a/src/cmd/compile/internal/gc/builtin.go b/src/cmd/compile/internal/gc/builtin.go
index efca44c667..07e864dd2e 100644
--- a/src/cmd/compile/internal/gc/builtin.go
+++ b/src/cmd/compile/internal/gc/builtin.go
@@ -206,7 +206,7 @@ var runtimeDecls = [...]struct {
func runtimeTypes() []*types.Type {
var typs [131]*types.Type
- typs[0] = types.Bytetype
+ typs[0] = types.ByteType
typs[1] = types.NewPtr(typs[0])
typs[2] = types.Types[types.TANY]
typs[3] = types.NewPtr(typs[2])
@@ -252,7 +252,7 @@ func runtimeTypes() []*types.Type {
typs[43] = functype(nil, []*ir.Field{anonfield(typs[42]), anonfield(typs[22])}, []*ir.Field{anonfield(typs[28])})
typs[44] = functype(nil, []*ir.Field{anonfield(typs[33]), anonfield(typs[1]), anonfield(typs[15])}, []*ir.Field{anonfield(typs[28])})
typs[45] = functype(nil, []*ir.Field{anonfield(typs[1]), anonfield(typs[15])}, []*ir.Field{anonfield(typs[28])})
- typs[46] = types.Runetype
+ typs[46] = types.RuneType
typs[47] = types.NewSlice(typs[46])
typs[48] = functype(nil, []*ir.Field{anonfield(typs[33]), anonfield(typs[47])}, []*ir.Field{anonfield(typs[28])})
typs[49] = types.NewSlice(typs[0])
diff --git a/src/cmd/compile/internal/gc/const.go b/src/cmd/compile/internal/gc/const.go
index 4dee373bfa..4a61c77630 100644
--- a/src/cmd/compile/internal/gc/const.go
+++ b/src/cmd/compile/internal/gc/const.go
@@ -127,7 +127,7 @@ func convlit1(n ir.Node, t *types.Type, explicit bool, context func() string) ir
}
// Nil is technically not a constant, so handle it specially.
- if n.Type().Etype == types.TNIL {
+ if n.Type().Kind() == types.TNIL {
if n.Op() != ir.ONIL {
base.Fatalf("unexpected op: %v (%v)", n, n.Op())
}
@@ -147,7 +147,7 @@ func convlit1(n ir.Node, t *types.Type, explicit bool, context func() string) ir
return n
}
- if t == nil || !ir.OKForConst[t.Etype] {
+ if t == nil || !ir.OKForConst[t.Kind()] {
t = defaultType(n.Type())
}
@@ -245,7 +245,7 @@ func operandType(op ir.Op, t *types.Type) *types.Type {
return complexForFloat(t)
}
default:
- if okfor[op][t.Etype] {
+ if okfor[op][t.Kind()] {
return t
}
}
@@ -499,12 +499,12 @@ func evalConst(n ir.Node) ir.Node {
}
case ir.OCONV, ir.ORUNESTR:
- if ir.OKForConst[n.Type().Etype] && nl.Op() == ir.OLITERAL {
+ if ir.OKForConst[n.Type().Kind()] && nl.Op() == ir.OLITERAL {
return origConst(n, convertVal(nl.Val(), n.Type(), true))
}
case ir.OCONVNOP:
- if ir.OKForConst[n.Type().Etype] && nl.Op() == ir.OLITERAL {
+ if ir.OKForConst[n.Type().Kind()] && nl.Op() == ir.OLITERAL {
// set so n.Orig gets OCONV instead of OCONVNOP
n.SetOp(ir.OCONV)
return origConst(n, nl.Val())
@@ -555,7 +555,7 @@ func evalConst(n ir.Node) ir.Node {
return n
case ir.OCAP, ir.OLEN:
- switch nl.Type().Etype {
+ switch nl.Type().Kind() {
case types.TSTRING:
if ir.IsConst(nl, constant.String) {
return origIntConst(n, int64(len(nl.StringVal())))
@@ -729,7 +729,7 @@ func mixUntyped(t1, t2 *types.Type) *types.Type {
}
func defaultType(t *types.Type) *types.Type {
- if !t.IsUntyped() || t.Etype == types.TNIL {
+ if !t.IsUntyped() || t.Kind() == types.TNIL {
return t
}
@@ -741,7 +741,7 @@ func defaultType(t *types.Type) *types.Type {
case types.UntypedInt:
return types.Types[types.TINT]
case types.UntypedRune:
- return types.Runetype
+ return types.RuneType
case types.UntypedFloat:
return types.Types[types.TFLOAT64]
case types.UntypedComplex:
@@ -769,7 +769,7 @@ func indexconst(n ir.Node) int64 {
if n.Op() != ir.OLITERAL {
return -1
}
- if !n.Type().IsInteger() && n.Type().Etype != types.TIDEAL {
+ if !n.Type().IsInteger() && n.Type().Kind() != types.TIDEAL {
return -1
}
@@ -885,9 +885,9 @@ func (s *constSet) add(pos src.XPos, n ir.Node, what, where string) {
typ := n.Type()
switch typ {
- case types.Bytetype:
+ case types.ByteType:
typ = types.Types[types.TUINT8]
- case types.Runetype:
+ case types.RuneType:
typ = types.Types[types.TINT32]
}
k := constSetKey{typ, ir.ConstValue(n)}
diff --git a/src/cmd/compile/internal/gc/dcl.go b/src/cmd/compile/internal/gc/dcl.go
index 3b60496c5c..3d0bdaec7a 100644
--- a/src/cmd/compile/internal/gc/dcl.go
+++ b/src/cmd/compile/internal/gc/dcl.go
@@ -445,7 +445,7 @@ func funcarg(n *ir.Field, ctxt ir.Class) {
// This happens during import, where the hidden_fndcl rule has
// used functype directly to parse the function's type.
func funcargs2(t *types.Type) {
- if t.Etype != types.TFUNC {
+ if t.Kind() != types.TFUNC {
base.Fatalf("funcargs2 %v", t)
}
@@ -496,7 +496,7 @@ func checkembeddedtype(t *types.Type) {
return
}
- if t.Sym == nil && t.IsPtr() {
+ if t.Sym() == nil && t.IsPtr() {
t = t.Elem()
if t.IsInterface() {
base.Errorf("embedded type cannot be a pointer to interface")
@@ -505,7 +505,7 @@ func checkembeddedtype(t *types.Type) {
if t.IsPtr() || t.IsUnsafePtr() {
base.Errorf("embedded type cannot be a pointer")
- } else if t.Etype == types.TFORW && !t.ForwardType().Embedlineno.IsKnown() {
+ } else if t.Kind() == types.TFORW && !t.ForwardType().Embedlineno.IsKnown() {
t.ForwardType().Embedlineno = base.Pos
}
}
@@ -719,12 +719,12 @@ func methodSymSuffix(recv *types.Type, msym *types.Sym, suffix string) *types.Sy
base.Fatalf("blank method name")
}
- rsym := recv.Sym
+ rsym := recv.Sym()
if recv.IsPtr() {
if rsym != nil {
base.Fatalf("declared pointer receiver type: %v", recv)
}
- rsym = recv.Elem().Sym
+ rsym = recv.Elem().Sym()
}
// Find the package the receiver type appeared in. For
@@ -777,11 +777,11 @@ func addmethod(n *ir.Func, msym *types.Sym, t *types.Type, local, nointerface bo
}
mt := methtype(rf.Type)
- if mt == nil || mt.Sym == nil {
+ if mt == nil || mt.Sym() == nil {
pa := rf.Type
t := pa
if t != nil && t.IsPtr() {
- if t.Sym != nil {
+ if t.Sym() != nil {
base.Errorf("invalid receiver type %v (%v is a pointer type)", pa, t)
return nil
}
@@ -791,7 +791,7 @@ func addmethod(n *ir.Func, msym *types.Sym, t *types.Type, local, nointerface bo
switch {
case t == nil || t.Broke():
// rely on typecheck having complained before
- case t.Sym == nil:
+ case t.Sym() == nil:
base.Errorf("invalid receiver type %v (%v is not a defined type)", pa, t)
case t.IsPtr():
base.Errorf("invalid receiver type %v (%v is a pointer type)", pa, t)
@@ -805,7 +805,7 @@ func addmethod(n *ir.Func, msym *types.Sym, t *types.Type, local, nointerface bo
return nil
}
- if local && mt.Sym.Pkg != ir.LocalPkg {
+ if local && mt.Sym().Pkg != ir.LocalPkg {
base.Errorf("cannot define new methods on non-local type %v", mt)
return nil
}
diff --git a/src/cmd/compile/internal/gc/embed.go b/src/cmd/compile/internal/gc/embed.go
index d9bfd6f5ed..d6e42e4f03 100644
--- a/src/cmd/compile/internal/gc/embed.go
+++ b/src/cmd/compile/internal/gc/embed.go
@@ -151,13 +151,13 @@ func embedKindApprox(typ ir.Node) int {
// embedKind determines the kind of embedding variable.
func embedKind(typ *types.Type) int {
- if typ.Sym != nil && typ.Sym.Name == "FS" && (typ.Sym.Pkg.Path == "embed" || (typ.Sym.Pkg == ir.LocalPkg && base.Ctxt.Pkgpath == "embed")) {
+ if typ.Sym() != nil && typ.Sym().Name == "FS" && (typ.Sym().Pkg.Path == "embed" || (typ.Sym().Pkg == ir.LocalPkg && base.Ctxt.Pkgpath == "embed")) {
return embedFiles
}
if typ == types.Types[types.TSTRING] {
return embedString
}
- if typ.Sym == nil && typ.IsSlice() && typ.Elem() == types.Bytetype {
+ if typ.Sym() == nil && typ.IsSlice() && typ.Elem() == types.ByteType {
return embedBytes
}
return embedUnknown
diff --git a/src/cmd/compile/internal/gc/escape.go b/src/cmd/compile/internal/gc/escape.go
index f2fff02959..b29896e5a4 100644
--- a/src/cmd/compile/internal/gc/escape.go
+++ b/src/cmd/compile/internal/gc/escape.go
@@ -659,7 +659,7 @@ func (e *Escape) exprSkipInit(k EscHole, n ir.Node) {
// unsafeValue evaluates a uintptr-typed arithmetic expression looking
// for conversions from an unsafe.Pointer.
func (e *Escape) unsafeValue(k EscHole, n ir.Node) {
- if n.Type().Etype != types.TUINTPTR {
+ if n.Type().Kind() != types.TUINTPTR {
base.Fatalf("unexpected type %v for %v", n.Type(), n)
}
diff --git a/src/cmd/compile/internal/gc/go.go b/src/cmd/compile/internal/gc/go.go
index 24393de53d..c493165c76 100644
--- a/src/cmd/compile/internal/gc/go.go
+++ b/src/cmd/compile/internal/gc/go.go
@@ -102,7 +102,7 @@ var gopkg *types.Pkg // pseudo-package for method symbols on anonymous receiver
var zerosize int64
-var simtype [types.NTYPE]types.EType
+var simtype [types.NTYPE]types.Kind
var (
isInt [types.NTYPE]bool
diff --git a/src/cmd/compile/internal/gc/iexport.go b/src/cmd/compile/internal/gc/iexport.go
index 2dfce26596..8f50868fc7 100644
--- a/src/cmd/compile/internal/gc/iexport.go
+++ b/src/cmd/compile/internal/gc/iexport.go
@@ -473,15 +473,15 @@ func (p *iexporter) doDecl(n ir.Node) {
w.tag('T')
w.pos(n.Pos())
- underlying := n.Type().Orig
- if underlying == types.Errortype.Orig {
+ underlying := n.Type().Underlying()
+ if underlying == types.ErrorType.Underlying() {
// For "type T error", use error as the
// underlying type instead of error's own
// underlying anonymous interface. This
// ensures consistency with how importers may
// declare error (e.g., go/types uses nil Pkg
// for predeclared objects).
- underlying = types.Errortype
+ underlying = types.ErrorType
}
w.typ(underlying)
@@ -634,8 +634,8 @@ func (w *exportWriter) startType(k itag) {
}
func (w *exportWriter) doTyp(t *types.Type) {
- if t.Sym != nil {
- if t.Sym.Pkg == ir.BuiltinPkg || t.Sym.Pkg == unsafepkg {
+ if t.Sym() != nil {
+ if t.Sym().Pkg == ir.BuiltinPkg || t.Sym().Pkg == unsafepkg {
base.Fatalf("builtin type missing from typIndex: %v", t)
}
@@ -644,7 +644,7 @@ func (w *exportWriter) doTyp(t *types.Type) {
return
}
- switch t.Etype {
+ switch t.Kind() {
case types.TPTR:
w.startType(pointerType)
w.typ(t.Elem())
@@ -762,7 +762,7 @@ func constTypeOf(typ *types.Type) constant.Kind {
return constant.Complex
}
- switch typ.Etype {
+ switch typ.Kind() {
case types.TBOOL:
return constant.Bool
case types.TSTRING:
@@ -809,7 +809,7 @@ func intSize(typ *types.Type) (signed bool, maxBytes uint) {
return true, Mpprec / 8
}
- switch typ.Etype {
+ switch typ.Kind() {
case types.TFLOAT32, types.TCOMPLEX64:
return true, 3
case types.TFLOAT64, types.TCOMPLEX128:
@@ -821,7 +821,7 @@ func intSize(typ *types.Type) (signed bool, maxBytes uint) {
// The go/types API doesn't expose sizes to importers, so they
// don't know how big these types are.
- switch typ.Etype {
+ switch typ.Kind() {
case types.TINT, types.TUINT, types.TUINTPTR:
maxBytes = 8
}
diff --git a/src/cmd/compile/internal/gc/inl.go b/src/cmd/compile/internal/gc/inl.go
index 97ecb9559b..b36a01e389 100644
--- a/src/cmd/compile/internal/gc/inl.go
+++ b/src/cmd/compile/internal/gc/inl.go
@@ -61,10 +61,10 @@ func fnpkg(fn *ir.Name) *types.Pkg {
if rcvr.IsPtr() {
rcvr = rcvr.Elem()
}
- if rcvr.Sym == nil {
+ if rcvr.Sym() == nil {
base.Fatalf("receiver with no sym: [%v] %L (%v)", fn.Sym(), fn, rcvr)
}
- return rcvr.Sym.Pkg
+ return rcvr.Sym().Pkg
}
// non-method
diff --git a/src/cmd/compile/internal/gc/mkbuiltin.go b/src/cmd/compile/internal/gc/mkbuiltin.go
index 5317484de9..38aa601645 100644
--- a/src/cmd/compile/internal/gc/mkbuiltin.go
+++ b/src/cmd/compile/internal/gc/mkbuiltin.go
@@ -143,9 +143,9 @@ func (i *typeInterner) mktype(t ast.Expr) string {
case *ast.Ident:
switch t.Name {
case "byte":
- return "types.Bytetype"
+ return "types.ByteType"
case "rune":
- return "types.Runetype"
+ return "types.RuneType"
}
return fmt.Sprintf("types.Types[types.T%s]", strings.ToUpper(t.Name))
case *ast.SelectorExpr:
diff --git a/src/cmd/compile/internal/gc/obj.go b/src/cmd/compile/internal/gc/obj.go
index 7b5e3015c2..f65131417a 100644
--- a/src/cmd/compile/internal/gc/obj.go
+++ b/src/cmd/compile/internal/gc/obj.go
@@ -218,7 +218,7 @@ func addptabs() {
if s.Pkg.Name != "main" {
continue
}
- if n.Type().Etype == types.TFUNC && n.Class() == ir.PFUNC {
+ if n.Type().Kind() == types.TFUNC && n.Class() == ir.PFUNC {
// function
ptabs = append(ptabs, ptabEntry{s: s, t: ir.AsNode(s.Def).Type()})
} else {
@@ -602,7 +602,7 @@ func litsym(n, c ir.Node, wid int) {
case constant.Float:
f, _ := constant.Float64Val(u)
- switch n.Type().Etype {
+ switch n.Type().Kind() {
case types.TFLOAT32:
s.WriteFloat32(base.Ctxt, n.Offset(), float32(f))
case types.TFLOAT64:
@@ -612,7 +612,7 @@ func litsym(n, c ir.Node, wid int) {
case constant.Complex:
re, _ := constant.Float64Val(constant.Real(u))
im, _ := constant.Float64Val(constant.Imag(u))
- switch n.Type().Etype {
+ switch n.Type().Kind() {
case types.TCOMPLEX64:
s.WriteFloat32(base.Ctxt, n.Offset(), float32(re))
s.WriteFloat32(base.Ctxt, n.Offset()+4, float32(im))
diff --git a/src/cmd/compile/internal/gc/order.go b/src/cmd/compile/internal/gc/order.go
index 83cfb44474..c2e236537f 100644
--- a/src/cmd/compile/internal/gc/order.go
+++ b/src/cmd/compile/internal/gc/order.go
@@ -784,7 +784,7 @@ func (o *Order) stmt(n ir.Node) {
n.SetRight(o.expr(n.Right(), nil))
orderBody := true
- switch n.Type().Etype {
+ switch n.Type().Kind() {
default:
base.Fatalf("order.stmt range %v", n.Type())
diff --git a/src/cmd/compile/internal/gc/plive.go b/src/cmd/compile/internal/gc/plive.go
index 6ad3140081..f2555cc941 100644
--- a/src/cmd/compile/internal/gc/plive.go
+++ b/src/cmd/compile/internal/gc/plive.go
@@ -416,7 +416,7 @@ func onebitwalktype1(t *types.Type, off int64, bv bvec) {
return
}
- switch t.Etype {
+ switch t.Kind() {
case types.TPTR, types.TUNSAFEPTR, types.TFUNC, types.TCHAN, types.TMAP:
if off&int64(Widthptr-1) != 0 {
base.Fatalf("onebitwalktype1: invalid alignment, %v", t)
@@ -1300,7 +1300,7 @@ func liveness(e *ssafn, f *ssa.Func, pp *Progs) LivenessMap {
// to fully initialize t.
func isfat(t *types.Type) bool {
if t != nil {
- switch t.Etype {
+ switch t.Kind() {
case types.TSLICE, types.TSTRING,
types.TINTER: // maybe remove later
return true
diff --git a/src/cmd/compile/internal/gc/range.go b/src/cmd/compile/internal/gc/range.go
index 2f2d7051c3..e48642a854 100644
--- a/src/cmd/compile/internal/gc/range.go
+++ b/src/cmd/compile/internal/gc/range.go
@@ -61,7 +61,7 @@ func typecheckrangeExpr(n ir.Node) {
var t1, t2 *types.Type
toomany := false
- switch t.Etype {
+ switch t.Kind() {
default:
base.ErrorfAt(n.Pos(), "cannot range over %L", n.Right())
return
@@ -88,7 +88,7 @@ func typecheckrangeExpr(n ir.Node) {
case types.TSTRING:
t1 = types.Types[types.TINT]
- t2 = types.Runetype
+ t2 = types.RuneType
}
if n.List().Len() > 2 || toomany {
@@ -208,7 +208,7 @@ func walkrange(nrange ir.Node) ir.Node {
var body []ir.Node
var init []ir.Node
- switch t.Etype {
+ switch t.Kind() {
default:
base.Fatalf("walkrange")
@@ -375,7 +375,7 @@ func walkrange(nrange ir.Node) ir.Node {
hv1 := temp(types.Types[types.TINT])
hv1t := temp(types.Types[types.TINT])
- hv2 := temp(types.Runetype)
+ hv2 := temp(types.RuneType)
// hv1 := 0
init = append(init, ir.Nod(ir.OAS, hv1, nil))
@@ -391,7 +391,7 @@ func walkrange(nrange ir.Node) ir.Node {
// hv2 := rune(ha[hv1])
nind := ir.Nod(ir.OINDEX, ha, hv1)
nind.SetBounded(true)
- body = append(body, ir.Nod(ir.OAS, hv2, conv(nind, types.Runetype)))
+ body = append(body, ir.Nod(ir.OAS, hv2, conv(nind, types.RuneType)))
// if hv2 < utf8.RuneSelf
nif := ir.Nod(ir.OIF, nil, nil)
@@ -467,7 +467,7 @@ func isMapClear(n ir.Node) bool {
return false
}
- if n.Op() != ir.ORANGE || n.Type().Etype != types.TMAP || n.List().Len() != 1 {
+ if n.Op() != ir.ORANGE || n.Type().Kind() != types.TMAP || n.List().Len() != 1 {
return false
}
diff --git a/src/cmd/compile/internal/gc/reflect.go b/src/cmd/compile/internal/gc/reflect.go
index 73d369f413..4ab3005ce8 100644
--- a/src/cmd/compile/internal/gc/reflect.go
+++ b/src/cmd/compile/internal/gc/reflect.go
@@ -68,7 +68,7 @@ func imethodSize() int { return 4 + 4 } // Sizeof(runtime.imeth
func commonSize() int { return 4*Widthptr + 8 + 8 } // Sizeof(runtime._type{})
func uncommonSize(t *types.Type) int { // Sizeof(runtime.uncommontype{})
- if t.Sym == nil && len(methods(t)) == 0 {
+ if t.Sym() == nil && len(methods(t)) == 0 {
return 0
}
return 4 + 2 + 2 + 4 + 4
@@ -448,7 +448,7 @@ func methods(t *types.Type) []*Sig {
func imethods(t *types.Type) []*Sig {
var methods []*Sig
for _, f := range t.Fields().Slice() {
- if f.Type.Etype != types.TFUNC || f.Sym == nil {
+ if f.Type.Kind() != types.TFUNC || f.Sym == nil {
continue
}
if f.Sym.IsBlank() {
@@ -640,7 +640,7 @@ func dname(name, tag string, pkg *types.Pkg, exported bool) *obj.LSym {
// backing array of the []method field is written (by dextratypeData).
func dextratype(lsym *obj.LSym, ot int, t *types.Type, dataAdd int) int {
m := methods(t)
- if t.Sym == nil && len(m) == 0 {
+ if t.Sym() == nil && len(m) == 0 {
return ot
}
noff := int(Rnd(int64(ot), int64(Widthptr)))
@@ -672,16 +672,16 @@ func dextratype(lsym *obj.LSym, ot int, t *types.Type, dataAdd int) int {
}
func typePkg(t *types.Type) *types.Pkg {
- tsym := t.Sym
+ tsym := t.Sym()
if tsym == nil {
- switch t.Etype {
+ switch t.Kind() {
case types.TARRAY, types.TSLICE, types.TPTR, types.TCHAN:
if t.Elem() != nil {
- tsym = t.Elem().Sym
+ tsym = t.Elem().Sym()
}
}
}
- if tsym != nil && t != types.Types[t.Etype] && t != types.Errortype {
+ if tsym != nil && t != types.Types[t.Kind()] && t != types.ErrorType {
return tsym.Pkg
}
return nil
@@ -753,7 +753,7 @@ func typeptrdata(t *types.Type) int64 {
return 0
}
- switch t.Etype {
+ switch t.Kind() {
case types.TPTR,
types.TUNSAFEPTR,
types.TFUNC,
@@ -823,7 +823,7 @@ func dcommontype(lsym *obj.LSym, t *types.Type) int {
var sptr *obj.LSym
if !t.IsPtr() || t.IsPtrElem() {
tptr := types.NewPtr(t)
- if t.Sym != nil || methods(tptr) != nil {
+ if t.Sym() != nil || methods(tptr) != nil {
sptrWeak = false
}
sptr = dtypesym(tptr)
@@ -855,7 +855,7 @@ func dcommontype(lsym *obj.LSym, t *types.Type) int {
if uncommonSize(t) != 0 {
tflag |= tflagUncommon
}
- if t.Sym != nil && t.Sym.Name != "" {
+ if t.Sym() != nil && t.Sym().Name != "" {
tflag |= tflagNamed
}
if IsRegularMemory(t) {
@@ -872,12 +872,12 @@ func dcommontype(lsym *obj.LSym, t *types.Type) int {
if !strings.HasPrefix(p, "*") {
p = "*" + p
tflag |= tflagExtraStar
- if t.Sym != nil {
- exported = types.IsExported(t.Sym.Name)
+ if t.Sym() != nil {
+ exported = types.IsExported(t.Sym().Name)
}
} else {
- if t.Elem() != nil && t.Elem().Sym != nil {
- exported = types.IsExported(t.Elem().Sym.Name)
+ if t.Elem() != nil && t.Elem().Sym() != nil {
+ exported = types.IsExported(t.Elem().Sym().Name)
}
}
@@ -895,7 +895,7 @@ func dcommontype(lsym *obj.LSym, t *types.Type) int {
ot = duint8(lsym, ot, t.Align) // align
ot = duint8(lsym, ot, t.Align) // fieldAlign
- i = kinds[t.Etype]
+ i = kinds[t.Kind()]
if isdirectiface(t) {
i |= objabi.KindDirectIface
}
@@ -1029,7 +1029,7 @@ func itabname(t, itype *types.Type) ir.Node {
// isreflexive reports whether t has a reflexive equality operator.
// That is, if x==x for all x of type t.
func isreflexive(t *types.Type) bool {
- switch t.Etype {
+ switch t.Kind() {
case types.TBOOL,
types.TINT,
types.TUINT,
@@ -1075,7 +1075,7 @@ func isreflexive(t *types.Type) bool {
// needkeyupdate reports whether map updates with t as a key
// need the key to be updated.
func needkeyupdate(t *types.Type) bool {
- switch t.Etype {
+ switch t.Kind() {
case types.TBOOL, types.TINT, types.TUINT, types.TINT8, types.TUINT8, types.TINT16, types.TUINT16, types.TINT32, types.TUINT32,
types.TINT64, types.TUINT64, types.TUINTPTR, types.TPTR, types.TUNSAFEPTR, types.TCHAN:
return false
@@ -1104,7 +1104,7 @@ func needkeyupdate(t *types.Type) bool {
// hashMightPanic reports whether the hash of a map key of type t might panic.
func hashMightPanic(t *types.Type) bool {
- switch t.Etype {
+ switch t.Kind() {
case types.TINTER:
return true
@@ -1128,8 +1128,8 @@ func hashMightPanic(t *types.Type) bool {
// They've been separate internally to make error messages
// better, but we have to merge them in the reflect tables.
func formalType(t *types.Type) *types.Type {
- if t == types.Bytetype || t == types.Runetype {
- return types.Types[t.Etype]
+ if t == types.ByteType || t == types.RuneType {
+ return types.Types[t.Kind()]
}
return t
}
@@ -1152,19 +1152,19 @@ func dtypesym(t *types.Type) *obj.LSym {
// emit the type structures for int, float, etc.
tbase := t
- if t.IsPtr() && t.Sym == nil && t.Elem().Sym != nil {
+ if t.IsPtr() && t.Sym() == nil && t.Elem().Sym() != nil {
tbase = t.Elem()
}
dupok := 0
- if tbase.Sym == nil {
+ if tbase.Sym() == nil {
dupok = obj.DUPOK
}
- if base.Ctxt.Pkgpath != "runtime" || (tbase != types.Types[tbase.Etype] && tbase != types.Bytetype && tbase != types.Runetype && tbase != types.Errortype) { // int, float, etc
+ if base.Ctxt.Pkgpath != "runtime" || (tbase != types.Types[tbase.Kind()] && tbase != types.ByteType && tbase != types.RuneType && tbase != types.ErrorType) { // int, float, etc
// named types from other files are defined only by those files
- if tbase.Sym != nil && tbase.Sym.Pkg != ir.LocalPkg {
+ if tbase.Sym() != nil && tbase.Sym().Pkg != ir.LocalPkg {
if i, ok := typeSymIdx[tbase]; ok {
- lsym.Pkg = tbase.Sym.Pkg.Prefix
+ lsym.Pkg = tbase.Sym().Pkg.Prefix
if t != tbase {
lsym.SymIdx = int32(i[1])
} else {
@@ -1175,13 +1175,13 @@ func dtypesym(t *types.Type) *obj.LSym {
return lsym
}
// TODO(mdempsky): Investigate whether this can happen.
- if tbase.Etype == types.TFORW {
+ if tbase.Kind() == types.TFORW {
return lsym
}
}
ot := 0
- switch t.Etype {
+ switch t.Kind() {
default:
ot = dcommontype(lsym, t)
ot = dextratype(lsym, ot, t, 0)
@@ -1262,8 +1262,8 @@ func dtypesym(t *types.Type) *obj.LSym {
ot = dcommontype(lsym, t)
var tpkg *types.Pkg
- if t.Sym != nil && t != types.Types[t.Etype] && t != types.Errortype {
- tpkg = t.Sym.Pkg
+ if t.Sym() != nil && t != types.Types[t.Kind()] && t != types.ErrorType {
+ tpkg = t.Sym().Pkg
}
ot = dgopkgpath(lsym, ot, tpkg)
@@ -1328,7 +1328,7 @@ func dtypesym(t *types.Type) *obj.LSym {
ot = dextratype(lsym, ot, t, 0)
case types.TPTR:
- if t.Elem().Etype == types.TANY {
+ if t.Elem().Kind() == types.TANY {
// ../../../../runtime/type.go:/UnsafePointerType
ot = dcommontype(lsym, t)
ot = dextratype(lsym, ot, t, 0)
@@ -1397,13 +1397,13 @@ func dtypesym(t *types.Type) *obj.LSym {
// When buildmode=shared, all types are in typelinks so the
// runtime can deduplicate type pointers.
keep := base.Ctxt.Flag_dynlink
- if !keep && t.Sym == nil {
+ if !keep && t.Sym() == nil {
// For an unnamed type, we only need the link if the type can
// be created at run time by reflect.PtrTo and similar
// functions. If the type exists in the program, those
// functions must return the existing type structure rather
// than creating a new one.
- switch t.Etype {
+ switch t.Kind() {
case types.TPTR, types.TARRAY, types.TCHAN, types.TFUNC, types.TMAP, types.TSLICE, types.TSTRUCT:
keep = true
}
@@ -1541,7 +1541,7 @@ func dumpsignats() {
for _, ts := range signats {
t := ts.t
dtypesym(t)
- if t.Sym != nil {
+ if t.Sym() != nil {
dtypesym(types.NewPtr(t))
}
}
@@ -1616,7 +1616,7 @@ func dumpbasictypes() {
// another possible choice would be package main,
// but using runtime means fewer copies in object files.
if base.Ctxt.Pkgpath == "runtime" {
- for i := types.EType(1); i <= types.TBOOL; i++ {
+ for i := types.Kind(1); i <= types.TBOOL; i++ {
dtypesym(types.NewPtr(types.Types[i]))
}
dtypesym(types.NewPtr(types.Types[types.TSTRING]))
@@ -1624,9 +1624,9 @@ func dumpbasictypes() {
// emit type structs for error and func(error) string.
// The latter is the type of an auto-generated wrapper.
- dtypesym(types.NewPtr(types.Errortype))
+ dtypesym(types.NewPtr(types.ErrorType))
- dtypesym(functype(nil, []*ir.Field{anonfield(types.Errortype)}, []*ir.Field{anonfield(types.Types[types.TSTRING])}))
+ dtypesym(functype(nil, []*ir.Field{anonfield(types.ErrorType)}, []*ir.Field{anonfield(types.Types[types.TSTRING])}))
// add paths for runtime and main, which 6l imports implicitly.
dimportpath(Runtimepkg)
@@ -1665,7 +1665,7 @@ func (a typesByString) Less(i, j int) bool {
// will be equal for the above checks, but different in DWARF output.
// Sort by source position to ensure deterministic order.
// See issues 27013 and 30202.
- if a[i].t.Etype == types.TINTER && a[i].t.Methods().Len() > 0 {
+ if a[i].t.Kind() == types.TINTER && a[i].t.Methods().Len() > 0 {
return a[i].t.Methods().Index(0).Pos.Before(a[j].t.Methods().Index(0).Pos)
}
return false
@@ -1821,7 +1821,7 @@ func (p *GCProg) emit(t *types.Type, offset int64) {
p.w.Ptr(offset / int64(Widthptr))
return
}
- switch t.Etype {
+ switch t.Kind() {
default:
base.Fatalf("GCProg.emit: unexpected type %v", t)
diff --git a/src/cmd/compile/internal/gc/ssa.go b/src/cmd/compile/internal/gc/ssa.go
index 4be6caa0e3..3e020d7b92 100644
--- a/src/cmd/compile/internal/gc/ssa.go
+++ b/src/cmd/compile/internal/gc/ssa.go
@@ -54,13 +54,13 @@ func initssaconfig() {
_ = types.NewPtr(types.Types[types.TINTER]) // *interface{}
_ = types.NewPtr(types.NewPtr(types.Types[types.TSTRING])) // **string
_ = types.NewPtr(types.NewSlice(types.Types[types.TINTER])) // *[]interface{}
- _ = types.NewPtr(types.NewPtr(types.Bytetype)) // **byte
- _ = types.NewPtr(types.NewSlice(types.Bytetype)) // *[]byte
+ _ = types.NewPtr(types.NewPtr(types.ByteType)) // **byte
+ _ = types.NewPtr(types.NewSlice(types.ByteType)) // *[]byte
_ = types.NewPtr(types.NewSlice(types.Types[types.TSTRING])) // *[]string
_ = types.NewPtr(types.NewPtr(types.NewPtr(types.Types[types.TUINT8]))) // ***uint8
_ = types.NewPtr(types.Types[types.TINT16]) // *int16
_ = types.NewPtr(types.Types[types.TINT64]) // *int64
- _ = types.NewPtr(types.Errortype) // *error
+ _ = types.NewPtr(types.ErrorType) // *error
types.NewPtrCacheEnabled = false
ssaConfig = ssa.NewConfig(thearch.LinkArch.Name, *types_, base.Ctxt, base.Flag.N == 0)
ssaConfig.SoftFloat = thearch.SoftFloat
@@ -1591,7 +1591,7 @@ func (s *state) exit() *ssa.Block {
type opAndType struct {
op ir.Op
- etype types.EType
+ etype types.Kind
}
var opToSSA = map[opAndType]ssa.Op{
@@ -1766,8 +1766,8 @@ var opToSSA = map[opAndType]ssa.Op{
opAndType{ir.OLE, types.TFLOAT32}: ssa.OpLeq32F,
}
-func (s *state) concreteEtype(t *types.Type) types.EType {
- e := t.Etype
+func (s *state) concreteEtype(t *types.Type) types.Kind {
+ e := t.Kind()
switch e {
default:
return e
@@ -1799,7 +1799,7 @@ func (s *state) ssaOp(op ir.Op, t *types.Type) ssa.Op {
}
func floatForComplex(t *types.Type) *types.Type {
- switch t.Etype {
+ switch t.Kind() {
case types.TCOMPLEX64:
return types.Types[types.TFLOAT32]
case types.TCOMPLEX128:
@@ -1810,7 +1810,7 @@ func floatForComplex(t *types.Type) *types.Type {
}
func complexForFloat(t *types.Type) *types.Type {
- switch t.Etype {
+ switch t.Kind() {
case types.TFLOAT32:
return types.Types[types.TCOMPLEX64]
case types.TFLOAT64:
@@ -1822,19 +1822,19 @@ func complexForFloat(t *types.Type) *types.Type {
type opAndTwoTypes struct {
op ir.Op
- etype1 types.EType
- etype2 types.EType
+ etype1 types.Kind
+ etype2 types.Kind
}
type twoTypes struct {
- etype1 types.EType
- etype2 types.EType
+ etype1 types.Kind
+ etype2 types.Kind
}
type twoOpsAndType struct {
op1 ssa.Op
op2 ssa.Op
- intermediateType types.EType
+ intermediateType types.Kind
}
var fpConvOpToSSA = map[twoTypes]twoOpsAndType{
@@ -2115,12 +2115,12 @@ func (s *state) expr(n ir.Node) *ssa.Value {
v := s.newValue1(ssa.OpCopy, to, x) // ensure that v has the right type
// CONVNOP closure
- if to.Etype == types.TFUNC && from.IsPtrShaped() {
+ if to.Kind() == types.TFUNC && from.IsPtrShaped() {
return v
}
// named <--> unnamed type or typed <--> untyped const
- if from.Etype == to.Etype {
+ if from.Kind() == to.Kind() {
return v
}
@@ -2130,7 +2130,7 @@ func (s *state) expr(n ir.Node) *ssa.Value {
}
// map <--> *hmap
- if to.Etype == types.TMAP && from.IsPtr() &&
+ if to.Kind() == types.TMAP && from.IsPtr() &&
to.MapType().Hmap == from.Elem() {
return v
}
@@ -2141,8 +2141,8 @@ func (s *state) expr(n ir.Node) *ssa.Value {
s.Fatalf("CONVNOP width mismatch %v (%d) -> %v (%d)\n", from, from.Width, to, to.Width)
return nil
}
- if etypesign(from.Etype) != etypesign(to.Etype) {
- s.Fatalf("CONVNOP sign mismatch %v (%s) -> %v (%s)\n", from, from.Etype, to, to.Etype)
+ if etypesign(from.Kind()) != etypesign(to.Kind()) {
+ s.Fatalf("CONVNOP sign mismatch %v (%s) -> %v (%s)\n", from, from.Kind(), to, to.Kind())
return nil
}
@@ -2153,7 +2153,7 @@ func (s *state) expr(n ir.Node) *ssa.Value {
return v
}
- if etypesign(from.Etype) == 0 {
+ if etypesign(from.Kind()) == 0 {
s.Fatalf("CONVNOP unrecognized non-integer %v -> %v\n", from, to)
return nil
}
@@ -2329,7 +2329,7 @@ func (s *state) expr(n ir.Node) *ssa.Value {
s.newValueOrSfCall1(op, ttp, s.newValue1(ssa.OpComplexImag, ftp, x)))
}
- s.Fatalf("unhandled OCONV %s -> %s", n.Left().Type().Etype, n.Type().Etype)
+ s.Fatalf("unhandled OCONV %s -> %s", n.Left().Type().Kind(), n.Type().Kind())
return nil
case ir.ODOTTYPE:
@@ -3172,7 +3172,7 @@ const (
type sfRtCallDef struct {
rtfn *obj.LSym
- rtype types.EType
+ rtype types.Kind
}
var softFloatOps map[ssa.Op]sfRtCallDef
@@ -3467,9 +3467,9 @@ func init() {
},
sys.AMD64, sys.MIPS64, sys.PPC64, sys.RISCV64, sys.S390X)
- type atomicOpEmitter func(s *state, n ir.Node, args []*ssa.Value, op ssa.Op, typ types.EType)
+ type atomicOpEmitter func(s *state, n ir.Node, args []*ssa.Value, op ssa.Op, typ types.Kind)
- makeAtomicGuardedIntrinsicARM64 := func(op0, op1 ssa.Op, typ, rtyp types.EType, emit atomicOpEmitter) intrinsicBuilder {
+ makeAtomicGuardedIntrinsicARM64 := func(op0, op1 ssa.Op, typ, rtyp types.Kind, emit atomicOpEmitter) intrinsicBuilder {
return func(s *state, n ir.Node, args []*ssa.Value) *ssa.Value {
// Target Atomic feature is identified by dynamic detection
@@ -3505,7 +3505,7 @@ func init() {
}
}
- atomicXchgXaddEmitterARM64 := func(s *state, n ir.Node, args []*ssa.Value, op ssa.Op, typ types.EType) {
+ atomicXchgXaddEmitterARM64 := func(s *state, n ir.Node, args []*ssa.Value, op ssa.Op, typ types.Kind) {
v := s.newValue3(op, types.NewTuple(types.Types[typ], types.TypeMem), args[0], args[1], s.mem())
s.vars[memVar] = s.newValue1(ssa.OpSelect1, types.TypeMem, v)
s.vars[n] = s.newValue1(ssa.OpSelect0, types.Types[typ], v)
@@ -3561,7 +3561,7 @@ func init() {
},
sys.PPC64)
- atomicCasEmitterARM64 := func(s *state, n ir.Node, args []*ssa.Value, op ssa.Op, typ types.EType) {
+ atomicCasEmitterARM64 := func(s *state, n ir.Node, args []*ssa.Value, op ssa.Op, typ types.Kind) {
v := s.newValue4(op, types.NewTuple(types.Types[types.TBOOL], types.TypeMem), args[0], args[1], args[2], s.mem())
s.vars[memVar] = s.newValue1(ssa.OpSelect1, types.TypeMem, v)
s.vars[n] = s.newValue1(ssa.OpSelect0, types.Types[typ], v)
@@ -3599,7 +3599,7 @@ func init() {
},
sys.AMD64, sys.MIPS, sys.PPC64, sys.S390X)
- atomicAndOrEmitterARM64 := func(s *state, n ir.Node, args []*ssa.Value, op ssa.Op, typ types.EType) {
+ atomicAndOrEmitterARM64 := func(s *state, n ir.Node, args []*ssa.Value, op ssa.Op, typ types.Kind) {
s.vars[memVar] = s.newValue3(op, types.TypeMem, args[0], args[1], s.mem())
}
@@ -4810,7 +4810,7 @@ func (s *state) getClosureAndRcvr(fn ir.Node) (*ssa.Value, *ssa.Value) {
// etypesign returns the signed-ness of e, for integer/pointer etypes.
// -1 means signed, +1 means unsigned, 0 means non-integer/non-pointer.
-func etypesign(e types.EType) int8 {
+func etypesign(e types.Kind) int8 {
switch e {
case types.TINT8, types.TINT16, types.TINT32, types.TINT64, types.TINT:
return -1
@@ -4980,7 +4980,7 @@ func canSSAType(t *types.Type) bool {
// Too big and we'll introduce too much register pressure.
return false
}
- switch t.Etype {
+ switch t.Kind() {
case types.TARRAY:
// We can't do larger arrays because dynamic indexing is
// not supported on SSA variables.
diff --git a/src/cmd/compile/internal/gc/subr.go b/src/cmd/compile/internal/gc/subr.go
index 04c8c537bd..011a7ac5bc 100644
--- a/src/cmd/compile/internal/gc/subr.go
+++ b/src/cmd/compile/internal/gc/subr.go
@@ -181,7 +181,7 @@ func nodstr(s string) ir.Node {
return ir.NewLiteral(constant.MakeString(s))
}
-func isptrto(t *types.Type, et types.EType) bool {
+func isptrto(t *types.Type, et types.Kind) bool {
if t == nil {
return false
}
@@ -192,7 +192,7 @@ func isptrto(t *types.Type, et types.EType) bool {
if t == nil {
return false
}
- if t.Etype != et {
+ if t.Kind() != et {
return false
}
return true
@@ -208,7 +208,7 @@ func methtype(t *types.Type) *types.Type {
// Strip away pointer if it's there.
if t.IsPtr() {
- if t.Sym != nil {
+ if t.Sym() != nil {
return nil
}
t = t.Elem()
@@ -218,15 +218,15 @@ func methtype(t *types.Type) *types.Type {
}
// Must be a named type or anonymous struct.
- if t.Sym == nil && !t.IsStruct() {
+ if t.Sym() == nil && !t.IsStruct() {
return nil
}
// Check types.
- if issimple[t.Etype] {
+ if issimple[t.Kind()] {
return t
}
- switch t.Etype {
+ switch t.Kind() {
case types.TARRAY, types.TCHAN, types.TFUNC, types.TMAP, types.TSLICE, types.TSTRING, types.TSTRUCT:
return t
}
@@ -241,7 +241,7 @@ func assignop(src, dst *types.Type) (ir.Op, string) {
if src == dst {
return ir.OCONVNOP, ""
}
- if src == nil || dst == nil || src.Etype == types.TFORW || dst.Etype == types.TFORW || src.Orig == nil || dst.Orig == nil {
+ if src == nil || dst == nil || src.Kind() == types.TFORW || dst.Kind() == types.TFORW || src.Underlying() == nil || dst.Underlying() == nil {
return ir.OXXX, ""
}
@@ -257,13 +257,13 @@ func assignop(src, dst *types.Type) (ir.Op, string) {
// we want to recompute the itab. Recomputing the itab ensures
// that itabs are unique (thus an interface with a compile-time
// type I has an itab with interface type I).
- if types.Identical(src.Orig, dst.Orig) {
+ if types.Identical(src.Underlying(), dst.Underlying()) {
if src.IsEmptyInterface() {
// Conversion between two empty interfaces
// requires no code.
return ir.OCONVNOP, ""
}
- if (src.Sym == nil || dst.Sym == nil) && !src.IsInterface() {
+ if (src.Sym() == nil || dst.Sym() == nil) && !src.IsInterface() {
// Conversion between two types, at least one unnamed,
// needs no conversion. The exception is nonempty interfaces
// which need to have their itab updated.
@@ -272,7 +272,7 @@ func assignop(src, dst *types.Type) (ir.Op, string) {
}
// 3. dst is an interface type and src implements dst.
- if dst.IsInterface() && src.Etype != types.TNIL {
+ if dst.IsInterface() && src.Kind() != types.TNIL {
var missing, have *types.Field
var ptr int
if implements(src, dst, &missing, &have, &ptr) {
@@ -309,7 +309,7 @@ func assignop(src, dst *types.Type) (ir.Op, string) {
return ir.OXXX, why
}
- if src.IsInterface() && dst.Etype != types.TBLANK {
+ if src.IsInterface() && dst.Kind() != types.TBLANK {
var missing, have *types.Field
var ptr int
var why string
@@ -323,14 +323,14 @@ func assignop(src, dst *types.Type) (ir.Op, string) {
// src and dst have identical element types, and
// either src or dst is not a named type.
if src.IsChan() && src.ChanDir() == types.Cboth && dst.IsChan() {
- if types.Identical(src.Elem(), dst.Elem()) && (src.Sym == nil || dst.Sym == nil) {
+ if types.Identical(src.Elem(), dst.Elem()) && (src.Sym() == nil || dst.Sym() == nil) {
return ir.OCONVNOP, ""
}
}
// 5. src is the predeclared identifier nil and dst is a nillable type.
- if src.Etype == types.TNIL {
- switch dst.Etype {
+ if src.Kind() == types.TNIL {
+ switch dst.Kind() {
case types.TPTR,
types.TFUNC,
types.TMAP,
@@ -344,7 +344,7 @@ func assignop(src, dst *types.Type) (ir.Op, string) {
// 6. rule about untyped constants - already converted by defaultlit.
// 7. Any typed value can be assigned to the blank identifier.
- if dst.Etype == types.TBLANK {
+ if dst.Kind() == types.TBLANK {
return ir.OCONVNOP, ""
}
@@ -373,7 +373,7 @@ func convertop(srcConstant bool, src, dst *types.Type) (ir.Op, string) {
return ir.OXXX, why
}
// (b) Disallow string to []T where T is go:notinheap.
- if src.IsString() && dst.IsSlice() && dst.Elem().NotInHeap() && (dst.Elem().Etype == types.Bytetype.Etype || dst.Elem().Etype == types.Runetype.Etype) {
+ if src.IsString() && dst.IsSlice() && dst.Elem().NotInHeap() && (dst.Elem().Kind() == types.ByteType.Kind() || dst.Elem().Kind() == types.RuneType.Kind()) {
why := fmt.Sprintf(":\n\t%v is incomplete (or unallocatable)", dst.Elem())
return ir.OXXX, why
}
@@ -393,21 +393,21 @@ func convertop(srcConstant bool, src, dst *types.Type) (ir.Op, string) {
}
// 2. Ignoring struct tags, src and dst have identical underlying types.
- if types.IdenticalIgnoreTags(src.Orig, dst.Orig) {
+ if types.IdenticalIgnoreTags(src.Underlying(), dst.Underlying()) {
return ir.OCONVNOP, ""
}
// 3. src and dst are unnamed pointer types and, ignoring struct tags,
// their base types have identical underlying types.
- if src.IsPtr() && dst.IsPtr() && src.Sym == nil && dst.Sym == nil {
- if types.IdenticalIgnoreTags(src.Elem().Orig, dst.Elem().Orig) {
+ if src.IsPtr() && dst.IsPtr() && src.Sym() == nil && dst.Sym() == nil {
+ if types.IdenticalIgnoreTags(src.Elem().Underlying(), dst.Elem().Underlying()) {
return ir.OCONVNOP, ""
}
}
// 4. src and dst are both integer or floating point types.
if (src.IsInteger() || src.IsFloat()) && (dst.IsInteger() || dst.IsFloat()) {
- if simtype[src.Etype] == simtype[dst.Etype] {
+ if simtype[src.Kind()] == simtype[dst.Kind()] {
return ir.OCONVNOP, ""
}
return ir.OCONV, ""
@@ -415,7 +415,7 @@ func convertop(srcConstant bool, src, dst *types.Type) (ir.Op, string) {
// 5. src and dst are both complex types.
if src.IsComplex() && dst.IsComplex() {
- if simtype[src.Etype] == simtype[dst.Etype] {
+ if simtype[src.Kind()] == simtype[dst.Kind()] {
return ir.OCONVNOP, ""
}
return ir.OCONV, ""
@@ -435,10 +435,10 @@ func convertop(srcConstant bool, src, dst *types.Type) (ir.Op, string) {
}
if src.IsSlice() && dst.IsString() {
- if src.Elem().Etype == types.Bytetype.Etype {
+ if src.Elem().Kind() == types.ByteType.Kind() {
return ir.OBYTES2STR, ""
}
- if src.Elem().Etype == types.Runetype.Etype {
+ if src.Elem().Kind() == types.RuneType.Kind() {
return ir.ORUNES2STR, ""
}
}
@@ -446,10 +446,10 @@ func convertop(srcConstant bool, src, dst *types.Type) (ir.Op, string) {
// 7. src is a string and dst is []byte or []rune.
// String to slice.
if src.IsString() && dst.IsSlice() {
- if dst.Elem().Etype == types.Bytetype.Etype {
+ if dst.Elem().Kind() == types.ByteType.Kind() {
return ir.OSTR2BYTES, ""
}
- if dst.Elem().Etype == types.Runetype.Etype {
+ if dst.Elem().Kind() == types.RuneType.Kind() {
return ir.OSTR2RUNES, ""
}
}
@@ -467,7 +467,7 @@ func convertop(srcConstant bool, src, dst *types.Type) (ir.Op, string) {
// src is map and dst is a pointer to corresponding hmap.
// This rule is needed for the implementation detail that
// go gc maps are implemented as a pointer to a hmap struct.
- if src.Etype == types.TMAP && dst.IsPtr() &&
+ if src.Kind() == types.TMAP && dst.IsPtr() &&
src.MapType().Hmap == dst.Elem() {
return ir.OCONVNOP, ""
}
@@ -485,7 +485,7 @@ func assignconvfn(n ir.Node, t *types.Type, context func() string) ir.Node {
return n
}
- if t.Etype == types.TBLANK && n.Type().Etype == types.TNIL {
+ if t.Kind() == types.TBLANK && n.Type().Kind() == types.TNIL {
base.Errorf("use of untyped nil")
}
@@ -493,7 +493,7 @@ func assignconvfn(n ir.Node, t *types.Type, context func() string) ir.Node {
if n.Type() == nil {
return n
}
- if t.Etype == types.TBLANK {
+ if t.Kind() == types.TBLANK {
return n
}
@@ -600,15 +600,15 @@ func calcHasCall(n ir.Node) bool {
// When using soft-float, these ops might be rewritten to function calls
// so we ensure they are evaluated first.
case ir.OADD, ir.OSUB, ir.ONEG, ir.OMUL:
- if thearch.SoftFloat && (isFloat[n.Type().Etype] || isComplex[n.Type().Etype]) {
+ if thearch.SoftFloat && (isFloat[n.Type().Kind()] || isComplex[n.Type().Kind()]) {
return true
}
case ir.OLT, ir.OEQ, ir.ONE, ir.OLE, ir.OGE, ir.OGT:
- if thearch.SoftFloat && (isFloat[n.Left().Type().Etype] || isComplex[n.Left().Type().Etype]) {
+ if thearch.SoftFloat && (isFloat[n.Left().Type().Kind()] || isComplex[n.Left().Type().Kind()]) {
return true
}
case ir.OCONV:
- if thearch.SoftFloat && ((isFloat[n.Type().Etype] || isComplex[n.Type().Etype]) || (isFloat[n.Left().Type().Etype] || isComplex[n.Left().Type().Etype])) {
+ if thearch.SoftFloat && ((isFloat[n.Type().Kind()] || isComplex[n.Type().Kind()]) || (isFloat[n.Left().Type().Kind()] || isComplex[n.Left().Type().Kind()])) {
return true
}
}
@@ -802,7 +802,7 @@ func lookdot0(s *types.Sym, t *types.Type, save **types.Field, ignorecase bool)
}
u = t
- if t.Sym != nil && t.IsPtr() && !t.Elem().IsPtr() {
+ if t.Sym() != nil && t.IsPtr() && !t.Elem().IsPtr() {
// If t is a defined pointer type, then x.m is shorthand for (*x).m.
u = t.Elem()
}
@@ -1110,13 +1110,13 @@ func genwrapper(rcvr *types.Type, method *types.Field, newnam *types.Sym) {
// Only generate (*T).M wrappers for T.M in T's own package.
if rcvr.IsPtr() && rcvr.Elem() == method.Type.Recv().Type &&
- rcvr.Elem().Sym != nil && rcvr.Elem().Sym.Pkg != ir.LocalPkg {
+ rcvr.Elem().Sym() != nil && rcvr.Elem().Sym().Pkg != ir.LocalPkg {
return
}
// Only generate I.M wrappers for I in I's own package
// but keep doing it for error.Error (was issue #29304).
- if rcvr.IsInterface() && rcvr.Sym != nil && rcvr.Sym.Pkg != ir.LocalPkg && rcvr != types.Errortype {
+ if rcvr.IsInterface() && rcvr.Sym() != nil && rcvr.Sym().Pkg != ir.LocalPkg && rcvr != types.ErrorType {
return
}
@@ -1193,7 +1193,7 @@ func genwrapper(rcvr *types.Type, method *types.Field, newnam *types.Sym) {
// Inline calls within (*T).M wrappers. This is safe because we only
// generate those wrappers within the same compilation unit as (T).M.
// TODO(mdempsky): Investigate why we can't enable this more generally.
- if rcvr.IsPtr() && rcvr.Elem() == method.Type.Recv().Type && rcvr.Elem().Sym != nil {
+ if rcvr.IsPtr() && rcvr.Elem() == method.Type.Recv().Type && rcvr.Elem().Sym() != nil {
inlcalls(fn)
}
escapeFuncs([]*ir.Func{fn}, false)
@@ -1433,7 +1433,7 @@ func isdirectiface(t *types.Type) bool {
return false
}
- switch t.Etype {
+ switch t.Kind() {
case types.TPTR:
// Pointers to notinheap types must be stored indirectly. See issue 42076.
return !t.Elem().NotInHeap()
diff --git a/src/cmd/compile/internal/gc/swt.go b/src/cmd/compile/internal/gc/swt.go
index 02d38ac4b1..30179e1dd6 100644
--- a/src/cmd/compile/internal/gc/swt.go
+++ b/src/cmd/compile/internal/gc/swt.go
@@ -157,7 +157,7 @@ func typecheckExprSwitch(n ir.Node) {
switch {
case t.IsMap():
nilonly = "map"
- case t.Etype == types.TFUNC:
+ case t.Kind() == types.TFUNC:
nilonly = "func"
case t.IsSlice():
nilonly = "slice"
@@ -332,7 +332,7 @@ type exprClause struct {
func (s *exprSwitch) Add(pos src.XPos, expr, jmp ir.Node) {
c := exprClause{pos: pos, lo: expr, hi: expr, jmp: jmp}
- if okforcmp[s.exprname.Type().Etype] && expr.Op() == ir.OLITERAL {
+ if okforcmp[s.exprname.Type().Kind()] && expr.Op() == ir.OLITERAL {
s.clauses = append(s.clauses, c)
return
}
diff --git a/src/cmd/compile/internal/gc/typecheck.go b/src/cmd/compile/internal/gc/typecheck.go
index dccb5ecdce..f120b44413 100644
--- a/src/cmd/compile/internal/gc/typecheck.go
+++ b/src/cmd/compile/internal/gc/typecheck.go
@@ -156,7 +156,7 @@ func typekind(t *types.Type) string {
if t.IsUntyped() {
return fmt.Sprintf("%v", t)
}
- et := t.Etype
+ et := t.Kind()
if int(et) < len(_typekind) {
s := _typekind[et]
if s != "" {
@@ -329,7 +329,7 @@ func typecheck(n ir.Node, top int) (res ir.Node) {
// The result of indexlit MUST be assigned back to n, e.g.
// n.Left = indexlit(n.Left)
func indexlit(n ir.Node) ir.Node {
- if n != nil && n.Type() != nil && n.Type().Etype == types.TIDEAL {
+ if n != nil && n.Type() != nil && n.Type().Kind() == types.TIDEAL {
return defaultlit(n, types.Types[types.TINT])
}
return n
@@ -583,7 +583,7 @@ func typecheck1(n ir.Node, top int) (res ir.Node) {
n.SetType(nil)
return n
}
- if n.Implicit() && !okforarith[l.Type().Etype] {
+ if n.Implicit() && !okforarith[l.Type().Kind()] {
base.Errorf("invalid operation: %v (non-numeric type %v)", n, l.Type())
n.SetType(nil)
return n
@@ -617,7 +617,7 @@ func typecheck1(n ir.Node, top int) (res ir.Node) {
return n
}
t = l.Type()
- if t != nil && t.Etype != types.TIDEAL && !t.IsInteger() {
+ if t != nil && t.Kind() != types.TIDEAL && !t.IsInteger() {
base.Errorf("invalid operation: %v (shift of type %v)", n, t)
n.SetType(nil)
return n
@@ -659,15 +659,15 @@ func typecheck1(n ir.Node, top int) (res ir.Node) {
return n
}
t := l.Type()
- if t.Etype == types.TIDEAL {
+ if t.Kind() == types.TIDEAL {
t = r.Type()
}
- et := t.Etype
+ et := t.Kind()
if et == types.TIDEAL {
et = types.TINT
}
aop := ir.OXXX
- if iscmp[n.Op()] && t.Etype != types.TIDEAL && !types.Identical(l.Type(), r.Type()) {
+ if iscmp[n.Op()] && t.Kind() != types.TIDEAL && !types.Identical(l.Type(), r.Type()) {
// comparison is okay as long as one side is
// assignable to the other. convert so they have
// the same type.
@@ -676,7 +676,7 @@ func typecheck1(n ir.Node, top int) (res ir.Node) {
// in that case, check comparability of the concrete type.
// The conversion allocates, so only do it if the concrete type is huge.
converted := false
- if r.Type().Etype != types.TBLANK {
+ if r.Type().Kind() != types.TBLANK {
aop, _ = assignop(l.Type(), r.Type())
if aop != ir.OXXX {
if r.Type().IsInterface() && !l.Type().IsInterface() && !IsComparable(l.Type()) {
@@ -698,7 +698,7 @@ func typecheck1(n ir.Node, top int) (res ir.Node) {
}
}
- if !converted && l.Type().Etype != types.TBLANK {
+ if !converted && l.Type().Kind() != types.TBLANK {
aop, _ = assignop(r.Type(), l.Type())
if aop != ir.OXXX {
if l.Type().IsInterface() && !r.Type().IsInterface() && !IsComparable(r.Type()) {
@@ -719,10 +719,10 @@ func typecheck1(n ir.Node, top int) (res ir.Node) {
}
}
- et = t.Etype
+ et = t.Kind()
}
- if t.Etype != types.TIDEAL && !types.Identical(l.Type(), r.Type()) {
+ if t.Kind() != types.TIDEAL && !types.Identical(l.Type(), r.Type()) {
l, r = defaultlit2(l, r, true)
if l.Type() == nil || r.Type() == nil {
n.SetType(nil)
@@ -735,10 +735,10 @@ func typecheck1(n ir.Node, top int) (res ir.Node) {
}
}
- if t.Etype == types.TIDEAL {
+ if t.Kind() == types.TIDEAL {
t = mixUntyped(l.Type(), r.Type())
}
- if dt := defaultType(t); !okfor[op][dt.Etype] {
+ if dt := defaultType(t); !okfor[op][dt.Kind()] {
base.Errorf("invalid operation: %v (operator %v not defined on %s)", n, op, typekind(t))
n.SetType(nil)
return n
@@ -764,7 +764,7 @@ func typecheck1(n ir.Node, top int) (res ir.Node) {
return n
}
- if l.Type().Etype == types.TFUNC && !ir.IsNil(l) && !ir.IsNil(r) {
+ if l.Type().Kind() == types.TFUNC && !ir.IsNil(l) && !ir.IsNil(r) {
base.Errorf("invalid operation: %v (func can only be compared to nil)", n)
n.SetType(nil)
return n
@@ -825,7 +825,7 @@ func typecheck1(n ir.Node, top int) (res ir.Node) {
n.SetType(nil)
return n
}
- if !okfor[n.Op()][defaultType(t).Etype] {
+ if !okfor[n.Op()][defaultType(t).Kind()] {
base.Errorf("invalid operation: %v (operator %v not defined on %s)", n, n.Op(), typekind(t))
n.SetType(nil)
return n
@@ -1023,7 +1023,7 @@ func typecheck1(n ir.Node, top int) (res ir.Node) {
n.SetType(nil)
return n
}
- switch t.Etype {
+ switch t.Kind() {
default:
base.Errorf("invalid operation: %v (type %v does not support indexing)", n, t)
n.SetType(nil)
@@ -1032,7 +1032,7 @@ func typecheck1(n ir.Node, top int) (res ir.Node) {
case types.TSTRING, types.TARRAY, types.TSLICE:
n.SetRight(indexlit(n.Right()))
if t.IsString() {
- n.SetType(types.Bytetype)
+ n.SetType(types.ByteType)
} else {
n.SetType(t.Elem())
}
@@ -1191,7 +1191,7 @@ func typecheck1(n ir.Node, top int) (res ir.Node) {
n.SetLeft(defaultlit(n.Left(), types.Types[types.TINT]))
- if !n.Left().Type().IsInteger() && n.Type().Etype != types.TIDEAL {
+ if !n.Left().Type().IsInteger() && n.Type().Kind() != types.TIDEAL {
base.Errorf("non-integer len argument in OMAKESLICECOPY")
}
@@ -1383,7 +1383,7 @@ func typecheck1(n ir.Node, top int) (res ir.Node) {
default:
n.SetOp(ir.OCALLFUNC)
- if t.Etype != types.TFUNC {
+ if t.Kind() != types.TFUNC {
name := l.String()
if isBuiltinFuncName(name) && l.Name().Defn != nil {
// be more specific when the function
@@ -1446,9 +1446,9 @@ func typecheck1(n ir.Node, top int) (res ir.Node) {
var ok bool
if n.Op() == ir.OLEN {
- ok = okforlen[t.Etype]
+ ok = okforlen[t.Kind()]
} else {
- ok = okforcap[t.Etype]
+ ok = okforcap[t.Kind()]
}
if !ok {
base.Errorf("invalid argument %L for %v", l, n.Op())
@@ -1469,7 +1469,7 @@ func typecheck1(n ir.Node, top int) (res ir.Node) {
}
// Determine result type.
- switch t.Etype {
+ switch t.Kind() {
case types.TIDEAL:
n.SetType(types.UntypedFloat)
case types.TCOMPLEX64:
@@ -1505,7 +1505,7 @@ func typecheck1(n ir.Node, top int) (res ir.Node) {
}
var t *types.Type
- switch l.Type().Etype {
+ switch l.Type().Kind() {
default:
base.Errorf("invalid operation: %v (arguments have type %v, expected floating-point)", n, l.Type())
n.SetType(nil)
@@ -1624,7 +1624,7 @@ func typecheck1(n ir.Node, top int) (res ir.Node) {
break
}
- args.SetSecond(assignconv(args.Second(), t.Orig, "append"))
+ args.SetSecond(assignconv(args.Second(), t.Underlying(), "append"))
break
}
@@ -1651,7 +1651,7 @@ func typecheck1(n ir.Node, top int) (res ir.Node) {
// copy([]byte, string)
if n.Left().Type().IsSlice() && n.Right().Type().IsString() {
- if types.Identical(n.Left().Type().Elem(), types.Bytetype) {
+ if types.Identical(n.Left().Type().Elem(), types.ByteType) {
break
}
base.Errorf("arguments to copy have different element types: %L and string", n.Left().Type())
@@ -1701,8 +1701,8 @@ func typecheck1(n ir.Node, top int) (res ir.Node) {
n.SetOp(op)
switch n.Op() {
case ir.OCONVNOP:
- if t.Etype == n.Type().Etype {
- switch t.Etype {
+ if t.Kind() == n.Type().Kind() {
+ switch t.Kind() {
case types.TFLOAT32, types.TFLOAT64, types.TCOMPLEX64, types.TCOMPLEX128:
// Floating point casts imply rounding and
// so the conversion must be kept.
@@ -1741,7 +1741,7 @@ func typecheck1(n ir.Node, top int) (res ir.Node) {
i := 1
var nn ir.Node
- switch t.Etype {
+ switch t.Kind() {
default:
base.Errorf("cannot make type %v", t)
n.SetType(nil)
@@ -2062,7 +2062,7 @@ func typecheck1(n ir.Node, top int) (res ir.Node) {
t := n.Type()
if t != nil && !t.IsFuncArgStruct() && n.Op() != ir.OTYPE {
- switch t.Etype {
+ switch t.Kind() {
case types.TFUNC, // might have TANY; wait until it's called
types.TANY, types.TFORW, types.TIDEAL, types.TNIL, types.TBLANK:
break
@@ -2352,7 +2352,7 @@ func typecheckMethodExpr(n ir.Node) (res ir.Node) {
// types declared at package scope. However, we need
// to make sure to generate wrappers for anonymous
// receiver types too.
- if mt.Sym == nil {
+ if mt.Sym() == nil {
addsignat(t)
}
}
@@ -2385,7 +2385,7 @@ func typecheckMethodExpr(n ir.Node) (res ir.Node) {
me.SetOpt(m)
// Issue 25065. Make sure that we emit the symbol for a local method.
- if base.Ctxt.Flag_dynlink && !inimport && (t.Sym == nil || t.Sym.Pkg == ir.LocalPkg) {
+ if base.Ctxt.Flag_dynlink && !inimport && (t.Sym() == nil || t.Sym().Pkg == ir.LocalPkg) {
makefuncsym(me.Sym())
}
@@ -2417,7 +2417,7 @@ func lookdot(n ir.Node, t *types.Type, dostrcmp int) *types.Field {
}
var f2 *types.Field
- if n.Left().Type() == t || n.Left().Type().Sym == nil {
+ if n.Left().Type() == t || n.Left().Type().Sym() == nil {
mt := methtype(t)
if mt != nil {
f2 = lookdot1(n, s, mt, mt.Methods(), dostrcmp)
@@ -2493,7 +2493,7 @@ func lookdot(n ir.Node, t *types.Type, dostrcmp int) *types.Field {
pll = ll
ll = ll.Left()
}
- if pll.Implicit() && ll.Type().IsPtr() && ll.Type().Sym != nil && ir.AsNode(ll.Type().Sym.Def) != nil && ir.AsNode(ll.Type().Sym.Def).Op() == ir.OTYPE {
+ if pll.Implicit() && ll.Type().IsPtr() && ll.Type().Sym() != nil && ir.AsNode(ll.Type().Sym().Def) != nil && ir.AsNode(ll.Type().Sym().Def).Op() == ir.OTYPE {
// It is invalid to automatically dereference a named pointer type when selecting a method.
// Make n.Left == ll to clarify error message.
n.SetLeft(ll)
@@ -2681,7 +2681,7 @@ func sigrepr(t *types.Type, isddd bool) string {
return "bool"
}
- if t.Etype == types.TIDEAL {
+ if t.Kind() == types.TIDEAL {
// "untyped number" is not commonly used
// outside of the compiler, so let's use "number".
// TODO(mdempsky): Revisit this.
@@ -2724,7 +2724,7 @@ func fielddup(name string, hash map[string]bool) {
// iscomptype reports whether type t is a composite literal type.
func iscomptype(t *types.Type) bool {
- switch t.Etype {
+ switch t.Kind() {
case types.TARRAY, types.TSLICE, types.TSTRUCT, types.TMAP:
return true
default:
@@ -2801,7 +2801,7 @@ func typecheckcomplit(n ir.Node) (res ir.Node) {
}
n.SetType(t)
- switch t.Etype {
+ switch t.Kind() {
default:
base.Errorf("invalid composite literal type %v", t)
n.SetType(nil)
@@ -3154,7 +3154,7 @@ func samesafeexpr(l ir.Node, r ir.Node) bool {
case ir.OCONV:
// Some conversions can't be reused, such as []byte(str).
// Allow only numeric-ish types. This is a bit conservative.
- return issimple[l.Type().Etype] && samesafeexpr(l.Left(), r.Left())
+ return issimple[l.Type().Kind()] && samesafeexpr(l.Left(), r.Left())
case ir.OINDEX, ir.OINDEXMAP,
ir.OADD, ir.OSUB, ir.OOR, ir.OXOR, ir.OMUL, ir.OLSH, ir.ORSH, ir.OAND, ir.OANDNOT, ir.ODIV, ir.OMOD:
@@ -3451,7 +3451,7 @@ func typecheckdeftype(n *ir.Name) {
n.SetDiag(true)
n.SetType(nil)
}
- if t.Etype == types.TFORW && base.Errors() > errorsBefore {
+ if t.Kind() == types.TFORW && base.Errors() > errorsBefore {
// Something went wrong during type-checking,
// but it was reported. Silence future errors.
t.SetBroke(true)
@@ -3541,7 +3541,7 @@ func typecheckdef(n ir.Node) {
t := n.Type()
if t != nil {
- if !ir.OKForConst[t.Etype] {
+ if !ir.OKForConst[t.Kind()] {
base.ErrorfAt(n.Pos(), "invalid constant type %v", t)
goto ret
}
@@ -3638,7 +3638,7 @@ ret:
func checkmake(t *types.Type, arg string, np *ir.Node) bool {
n := *np
- if !n.Type().IsInteger() && n.Type().Etype != types.TIDEAL {
+ if !n.Type().IsInteger() && n.Type().Kind() != types.TIDEAL {
base.Errorf("non-integer %s argument in make(%v) - %v", arg, t, n.Type())
return false
}
diff --git a/src/cmd/compile/internal/gc/universe.go b/src/cmd/compile/internal/gc/universe.go
index b1492659b4..49e50734c6 100644
--- a/src/cmd/compile/internal/gc/universe.go
+++ b/src/cmd/compile/internal/gc/universe.go
@@ -15,7 +15,7 @@ import (
var basicTypes = [...]struct {
name string
- etype types.EType
+ etype types.Kind
}{
{"int8", types.TINT8},
{"int16", types.TINT16},
@@ -35,9 +35,9 @@ var basicTypes = [...]struct {
var typedefs = [...]struct {
name string
- etype types.EType
- sameas32 types.EType
- sameas64 types.EType
+ etype types.Kind
+ sameas32 types.Kind
+ sameas64 types.Kind
}{
{"int", types.TINT, types.TINT32, types.TINT64},
{"uint", types.TUINT, types.TUINT32, types.TUINT64},
@@ -99,14 +99,14 @@ func initUniverse() {
// string is same as slice wo the cap
sizeofString = Rnd(sliceLenOffset+int64(Widthptr), int64(Widthptr))
- for et := types.EType(0); et < types.NTYPE; et++ {
+ for et := types.Kind(0); et < types.NTYPE; et++ {
simtype[et] = et
}
types.Types[types.TANY] = types.New(types.TANY)
types.Types[types.TINTER] = types.New(types.TINTER) // empty interface
- defBasic := func(kind types.EType, pkg *types.Pkg, name string) *types.Type {
+ defBasic := func(kind types.Kind, pkg *types.Pkg, name string) *types.Type {
sym := pkg.Lookup(name)
n := ir.NewNameAt(src.NoXPos, sym)
n.SetOp(ir.OTYPE)
@@ -140,18 +140,18 @@ func initUniverse() {
// of less informative error messages involving bytes and runes)?
// (Alternatively, we could introduce an OTALIAS node representing
// type aliases, albeit at the cost of having to deal with it everywhere).
- types.Bytetype = defBasic(types.TUINT8, ir.BuiltinPkg, "byte")
- types.Runetype = defBasic(types.TINT32, ir.BuiltinPkg, "rune")
+ types.ByteType = defBasic(types.TUINT8, ir.BuiltinPkg, "byte")
+ types.RuneType = defBasic(types.TINT32, ir.BuiltinPkg, "rune")
// error type
s := ir.BuiltinPkg.Lookup("error")
n := ir.NewNameAt(src.NoXPos, s)
n.SetOp(ir.OTYPE)
- types.Errortype = types.NewNamed(n)
- types.Errortype.SetUnderlying(makeErrorInterface())
- n.SetType(types.Errortype)
+ types.ErrorType = types.NewNamed(n)
+ types.ErrorType.SetUnderlying(makeErrorInterface())
+ n.SetType(types.ErrorType)
s.Def = n
- dowidth(types.Errortype)
+ dowidth(types.ErrorType)
types.Types[types.TUNSAFEPTR] = defBasic(types.TUNSAFEPTR, unsafepkg, "Pointer")
@@ -218,7 +218,7 @@ func initUniverse() {
isComplex[types.TCOMPLEX128] = true
// initialize okfor
- for et := types.EType(0); et < types.NTYPE; et++ {
+ for et := types.Kind(0); et < types.NTYPE; et++ {
if isInt[et] || et == types.TIDEAL {
okforeq[et] = true
okforcmp[et] = true
diff --git a/src/cmd/compile/internal/gc/walk.go b/src/cmd/compile/internal/gc/walk.go
index 511cdd3685..b3af353c3f 100644
--- a/src/cmd/compile/internal/gc/walk.go
+++ b/src/cmd/compile/internal/gc/walk.go
@@ -423,7 +423,7 @@ func walkexpr(n ir.Node, init *ir.Nodes) ir.Node {
// Eagerly checkwidth all expressions for the back end.
if n.Type() != nil && !n.Type().WidthCalculated() {
- switch n.Type().Etype {
+ switch n.Type().Kind() {
case types.TBLANK, types.TNIL, types.TIDEAL:
default:
checkwidth(n.Type())
@@ -975,7 +975,7 @@ opswitch:
n.SetRight(walkexpr(n.Right(), init))
// rewrite complex div into function call.
- et := n.Left().Type().Etype
+ et := n.Left().Type().Kind()
if isComplex[et] && n.Op() == ir.ODIV {
t := n.Type()
@@ -1638,7 +1638,7 @@ func markUsedIfaceMethod(n ir.Node) {
// name can be derived from the names of the returned types.
//
// If no such function is necessary, it returns (Txxx, Txxx).
-func rtconvfn(src, dst *types.Type) (param, result types.EType) {
+func rtconvfn(src, dst *types.Type) (param, result types.Kind) {
if thearch.SoftFloat {
return types.Txxx, types.Txxx
}
@@ -1646,31 +1646,31 @@ func rtconvfn(src, dst *types.Type) (param, result types.EType) {
switch thearch.LinkArch.Family {
case sys.ARM, sys.MIPS:
if src.IsFloat() {
- switch dst.Etype {
+ switch dst.Kind() {
case types.TINT64, types.TUINT64:
- return types.TFLOAT64, dst.Etype
+ return types.TFLOAT64, dst.Kind()
}
}
if dst.IsFloat() {
- switch src.Etype {
+ switch src.Kind() {
case types.TINT64, types.TUINT64:
- return src.Etype, types.TFLOAT64
+ return src.Kind(), types.TFLOAT64
}
}
case sys.I386:
if src.IsFloat() {
- switch dst.Etype {
+ switch dst.Kind() {
case types.TINT64, types.TUINT64:
- return types.TFLOAT64, dst.Etype
+ return types.TFLOAT64, dst.Kind()
case types.TUINT32, types.TUINT, types.TUINTPTR:
return types.TFLOAT64, types.TUINT32
}
}
if dst.IsFloat() {
- switch src.Etype {
+ switch src.Kind() {
case types.TINT64, types.TUINT64:
- return src.Etype, types.TFLOAT64
+ return src.Kind(), types.TFLOAT64
case types.TUINT32, types.TUINT, types.TUINTPTR:
return types.TUINT32, types.TFLOAT64
}
@@ -1937,7 +1937,7 @@ func walkprint(nn ir.Node, init *ir.Nodes) ir.Node {
for i, n := range nn.List().Slice() {
if n.Op() == ir.OLITERAL {
if n.Type() == types.UntypedRune {
- n = defaultlit(n, types.Runetype)
+ n = defaultlit(n, types.RuneType)
}
switch n.Val().Kind() {
@@ -1949,17 +1949,17 @@ func walkprint(nn ir.Node, init *ir.Nodes) ir.Node {
}
}
- if n.Op() != ir.OLITERAL && n.Type() != nil && n.Type().Etype == types.TIDEAL {
+ if n.Op() != ir.OLITERAL && n.Type() != nil && n.Type().Kind() == types.TIDEAL {
n = defaultlit(n, types.Types[types.TINT64])
}
n = defaultlit(n, nil)
nn.List().SetIndex(i, n)
- if n.Type() == nil || n.Type().Etype == types.TFORW {
+ if n.Type() == nil || n.Type().Kind() == types.TFORW {
continue
}
var on ir.Node
- switch n.Type().Etype {
+ switch n.Type().Kind() {
case types.TINTER:
if n.Type().IsEmptyInterface() {
on = syslook("printeface")
@@ -1984,7 +1984,7 @@ func walkprint(nn ir.Node, init *ir.Nodes) ir.Node {
on = syslook("printslice")
on = substArgTypes(on, n.Type()) // any-1
case types.TUINT, types.TUINT8, types.TUINT16, types.TUINT32, types.TUINT64, types.TUINTPTR:
- if isRuntimePkg(n.Type().Sym.Pkg) && n.Type().Sym.Name == "hex" {
+ if isRuntimePkg(n.Type().Sym().Pkg) && n.Type().Sym().Name == "hex" {
on = syslook("printhex")
} else {
on = syslook("printuint")
@@ -2058,9 +2058,9 @@ func isReflectHeaderDataField(l ir.Node) bool {
var tsym *types.Sym
switch l.Op() {
case ir.ODOT:
- tsym = l.Left().Type().Sym
+ tsym = l.Left().Type().Sym()
case ir.ODOTPTR:
- tsym = l.Left().Type().Elem().Sym
+ tsym = l.Left().Type().Elem().Sym()
default:
return false
}
@@ -2484,7 +2484,7 @@ func heapmoves() {
}
func vmkcall(fn ir.Node, t *types.Type, init *ir.Nodes, va []ir.Node) ir.Node {
- if fn.Type() == nil || fn.Type().Etype != types.TFUNC {
+ if fn.Type() == nil || fn.Type().Kind() != types.TFUNC {
base.Fatalf("mkcall %v %v", fn, fn.Type())
}
@@ -3264,7 +3264,7 @@ func walkcompare(n ir.Node, init *ir.Nodes) ir.Node {
maxcmpsize = 2 * int64(thearch.LinkArch.RegSize)
}
- switch t.Etype {
+ switch t.Kind() {
default:
if base.Debug.Libfuzzer != 0 && t.IsInteger() {
n.SetLeft(cheapexpr(n.Left(), init))
@@ -3315,7 +3315,7 @@ func walkcompare(n ir.Node, init *ir.Nodes) ir.Node {
return n
case types.TARRAY:
// We can compare several elements at once with 2/4/8 byte integer compares
- inline = t.NumElem() <= 1 || (issimple[t.Elem().Etype] && (t.NumElem() <= 4 || t.Elem().Width*t.NumElem() <= maxcmpsize))
+ inline = t.NumElem() <= 1 || (issimple[t.Elem().Kind()] && (t.NumElem() <= 4 || t.Elem().Width*t.NumElem() <= maxcmpsize))
case types.TSTRUCT:
inline = t.NumComponents(types.IgnoreBlankFields) <= 4
}
@@ -3697,7 +3697,7 @@ func usemethod(n ir.Node) {
}
if res1 == nil {
- if p0.Type.Etype != types.TINT {
+ if p0.Type.Kind() != types.TINT {
return
}
} else {
@@ -3712,7 +3712,7 @@ func usemethod(n ir.Node) {
// Note: Don't rely on res0.Type.String() since its formatting depends on multiple factors
// (including global variables such as numImports - was issue #19028).
// Also need to check for reflect package itself (see Issue #38515).
- if s := res0.Type.Sym; s != nil && s.Name == "Method" && isReflectPkg(s.Pkg) {
+ if s := res0.Type.Sym(); s != nil && s.Name == "Method" && isReflectPkg(s.Pkg) {
Curfn.SetReflectMethod(true)
// The LSym is initialized at this point. We need to set the attribute on the LSym.
Curfn.LSym.Set(obj.AttrReflectMethod, true)
@@ -3756,7 +3756,7 @@ func usefield(n ir.Node) {
if outer.IsPtr() {
outer = outer.Elem()
}
- if outer.Sym == nil {
+ if outer.Sym() == nil {
base.Errorf("tracked field must be in named struct type")
}
if !types.IsExported(field.Sym.Name) {
diff --git a/src/cmd/compile/internal/ir/fmt.go b/src/cmd/compile/internal/ir/fmt.go
index a111471222..5bb1ed857c 100644
--- a/src/cmd/compile/internal/ir/fmt.go
+++ b/src/cmd/compile/internal/ir/fmt.go
@@ -317,7 +317,7 @@ func (m FmtMode) prepareArgs(args []interface{}) {
args[i] = &fmtSym{arg, m}
case Nodes:
args[i] = &fmtNodes{arg, m}
- case int32, int64, string, types.EType, constant.Value:
+ case int32, int64, string, types.Kind, constant.Value:
// OK: printing these types doesn't depend on mode
default:
base.Fatalf("mode.prepareArgs type %T", arg)
@@ -590,18 +590,18 @@ func tconv2(b *bytes.Buffer, t *types.Type, flag FmtFlag, mode FmtMode, visited
b.WriteString("<T>")
return
}
- if t.Etype == types.TSSA {
+ if t.Kind() == types.TSSA {
b.WriteString(t.Extra.(string))
return
}
- if t.Etype == types.TTUPLE {
+ if t.Kind() == types.TTUPLE {
b.WriteString(t.FieldType(0).String())
b.WriteByte(',')
b.WriteString(t.FieldType(1).String())
return
}
- if t.Etype == types.TRESULTS {
+ if t.Kind() == types.TRESULTS {
tys := t.Extra.(*types.Results).Types
for i, et := range tys {
if i > 0 {
@@ -616,51 +616,51 @@ func tconv2(b *bytes.Buffer, t *types.Type, flag FmtFlag, mode FmtMode, visited
if mode == FTypeIdName {
flag |= FmtUnsigned
}
- if t == types.Bytetype || t == types.Runetype {
+ if t == types.ByteType || t == types.RuneType {
// in %-T mode collapse rune and byte with their originals.
switch mode {
case FTypeIdName, FTypeId:
- t = types.Types[t.Etype]
+ t = types.Types[t.Kind()]
default:
- sconv2(b, t.Sym, FmtShort, mode)
+ sconv2(b, t.Sym(), FmtShort, mode)
return
}
}
- if t == types.Errortype {
+ if t == types.ErrorType {
b.WriteString("error")
return
}
// Unless the 'L' flag was specified, if the type has a name, just print that name.
- if flag&FmtLong == 0 && t.Sym != nil && t != types.Types[t.Etype] {
+ if flag&FmtLong == 0 && t.Sym() != nil && t != types.Types[t.Kind()] {
switch mode {
case FTypeId, FTypeIdName:
if flag&FmtShort != 0 {
if t.Vargen != 0 {
- sconv2(b, t.Sym, FmtShort, mode)
+ sconv2(b, t.Sym(), FmtShort, mode)
fmt.Fprintf(b, "·%d", t.Vargen)
return
}
- sconv2(b, t.Sym, FmtShort, mode)
+ sconv2(b, t.Sym(), FmtShort, mode)
return
}
if mode == FTypeIdName {
- sconv2(b, t.Sym, FmtUnsigned, mode)
+ sconv2(b, t.Sym(), FmtUnsigned, mode)
return
}
- if t.Sym.Pkg == LocalPkg && t.Vargen != 0 {
- b.WriteString(mode.Sprintf("%v·%d", t.Sym, t.Vargen))
+ if t.Sym().Pkg == LocalPkg && t.Vargen != 0 {
+ b.WriteString(mode.Sprintf("%v·%d", t.Sym(), t.Vargen))
return
}
}
- sconv2(b, t.Sym, 0, mode)
+ sconv2(b, t.Sym(), 0, mode)
return
}
- if int(t.Etype) < len(BasicTypeNames) && BasicTypeNames[t.Etype] != "" {
+ if int(t.Kind()) < len(BasicTypeNames) && BasicTypeNames[t.Kind()] != "" {
var name string
switch t {
case types.UntypedBool:
@@ -676,14 +676,14 @@ func tconv2(b *bytes.Buffer, t *types.Type, flag FmtFlag, mode FmtMode, visited
case types.UntypedComplex:
name = "untyped complex"
default:
- name = BasicTypeNames[t.Etype]
+ name = BasicTypeNames[t.Kind()]
}
b.WriteString(name)
return
}
if mode == FDbg {
- b.WriteString(t.Etype.String())
+ b.WriteString(t.Kind().String())
b.WriteByte('-')
tconv2(b, t, flag, FErr, visited)
return
@@ -702,7 +702,7 @@ func tconv2(b *bytes.Buffer, t *types.Type, flag FmtFlag, mode FmtMode, visited
visited[t] = b.Len()
defer delete(visited, t)
- switch t.Etype {
+ switch t.Kind() {
case types.TPTR:
b.WriteByte('*')
switch mode {
@@ -734,7 +734,7 @@ func tconv2(b *bytes.Buffer, t *types.Type, flag FmtFlag, mode FmtMode, visited
tconv2(b, t.Elem(), 0, mode, visited)
default:
b.WriteString("chan ")
- if t.Elem() != nil && t.Elem().IsChan() && t.Elem().Sym == nil && t.Elem().ChanDir() == types.Crecv {
+ if t.Elem() != nil && t.Elem().IsChan() && t.Elem().Sym() == nil && t.Elem().ChanDir() == types.Crecv {
b.WriteByte('(')
tconv2(b, t.Elem(), 0, mode, visited)
b.WriteByte(')')
@@ -860,9 +860,9 @@ func tconv2(b *bytes.Buffer, t *types.Type, flag FmtFlag, mode FmtMode, visited
case types.TFORW:
b.WriteString("undefined")
- if t.Sym != nil {
+ if t.Sym() != nil {
b.WriteByte(' ')
- sconv2(b, t.Sym, 0, mode)
+ sconv2(b, t.Sym(), 0, mode)
}
case types.TUNSAFEPTR:
@@ -872,7 +872,7 @@ func tconv2(b *bytes.Buffer, t *types.Type, flag FmtFlag, mode FmtMode, visited
b.WriteString("Txxx")
default:
// Don't know how to handle - fall back to detailed prints.
- b.WriteString(mode.Sprintf("%v <%v>", t.Etype, t.Sym))
+ b.WriteString(mode.Sprintf("%v <%v>", t.Kind(), t.Sym()))
}
}
@@ -1446,7 +1446,7 @@ func exprFmt(n Node, s fmt.State, prec int, mode FmtMode) {
OSTR2BYTES,
OSTR2RUNES,
ORUNESTR:
- if n.Type() == nil || n.Type().Sym == nil {
+ if n.Type() == nil || n.Type().Sym() == nil {
mode.Fprintf(s, "(%v)", n.Type())
} else {
mode.Fprintf(s, "%v", n.Type())
@@ -1564,7 +1564,7 @@ func nodeFmt(n Node, s fmt.State, flag FmtFlag, mode FmtMode) {
}
if flag&FmtLong != 0 && t != nil {
- if t.Etype == types.TNIL {
+ if t.Kind() == types.TNIL {
fmt.Fprint(s, "nil")
} else if n.Op() == ONAME && n.Name().AutoTemp() {
mode.Fprintf(s, "%v value", t)
diff --git a/src/cmd/compile/internal/ir/node.go b/src/cmd/compile/internal/ir/node.go
index a7144eee44..fc4c593929 100644
--- a/src/cmd/compile/internal/ir/node.go
+++ b/src/cmd/compile/internal/ir/node.go
@@ -647,7 +647,7 @@ const (
GoBuildPragma
)
-func AsNode(n types.IRNode) Node {
+func AsNode(n types.Object) Node {
if n == nil {
return nil
}
diff --git a/src/cmd/compile/internal/ir/type.go b/src/cmd/compile/internal/ir/type.go
index 446145b24c..d2f5bb9239 100644
--- a/src/cmd/compile/internal/ir/type.go
+++ b/src/cmd/compile/internal/ir/type.go
@@ -353,7 +353,7 @@ func (n *typeNode) String() string { return fmt.Sprint(n) }
func (n *typeNode) Format(s fmt.State, verb rune) { FmtNode(n, s, verb) }
func (n *typeNode) rawCopy() Node { c := *n; return &c }
func (n *typeNode) Type() *types.Type { return n.typ }
-func (n *typeNode) Sym() *types.Sym { return n.typ.Sym }
+func (n *typeNode) Sym() *types.Sym { return n.typ.Sym() }
func (n *typeNode) CanBeNtype() {}
// TypeNode returns the Node representing the type t.
diff --git a/src/cmd/compile/internal/ir/val.go b/src/cmd/compile/internal/ir/val.go
index 9035e90084..aae965bb4c 100644
--- a/src/cmd/compile/internal/ir/val.go
+++ b/src/cmd/compile/internal/ir/val.go
@@ -73,7 +73,7 @@ func AssertValidTypeForConst(t *types.Type, v constant.Value) {
func ValidTypeForConst(t *types.Type, v constant.Value) bool {
switch v.Kind() {
case constant.Unknown:
- return OKForConst[t.Etype]
+ return OKForConst[t.Kind()]
case constant.Bool:
return t.IsBoolean()
case constant.String:
diff --git a/src/cmd/compile/internal/ssa/expand_calls.go b/src/cmd/compile/internal/ssa/expand_calls.go
index f266e49327..0eba238d81 100644
--- a/src/cmd/compile/internal/ssa/expand_calls.go
+++ b/src/cmd/compile/internal/ssa/expand_calls.go
@@ -69,7 +69,7 @@ func expandCalls(f *Func) {
// intPairTypes returns the pair of 32-bit int types needed to encode a 64-bit integer type on a target
// that has no 64-bit integer registers.
- intPairTypes := func(et types.EType) (tHi, tLo *types.Type) {
+ intPairTypes := func(et types.Kind) (tHi, tLo *types.Type) {
tHi = typ.UInt32
if et == types.TINT64 {
tHi = typ.Int32
@@ -294,7 +294,7 @@ func expandCalls(f *Func) {
case OpStructSelect:
w := selector.Args[0]
var ls []LocalSlot
- if w.Type.Etype != types.TSTRUCT { // IData artifact
+ if w.Type.Kind() != types.TSTRUCT { // IData artifact
ls = rewriteSelect(leaf, w, offset)
} else {
ls = rewriteSelect(leaf, w, offset+w.Type.FieldOff(int(selector.AuxInt)))
@@ -383,7 +383,7 @@ func expandCalls(f *Func) {
decomposeOne func(pos src.XPos, b *Block, base, source, mem *Value, t1 *types.Type, offArg, offStore int64) *Value,
decomposeTwo func(pos src.XPos, b *Block, base, source, mem *Value, t1, t2 *types.Type, offArg, offStore int64) *Value) *Value {
u := source.Type
- switch u.Etype {
+ switch u.Kind() {
case types.TARRAY:
elem := u.Elem()
for i := int64(0); i < u.NumElem(); i++ {
@@ -403,7 +403,7 @@ func expandCalls(f *Func) {
if t.Width == regSize {
break
}
- tHi, tLo := intPairTypes(t.Etype)
+ tHi, tLo := intPairTypes(t.Kind())
mem = decomposeOne(pos, b, base, source, mem, tHi, source.AuxInt+hiOffset, offset+hiOffset)
pos = pos.WithNotStmt()
return decomposeOne(pos, b, base, source, mem, tLo, source.AuxInt+lowOffset, offset+lowOffset)
@@ -491,7 +491,7 @@ func expandCalls(f *Func) {
return storeArgOrLoad(pos, b, base, source.Args[0], mem, t.Elem(), offset)
case OpInt64Make:
- tHi, tLo := intPairTypes(t.Etype)
+ tHi, tLo := intPairTypes(t.Kind())
mem = storeArgOrLoad(pos, b, base, source.Args[0], mem, tHi, offset+hiOffset)
pos = pos.WithNotStmt()
return storeArgOrLoad(pos, b, base, source.Args[1], mem, tLo, offset+lowOffset)
@@ -524,7 +524,7 @@ func expandCalls(f *Func) {
}
// For nodes that cannot be taken apart -- OpSelectN, other structure selectors.
- switch t.Etype {
+ switch t.Kind() {
case types.TARRAY:
elt := t.Elem()
if source.Type != t && t.NumElem() == 1 && elt.Width == t.Width && t.Width == regSize {
@@ -576,7 +576,7 @@ func expandCalls(f *Func) {
if t.Width == regSize {
break
}
- tHi, tLo := intPairTypes(t.Etype)
+ tHi, tLo := intPairTypes(t.Kind())
sel := source.Block.NewValue1(pos, OpInt64Hi, tHi, source)
mem = storeArgOrLoad(pos, b, base, sel, mem, tHi, offset+hiOffset)
pos = pos.WithNotStmt()
@@ -873,7 +873,7 @@ func expandCalls(f *Func) {
offset := int64(0)
switch v.Op {
case OpStructSelect:
- if w.Type.Etype == types.TSTRUCT {
+ if w.Type.Kind() == types.TSTRUCT {
offset = w.Type.FieldOff(int(v.AuxInt))
} else { // Immediate interface data artifact, offset is zero.
f.Fatalf("Expand calls interface data problem, func %s, v=%s, w=%s\n", f.Name, v.LongString(), w.LongString())
diff --git a/src/cmd/compile/internal/ssa/export_test.go b/src/cmd/compile/internal/ssa/export_test.go
index df83383308..5a81f76ceb 100644
--- a/src/cmd/compile/internal/ssa/export_test.go
+++ b/src/cmd/compile/internal/ssa/export_test.go
@@ -140,7 +140,7 @@ func init() {
// so this test setup can share it.
types.Tconv = func(t *types.Type, flag, mode int) string {
- return t.Etype.String()
+ return t.Kind().String()
}
types.Sconv = func(s *types.Sym, flag, mode int) string {
return "sym"
@@ -149,13 +149,13 @@ func init() {
fmt.Fprintf(s, "sym")
}
types.FormatType = func(t *types.Type, s fmt.State, verb rune, mode int) {
- fmt.Fprintf(s, "%v", t.Etype)
+ fmt.Fprintf(s, "%v", t.Kind())
}
types.Dowidth = func(t *types.Type) {}
for _, typ := range [...]struct {
width int64
- et types.EType
+ et types.Kind
}{
{1, types.TINT8},
{1, types.TUINT8},
diff --git a/src/cmd/compile/internal/ssa/regalloc.go b/src/cmd/compile/internal/ssa/regalloc.go
index 459a9923f7..376ca97512 100644
--- a/src/cmd/compile/internal/ssa/regalloc.go
+++ b/src/cmd/compile/internal/ssa/regalloc.go
@@ -783,9 +783,9 @@ func (s *regAllocState) compatRegs(t *types.Type) regMask {
return 0
}
if t.IsFloat() || t == types.TypeInt128 {
- if t.Etype == types.TFLOAT32 && s.f.Config.fp32RegMask != 0 {
+ if t.Kind() == types.TFLOAT32 && s.f.Config.fp32RegMask != 0 {
m = s.f.Config.fp32RegMask
- } else if t.Etype == types.TFLOAT64 && s.f.Config.fp64RegMask != 0 {
+ } else if t.Kind() == types.TFLOAT64 && s.f.Config.fp64RegMask != 0 {
m = s.f.Config.fp64RegMask
} else {
m = s.f.Config.fpRegMask
diff --git a/src/cmd/compile/internal/types/etype_string.go b/src/cmd/compile/internal/types/etype_string.go
index 14fd5b71df..e7698296ab 100644
--- a/src/cmd/compile/internal/types/etype_string.go
+++ b/src/cmd/compile/internal/types/etype_string.go
@@ -52,8 +52,8 @@ const _EType_name = "xxxINT8UINT8INT16UINT16INT32UINT32INT64UINT64INTUINTUINTPTR
var _EType_index = [...]uint8{0, 3, 7, 12, 17, 23, 28, 34, 39, 45, 48, 52, 59, 68, 78, 85, 92, 96, 99, 103, 108, 113, 119, 123, 126, 131, 135, 138, 144, 153, 158, 161, 166, 174, 182, 185, 190, 197, 202}
-func (i EType) String() string {
- if i >= EType(len(_EType_index)-1) {
+func (i Kind) String() string {
+ if i >= Kind(len(_EType_index)-1) {
return "EType(" + strconv.FormatInt(int64(i), 10) + ")"
}
return _EType_name[_EType_index[i]:_EType_index[i+1]]
diff --git a/src/cmd/compile/internal/types/identity.go b/src/cmd/compile/internal/types/identity.go
index a77f514df9..9bc636d7ff 100644
--- a/src/cmd/compile/internal/types/identity.go
+++ b/src/cmd/compile/internal/types/identity.go
@@ -25,17 +25,17 @@ func identical(t1, t2 *Type, cmpTags bool, assumedEqual map[typePair]struct{}) b
if t1 == t2 {
return true
}
- if t1 == nil || t2 == nil || t1.Etype != t2.Etype || t1.Broke() || t2.Broke() {
+ if t1 == nil || t2 == nil || t1.kind != t2.kind || t1.Broke() || t2.Broke() {
return false
}
- if t1.Sym != nil || t2.Sym != nil {
+ if t1.sym != nil || t2.sym != nil {
// Special case: we keep byte/uint8 and rune/int32
// separate for error messages. Treat them as equal.
- switch t1.Etype {
+ switch t1.kind {
case TUINT8:
- return (t1 == Types[TUINT8] || t1 == Bytetype) && (t2 == Types[TUINT8] || t2 == Bytetype)
+ return (t1 == Types[TUINT8] || t1 == ByteType) && (t2 == Types[TUINT8] || t2 == ByteType)
case TINT32:
- return (t1 == Types[TINT32] || t1 == Runetype) && (t2 == Types[TINT32] || t2 == Runetype)
+ return (t1 == Types[TINT32] || t1 == RuneType) && (t2 == Types[TINT32] || t2 == RuneType)
default:
return false
}
@@ -52,7 +52,7 @@ func identical(t1, t2 *Type, cmpTags bool, assumedEqual map[typePair]struct{}) b
}
assumedEqual[typePair{t1, t2}] = struct{}{}
- switch t1.Etype {
+ switch t1.kind {
case TIDEAL:
// Historically, cmd/compile used a single "untyped
// number" type, so all untyped number types were
diff --git a/src/cmd/compile/internal/types/scope.go b/src/cmd/compile/internal/types/scope.go
index 33a02c543d..37ac90a025 100644
--- a/src/cmd/compile/internal/types/scope.go
+++ b/src/cmd/compile/internal/types/scope.go
@@ -15,7 +15,7 @@ var Block int32 // current block number
// restored once the block scope ends.
type dsym struct {
sym *Sym // sym == nil indicates stack mark
- def IRNode
+ def Object
block int32
lastlineno src.XPos // last declaration for diagnostic
}
@@ -79,16 +79,16 @@ func IsDclstackValid() bool {
}
// PkgDef returns the definition associated with s at package scope.
-func (s *Sym) PkgDef() IRNode {
+func (s *Sym) PkgDef() Object {
return *s.pkgDefPtr()
}
// SetPkgDef sets the definition associated with s at package scope.
-func (s *Sym) SetPkgDef(n IRNode) {
+func (s *Sym) SetPkgDef(n Object) {
*s.pkgDefPtr() = n
}
-func (s *Sym) pkgDefPtr() *IRNode {
+func (s *Sym) pkgDefPtr() *Object {
// Look for outermost saved declaration, which must be the
// package scope definition, if present.
for _, d := range dclstack {
diff --git a/src/cmd/compile/internal/types/sym.go b/src/cmd/compile/internal/types/sym.go
index 7272f1f786..490222d843 100644
--- a/src/cmd/compile/internal/types/sym.go
+++ b/src/cmd/compile/internal/types/sym.go
@@ -33,7 +33,7 @@ type Sym struct {
Name string // object name
// saved and restored by dcopy
- Def IRNode // definition: ONAME OTYPE OPACK or OLITERAL
+ Def Object // definition: ONAME OTYPE OPACK or OLITERAL
Block int32 // blocknumber to catch redeclaration
Lastlineno src.XPos // last declaration for diagnostic
diff --git a/src/cmd/compile/internal/types/type.go b/src/cmd/compile/internal/types/type.go
index f0211a67fb..36aac53124 100644
--- a/src/cmd/compile/internal/types/type.go
+++ b/src/cmd/compile/internal/types/type.go
@@ -14,7 +14,7 @@ import (
// IRNode represents an ir.Node, but without needing to import cmd/compile/internal/ir,
// which would cause an import cycle. The uses in other packages must type assert
// values of type IRNode to ir.Node or a more specific type.
-type IRNode interface {
+type Object interface {
Pos() src.XPos
Sym() *Sym
Type() *Type
@@ -23,10 +23,10 @@ type IRNode interface {
//go:generate stringer -type EType -trimprefix T
// EType describes a kind of type.
-type EType uint8
+type Kind uint8
const (
- Txxx EType = iota
+ Txxx Kind = iota
TINT8
TUINT8
@@ -103,11 +103,11 @@ var Types [NTYPE]*Type
var (
// Predeclared alias types. Kept separate for better error messages.
- Bytetype *Type
- Runetype *Type
+ ByteType *Type
+ RuneType *Type
// Predeclared error interface type.
- Errortype *Type
+ ErrorType *Type
// Types to represent untyped string and boolean constants.
UntypedString = New(TSTRING)
@@ -146,19 +146,19 @@ type Type struct {
methods Fields
allMethods Fields
- nod IRNode // canonical OTYPE node
- Orig *Type // original type (type literal or predefined type)
+ nod Object // canonical OTYPE node
+ underlying *Type // original type (type literal or predefined type)
// Cache of composite types, with this type being the element type.
- Cache struct {
+ cache struct {
ptr *Type // *T, or nil
slice *Type // []T, or nil
}
- Sym *Sym // symbol containing name, for named types
+ sym *Sym // symbol containing name, for named types
Vargen int32 // unique name for OTYPE/ONAME
- Etype EType // kind of type
+ kind Kind // kind of type
Align uint8 // the required alignment of this type, in bytes (0 means Width and Align have not yet been computed)
flags bitset8
@@ -185,16 +185,16 @@ func (t *Type) SetDeferwidth(b bool) { t.flags.set(typeDeferwidth, b) }
func (t *Type) SetRecur(b bool) { t.flags.set(typeRecur, b) }
// Kind returns the kind of type t.
-func (t *Type) Kind() EType { return t.Etype }
+func (t *Type) Kind() Kind { return t.kind }
// Sym returns the name of type t.
-func (t *Type) GetSym() *Sym { return t.Sym }
+func (t *Type) Sym() *Sym { return t.sym }
// Underlying returns the underlying type of type t.
-func (t *Type) Underlying() *Type { return t.Orig }
+func (t *Type) Underlying() *Type { return t.underlying }
// SetNod associates t with syntax node n.
-func (t *Type) SetNod(n IRNode) {
+func (t *Type) SetNod(n Object) {
// t.nod can be non-nil already
// in the case of shared *Types, like []byte or interface{}.
if t.nod == nil {
@@ -218,7 +218,7 @@ func (t *Type) Pos() src.XPos {
// cmd/compile itself, but we need to track it because it's exposed by
// the go/types API.
func (t *Type) Pkg() *Pkg {
- switch t.Etype {
+ switch t.kind {
case TFUNC:
return t.Extra.(*Func).pkg
case TSTRUCT:
@@ -233,7 +233,7 @@ func (t *Type) Pkg() *Pkg {
// SetPkg sets the package that t appeared in.
func (t *Type) SetPkg(pkg *Pkg) {
- switch t.Etype {
+ switch t.kind {
case TFUNC:
t.Extra.(*Func).pkg = pkg
case TSTRUCT:
@@ -392,7 +392,7 @@ type Field struct {
// For fields that represent function parameters, Nname points
// to the associated ONAME Node.
- Nname IRNode
+ Nname Object
// Offset in bytes of this field or method within its enclosing struct
// or interface Type.
@@ -420,7 +420,7 @@ func (f *Field) End() int64 {
// IsMethod reports whether f represents a method rather than a struct field.
func (f *Field) IsMethod() bool {
- return f.Type.Etype == TFUNC && f.Type.Recv() != nil
+ return f.Type.kind == TFUNC && f.Type.Recv() != nil
}
// Fields is a pointer to a slice of *Field.
@@ -475,14 +475,14 @@ func (f *Fields) Append(s ...*Field) {
}
// New returns a new Type of the specified kind.
-func New(et EType) *Type {
+func New(et Kind) *Type {
t := &Type{
- Etype: et,
+ kind: et,
Width: BADWIDTH,
}
- t.Orig = t
+ t.underlying = t
// TODO(josharian): lazily initialize some of these?
- switch t.Etype {
+ switch t.kind {
case TMAP:
t.Extra = new(Map)
case TFORW:
@@ -522,7 +522,7 @@ func NewArray(elem *Type, bound int64) *Type {
// NewSlice returns the slice Type with element type elem.
func NewSlice(elem *Type) *Type {
- if t := elem.Cache.slice; t != nil {
+ if t := elem.cache.slice; t != nil {
if t.Elem() != elem {
Fatalf("elem mismatch")
}
@@ -531,7 +531,7 @@ func NewSlice(elem *Type) *Type {
t := New(TSLICE)
t.Extra = Slice{Elem: elem}
- elem.Cache.slice = t
+ elem.cache.slice = t
return t
}
@@ -583,7 +583,7 @@ func NewPtr(elem *Type) *Type {
Fatalf("NewPtr: pointer to elem Type is nil")
}
- if t := elem.Cache.ptr; t != nil {
+ if t := elem.cache.ptr; t != nil {
if t.Elem() != elem {
Fatalf("NewPtr: elem mismatch")
}
@@ -595,7 +595,7 @@ func NewPtr(elem *Type) *Type {
t.Width = int64(Widthptr)
t.Align = uint8(Widthptr)
if NewPtrCacheEnabled {
- elem.Cache.ptr = t
+ elem.cache.ptr = t
}
return t
}
@@ -634,7 +634,7 @@ func SubstAny(t *Type, types *[]*Type) *Type {
return nil
}
- switch t.Etype {
+ switch t.kind {
default:
// Leave the type unchanged.
@@ -718,7 +718,7 @@ func (t *Type) copy() *Type {
}
nt := *t
// copy any *T Extra fields, to avoid aliasing
- switch t.Etype {
+ switch t.kind {
case TMAP:
x := *t.Extra.(*Map)
nt.Extra = &x
@@ -744,8 +744,8 @@ func (t *Type) copy() *Type {
Fatalf("ssa types cannot be copied")
}
// TODO(mdempsky): Find out why this is necessary and explain.
- if t.Orig == t {
- nt.Orig = &nt
+ if t.underlying == t {
+ nt.underlying = &nt
}
return &nt
}
@@ -755,8 +755,8 @@ func (f *Field) Copy() *Field {
return &nf
}
-func (t *Type) wantEtype(et EType) {
- if t.Etype != et {
+func (t *Type) wantEtype(et Kind) {
+ if t.kind != et {
Fatalf("want %v, but have %v", et, t)
}
}
@@ -810,7 +810,7 @@ func (t *Type) Key() *Type {
// Elem returns the type of elements of t.
// Usable with pointers, channels, arrays, slices, and maps.
func (t *Type) Elem() *Type {
- switch t.Etype {
+ switch t.kind {
case TPTR:
return t.Extra.(Ptr).Elem
case TARRAY:
@@ -822,7 +822,7 @@ func (t *Type) Elem() *Type {
case TMAP:
return t.Extra.(*Map).Elem
}
- Fatalf("Type.Elem %s", t.Etype)
+ Fatalf("Type.Elem %s", t.kind)
return nil
}
@@ -840,7 +840,7 @@ func (t *Type) FuncArgs() *Type {
// IsFuncArgStruct reports whether t is a struct representing function parameters.
func (t *Type) IsFuncArgStruct() bool {
- return t.Etype == TSTRUCT && t.Extra.(*Struct).Funarg != FunargNone
+ return t.kind == TSTRUCT && t.Extra.(*Struct).Funarg != FunargNone
}
func (t *Type) Methods() *Fields {
@@ -854,7 +854,7 @@ func (t *Type) AllMethods() *Fields {
}
func (t *Type) Fields() *Fields {
- switch t.Etype {
+ switch t.kind {
case TSTRUCT:
return &t.Extra.(*Struct).fields
case TINTER:
@@ -919,7 +919,7 @@ func (t *Type) ArgWidth() int64 {
}
func (t *Type) Size() int64 {
- if t.Etype == TSSA {
+ if t.kind == TSSA {
if t == TypeInt128 {
return 16
}
@@ -935,7 +935,7 @@ func (t *Type) Alignment() int64 {
}
func (t *Type) SimpleString() string {
- return t.Etype.String()
+ return t.kind.String()
}
// Cmp is a comparison between values a and b.
@@ -1019,31 +1019,31 @@ func (t *Type) cmp(x *Type) Cmp {
return CMPgt
}
- if t.Etype != x.Etype {
- return cmpForNe(t.Etype < x.Etype)
+ if t.kind != x.kind {
+ return cmpForNe(t.kind < x.kind)
}
- if t.Sym != nil || x.Sym != nil {
+ if t.sym != nil || x.sym != nil {
// Special case: we keep byte and uint8 separate
// for error messages. Treat them as equal.
- switch t.Etype {
+ switch t.kind {
case TUINT8:
- if (t == Types[TUINT8] || t == Bytetype) && (x == Types[TUINT8] || x == Bytetype) {
+ if (t == Types[TUINT8] || t == ByteType) && (x == Types[TUINT8] || x == ByteType) {
return CMPeq
}
case TINT32:
- if (t == Types[Runetype.Etype] || t == Runetype) && (x == Types[Runetype.Etype] || x == Runetype) {
+ if (t == Types[RuneType.kind] || t == RuneType) && (x == Types[RuneType.kind] || x == RuneType) {
return CMPeq
}
}
}
- if c := t.Sym.cmpsym(x.Sym); c != CMPeq {
+ if c := t.sym.cmpsym(x.sym); c != CMPeq {
return c
}
- if x.Sym != nil {
+ if x.sym != nil {
// Syms non-nil, if vargens match then equal.
if t.Vargen != x.Vargen {
return cmpForNe(t.Vargen < x.Vargen)
@@ -1052,7 +1052,7 @@ func (t *Type) cmp(x *Type) Cmp {
}
// both syms nil, look at structure below.
- switch t.Etype {
+ switch t.kind {
case TBOOL, TFLOAT32, TFLOAT64, TCOMPLEX64, TCOMPLEX128, TUNSAFEPTR, TUINTPTR,
TINT8, TINT16, TINT32, TINT64, TINT, TUINT8, TUINT16, TUINT32, TUINT64, TUINT:
return CMPeq
@@ -1209,15 +1209,15 @@ func (t *Type) cmp(x *Type) Cmp {
}
// IsKind reports whether t is a Type of the specified kind.
-func (t *Type) IsKind(et EType) bool {
- return t != nil && t.Etype == et
+func (t *Type) IsKind(et Kind) bool {
+ return t != nil && t.kind == et
}
func (t *Type) IsBoolean() bool {
- return t.Etype == TBOOL
+ return t.kind == TBOOL
}
-var unsignedEType = [...]EType{
+var unsignedEType = [...]Kind{
TINT8: TUINT8,
TUINT8: TUINT8,
TINT16: TUINT16,
@@ -1236,11 +1236,11 @@ func (t *Type) ToUnsigned() *Type {
if !t.IsInteger() {
Fatalf("unsignedType(%v)", t)
}
- return Types[unsignedEType[t.Etype]]
+ return Types[unsignedEType[t.kind]]
}
func (t *Type) IsInteger() bool {
- switch t.Etype {
+ switch t.kind {
case TINT8, TUINT8, TINT16, TUINT16, TINT32, TUINT32, TINT64, TUINT64, TINT, TUINT, TUINTPTR:
return true
}
@@ -1248,7 +1248,7 @@ func (t *Type) IsInteger() bool {
}
func (t *Type) IsSigned() bool {
- switch t.Etype {
+ switch t.kind {
case TINT8, TINT16, TINT32, TINT64, TINT:
return true
}
@@ -1256,7 +1256,7 @@ func (t *Type) IsSigned() bool {
}
func (t *Type) IsUnsigned() bool {
- switch t.Etype {
+ switch t.kind {
case TUINT8, TUINT16, TUINT32, TUINT64, TUINT, TUINTPTR:
return true
}
@@ -1264,32 +1264,32 @@ func (t *Type) IsUnsigned() bool {
}
func (t *Type) IsFloat() bool {
- return t.Etype == TFLOAT32 || t.Etype == TFLOAT64 || t == UntypedFloat
+ return t.kind == TFLOAT32 || t.kind == TFLOAT64 || t == UntypedFloat
}
func (t *Type) IsComplex() bool {
- return t.Etype == TCOMPLEX64 || t.Etype == TCOMPLEX128 || t == UntypedComplex
+ return t.kind == TCOMPLEX64 || t.kind == TCOMPLEX128 || t == UntypedComplex
}
// IsPtr reports whether t is a regular Go pointer type.
// This does not include unsafe.Pointer.
func (t *Type) IsPtr() bool {
- return t.Etype == TPTR
+ return t.kind == TPTR
}
// IsPtrElem reports whether t is the element of a pointer (to t).
func (t *Type) IsPtrElem() bool {
- return t.Cache.ptr != nil
+ return t.cache.ptr != nil
}
// IsUnsafePtr reports whether t is an unsafe pointer.
func (t *Type) IsUnsafePtr() bool {
- return t.Etype == TUNSAFEPTR
+ return t.kind == TUNSAFEPTR
}
// IsUintptr reports whether t is an uintptr.
func (t *Type) IsUintptr() bool {
- return t.Etype == TUINTPTR
+ return t.kind == TUINTPTR
}
// IsPtrShaped reports whether t is represented by a single machine pointer.
@@ -1298,13 +1298,13 @@ func (t *Type) IsUintptr() bool {
// that consist of a single pointer shaped type.
// TODO(mdempsky): Should it? See golang.org/issue/15028.
func (t *Type) IsPtrShaped() bool {
- return t.Etype == TPTR || t.Etype == TUNSAFEPTR ||
- t.Etype == TMAP || t.Etype == TCHAN || t.Etype == TFUNC
+ return t.kind == TPTR || t.kind == TUNSAFEPTR ||
+ t.kind == TMAP || t.kind == TCHAN || t.kind == TFUNC
}
// HasNil reports whether the set of values determined by t includes nil.
func (t *Type) HasNil() bool {
- switch t.Etype {
+ switch t.kind {
case TCHAN, TFUNC, TINTER, TMAP, TNIL, TPTR, TSLICE, TUNSAFEPTR:
return true
}
@@ -1312,31 +1312,31 @@ func (t *Type) HasNil() bool {
}
func (t *Type) IsString() bool {
- return t.Etype == TSTRING
+ return t.kind == TSTRING
}
func (t *Type) IsMap() bool {
- return t.Etype == TMAP
+ return t.kind == TMAP
}
func (t *Type) IsChan() bool {
- return t.Etype == TCHAN
+ return t.kind == TCHAN
}
func (t *Type) IsSlice() bool {
- return t.Etype == TSLICE
+ return t.kind == TSLICE
}
func (t *Type) IsArray() bool {
- return t.Etype == TARRAY
+ return t.kind == TARRAY
}
func (t *Type) IsStruct() bool {
- return t.Etype == TSTRUCT
+ return t.kind == TSTRUCT
}
func (t *Type) IsInterface() bool {
- return t.Etype == TINTER
+ return t.kind == TINTER
}
// IsEmptyInterface reports whether t is an empty interface type.
@@ -1352,7 +1352,7 @@ func (t *Type) NumFields() int {
return t.Fields().Len()
}
func (t *Type) FieldType(i int) *Type {
- if t.Etype == TTUPLE {
+ if t.kind == TTUPLE {
switch i {
case 0:
return t.Extra.(*Tuple).first
@@ -1362,7 +1362,7 @@ func (t *Type) FieldType(i int) *Type {
panic("bad tuple index")
}
}
- if t.Etype == TRESULTS {
+ if t.kind == TRESULTS {
return t.Extra.(*Results).Types[i]
}
return t.Field(i).Type
@@ -1393,7 +1393,7 @@ const (
// (and their comprised elements) are excluded from the count.
// struct { x, y [3]int } has six components; [10]struct{ x, y string } has twenty.
func (t *Type) NumComponents(countBlank componentsIncludeBlankFields) int64 {
- switch t.Etype {
+ switch t.kind {
case TSTRUCT:
if t.IsFuncArgStruct() {
Fatalf("NumComponents func arg struct")
@@ -1416,7 +1416,7 @@ func (t *Type) NumComponents(countBlank componentsIncludeBlankFields) int64 {
// if there is exactly one. Otherwise, it returns nil.
// Components are counted as in NumComponents, including blank fields.
func (t *Type) SoleComponent() *Type {
- switch t.Etype {
+ switch t.kind {
case TSTRUCT:
if t.IsFuncArgStruct() {
Fatalf("SoleComponent func arg struct")
@@ -1442,10 +1442,10 @@ func (t *Type) ChanDir() ChanDir {
}
func (t *Type) IsMemory() bool {
- if t == TypeMem || t.Etype == TTUPLE && t.Extra.(*Tuple).second == TypeMem {
+ if t == TypeMem || t.kind == TTUPLE && t.Extra.(*Tuple).second == TypeMem {
return true
}
- if t.Etype == TRESULTS {
+ if t.kind == TRESULTS {
if types := t.Extra.(*Results).Types; len(types) > 0 && types[len(types)-1] == TypeMem {
return true
}
@@ -1454,8 +1454,8 @@ func (t *Type) IsMemory() bool {
}
func (t *Type) IsFlags() bool { return t == TypeFlags }
func (t *Type) IsVoid() bool { return t == TypeVoid }
-func (t *Type) IsTuple() bool { return t.Etype == TTUPLE }
-func (t *Type) IsResults() bool { return t.Etype == TRESULTS }
+func (t *Type) IsTuple() bool { return t.kind == TTUPLE }
+func (t *Type) IsResults() bool { return t.kind == TRESULTS }
// IsUntyped reports whether t is an untyped type.
func (t *Type) IsUntyped() bool {
@@ -1465,7 +1465,7 @@ func (t *Type) IsUntyped() bool {
if t == UntypedString || t == UntypedBool {
return true
}
- switch t.Etype {
+ switch t.kind {
case TNIL, TIDEAL:
return true
}
@@ -1475,7 +1475,7 @@ func (t *Type) IsUntyped() bool {
// HasPointers reports whether t contains a heap pointer.
// Note that this function ignores pointers to go:notinheap types.
func (t *Type) HasPointers() bool {
- switch t.Etype {
+ switch t.kind {
case TINT, TUINT, TINT8, TUINT8, TINT16, TUINT16, TINT32, TUINT32, TINT64,
TUINT64, TUINTPTR, TFLOAT32, TFLOAT64, TCOMPLEX64, TCOMPLEX128, TBOOL, TSSA:
return false
@@ -1551,16 +1551,16 @@ var (
)
// NewNamed returns a new named type for the given type name.
-func NewNamed(obj IRNode) *Type {
+func NewNamed(obj Object) *Type {
t := New(TFORW)
- t.Sym = obj.Sym()
+ t.sym = obj.Sym()
t.nod = obj
return t
}
// Obj returns the type name for the named type t.
-func (t *Type) Obj() IRNode {
- if t.Sym != nil {
+func (t *Type) Obj() Object {
+ if t.sym != nil {
return t.nod
}
return nil
@@ -1568,7 +1568,7 @@ func (t *Type) Obj() IRNode {
// SetUnderlying sets the underlying type.
func (t *Type) SetUnderlying(underlying *Type) {
- if underlying.Etype == TFORW {
+ if underlying.kind == TFORW {
// This type isn't computed yet; when it is, update n.
underlying.ForwardType().Copyto = append(underlying.ForwardType().Copyto, t)
return
@@ -1577,11 +1577,11 @@ func (t *Type) SetUnderlying(underlying *Type) {
ft := t.ForwardType()
// TODO(mdempsky): Fix Type rekinding.
- t.Etype = underlying.Etype
+ t.kind = underlying.kind
t.Extra = underlying.Extra
t.Width = underlying.Width
t.Align = underlying.Align
- t.Orig = underlying.Orig
+ t.underlying = underlying.underlying
if underlying.NotInHeap() {
t.SetNotInHeap(true)
@@ -1612,9 +1612,9 @@ func (t *Type) SetUnderlying(underlying *Type) {
}
// NewNamed returns a new basic type of the given kind.
-func NewBasic(kind EType, obj IRNode) *Type {
+func NewBasic(kind Kind, obj Object) *Type {
t := New(kind)
- t.Sym = obj.Sym()
+ t.sym = obj.Sym()
t.nod = obj
return t
}