From 9ea272e5ec5dd5eadd59d54c08377d5d9527a51b Mon Sep 17 00:00:00 2001 From: Matthew Dempsky Date: Tue, 29 Dec 2020 03:08:23 -0800 Subject: [dev.regabi] cmd/compile: simplify ir.Func somewhat Two simplifications: 1. Statements (including ODCLFUNC) don't have types, and the Func.Nname already has a type. There's no need for a second one. However, there is a lot of code that expects to be able to call Func.Type, so leave a forwarding method, like with Sym and Linksym. 2. Inline and remove ir.NewFuncNameAt. It doesn't really save any code, and it's only used a handful of places. Passes toolstash -cmp. Change-Id: I51acaa341897dae0fcdf2fa576a10174a2ae4d1e Reviewed-on: https://go-review.googlesource.com/c/go/+/280648 Trust: Matthew Dempsky Run-TryBot: Matthew Dempsky TryBot-Result: Go Bot Reviewed-by: Cuong Manh Le --- src/cmd/compile/internal/noder/noder.go | 7 +++++-- 1 file changed, 5 insertions(+), 2 deletions(-) (limited to 'src/cmd/compile/internal/noder/noder.go') diff --git a/src/cmd/compile/internal/noder/noder.go b/src/cmd/compile/internal/noder/noder.go index 920f4839ad..f4b5e0cf91 100644 --- a/src/cmd/compile/internal/noder/noder.go +++ b/src/cmd/compile/internal/noder/noder.go @@ -524,7 +524,8 @@ func (p *noder) funcDecl(fun *syntax.FuncDecl) ir.Node { name = ir.BlankNode.Sym() // filled in by typecheckfunc } - f.Nname = ir.NewFuncNameAt(p.pos(fun.Name), name, f) + f.Nname = ir.NewNameAt(p.pos(fun.Name), name) + f.Nname.Func = f f.Nname.Defn = f f.Nname.Ntype = t @@ -1742,7 +1743,9 @@ func (p *noder) funcLit(expr *syntax.FuncLit) ir.Node { fn := ir.NewFunc(p.pos(expr)) fn.SetIsHiddenClosure(ir.CurFunc != nil) - fn.Nname = ir.NewFuncNameAt(p.pos(expr), ir.BlankNode.Sym(), fn) // filled in by typecheckclosure + + fn.Nname = ir.NewNameAt(p.pos(expr), ir.BlankNode.Sym()) // filled in by typecheckclosure + fn.Nname.Func = fn fn.Nname.Ntype = xtype fn.Nname.Defn = fn -- cgit v1.2.3-54-g00ecf From fd22df990545bce77ff78b27c4f7220c7a666a84 Mon Sep 17 00:00:00 2001 From: Matthew Dempsky Date: Thu, 31 Dec 2020 18:25:35 -0800 Subject: [dev.regabi] cmd/compile: remove idempotent Name() calls [generated] [git-generate] cd src/cmd/compile/internal/ir pkgs=$(grep -l -w Name ../*/*.go | xargs dirname | sort -u | grep -v '/ir$') rf ' ex . '"$(echo $pkgs)"' { var n *Name n.Name() -> n } ' Change-Id: I6bfce6417a6dba833d2f652ae212a32c11bc5ef6 Reviewed-on: https://go-review.googlesource.com/c/go/+/280972 Trust: Matthew Dempsky Run-TryBot: Matthew Dempsky Reviewed-by: Cuong Manh Le TryBot-Result: Go Bot --- src/cmd/compile/internal/dwarfgen/dwarf.go | 22 +++++++++---------- src/cmd/compile/internal/escape/escape.go | 2 +- src/cmd/compile/internal/gc/obj.go | 4 ++-- src/cmd/compile/internal/ir/expr.go | 4 ++-- src/cmd/compile/internal/ir/name.go | 4 ++-- src/cmd/compile/internal/liveness/plive.go | 14 ++++++------- src/cmd/compile/internal/noder/noder.go | 6 +++--- src/cmd/compile/internal/pkginit/initorder.go | 2 +- src/cmd/compile/internal/ssagen/nowb.go | 4 ++-- src/cmd/compile/internal/ssagen/pgen.go | 6 +++--- src/cmd/compile/internal/ssagen/ssa.go | 4 ++-- src/cmd/compile/internal/typecheck/func.go | 6 +++--- src/cmd/compile/internal/typecheck/iexport.go | 4 ++-- src/cmd/compile/internal/typecheck/typecheck.go | 28 ++++++++++++------------- src/cmd/compile/internal/walk/expr.go | 2 +- src/cmd/compile/internal/walk/order.go | 4 ++-- src/cmd/compile/internal/walk/stmt.go | 2 +- 17 files changed, 59 insertions(+), 59 deletions(-) (limited to 'src/cmd/compile/internal/noder/noder.go') diff --git a/src/cmd/compile/internal/dwarfgen/dwarf.go b/src/cmd/compile/internal/dwarfgen/dwarf.go index d0bee58442..42c83b1f23 100644 --- a/src/cmd/compile/internal/dwarfgen/dwarf.go +++ b/src/cmd/compile/internal/dwarfgen/dwarf.go @@ -127,7 +127,7 @@ func Info(fnsym *obj.LSym, infosym *obj.LSym, curfn interface{}) ([]dwarf.Scope, } func declPos(decl *ir.Name) src.XPos { - if decl.Name().Defn != nil && (decl.Name().Captured() || decl.Name().Byval()) { + if decl.Defn != nil && (decl.Captured() || decl.Byval()) { // It's not clear which position is correct for captured variables here: // * decl.Pos is the wrong position for captured variables, in the inner // function, but it is the right position in the outer function. @@ -142,7 +142,7 @@ func declPos(decl *ir.Name) src.XPos { // case statement. // This code is probably wrong for type switch variables that are also // captured. - return decl.Name().Defn.Pos() + return decl.Defn.Pos() } return decl.Pos() } @@ -211,7 +211,7 @@ func createDwarfVars(fnsym *obj.LSym, complexOK bool, fn *ir.Func, apDecls []*ir // misleading location for the param (we want pointer-to-heap // and not stack). // TODO(thanm): generate a better location expression - stackcopy := n.Name().Stackcopy + stackcopy := n.Stackcopy if stackcopy != nil && (stackcopy.Class_ == ir.PPARAM || stackcopy.Class_ == ir.PPARAMOUT) { abbrev = dwarf.DW_ABRV_PARAM_LOCLIST isReturnValue = (stackcopy.Class_ == ir.PPARAMOUT) @@ -219,9 +219,9 @@ func createDwarfVars(fnsym *obj.LSym, complexOK bool, fn *ir.Func, apDecls []*ir } inlIndex := 0 if base.Flag.GenDwarfInl > 1 { - if n.Name().InlFormal() || n.Name().InlLocal() { + if n.InlFormal() || n.InlLocal() { inlIndex = posInlIndex(n.Pos()) + 1 - if n.Name().InlFormal() { + if n.InlFormal() { abbrev = dwarf.DW_ABRV_PARAM_LOCLIST } } @@ -312,9 +312,9 @@ func createSimpleVar(fnsym *obj.LSym, n *ir.Name) *dwarf.Var { delete(fnsym.Func().Autot, reflectdata.TypeLinksym(n.Type())) inlIndex := 0 if base.Flag.GenDwarfInl > 1 { - if n.Name().InlFormal() || n.Name().InlLocal() { + if n.InlFormal() || n.InlLocal() { inlIndex = posInlIndex(n.Pos()) + 1 - if n.Name().InlFormal() { + if n.InlFormal() { abbrev = dwarf.DW_ABRV_PARAM } } @@ -323,7 +323,7 @@ func createSimpleVar(fnsym *obj.LSym, n *ir.Name) *dwarf.Var { return &dwarf.Var{ Name: n.Sym().Name, IsReturnValue: n.Class_ == ir.PPARAMOUT, - IsInlFormal: n.Name().InlFormal(), + IsInlFormal: n.InlFormal(), Abbrev: abbrev, StackOffset: int32(offs), Type: base.Ctxt.Lookup(typename), @@ -381,9 +381,9 @@ func createComplexVar(fnsym *obj.LSym, fn *ir.Func, varID ssa.VarID) *dwarf.Var typename := dwarf.InfoPrefix + gotype.Name[len("type."):] inlIndex := 0 if base.Flag.GenDwarfInl > 1 { - if n.Name().InlFormal() || n.Name().InlLocal() { + if n.InlFormal() || n.InlLocal() { inlIndex = posInlIndex(n.Pos()) + 1 - if n.Name().InlFormal() { + if n.InlFormal() { abbrev = dwarf.DW_ABRV_PARAM_LOCLIST } } @@ -392,7 +392,7 @@ func createComplexVar(fnsym *obj.LSym, fn *ir.Func, varID ssa.VarID) *dwarf.Var dvar := &dwarf.Var{ Name: n.Sym().Name, IsReturnValue: n.Class_ == ir.PPARAMOUT, - IsInlFormal: n.Name().InlFormal(), + IsInlFormal: n.InlFormal(), Abbrev: abbrev, Type: base.Ctxt.Lookup(typename), // The stack offset is used as a sorting key, so for decomposed diff --git a/src/cmd/compile/internal/escape/escape.go b/src/cmd/compile/internal/escape/escape.go index b5b09beb5a..98dbf54b75 100644 --- a/src/cmd/compile/internal/escape/escape.go +++ b/src/cmd/compile/internal/escape/escape.go @@ -1158,7 +1158,7 @@ func (e *escape) newLoc(n ir.Node, transient bool) *location { if n.Op() == ir.ONAME { n := n.(*ir.Name) if n.Curfn != e.curfn { - base.Fatalf("curfn mismatch: %v != %v", n.Name().Curfn, e.curfn) + base.Fatalf("curfn mismatch: %v != %v", n.Curfn, e.curfn) } if n.Opt != nil { diff --git a/src/cmd/compile/internal/gc/obj.go b/src/cmd/compile/internal/gc/obj.go index 1e8ac8ebb2..30cfac1b71 100644 --- a/src/cmd/compile/internal/gc/obj.go +++ b/src/cmd/compile/internal/gc/obj.go @@ -264,14 +264,14 @@ func ggloblnod(nam *ir.Name) { s := nam.Linksym() s.Gotype = reflectdata.TypeLinksym(nam.Type()) flags := 0 - if nam.Name().Readonly() { + if nam.Readonly() { flags = obj.RODATA } if nam.Type() != nil && !nam.Type().HasPointers() { flags |= obj.NOPTR } base.Ctxt.Globl(s, nam.Type().Width, flags) - if nam.Name().LibfuzzerExtraCounter() { + if nam.LibfuzzerExtraCounter() { s.Type = objabi.SLIBFUZZER_EXTRA_COUNTER } if nam.Sym().Linkname != "" { diff --git a/src/cmd/compile/internal/ir/expr.go b/src/cmd/compile/internal/ir/expr.go index f435a5bb26..88fbdff1e0 100644 --- a/src/cmd/compile/internal/ir/expr.go +++ b/src/cmd/compile/internal/ir/expr.go @@ -771,11 +771,11 @@ func staticValue1(nn Node) Node { return nil } n := nn.(*Name) - if n.Class_ != PAUTO || n.Name().Addrtaken() { + if n.Class_ != PAUTO || n.Addrtaken() { return nil } - defn := n.Name().Defn + defn := n.Defn if defn == nil { return nil } diff --git a/src/cmd/compile/internal/ir/name.go b/src/cmd/compile/internal/ir/name.go index c79b7e52e5..5acb2d0762 100644 --- a/src/cmd/compile/internal/ir/name.go +++ b/src/cmd/compile/internal/ir/name.go @@ -312,7 +312,7 @@ func (n *Name) MarkReadonly() { if n.Op() != ONAME { base.Fatalf("Node.MarkReadonly %v", n.Op()) } - n.Name().setReadonly(true) + n.setReadonly(true) // Mark the linksym as readonly immediately // so that the SSA backend can use this information. // It will be overridden later during dumpglobls. @@ -433,7 +433,7 @@ func IsParamHeapCopy(n Node) bool { return false } name := n.(*Name) - return name.Class_ == PAUTOHEAP && name.Name().Stackcopy != nil + return name.Class_ == PAUTOHEAP && name.Stackcopy != nil } var RegFP *Name diff --git a/src/cmd/compile/internal/liveness/plive.go b/src/cmd/compile/internal/liveness/plive.go index 89c70df65a..91f10b0a9d 100644 --- a/src/cmd/compile/internal/liveness/plive.go +++ b/src/cmd/compile/internal/liveness/plive.go @@ -255,7 +255,7 @@ func (lv *liveness) valueEffects(v *ssa.Value) (int32, liveEffect) { // variable" ICEs (issue 19632). switch v.Op { case ssa.OpVarDef, ssa.OpVarKill, ssa.OpVarLive, ssa.OpKeepAlive: - if !n.Name().Used() { + if !n.Used() { return -1, 0 } } @@ -688,11 +688,11 @@ func (lv *liveness) epilogue() { if lv.fn.HasDefer() { for i, n := range lv.vars { if n.Class_ == ir.PPARAMOUT { - if n.Name().IsOutputParamHeapAddr() { + if n.IsOutputParamHeapAddr() { // Just to be paranoid. Heap addresses are PAUTOs. base.Fatalf("variable %v both output param and heap output param", n) } - if n.Name().Heapaddr != nil { + if n.Heapaddr != nil { // If this variable moved to the heap, then // its stack copy is not live. continue @@ -700,21 +700,21 @@ func (lv *liveness) epilogue() { // Note: zeroing is handled by zeroResults in walk.go. livedefer.Set(int32(i)) } - if n.Name().IsOutputParamHeapAddr() { + if n.IsOutputParamHeapAddr() { // This variable will be overwritten early in the function // prologue (from the result of a mallocgc) but we need to // zero it in case that malloc causes a stack scan. - n.Name().SetNeedzero(true) + n.SetNeedzero(true) livedefer.Set(int32(i)) } - if n.Name().OpenDeferSlot() { + if n.OpenDeferSlot() { // Open-coded defer args slots must be live // everywhere in a function, since a panic can // occur (almost) anywhere. Because it is live // everywhere, it must be zeroed on entry. livedefer.Set(int32(i)) // It was already marked as Needzero when created. - if !n.Name().Needzero() { + if !n.Needzero() { base.Fatalf("all pointer-containing defer arg slots should have Needzero set") } } diff --git a/src/cmd/compile/internal/noder/noder.go b/src/cmd/compile/internal/noder/noder.go index f4b5e0cf91..748fd96380 100644 --- a/src/cmd/compile/internal/noder/noder.go +++ b/src/cmd/compile/internal/noder/noder.go @@ -1835,7 +1835,7 @@ func oldname(s *types.Sym) ir.Node { // the := it looks like a reference to the outer x so we'll // make x a closure variable unnecessarily. n := n.(*ir.Name) - c := n.Name().Innermost + c := n.Innermost if c == nil || c.Curfn != ir.CurFunc { // Do not have a closure var for the active closure yet; make one. c = typecheck.NewName(s) @@ -1845,8 +1845,8 @@ func oldname(s *types.Sym) ir.Node { // Link into list of active closure variables. // Popped from list in func funcLit. - c.Outer = n.Name().Innermost - n.Name().Innermost = c + c.Outer = n.Innermost + n.Innermost = c ir.CurFunc.ClosureVars = append(ir.CurFunc.ClosureVars, c) } diff --git a/src/cmd/compile/internal/pkginit/initorder.go b/src/cmd/compile/internal/pkginit/initorder.go index c6e223954d..1c222c1de4 100644 --- a/src/cmd/compile/internal/pkginit/initorder.go +++ b/src/cmd/compile/internal/pkginit/initorder.go @@ -197,7 +197,7 @@ func (o *InitOrder) findInitLoopAndExit(n *ir.Name, path *[]*ir.Name) { // There might be multiple loops involving n; by sorting // references, we deterministically pick the one reported. - refers := collectDeps(n.Name().Defn, false).Sorted(func(ni, nj *ir.Name) bool { + refers := collectDeps(n.Defn, false).Sorted(func(ni, nj *ir.Name) bool { return ni.Pos().Before(nj.Pos()) }) diff --git a/src/cmd/compile/internal/ssagen/nowb.go b/src/cmd/compile/internal/ssagen/nowb.go index 7b2e68c8e7..26858fac87 100644 --- a/src/cmd/compile/internal/ssagen/nowb.go +++ b/src/cmd/compile/internal/ssagen/nowb.go @@ -76,7 +76,7 @@ func (c *nowritebarrierrecChecker) findExtraCalls(nn ir.Node) { return } fn := n.X.(*ir.Name) - if fn.Class_ != ir.PFUNC || fn.Name().Defn == nil { + if fn.Class_ != ir.PFUNC || fn.Defn == nil { return } if !types.IsRuntimePkg(fn.Sym().Pkg) || fn.Sym().Name != "systemstack" { @@ -88,7 +88,7 @@ func (c *nowritebarrierrecChecker) findExtraCalls(nn ir.Node) { switch arg.Op() { case ir.ONAME: arg := arg.(*ir.Name) - callee = arg.Name().Defn.(*ir.Func) + callee = arg.Defn.(*ir.Func) case ir.OCLOSURE: arg := arg.(*ir.ClosureExpr) callee = arg.Func diff --git a/src/cmd/compile/internal/ssagen/pgen.go b/src/cmd/compile/internal/ssagen/pgen.go index 72ce233fda..2be10ff7af 100644 --- a/src/cmd/compile/internal/ssagen/pgen.go +++ b/src/cmd/compile/internal/ssagen/pgen.go @@ -86,7 +86,7 @@ func (s *ssafn) AllocFrame(f *ssa.Func) { for _, l := range f.RegAlloc { if ls, ok := l.(ssa.LocalSlot); ok { - ls.N.Name().SetUsed(true) + ls.N.SetUsed(true) } } @@ -98,10 +98,10 @@ func (s *ssafn) AllocFrame(f *ssa.Func) { case ir.PPARAM, ir.PPARAMOUT: // Don't modify nodfp; it is a global. if n != ir.RegFP { - n.Name().SetUsed(true) + n.SetUsed(true) } case ir.PAUTO: - n.Name().SetUsed(true) + n.SetUsed(true) } } if !scratchUsed { diff --git a/src/cmd/compile/internal/ssagen/ssa.go b/src/cmd/compile/internal/ssagen/ssa.go index 022959a934..8e3b09aac3 100644 --- a/src/cmd/compile/internal/ssagen/ssa.go +++ b/src/cmd/compile/internal/ssagen/ssa.go @@ -6223,7 +6223,7 @@ func (s *state) addNamedValue(n *ir.Name, v *ssa.Value) { // from being assigned too early. See #14591 and #14762. TODO: allow this. return } - loc := ssa.LocalSlot{N: n.Name(), Type: n.Type(), Off: 0} + loc := ssa.LocalSlot{N: n, Type: n.Type(), Off: 0} values, ok := s.f.NamedValues[loc] if !ok { s.f.Names = append(s.f.Names, loc) @@ -7198,7 +7198,7 @@ func (e *ssafn) DerefItab(it *obj.LSym, offset int64) *obj.LSym { func (e *ssafn) SplitSlot(parent *ssa.LocalSlot, suffix string, offset int64, t *types.Type) ssa.LocalSlot { node := parent.N - if node.Class_ != ir.PAUTO || node.Name().Addrtaken() { + if node.Class_ != ir.PAUTO || node.Addrtaken() { // addressed things and non-autos retain their parents (i.e., cannot truly be split) return ssa.LocalSlot{N: node, Type: t, Off: parent.Off + offset} } diff --git a/src/cmd/compile/internal/typecheck/func.go b/src/cmd/compile/internal/typecheck/func.go index 75f38d588d..3552bcf924 100644 --- a/src/cmd/compile/internal/typecheck/func.go +++ b/src/cmd/compile/internal/typecheck/func.go @@ -131,10 +131,10 @@ func CaptureVars(fn *ir.Func) { outermost := v.Defn.(*ir.Name) // out parameters will be assigned to implicitly upon return. - if outermost.Class_ != ir.PPARAMOUT && !outermost.Name().Addrtaken() && !outermost.Name().Assigned() && v.Type().Width <= 128 { + if outermost.Class_ != ir.PPARAMOUT && !outermost.Addrtaken() && !outermost.Assigned() && v.Type().Width <= 128 { v.SetByval(true) } else { - outermost.Name().SetAddrtaken(true) + outermost.SetAddrtaken(true) outer = NodAddr(outer) } @@ -147,7 +147,7 @@ func CaptureVars(fn *ir.Func) { if v.Byval() { how = "value" } - base.WarnfAt(v.Pos(), "%v capturing by %s: %v (addr=%v assign=%v width=%d)", name, how, v.Sym(), outermost.Name().Addrtaken(), outermost.Name().Assigned(), int32(v.Type().Width)) + base.WarnfAt(v.Pos(), "%v capturing by %s: %v (addr=%v assign=%v width=%d)", name, how, v.Sym(), outermost.Addrtaken(), outermost.Assigned(), int32(v.Type().Width)) } outer = Expr(outer) diff --git a/src/cmd/compile/internal/typecheck/iexport.go b/src/cmd/compile/internal/typecheck/iexport.go index aa16a54bb8..50acb10a9a 100644 --- a/src/cmd/compile/internal/typecheck/iexport.go +++ b/src/cmd/compile/internal/typecheck/iexport.go @@ -1521,8 +1521,8 @@ func (w *exportWriter) localName(n *ir.Name) { // PPARAM/PPARAMOUT, because we only want to include vargen in // non-param names. var v int32 - if n.Class_ == ir.PAUTO || (n.Class_ == ir.PAUTOHEAP && n.Name().Stackcopy == nil) { - v = n.Name().Vargen + if n.Class_ == ir.PAUTO || (n.Class_ == ir.PAUTOHEAP && n.Stackcopy == nil) { + v = n.Vargen } w.localIdent(n.Sym(), v) diff --git a/src/cmd/compile/internal/typecheck/typecheck.go b/src/cmd/compile/internal/typecheck/typecheck.go index cf9b48f5a6..519d8ddfd9 100644 --- a/src/cmd/compile/internal/typecheck/typecheck.go +++ b/src/cmd/compile/internal/typecheck/typecheck.go @@ -57,7 +57,7 @@ func Package() { base.Timer.Start("fe", "typecheck", "top1") for i := 0; i < len(Target.Decls); i++ { n := Target.Decls[i] - if op := n.Op(); op != ir.ODCL && op != ir.OAS && op != ir.OAS2 && (op != ir.ODCLTYPE || !n.(*ir.Decl).X.Name().Alias()) { + if op := n.Op(); op != ir.ODCL && op != ir.OAS && op != ir.OAS2 && (op != ir.ODCLTYPE || !n.(*ir.Decl).X.Alias()) { Target.Decls[i] = Stmt(n) } } @@ -69,7 +69,7 @@ func Package() { base.Timer.Start("fe", "typecheck", "top2") for i := 0; i < len(Target.Decls); i++ { n := Target.Decls[i] - if op := n.Op(); op == ir.ODCL || op == ir.OAS || op == ir.OAS2 || op == ir.ODCLTYPE && n.(*ir.Decl).X.Name().Alias() { + if op := n.Op(); op == ir.ODCL || op == ir.OAS || op == ir.OAS2 || op == ir.ODCLTYPE && n.(*ir.Decl).X.Alias() { Target.Decls[i] = Stmt(n) } } @@ -636,7 +636,7 @@ func typecheck1(n ir.Node, top int) ir.Node { n.SetType(nil) return n } - n.Name().SetUsed(true) + n.SetUsed(true) } return n @@ -1729,9 +1729,9 @@ func checkassign(stmt ir.Node, n ir.Node) { r := ir.OuterValue(n) if r.Op() == ir.ONAME { r := r.(*ir.Name) - r.Name().SetAssigned(true) - if r.Name().IsClosureVar() { - r.Name().Defn.Name().SetAssigned(true) + r.SetAssigned(true) + if r.IsClosureVar() { + r.Defn.Name().SetAssigned(true) } } } @@ -1938,9 +1938,9 @@ func typecheckdef(n ir.Node) { case ir.ONAME: n := n.(*ir.Name) - if n.Name().Ntype != nil { - n.Name().Ntype = typecheckNtype(n.Name().Ntype) - n.SetType(n.Name().Ntype.Type()) + if n.Ntype != nil { + n.Ntype = typecheckNtype(n.Ntype) + n.SetType(n.Ntype.Type()) if n.Type() == nil { n.SetDiag(true) goto ret @@ -1950,7 +1950,7 @@ func typecheckdef(n ir.Node) { if n.Type() != nil { break } - if n.Name().Defn == nil { + if n.Defn == nil { if n.BuiltinOp != 0 { // like OPRINTN break } @@ -1965,13 +1965,13 @@ func typecheckdef(n ir.Node) { base.Fatalf("var without type, init: %v", n.Sym()) } - if n.Name().Defn.Op() == ir.ONAME { - n.Name().Defn = Expr(n.Name().Defn) - n.SetType(n.Name().Defn.Type()) + if n.Defn.Op() == ir.ONAME { + n.Defn = Expr(n.Defn) + n.SetType(n.Defn.Type()) break } - n.Name().Defn = Stmt(n.Name().Defn) // fills in n.Type + n.Defn = Stmt(n.Defn) // fills in n.Type case ir.OTYPE: n := n.(*ir.Name) diff --git a/src/cmd/compile/internal/walk/expr.go b/src/cmd/compile/internal/walk/expr.go index 0d7ffca15d..f06a87c37f 100644 --- a/src/cmd/compile/internal/walk/expr.go +++ b/src/cmd/compile/internal/walk/expr.go @@ -54,7 +54,7 @@ func walkExpr(n ir.Node, init *ir.Nodes) ir.Node { if n.Op() == ir.ONAME && n.(*ir.Name).Class_ == ir.PAUTOHEAP { n := n.(*ir.Name) - nn := ir.NewStarExpr(base.Pos, n.Name().Heapaddr) + nn := ir.NewStarExpr(base.Pos, n.Heapaddr) nn.X.MarkNonNil() return walkExpr(typecheck.Expr(nn), init) } diff --git a/src/cmd/compile/internal/walk/order.go b/src/cmd/compile/internal/walk/order.go index a2bd0cf10a..e40c877ea9 100644 --- a/src/cmd/compile/internal/walk/order.go +++ b/src/cmd/compile/internal/walk/order.go @@ -406,7 +406,7 @@ func (o *orderState) edge() { // Create a new uint8 counter to be allocated in section // __libfuzzer_extra_counters. counter := staticinit.StaticName(types.Types[types.TUINT8]) - counter.Name().SetLibfuzzerExtraCounter(true) + counter.SetLibfuzzerExtraCounter(true) // counter += 1 incr := ir.NewAssignOpStmt(base.Pos, ir.OADD, counter, ir.NewInt(1)) @@ -517,7 +517,7 @@ func (o *orderState) call(nn ir.Node) { if arg.X.Type().IsUnsafePtr() { x := o.copyExpr(arg.X) arg.X = x - x.Name().SetAddrtaken(true) // ensure SSA keeps the x variable + x.SetAddrtaken(true) // ensure SSA keeps the x variable n.KeepAlive = append(n.KeepAlive, x) } } diff --git a/src/cmd/compile/internal/walk/stmt.go b/src/cmd/compile/internal/walk/stmt.go index cfd1da46d2..8641a58e2e 100644 --- a/src/cmd/compile/internal/walk/stmt.go +++ b/src/cmd/compile/internal/walk/stmt.go @@ -181,7 +181,7 @@ func walkDecl(n *ir.Decl) ir.Node { if base.Flag.CompilingRuntime { base.Errorf("%v escapes to heap, not allowed in runtime", v) } - nn := ir.NewAssignStmt(base.Pos, v.Name().Heapaddr, callnew(v.Type())) + nn := ir.NewAssignStmt(base.Pos, v.Heapaddr, callnew(v.Type())) nn.Def = true return walkStmt(typecheck.Stmt(nn)) } -- cgit v1.2.3-54-g00ecf From 0f1d2129c4c294a895480b79eeab8d22c07ac573 Mon Sep 17 00:00:00 2001 From: Matthew Dempsky Date: Thu, 31 Dec 2020 21:48:27 -0800 Subject: [dev.regabi] cmd/compile: reshuffle type-checking code [generated] This commit splits up typecheck.Package and moves the code elsewhere. The type-checking code is moved into noder, so that it can eventually be interleaved with the noding process. The non-type-checking code is moved back into package gc, so that it can be incorporated into appropriate compiler backend phases. While here, deadcode removal is moved into its own package. Passes toolstash -cmp. [git-generate] cd src/cmd/compile/internal/typecheck : Split into two functions. sed -i -e '/Phase 6/i}\n\nfunc postTypecheck() {' typecheck.go rf ' # Export needed identifiers. mv deadcode Deadcode mv loadsys InitRuntime mv declareUniverse DeclareUniverse mv dirtyAddrtaken DirtyAddrtaken mv computeAddrtaken ComputeAddrtaken mv incrementalAddrtaken IncrementalAddrtaken # Move into new package. mv Deadcode deadcodeslice deadcodeexpr deadcode.go mv deadcode.go cmd/compile/internal/deadcode # Move top-level type-checking code into noder. # Move DeclVars there too, now that nothing else uses it. mv DeclVars Package noder.go mv noder.go cmd/compile/internal/noder # Move non-type-checking code back into gc. mv postTypecheck main.go mv main.go cmd/compile/internal/gc ' cd ../deadcode rf ' # Destutter names. mv Deadcode Func mv deadcodeslice stmts mv deadcodeexpr expr ' cd ../noder rf ' # Move functions up, next to their related code. mv noder.go:/func Package/-1,$ \ noder.go:/makeSrcPosBase translates/-1 mv noder.go:/func DeclVars/-3,$ \ noder.go:/constState tracks/-1 ' cd ../gc rf ' # Inline postTypecheck code back into gc.Main. mv main.go:/func postTypecheck/+0,/AllImportedBodies/+1 \ main.go:/Build init task/-1 rm postTypecheck ' Change-Id: Ie5e992ece4a42204cce6aa98dd6eb52112d098c8 Reviewed-on: https://go-review.googlesource.com/c/go/+/280974 Trust: Matthew Dempsky Run-TryBot: Matthew Dempsky TryBot-Result: Go Bot Reviewed-by: Cuong Manh Le --- src/cmd/compile/internal/deadcode/deadcode.go | 150 +++++++++++++++ src/cmd/compile/internal/gc/main.go | 42 +++- src/cmd/compile/internal/noder/noder.go | 119 +++++++++++- src/cmd/compile/internal/typecheck/dcl.go | 54 ------ src/cmd/compile/internal/typecheck/func.go | 10 +- src/cmd/compile/internal/typecheck/subr.go | 16 +- src/cmd/compile/internal/typecheck/syms.go | 4 +- src/cmd/compile/internal/typecheck/typecheck.go | 243 +----------------------- src/cmd/compile/internal/typecheck/universe.go | 4 +- 9 files changed, 327 insertions(+), 315 deletions(-) create mode 100644 src/cmd/compile/internal/deadcode/deadcode.go (limited to 'src/cmd/compile/internal/noder/noder.go') diff --git a/src/cmd/compile/internal/deadcode/deadcode.go b/src/cmd/compile/internal/deadcode/deadcode.go new file mode 100644 index 0000000000..5453cfe396 --- /dev/null +++ b/src/cmd/compile/internal/deadcode/deadcode.go @@ -0,0 +1,150 @@ +// Copyright 2009 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package deadcode + +import ( + "go/constant" + + "cmd/compile/internal/base" + "cmd/compile/internal/ir" +) + +func Func(fn *ir.Func) { + stmts(&fn.Body) + + if len(fn.Body) == 0 { + return + } + + for _, n := range fn.Body { + if len(n.Init()) > 0 { + return + } + switch n.Op() { + case ir.OIF: + n := n.(*ir.IfStmt) + if !ir.IsConst(n.Cond, constant.Bool) || len(n.Body) > 0 || len(n.Else) > 0 { + return + } + case ir.OFOR: + n := n.(*ir.ForStmt) + if !ir.IsConst(n.Cond, constant.Bool) || ir.BoolVal(n.Cond) { + return + } + default: + return + } + } + + fn.Body.Set([]ir.Node{ir.NewBlockStmt(base.Pos, nil)}) +} + +func stmts(nn *ir.Nodes) { + var lastLabel = -1 + for i, n := range *nn { + if n != nil && n.Op() == ir.OLABEL { + lastLabel = i + } + } + for i, n := range *nn { + // Cut is set to true when all nodes after i'th position + // should be removed. + // In other words, it marks whole slice "tail" as dead. + cut := false + if n == nil { + continue + } + if n.Op() == ir.OIF { + n := n.(*ir.IfStmt) + n.Cond = expr(n.Cond) + if ir.IsConst(n.Cond, constant.Bool) { + var body ir.Nodes + if ir.BoolVal(n.Cond) { + n.Else = ir.Nodes{} + body = n.Body + } else { + n.Body = ir.Nodes{} + body = n.Else + } + // If "then" or "else" branch ends with panic or return statement, + // it is safe to remove all statements after this node. + // isterminating is not used to avoid goto-related complications. + // We must be careful not to deadcode-remove labels, as they + // might be the target of a goto. See issue 28616. + if body := body; len(body) != 0 { + switch body[(len(body) - 1)].Op() { + case ir.ORETURN, ir.ORETJMP, ir.OPANIC: + if i > lastLabel { + cut = true + } + } + } + } + } + + stmts(n.PtrInit()) + switch n.Op() { + case ir.OBLOCK: + n := n.(*ir.BlockStmt) + stmts(&n.List) + case ir.OFOR: + n := n.(*ir.ForStmt) + stmts(&n.Body) + case ir.OIF: + n := n.(*ir.IfStmt) + stmts(&n.Body) + stmts(&n.Else) + case ir.ORANGE: + n := n.(*ir.RangeStmt) + stmts(&n.Body) + case ir.OSELECT: + n := n.(*ir.SelectStmt) + for _, cas := range n.Cases { + stmts(&cas.Body) + } + case ir.OSWITCH: + n := n.(*ir.SwitchStmt) + for _, cas := range n.Cases { + stmts(&cas.Body) + } + } + + if cut { + nn.Set((*nn)[:i+1]) + break + } + } +} + +func expr(n ir.Node) ir.Node { + // Perform dead-code elimination on short-circuited boolean + // expressions involving constants with the intent of + // producing a constant 'if' condition. + switch n.Op() { + case ir.OANDAND: + n := n.(*ir.LogicalExpr) + n.X = expr(n.X) + n.Y = expr(n.Y) + if ir.IsConst(n.X, constant.Bool) { + if ir.BoolVal(n.X) { + return n.Y // true && x => x + } else { + return n.X // false && x => false + } + } + case ir.OOROR: + n := n.(*ir.LogicalExpr) + n.X = expr(n.X) + n.Y = expr(n.Y) + if ir.IsConst(n.X, constant.Bool) { + if ir.BoolVal(n.X) { + return n.X // true || x => true + } else { + return n.Y // false || x => x + } + } + } + return n +} diff --git a/src/cmd/compile/internal/gc/main.go b/src/cmd/compile/internal/gc/main.go index 45219801f0..603619eb5a 100644 --- a/src/cmd/compile/internal/gc/main.go +++ b/src/cmd/compile/internal/gc/main.go @@ -8,6 +8,7 @@ import ( "bufio" "bytes" "cmd/compile/internal/base" + "cmd/compile/internal/deadcode" "cmd/compile/internal/devirtualize" "cmd/compile/internal/dwarfgen" "cmd/compile/internal/escape" @@ -210,12 +211,51 @@ func Main(archInit func(*ssagen.ArchInfo)) { dwarfgen.RecordPackageName() // Typecheck. - typecheck.Package() + noder.Package() // With all user code typechecked, it's now safe to verify unused dot imports. noder.CheckDotImports() base.ExitIfErrors() + // Phase 6: Compute Addrtaken for names. + // We need to wait until typechecking is done so that when we see &x[i] + // we know that x has its address taken if x is an array, but not if x is a slice. + // We compute Addrtaken in bulk here. + // After this phase, we maintain Addrtaken incrementally. + if typecheck.DirtyAddrtaken { + typecheck.ComputeAddrtaken(typecheck.Target.Decls) + typecheck.DirtyAddrtaken = false + } + typecheck.IncrementalAddrtaken = true + // Phase 7: Eliminate some obviously dead code. + // Must happen after typechecking. + for _, n := range typecheck.Target.Decls { + if n.Op() == ir.ODCLFUNC { + deadcode.Func(n.(*ir.Func)) + } + } + + // Phase 8: Decide how to capture closed variables. + // This needs to run before escape analysis, + // because variables captured by value do not escape. + base.Timer.Start("fe", "capturevars") + for _, n := range typecheck.Target.Decls { + if n.Op() == ir.ODCLFUNC { + n := n.(*ir.Func) + if n.OClosure != nil { + ir.CurFunc = n + typecheck.CaptureVars(n) + } + } + } + typecheck.CaptureVarsComplete = true + ir.CurFunc = nil + + if base.Debug.TypecheckInl != 0 { + // Typecheck imported function bodies if Debug.l > 1, + // otherwise lazily when used or re-exported. + typecheck.AllImportedBodies() + } // Build init task. if initTask := pkginit.Task(); initTask != nil { typecheck.Export(initTask) diff --git a/src/cmd/compile/internal/noder/noder.go b/src/cmd/compile/internal/noder/noder.go index 748fd96380..40569af317 100644 --- a/src/cmd/compile/internal/noder/noder.go +++ b/src/cmd/compile/internal/noder/noder.go @@ -85,6 +85,69 @@ func ParseFiles(filenames []string) uint { return lines } +func Package() { + typecheck.DeclareUniverse() + + typecheck.TypecheckAllowed = true + + // Process top-level declarations in phases. + + // Phase 1: const, type, and names and types of funcs. + // This will gather all the information about types + // and methods but doesn't depend on any of it. + // + // We also defer type alias declarations until phase 2 + // to avoid cycles like #18640. + // TODO(gri) Remove this again once we have a fix for #25838. + + // Don't use range--typecheck can add closures to Target.Decls. + base.Timer.Start("fe", "typecheck", "top1") + for i := 0; i < len(typecheck.Target.Decls); i++ { + n := typecheck.Target.Decls[i] + if op := n.Op(); op != ir.ODCL && op != ir.OAS && op != ir.OAS2 && (op != ir.ODCLTYPE || !n.(*ir.Decl).X.Alias()) { + typecheck.Target.Decls[i] = typecheck.Stmt(n) + } + } + + // Phase 2: Variable assignments. + // To check interface assignments, depends on phase 1. + + // Don't use range--typecheck can add closures to Target.Decls. + base.Timer.Start("fe", "typecheck", "top2") + for i := 0; i < len(typecheck.Target.Decls); i++ { + n := typecheck.Target.Decls[i] + if op := n.Op(); op == ir.ODCL || op == ir.OAS || op == ir.OAS2 || op == ir.ODCLTYPE && n.(*ir.Decl).X.Alias() { + typecheck.Target.Decls[i] = typecheck.Stmt(n) + } + } + + // Phase 3: Type check function bodies. + // Don't use range--typecheck can add closures to Target.Decls. + base.Timer.Start("fe", "typecheck", "func") + var fcount int64 + for i := 0; i < len(typecheck.Target.Decls); i++ { + n := typecheck.Target.Decls[i] + if n.Op() == ir.ODCLFUNC { + typecheck.FuncBody(n.(*ir.Func)) + fcount++ + } + } + + // Phase 4: Check external declarations. + // TODO(mdempsky): This should be handled when type checking their + // corresponding ODCL nodes. + base.Timer.Start("fe", "typecheck", "externdcls") + for i, n := range typecheck.Target.Externs { + if n.Op() == ir.ONAME { + typecheck.Target.Externs[i] = typecheck.Expr(typecheck.Target.Externs[i]) + } + } + + // Phase 5: With all user code type-checked, it's now safe to verify map keys. + typecheck.CheckMapKeys() + +} + // makeSrcPosBase translates from a *syntax.PosBase to a *src.PosBase. func (p *noder) makeSrcPosBase(b0 *syntax.PosBase) *src.PosBase { // fast path: most likely PosBase hasn't changed @@ -398,7 +461,61 @@ func (p *noder) varDecl(decl *syntax.VarDecl) []ir.Node { } p.setlineno(decl) - return typecheck.DeclVars(names, typ, exprs) + return DeclVars(names, typ, exprs) +} + +// declare variables from grammar +// new_name_list (type | [type] = expr_list) +func DeclVars(vl []*ir.Name, t ir.Ntype, el []ir.Node) []ir.Node { + var init []ir.Node + doexpr := len(el) > 0 + + if len(el) == 1 && len(vl) > 1 { + e := el[0] + as2 := ir.NewAssignListStmt(base.Pos, ir.OAS2, nil, nil) + as2.Rhs = []ir.Node{e} + for _, v := range vl { + as2.Lhs.Append(v) + typecheck.Declare(v, typecheck.DeclContext) + v.Ntype = t + v.Defn = as2 + if ir.CurFunc != nil { + init = append(init, ir.NewDecl(base.Pos, ir.ODCL, v)) + } + } + + return append(init, as2) + } + + for i, v := range vl { + var e ir.Node + if doexpr { + if i >= len(el) { + base.Errorf("assignment mismatch: %d variables but %d values", len(vl), len(el)) + break + } + e = el[i] + } + + typecheck.Declare(v, typecheck.DeclContext) + v.Ntype = t + + if e != nil || ir.CurFunc != nil || ir.IsBlank(v) { + if ir.CurFunc != nil { + init = append(init, ir.NewDecl(base.Pos, ir.ODCL, v)) + } + as := ir.NewAssignStmt(base.Pos, v, e) + init = append(init, as) + if e != nil { + v.Defn = as + } + } + } + + if len(el) > len(vl) { + base.Errorf("assignment mismatch: %d variables but %d values", len(vl), len(el)) + } + return init } // constState tracks state between constant specifiers within a diff --git a/src/cmd/compile/internal/typecheck/dcl.go b/src/cmd/compile/internal/typecheck/dcl.go index c4f32ff59d..fd55f472ab 100644 --- a/src/cmd/compile/internal/typecheck/dcl.go +++ b/src/cmd/compile/internal/typecheck/dcl.go @@ -33,60 +33,6 @@ func DeclFunc(sym *types.Sym, tfn ir.Ntype) *ir.Func { return fn } -// declare variables from grammar -// new_name_list (type | [type] = expr_list) -func DeclVars(vl []*ir.Name, t ir.Ntype, el []ir.Node) []ir.Node { - var init []ir.Node - doexpr := len(el) > 0 - - if len(el) == 1 && len(vl) > 1 { - e := el[0] - as2 := ir.NewAssignListStmt(base.Pos, ir.OAS2, nil, nil) - as2.Rhs = []ir.Node{e} - for _, v := range vl { - as2.Lhs.Append(v) - Declare(v, DeclContext) - v.Ntype = t - v.Defn = as2 - if ir.CurFunc != nil { - init = append(init, ir.NewDecl(base.Pos, ir.ODCL, v)) - } - } - - return append(init, as2) - } - - for i, v := range vl { - var e ir.Node - if doexpr { - if i >= len(el) { - base.Errorf("assignment mismatch: %d variables but %d values", len(vl), len(el)) - break - } - e = el[i] - } - - Declare(v, DeclContext) - v.Ntype = t - - if e != nil || ir.CurFunc != nil || ir.IsBlank(v) { - if ir.CurFunc != nil { - init = append(init, ir.NewDecl(base.Pos, ir.ODCL, v)) - } - as := ir.NewAssignStmt(base.Pos, v, e) - init = append(init, as) - if e != nil { - v.Defn = as - } - } - } - - if len(el) > len(vl) { - base.Errorf("assignment mismatch: %d variables but %d values", len(vl), len(el)) - } - return init -} - // Declare records that Node n declares symbol n.Sym in the specified // declaration context. func Declare(n *ir.Name, ctxt ir.Class) { diff --git a/src/cmd/compile/internal/typecheck/func.go b/src/cmd/compile/internal/typecheck/func.go index 3552bcf924..d8c1748432 100644 --- a/src/cmd/compile/internal/typecheck/func.go +++ b/src/cmd/compile/internal/typecheck/func.go @@ -169,13 +169,13 @@ func ImportedBody(fn *ir.Func) { // computeAddrtaken call below (after we typecheck the body). // TODO: export/import types and addrtaken marks along with inlined bodies, // so this will be unnecessary. - incrementalAddrtaken = false + IncrementalAddrtaken = false defer func() { - if dirtyAddrtaken { - computeAddrtaken(fn.Inl.Body) // compute addrtaken marks once types are available - dirtyAddrtaken = false + if DirtyAddrtaken { + ComputeAddrtaken(fn.Inl.Body) // compute addrtaken marks once types are available + DirtyAddrtaken = false } - incrementalAddrtaken = true + IncrementalAddrtaken = true }() ImportBody(fn) diff --git a/src/cmd/compile/internal/typecheck/subr.go b/src/cmd/compile/internal/typecheck/subr.go index 9d414874a0..447e945d81 100644 --- a/src/cmd/compile/internal/typecheck/subr.go +++ b/src/cmd/compile/internal/typecheck/subr.go @@ -72,7 +72,7 @@ func NodAddrAt(pos src.XPos, n ir.Node) *ir.AddrExpr { } func markAddrOf(n ir.Node) ir.Node { - if incrementalAddrtaken { + if IncrementalAddrtaken { // We can only do incremental addrtaken computation when it is ok // to typecheck the argument of the OADDR. That's only safe after the // main typecheck has completed. @@ -86,22 +86,22 @@ func markAddrOf(n ir.Node) ir.Node { } else { // Remember that we built an OADDR without computing the Addrtaken bit for // its argument. We'll do that later in bulk using computeAddrtaken. - dirtyAddrtaken = true + DirtyAddrtaken = true } return n } -// If incrementalAddrtaken is false, we do not compute Addrtaken for an OADDR Node +// If IncrementalAddrtaken is false, we do not compute Addrtaken for an OADDR Node // when it is built. The Addrtaken bits are set in bulk by computeAddrtaken. -// If incrementalAddrtaken is true, then when an OADDR Node is built the Addrtaken +// If IncrementalAddrtaken is true, then when an OADDR Node is built the Addrtaken // field of its argument is updated immediately. -var incrementalAddrtaken = false +var IncrementalAddrtaken = false -// If dirtyAddrtaken is true, then there are OADDR whose corresponding arguments +// If DirtyAddrtaken is true, then there are OADDR whose corresponding arguments // have not yet been marked as Addrtaken. -var dirtyAddrtaken = false +var DirtyAddrtaken = false -func computeAddrtaken(top []ir.Node) { +func ComputeAddrtaken(top []ir.Node) { for _, n := range top { ir.Visit(n, func(n ir.Node) { if n.Op() == ir.OADDR { diff --git a/src/cmd/compile/internal/typecheck/syms.go b/src/cmd/compile/internal/typecheck/syms.go index ab3384bf90..f0e230432a 100644 --- a/src/cmd/compile/internal/typecheck/syms.go +++ b/src/cmd/compile/internal/typecheck/syms.go @@ -61,10 +61,10 @@ func Lookup(name string) *types.Sym { return types.LocalPkg.Lookup(name) } -// loadsys loads the definitions for the low-level runtime functions, +// InitRuntime loads the definitions for the low-level runtime functions, // so that the compiler can generate calls to them, // but does not make them visible to user code. -func loadsys() { +func InitRuntime() { types.Block = 1 inimport = true diff --git a/src/cmd/compile/internal/typecheck/typecheck.go b/src/cmd/compile/internal/typecheck/typecheck.go index 4b5c3198ca..4c6ac21fc6 100644 --- a/src/cmd/compile/internal/typecheck/typecheck.go +++ b/src/cmd/compile/internal/typecheck/typecheck.go @@ -35,110 +35,7 @@ func Init() { initUniverse() DeclContext = ir.PEXTERN base.Timer.Start("fe", "loadsys") - loadsys() -} - -func Package() { - declareUniverse() - - TypecheckAllowed = true - - // Process top-level declarations in phases. - - // Phase 1: const, type, and names and types of funcs. - // This will gather all the information about types - // and methods but doesn't depend on any of it. - // - // We also defer type alias declarations until phase 2 - // to avoid cycles like #18640. - // TODO(gri) Remove this again once we have a fix for #25838. - - // Don't use range--typecheck can add closures to Target.Decls. - base.Timer.Start("fe", "typecheck", "top1") - for i := 0; i < len(Target.Decls); i++ { - n := Target.Decls[i] - if op := n.Op(); op != ir.ODCL && op != ir.OAS && op != ir.OAS2 && (op != ir.ODCLTYPE || !n.(*ir.Decl).X.Alias()) { - Target.Decls[i] = Stmt(n) - } - } - - // Phase 2: Variable assignments. - // To check interface assignments, depends on phase 1. - - // Don't use range--typecheck can add closures to Target.Decls. - base.Timer.Start("fe", "typecheck", "top2") - for i := 0; i < len(Target.Decls); i++ { - n := Target.Decls[i] - if op := n.Op(); op == ir.ODCL || op == ir.OAS || op == ir.OAS2 || op == ir.ODCLTYPE && n.(*ir.Decl).X.Alias() { - Target.Decls[i] = Stmt(n) - } - } - - // Phase 3: Type check function bodies. - // Don't use range--typecheck can add closures to Target.Decls. - base.Timer.Start("fe", "typecheck", "func") - var fcount int64 - for i := 0; i < len(Target.Decls); i++ { - n := Target.Decls[i] - if n.Op() == ir.ODCLFUNC { - FuncBody(n.(*ir.Func)) - fcount++ - } - } - - // Phase 4: Check external declarations. - // TODO(mdempsky): This should be handled when type checking their - // corresponding ODCL nodes. - base.Timer.Start("fe", "typecheck", "externdcls") - for i, n := range Target.Externs { - if n.Op() == ir.ONAME { - Target.Externs[i] = Expr(Target.Externs[i]) - } - } - - // Phase 5: With all user code type-checked, it's now safe to verify map keys. - CheckMapKeys() - - // Phase 6: Compute Addrtaken for names. - // We need to wait until typechecking is done so that when we see &x[i] - // we know that x has its address taken if x is an array, but not if x is a slice. - // We compute Addrtaken in bulk here. - // After this phase, we maintain Addrtaken incrementally. - if dirtyAddrtaken { - computeAddrtaken(Target.Decls) - dirtyAddrtaken = false - } - incrementalAddrtaken = true - - // Phase 7: Eliminate some obviously dead code. - // Must happen after typechecking. - for _, n := range Target.Decls { - if n.Op() == ir.ODCLFUNC { - deadcode(n.(*ir.Func)) - } - } - - // Phase 8: Decide how to capture closed variables. - // This needs to run before escape analysis, - // because variables captured by value do not escape. - base.Timer.Start("fe", "capturevars") - for _, n := range Target.Decls { - if n.Op() == ir.ODCLFUNC { - n := n.(*ir.Func) - if n.OClosure != nil { - ir.CurFunc = n - CaptureVars(n) - } - } - } - CaptureVarsComplete = true - ir.CurFunc = nil - - if base.Debug.TypecheckInl != 0 { - // Typecheck imported function bodies if Debug.l > 1, - // otherwise lazily when used or re-exported. - AllImportedBodies() - } + InitRuntime() } func AssignExpr(n ir.Node) ir.Node { return typecheck(n, ctxExpr|ctxAssign) } @@ -2247,144 +2144,6 @@ func CheckReturn(fn *ir.Func) { } } -func deadcode(fn *ir.Func) { - deadcodeslice(&fn.Body) - - if len(fn.Body) == 0 { - return - } - - for _, n := range fn.Body { - if len(n.Init()) > 0 { - return - } - switch n.Op() { - case ir.OIF: - n := n.(*ir.IfStmt) - if !ir.IsConst(n.Cond, constant.Bool) || len(n.Body) > 0 || len(n.Else) > 0 { - return - } - case ir.OFOR: - n := n.(*ir.ForStmt) - if !ir.IsConst(n.Cond, constant.Bool) || ir.BoolVal(n.Cond) { - return - } - default: - return - } - } - - fn.Body.Set([]ir.Node{ir.NewBlockStmt(base.Pos, nil)}) -} - -func deadcodeslice(nn *ir.Nodes) { - var lastLabel = -1 - for i, n := range *nn { - if n != nil && n.Op() == ir.OLABEL { - lastLabel = i - } - } - for i, n := range *nn { - // Cut is set to true when all nodes after i'th position - // should be removed. - // In other words, it marks whole slice "tail" as dead. - cut := false - if n == nil { - continue - } - if n.Op() == ir.OIF { - n := n.(*ir.IfStmt) - n.Cond = deadcodeexpr(n.Cond) - if ir.IsConst(n.Cond, constant.Bool) { - var body ir.Nodes - if ir.BoolVal(n.Cond) { - n.Else = ir.Nodes{} - body = n.Body - } else { - n.Body = ir.Nodes{} - body = n.Else - } - // If "then" or "else" branch ends with panic or return statement, - // it is safe to remove all statements after this node. - // isterminating is not used to avoid goto-related complications. - // We must be careful not to deadcode-remove labels, as they - // might be the target of a goto. See issue 28616. - if body := body; len(body) != 0 { - switch body[(len(body) - 1)].Op() { - case ir.ORETURN, ir.ORETJMP, ir.OPANIC: - if i > lastLabel { - cut = true - } - } - } - } - } - - deadcodeslice(n.PtrInit()) - switch n.Op() { - case ir.OBLOCK: - n := n.(*ir.BlockStmt) - deadcodeslice(&n.List) - case ir.OFOR: - n := n.(*ir.ForStmt) - deadcodeslice(&n.Body) - case ir.OIF: - n := n.(*ir.IfStmt) - deadcodeslice(&n.Body) - deadcodeslice(&n.Else) - case ir.ORANGE: - n := n.(*ir.RangeStmt) - deadcodeslice(&n.Body) - case ir.OSELECT: - n := n.(*ir.SelectStmt) - for _, cas := range n.Cases { - deadcodeslice(&cas.Body) - } - case ir.OSWITCH: - n := n.(*ir.SwitchStmt) - for _, cas := range n.Cases { - deadcodeslice(&cas.Body) - } - } - - if cut { - nn.Set((*nn)[:i+1]) - break - } - } -} - -func deadcodeexpr(n ir.Node) ir.Node { - // Perform dead-code elimination on short-circuited boolean - // expressions involving constants with the intent of - // producing a constant 'if' condition. - switch n.Op() { - case ir.OANDAND: - n := n.(*ir.LogicalExpr) - n.X = deadcodeexpr(n.X) - n.Y = deadcodeexpr(n.Y) - if ir.IsConst(n.X, constant.Bool) { - if ir.BoolVal(n.X) { - return n.Y // true && x => x - } else { - return n.X // false && x => false - } - } - case ir.OOROR: - n := n.(*ir.LogicalExpr) - n.X = deadcodeexpr(n.X) - n.Y = deadcodeexpr(n.Y) - if ir.IsConst(n.X, constant.Bool) { - if ir.BoolVal(n.X) { - return n.X // true || x => true - } else { - return n.Y // false || x => x - } - } - } - return n -} - // getIotaValue returns the current value for "iota", // or -1 if not within a ConstSpec. func getIotaValue() int64 { diff --git a/src/cmd/compile/internal/typecheck/universe.go b/src/cmd/compile/internal/typecheck/universe.go index fc8e962e28..054f094cd3 100644 --- a/src/cmd/compile/internal/typecheck/universe.go +++ b/src/cmd/compile/internal/typecheck/universe.go @@ -336,8 +336,8 @@ func makeErrorInterface() *types.Type { return types.NewInterface(types.NoPkg, []*types.Field{method}) } -// declareUniverse makes the universe block visible within the current package. -func declareUniverse() { +// DeclareUniverse makes the universe block visible within the current package. +func DeclareUniverse() { // Operationally, this is similar to a dot import of builtinpkg, except // that we silently skip symbols that are already declared in the // package block rather than emitting a redeclared symbol error. -- cgit v1.2.3-54-g00ecf From 3a4474cdfda0096b5d88c769f81ad81d6f0168c7 Mon Sep 17 00:00:00 2001 From: Matthew Dempsky Date: Thu, 31 Dec 2020 23:39:15 -0800 Subject: [dev.regabi] cmd/compile: some more manual shuffling More minor reshuffling of passes. Passes toolstash -cmp. Change-Id: I22633b3741f668fc5ee8579d7d610035ed57df1f Reviewed-on: https://go-review.googlesource.com/c/go/+/280975 Trust: Matthew Dempsky Run-TryBot: Matthew Dempsky TryBot-Result: Go Bot Reviewed-by: Cuong Manh Le --- src/cmd/compile/internal/gc/abiutils_test.go | 2 +- src/cmd/compile/internal/gc/main.go | 26 ++++++++++--------------- src/cmd/compile/internal/noder/noder.go | 14 +++++++++++++ src/cmd/compile/internal/typecheck/dcl.go | 2 +- src/cmd/compile/internal/typecheck/syms.go | 7 +------ src/cmd/compile/internal/typecheck/typecheck.go | 7 ------- src/cmd/compile/internal/typecheck/universe.go | 4 ++-- 7 files changed, 29 insertions(+), 33 deletions(-) (limited to 'src/cmd/compile/internal/noder/noder.go') diff --git a/src/cmd/compile/internal/gc/abiutils_test.go b/src/cmd/compile/internal/gc/abiutils_test.go index d535a6a34b..6fd0af1b1f 100644 --- a/src/cmd/compile/internal/gc/abiutils_test.go +++ b/src/cmd/compile/internal/gc/abiutils_test.go @@ -38,7 +38,7 @@ func TestMain(m *testing.M) { base.Ctxt.Bso = bufio.NewWriter(os.Stdout) types.PtrSize = ssagen.Arch.LinkArch.PtrSize types.RegSize = ssagen.Arch.LinkArch.RegSize - typecheck.Init() + typecheck.InitUniverse() os.Exit(m.Run()) } diff --git a/src/cmd/compile/internal/gc/main.go b/src/cmd/compile/internal/gc/main.go index 603619eb5a..df6a9d8e45 100644 --- a/src/cmd/compile/internal/gc/main.go +++ b/src/cmd/compile/internal/gc/main.go @@ -200,23 +200,15 @@ func Main(archInit func(*ssagen.ArchInfo)) { base.AutogeneratedPos = makePos(src.NewFileBase("", ""), 1, 0) - typecheck.Init() + typecheck.InitUniverse() - // Parse input. - base.Timer.Start("fe", "parse") - lines := noder.ParseFiles(flag.Args()) - ssagen.CgoSymABIs() - base.Timer.Stop() - base.Timer.AddEvent(int64(lines), "lines") - dwarfgen.RecordPackageName() + // Parse and typecheck input. + noder.LoadPackage(flag.Args()) - // Typecheck. - noder.Package() + dwarfgen.RecordPackageName() + ssagen.CgoSymABIs() - // With all user code typechecked, it's now safe to verify unused dot imports. - noder.CheckDotImports() - base.ExitIfErrors() - // Phase 6: Compute Addrtaken for names. + // Compute Addrtaken for names. // We need to wait until typechecking is done so that when we see &x[i] // we know that x has its address taken if x is an array, but not if x is a slice. // We compute Addrtaken in bulk here. @@ -227,7 +219,7 @@ func Main(archInit func(*ssagen.ArchInfo)) { } typecheck.IncrementalAddrtaken = true - // Phase 7: Eliminate some obviously dead code. + // Eliminate some obviously dead code. // Must happen after typechecking. for _, n := range typecheck.Target.Decls { if n.Op() == ir.ODCLFUNC { @@ -235,7 +227,7 @@ func Main(archInit func(*ssagen.ArchInfo)) { } } - // Phase 8: Decide how to capture closed variables. + // Decide how to capture closed variables. // This needs to run before escape analysis, // because variables captured by value do not escape. base.Timer.Start("fe", "capturevars") @@ -256,6 +248,7 @@ func Main(archInit func(*ssagen.ArchInfo)) { // otherwise lazily when used or re-exported. typecheck.AllImportedBodies() } + // Build init task. if initTask := pkginit.Task(); initTask != nil { typecheck.Export(initTask) @@ -311,6 +304,7 @@ func Main(archInit func(*ssagen.ArchInfo)) { // Prepare for SSA compilation. // This must be before peekitabs, because peekitabs // can trigger function compilation. + typecheck.InitRuntime() ssagen.InitConfig() // Just before compilation, compile itabs found on diff --git a/src/cmd/compile/internal/noder/noder.go b/src/cmd/compile/internal/noder/noder.go index 40569af317..29bfde3ff2 100644 --- a/src/cmd/compile/internal/noder/noder.go +++ b/src/cmd/compile/internal/noder/noder.go @@ -25,6 +25,20 @@ import ( "cmd/internal/src" ) +func LoadPackage(filenames []string) { + base.Timer.Start("fe", "parse") + lines := ParseFiles(filenames) + base.Timer.Stop() + base.Timer.AddEvent(int64(lines), "lines") + + // Typecheck. + Package() + + // With all user code typechecked, it's now safe to verify unused dot imports. + CheckDotImports() + base.ExitIfErrors() +} + // ParseFiles concurrently parses files into *syntax.File structures. // Each declaration in every *syntax.File is converted to a syntax tree // and its root represented by *Node is appended to Target.Decls. diff --git a/src/cmd/compile/internal/typecheck/dcl.go b/src/cmd/compile/internal/typecheck/dcl.go index fd55f472ab..daec9848d0 100644 --- a/src/cmd/compile/internal/typecheck/dcl.go +++ b/src/cmd/compile/internal/typecheck/dcl.go @@ -15,7 +15,7 @@ import ( "cmd/internal/src" ) -var DeclContext ir.Class // PEXTERN/PAUTO +var DeclContext ir.Class = ir.PEXTERN // PEXTERN/PAUTO func DeclFunc(sym *types.Sym, tfn ir.Ntype) *ir.Func { if tfn.Op() != ir.OTFUNC { diff --git a/src/cmd/compile/internal/typecheck/syms.go b/src/cmd/compile/internal/typecheck/syms.go index f0e230432a..2251062e16 100644 --- a/src/cmd/compile/internal/typecheck/syms.go +++ b/src/cmd/compile/internal/typecheck/syms.go @@ -65,11 +65,9 @@ func Lookup(name string) *types.Sym { // so that the compiler can generate calls to them, // but does not make them visible to user code. func InitRuntime() { + base.Timer.Start("fe", "loadsys") types.Block = 1 - inimport = true - TypecheckAllowed = true - typs := runtimeTypes() for _, d := range &runtimeDecls { sym := ir.Pkgs.Runtime.Lookup(d.name) @@ -83,9 +81,6 @@ func InitRuntime() { base.Fatalf("unhandled declaration tag %v", d.tag) } } - - TypecheckAllowed = false - inimport = false } // LookupRuntimeFunc looks up Go function name in package runtime. This function diff --git a/src/cmd/compile/internal/typecheck/typecheck.go b/src/cmd/compile/internal/typecheck/typecheck.go index 4c6ac21fc6..c8d82443a1 100644 --- a/src/cmd/compile/internal/typecheck/typecheck.go +++ b/src/cmd/compile/internal/typecheck/typecheck.go @@ -31,13 +31,6 @@ var ( NeedRuntimeType = func(*types.Type) {} ) -func Init() { - initUniverse() - DeclContext = ir.PEXTERN - base.Timer.Start("fe", "loadsys") - InitRuntime() -} - func AssignExpr(n ir.Node) ir.Node { return typecheck(n, ctxExpr|ctxAssign) } func Expr(n ir.Node) ir.Node { return typecheck(n, ctxExpr) } func Stmt(n ir.Node) ir.Node { return typecheck(n, ctxStmt) } diff --git a/src/cmd/compile/internal/typecheck/universe.go b/src/cmd/compile/internal/typecheck/universe.go index 054f094cd3..f1e7ed4273 100644 --- a/src/cmd/compile/internal/typecheck/universe.go +++ b/src/cmd/compile/internal/typecheck/universe.go @@ -90,8 +90,8 @@ var unsafeFuncs = [...]struct { {"Sizeof", ir.OSIZEOF}, } -// initUniverse initializes the universe block. -func initUniverse() { +// InitUniverse initializes the universe block. +func InitUniverse() { if types.PtrSize == 0 { base.Fatalf("typeinit before betypeinit") } -- cgit v1.2.3-54-g00ecf From 68e6fa4f6852b4ef0fe61789618c093f4e2185c9 Mon Sep 17 00:00:00 2001 From: Matthew Dempsky Date: Thu, 31 Dec 2020 23:45:36 -0800 Subject: [dev.regabi] cmd/compile: fix package-initialization order This CL fixes package initialization order by creating the init task before the general deadcode-removal pass. It also changes noder to emit zero-initialization assignments (i.e., OAS with nil RHS) for package-block variables, so that initOrder can tell the variables still need initialization. To allow this, we need to also extend the static-init code to recognize zero-initialization assignments. This doesn't pass toolstash -cmp, because it reorders some package initialization routines. Fixes #43444. Change-Id: I0da7996a62c85e15e97ce965298127e075390a7e Reviewed-on: https://go-review.googlesource.com/c/go/+/280976 Trust: Matthew Dempsky Run-TryBot: Matthew Dempsky TryBot-Result: Go Bot Reviewed-by: Cuong Manh Le --- src/cmd/compile/internal/gc/main.go | 10 +++--- src/cmd/compile/internal/noder/noder.go | 52 ++++++++++------------------ src/cmd/compile/internal/pkginit/init.go | 4 +++ src/cmd/compile/internal/staticinit/sched.go | 16 +++++++-- test/fixedbugs/issue43444.go | 28 +++++++++++++++ test/fixedbugs/issue43444.out | 1 + 6 files changed, 70 insertions(+), 41 deletions(-) create mode 100644 test/fixedbugs/issue43444.go create mode 100644 test/fixedbugs/issue43444.out (limited to 'src/cmd/compile/internal/noder/noder.go') diff --git a/src/cmd/compile/internal/gc/main.go b/src/cmd/compile/internal/gc/main.go index df6a9d8e45..c1f51e4f1d 100644 --- a/src/cmd/compile/internal/gc/main.go +++ b/src/cmd/compile/internal/gc/main.go @@ -208,6 +208,11 @@ func Main(archInit func(*ssagen.ArchInfo)) { dwarfgen.RecordPackageName() ssagen.CgoSymABIs() + // Build init task. + if initTask := pkginit.Task(); initTask != nil { + typecheck.Export(initTask) + } + // Compute Addrtaken for names. // We need to wait until typechecking is done so that when we see &x[i] // we know that x has its address taken if x is an array, but not if x is a slice. @@ -249,11 +254,6 @@ func Main(archInit func(*ssagen.ArchInfo)) { typecheck.AllImportedBodies() } - // Build init task. - if initTask := pkginit.Task(); initTask != nil { - typecheck.Export(initTask) - } - // Inlining base.Timer.Start("fe", "inlining") if base.Flag.LowerL != 0 { diff --git a/src/cmd/compile/internal/noder/noder.go b/src/cmd/compile/internal/noder/noder.go index 29bfde3ff2..cc8a1c7c89 100644 --- a/src/cmd/compile/internal/noder/noder.go +++ b/src/cmd/compile/internal/noder/noder.go @@ -474,24 +474,15 @@ func (p *noder) varDecl(decl *syntax.VarDecl) []ir.Node { p.checkUnused(pragma) } - p.setlineno(decl) - return DeclVars(names, typ, exprs) -} - -// declare variables from grammar -// new_name_list (type | [type] = expr_list) -func DeclVars(vl []*ir.Name, t ir.Ntype, el []ir.Node) []ir.Node { var init []ir.Node - doexpr := len(el) > 0 + p.setlineno(decl) - if len(el) == 1 && len(vl) > 1 { - e := el[0] - as2 := ir.NewAssignListStmt(base.Pos, ir.OAS2, nil, nil) - as2.Rhs = []ir.Node{e} - for _, v := range vl { + if len(names) > 1 && len(exprs) == 1 { + as2 := ir.NewAssignListStmt(base.Pos, ir.OAS2, nil, exprs) + for _, v := range names { as2.Lhs.Append(v) typecheck.Declare(v, typecheck.DeclContext) - v.Ntype = t + v.Ntype = typ v.Defn = as2 if ir.CurFunc != nil { init = append(init, ir.NewDecl(base.Pos, ir.ODCL, v)) @@ -501,34 +492,29 @@ func DeclVars(vl []*ir.Name, t ir.Ntype, el []ir.Node) []ir.Node { return append(init, as2) } - for i, v := range vl { + for i, v := range names { var e ir.Node - if doexpr { - if i >= len(el) { - base.Errorf("assignment mismatch: %d variables but %d values", len(vl), len(el)) - break - } - e = el[i] + if i < len(exprs) { + e = exprs[i] } typecheck.Declare(v, typecheck.DeclContext) - v.Ntype = t + v.Ntype = typ - if e != nil || ir.CurFunc != nil || ir.IsBlank(v) { - if ir.CurFunc != nil { - init = append(init, ir.NewDecl(base.Pos, ir.ODCL, v)) - } - as := ir.NewAssignStmt(base.Pos, v, e) - init = append(init, as) - if e != nil { - v.Defn = as - } + if ir.CurFunc != nil { + init = append(init, ir.NewDecl(base.Pos, ir.ODCL, v)) + } + as := ir.NewAssignStmt(base.Pos, v, e) + init = append(init, as) + if e != nil || ir.CurFunc == nil { + v.Defn = as } } - if len(el) > len(vl) { - base.Errorf("assignment mismatch: %d variables but %d values", len(vl), len(el)) + if len(exprs) != 0 && len(names) != len(exprs) { + base.Errorf("assignment mismatch: %d variables but %d values", len(names), len(exprs)) } + return init } diff --git a/src/cmd/compile/internal/pkginit/init.go b/src/cmd/compile/internal/pkginit/init.go index f1ffbb5933..24fe1a7628 100644 --- a/src/cmd/compile/internal/pkginit/init.go +++ b/src/cmd/compile/internal/pkginit/init.go @@ -6,6 +6,7 @@ package pkginit import ( "cmd/compile/internal/base" + "cmd/compile/internal/deadcode" "cmd/compile/internal/ir" "cmd/compile/internal/objw" "cmd/compile/internal/typecheck" @@ -68,6 +69,9 @@ func Task() *ir.Name { // Record user init functions. for _, fn := range typecheck.Target.Inits { + // Must happen after initOrder; see #43444. + deadcode.Func(fn) + // Skip init functions with empty bodies. if len(fn.Body) == 1 { if stmt := fn.Body[0]; stmt.Op() == ir.OBLOCK && len(stmt.(*ir.BlockStmt).List) == 0 { diff --git a/src/cmd/compile/internal/staticinit/sched.go b/src/cmd/compile/internal/staticinit/sched.go index 1b0af1b05d..8e4ce55954 100644 --- a/src/cmd/compile/internal/staticinit/sched.go +++ b/src/cmd/compile/internal/staticinit/sched.go @@ -86,17 +86,22 @@ func (s *Schedule) staticcopy(l *ir.Name, loff int64, rn *ir.Name, typ *types.Ty if rn.Class_ != ir.PEXTERN || rn.Sym().Pkg != types.LocalPkg { return false } - if rn.Defn == nil { // probably zeroed but perhaps supplied externally and of unknown value - return false - } if rn.Defn.Op() != ir.OAS { return false } if rn.Type().IsString() { // perhaps overwritten by cmd/link -X (#34675) return false } + if rn.Embed != nil { + return false + } orig := rn r := rn.Defn.(*ir.AssignStmt).Y + if r == nil { + // No explicit initialization value. Probably zeroed but perhaps + // supplied externally and of unknown value. + return false + } for r.Op() == ir.OCONVNOP && !types.Identical(r.Type(), typ) { r = r.(*ir.ConvExpr).X @@ -185,6 +190,11 @@ func (s *Schedule) staticcopy(l *ir.Name, loff int64, rn *ir.Name, typ *types.Ty } func (s *Schedule) StaticAssign(l *ir.Name, loff int64, r ir.Node, typ *types.Type) bool { + if r == nil { + // No explicit initialization value. Either zero or supplied + // externally. + return true + } for r.Op() == ir.OCONVNOP { r = r.(*ir.ConvExpr).X } diff --git a/test/fixedbugs/issue43444.go b/test/fixedbugs/issue43444.go new file mode 100644 index 0000000000..c430e1baf7 --- /dev/null +++ b/test/fixedbugs/issue43444.go @@ -0,0 +1,28 @@ +// run + +package main + +var sp = "" + +func f(name string, _ ...interface{}) int { + print(sp, name) + sp = " " + return 0 +} + +var a = f("a", x) +var b = f("b", y) +var c = f("c", z) +var d = func() int { + if false { + _ = z + } + return f("d") +}() +var e = f("e") + +var x int +var y int = 42 +var z int = func() int { return 42 }() + +func main() { println() } diff --git a/test/fixedbugs/issue43444.out b/test/fixedbugs/issue43444.out new file mode 100644 index 0000000000..22d6a0dc69 --- /dev/null +++ b/test/fixedbugs/issue43444.out @@ -0,0 +1 @@ +e a b c d -- cgit v1.2.3-54-g00ecf From 2f2d4b4e68ab2fc448a1c2daf793b11ccde2fb16 Mon Sep 17 00:00:00 2001 From: Matthew Dempsky Date: Sat, 2 Jan 2021 01:04:19 -0800 Subject: [dev.regabi] cmd/compile: remove {Ptr,Set}Init from Node interface This CL separates out PtrInit and SetInit into a new InitNode extension interface, and adds a new TakeInit helper function for taking and clearing the Init list (if any) from a Node. This allows removing miniNode.SetInit and miniNode.PtrInit, which in turn allow getting rid of immutableEmptyNodes, and will allow simplification of the Nodes API. It would be nice to get rid of the default Init method too, but there's way more code that expects to be able to call that at the moment, so that'll have to wait. Passes toolstash -cmp. Change-Id: Ia8c18fab9555b774376f7f43eeecfde4f07b5946 Reviewed-on: https://go-review.googlesource.com/c/go/+/281001 Trust: Matthew Dempsky Run-TryBot: Matthew Dempsky TryBot-Result: Go Bot Reviewed-by: Cuong Manh Le --- src/cmd/compile/internal/deadcode/deadcode.go | 4 +- src/cmd/compile/internal/inline/inl.go | 4 +- src/cmd/compile/internal/ir/mini.go | 8 +--- src/cmd/compile/internal/ir/node.go | 52 +++++++++++-------------- src/cmd/compile/internal/noder/noder.go | 2 +- src/cmd/compile/internal/typecheck/typecheck.go | 2 +- src/cmd/compile/internal/walk/assign.go | 10 ++--- src/cmd/compile/internal/walk/builtin.go | 2 +- src/cmd/compile/internal/walk/expr.go | 6 +-- src/cmd/compile/internal/walk/order.go | 9 ++--- src/cmd/compile/internal/walk/range.go | 2 +- src/cmd/compile/internal/walk/select.go | 9 ++--- src/cmd/compile/internal/walk/stmt.go | 12 +++--- src/cmd/compile/internal/walk/walk.go | 3 +- 14 files changed, 52 insertions(+), 73 deletions(-) (limited to 'src/cmd/compile/internal/noder/noder.go') diff --git a/src/cmd/compile/internal/deadcode/deadcode.go b/src/cmd/compile/internal/deadcode/deadcode.go index 5453cfe396..474532bc17 100644 --- a/src/cmd/compile/internal/deadcode/deadcode.go +++ b/src/cmd/compile/internal/deadcode/deadcode.go @@ -84,7 +84,9 @@ func stmts(nn *ir.Nodes) { } } - stmts(n.PtrInit()) + if len(n.Init()) != 0 { + stmts(n.(ir.InitNode).PtrInit()) + } switch n.Op() { case ir.OBLOCK: n := n.(*ir.BlockStmt) diff --git a/src/cmd/compile/internal/inline/inl.go b/src/cmd/compile/internal/inline/inl.go index 31b97a3787..24fbe3dac0 100644 --- a/src/cmd/compile/internal/inline/inl.go +++ b/src/cmd/compile/internal/inline/inl.go @@ -639,7 +639,7 @@ func inlCallee(fn ir.Node) *ir.Func { return nil } -func inlParam(t *types.Field, as ir.Node, inlvars map[*ir.Name]*ir.Name) ir.Node { +func inlParam(t *types.Field, as ir.InitNode, inlvars map[*ir.Name]*ir.Name) ir.Node { if t.Nname == nil { return ir.BlankNode } @@ -741,7 +741,7 @@ func mkinlcall(n *ir.CallExpr, fn *ir.Func, maxCost int32, inlMap map[*ir.Func]b callee := n.X for callee.Op() == ir.OCONVNOP { conv := callee.(*ir.ConvExpr) - ninit.Append(conv.PtrInit().Take()...) + ninit.Append(ir.TakeInit(conv)...) callee = conv.X } if callee.Op() != ir.ONAME && callee.Op() != ir.OCLOSURE && callee.Op() != ir.OMETHEXPR { diff --git a/src/cmd/compile/internal/ir/mini.go b/src/cmd/compile/internal/ir/mini.go index 9270132621..93aa15abec 100644 --- a/src/cmd/compile/internal/ir/mini.go +++ b/src/cmd/compile/internal/ir/mini.go @@ -80,13 +80,7 @@ func (n *miniNode) SetDiag(x bool) { n.bits.set(miniDiag, x) } // Empty, immutable graph structure. -func (n *miniNode) Init() Nodes { return Nodes{} } -func (n *miniNode) PtrInit() *Nodes { return &immutableEmptyNodes } -func (n *miniNode) SetInit(x Nodes) { - if x != nil { - panic(n.no("SetInit")) - } -} +func (n *miniNode) Init() Nodes { return Nodes{} } // Additional functionality unavailable. diff --git a/src/cmd/compile/internal/ir/node.go b/src/cmd/compile/internal/ir/node.go index 9536503085..9945cc987a 100644 --- a/src/cmd/compile/internal/ir/node.go +++ b/src/cmd/compile/internal/ir/node.go @@ -34,8 +34,6 @@ type Node interface { // Abstract graph structure, for generic traversals. Op() Op Init() Nodes - PtrInit() *Nodes - SetInit(x Nodes) // Fields specific to certain Ops only. Type() *types.Type @@ -90,6 +88,20 @@ func MayBeShared(n Node) bool { return false } +type InitNode interface { + Node + PtrInit() *Nodes + SetInit(x Nodes) +} + +func TakeInit(n Node) Nodes { + init := n.Init() + if len(init) != 0 { + n.(InitNode).SetInit(nil) + } + return init +} + //go:generate stringer -type=Op -trimprefix=O node.go type Op uint8 @@ -311,35 +323,15 @@ const ( // a slice to save space. type Nodes []Node -// immutableEmptyNodes is an immutable, empty Nodes list. -// The methods that would modify it panic instead. -var immutableEmptyNodes = Nodes{} - -func (n *Nodes) mutate() { - if n == &immutableEmptyNodes { - panic("immutable Nodes.Set") - } -} - // Set sets n to a slice. // This takes ownership of the slice. -func (n *Nodes) Set(s []Node) { - if n == &immutableEmptyNodes { - if len(s) == 0 { - // Allow immutableEmptyNodes.Set(nil) (a no-op). - return - } - n.mutate() - } - *n = s -} +func (n *Nodes) Set(s []Node) { *n = s } // Append appends entries to Nodes. func (n *Nodes) Append(a ...Node) { if len(a) == 0 { return } - n.mutate() *n = append(*n, a...) } @@ -349,7 +341,6 @@ func (n *Nodes) Prepend(a ...Node) { if len(a) == 0 { return } - n.mutate() *n = append(a, *n...) } @@ -544,15 +535,16 @@ func SetPos(n Node) src.XPos { // The result of InitExpr MUST be assigned back to n, e.g. // n.Left = InitExpr(init, n.Left) -func InitExpr(init []Node, n Node) Node { +func InitExpr(init []Node, expr Node) Node { if len(init) == 0 { - return n + return expr } - if MayBeShared(n) { + + n, ok := expr.(InitNode) + if !ok || MayBeShared(n) { // Introduce OCONVNOP to hold init list. - old := n - n = NewConvExpr(base.Pos, OCONVNOP, nil, old) - n.SetType(old.Type()) + n = NewConvExpr(base.Pos, OCONVNOP, nil, expr) + n.SetType(expr.Type()) n.SetTypecheck(1) } diff --git a/src/cmd/compile/internal/noder/noder.go b/src/cmd/compile/internal/noder/noder.go index cc8a1c7c89..948833f46e 100644 --- a/src/cmd/compile/internal/noder/noder.go +++ b/src/cmd/compile/internal/noder/noder.go @@ -1200,7 +1200,7 @@ func (p *noder) stmtFall(stmt syntax.Stmt, fallOK bool) ir.Node { panic("unhandled Stmt") } -func (p *noder) assignList(expr syntax.Expr, defn ir.Node, colas bool) []ir.Node { +func (p *noder) assignList(expr syntax.Expr, defn ir.InitNode, colas bool) []ir.Node { if !colas { return p.exprList(expr) } diff --git a/src/cmd/compile/internal/typecheck/typecheck.go b/src/cmd/compile/internal/typecheck/typecheck.go index 0822a4624c..0ee66df2cf 100644 --- a/src/cmd/compile/internal/typecheck/typecheck.go +++ b/src/cmd/compile/internal/typecheck/typecheck.go @@ -914,7 +914,7 @@ func typecheck1(n ir.Node, top int) ir.Node { // Each must execute its own return n. } -func typecheckargs(n ir.Node) { +func typecheckargs(n ir.InitNode) { var list []ir.Node switch n := n.(type) { default: diff --git a/src/cmd/compile/internal/walk/assign.go b/src/cmd/compile/internal/walk/assign.go index c01079d236..762baa0dd9 100644 --- a/src/cmd/compile/internal/walk/assign.go +++ b/src/cmd/compile/internal/walk/assign.go @@ -17,7 +17,7 @@ import ( // walkAssign walks an OAS (AssignExpr) or OASOP (AssignOpExpr) node. func walkAssign(init *ir.Nodes, n ir.Node) ir.Node { - init.Append(n.PtrInit().Take()...) + init.Append(ir.TakeInit(n)...) var left, right ir.Node switch n.Op() { @@ -124,7 +124,7 @@ func walkAssignDotType(n *ir.AssignListStmt, init *ir.Nodes) ir.Node { // walkAssignFunc walks an OAS2FUNC node. func walkAssignFunc(init *ir.Nodes, n *ir.AssignListStmt) ir.Node { - init.Append(n.PtrInit().Take()...) + init.Append(ir.TakeInit(n)...) r := n.Rhs[0] walkExprListSafe(n.Lhs, init) @@ -142,7 +142,7 @@ func walkAssignFunc(init *ir.Nodes, n *ir.AssignListStmt) ir.Node { // walkAssignList walks an OAS2 node. func walkAssignList(init *ir.Nodes, n *ir.AssignListStmt) ir.Node { - init.Append(n.PtrInit().Take()...) + init.Append(ir.TakeInit(n)...) walkExprListSafe(n.Lhs, init) walkExprListSafe(n.Rhs, init) return ir.NewBlockStmt(src.NoXPos, ascompatee(ir.OAS, n.Lhs, n.Rhs, init)) @@ -150,7 +150,7 @@ func walkAssignList(init *ir.Nodes, n *ir.AssignListStmt) ir.Node { // walkAssignMapRead walks an OAS2MAPR node. func walkAssignMapRead(init *ir.Nodes, n *ir.AssignListStmt) ir.Node { - init.Append(n.PtrInit().Take()...) + init.Append(ir.TakeInit(n)...) r := n.Rhs[0].(*ir.IndexExpr) walkExprListSafe(n.Lhs, init) @@ -213,7 +213,7 @@ func walkAssignMapRead(init *ir.Nodes, n *ir.AssignListStmt) ir.Node { // walkAssignRecv walks an OAS2RECV node. func walkAssignRecv(init *ir.Nodes, n *ir.AssignListStmt) ir.Node { - init.Append(n.PtrInit().Take()...) + init.Append(ir.TakeInit(n)...) r := n.Rhs[0].(*ir.UnaryExpr) // recv walkExprListSafe(n.Lhs, init) diff --git a/src/cmd/compile/internal/walk/builtin.go b/src/cmd/compile/internal/walk/builtin.go index fe6045cbbd..13837eeffc 100644 --- a/src/cmd/compile/internal/walk/builtin.go +++ b/src/cmd/compile/internal/walk/builtin.go @@ -206,7 +206,7 @@ func walkCopy(n *ir.BinaryExpr, init *ir.Nodes, runtimecall bool) ir.Node { // walkDelete walks an ODELETE node. func walkDelete(init *ir.Nodes, n *ir.CallExpr) ir.Node { - init.Append(n.PtrInit().Take()...) + init.Append(ir.TakeInit(n)...) map_ := n.Args[0] key := n.Args[1] map_ = walkExpr(map_, init) diff --git a/src/cmd/compile/internal/walk/expr.go b/src/cmd/compile/internal/walk/expr.go index 1fd09b42af..7dfac30094 100644 --- a/src/cmd/compile/internal/walk/expr.go +++ b/src/cmd/compile/internal/walk/expr.go @@ -26,7 +26,7 @@ func walkExpr(n ir.Node, init *ir.Nodes) ir.Node { return n } - if init == n.PtrInit() { + if n, ok := n.(ir.InitNode); ok && init == n.PtrInit() { // not okay to use n->ninit when walking n, // because we might replace n with some other node // and would lose the init list. @@ -35,7 +35,7 @@ func walkExpr(n ir.Node, init *ir.Nodes) ir.Node { if len(n.Init()) != 0 { walkStmtList(n.Init()) - init.Append(n.PtrInit().Take()...) + init.Append(ir.TakeInit(n)...) } lno := ir.SetPos(n) @@ -359,7 +359,7 @@ func safeExpr(n ir.Node, init *ir.Nodes) ir.Node { if len(n.Init()) != 0 { walkStmtList(n.Init()) - init.Append(n.PtrInit().Take()...) + init.Append(ir.TakeInit(n)...) } switch n.Op() { diff --git a/src/cmd/compile/internal/walk/order.go b/src/cmd/compile/internal/walk/order.go index e40c877ea9..679b795270 100644 --- a/src/cmd/compile/internal/walk/order.go +++ b/src/cmd/compile/internal/walk/order.go @@ -466,8 +466,7 @@ func (o *orderState) init(n ir.Node) { } return } - o.stmtList(n.Init()) - n.PtrInit().Set(nil) + o.stmtList(ir.TakeInit(n)) } // call orders the call expression n. @@ -938,8 +937,7 @@ func (o *orderState) stmt(n ir.Node) { if !ir.IsAutoTmp(recv.X) { recv.X = o.copyExpr(recv.X) } - init := *r.PtrInit() - r.PtrInit().Set(nil) + init := ir.TakeInit(r) colas := r.Def do := func(i int, t *types.Type) { @@ -1000,8 +998,7 @@ func (o *orderState) stmt(n ir.Node) { // TODO(mdempsky): Is this actually necessary? // walkselect appears to walk Ninit. - cas.Body.Prepend(cas.Init()...) - cas.PtrInit().Set(nil) + cas.Body.Prepend(ir.TakeInit(cas)...) } o.out = append(o.out, n) diff --git a/src/cmd/compile/internal/walk/range.go b/src/cmd/compile/internal/walk/range.go index 49a69e9751..3092b71d72 100644 --- a/src/cmd/compile/internal/walk/range.go +++ b/src/cmd/compile/internal/walk/range.go @@ -210,7 +210,7 @@ func walkRange(nrange *ir.RangeStmt) ir.Node { a.SetTypecheck(1) a.Lhs = []ir.Node{hv1, hb} a.Rhs = []ir.Node{ir.NewUnaryExpr(base.Pos, ir.ORECV, ha)} - *nfor.Cond.PtrInit() = []ir.Node{a} + nfor.Cond = ir.InitExpr([]ir.Node{a}, nfor.Cond) if v1 == nil { body = nil } else { diff --git a/src/cmd/compile/internal/walk/select.go b/src/cmd/compile/internal/walk/select.go index 1c5e1d7e64..c6e9b71384 100644 --- a/src/cmd/compile/internal/walk/select.go +++ b/src/cmd/compile/internal/walk/select.go @@ -17,8 +17,7 @@ func walkSelect(sel *ir.SelectStmt) { base.Fatalf("double walkselect") } - init := sel.Init() - sel.PtrInit().Set(nil) + init := ir.TakeInit(sel) init = append(init, walkSelectCases(sel.Cases)...) sel.Cases = nil @@ -45,8 +44,7 @@ func walkSelectCases(cases []*ir.CommClause) []ir.Node { l := cas.Init() if cas.Comm != nil { // not default: n := cas.Comm - l = append(l, n.Init()...) - n.PtrInit().Set(nil) + l = append(l, ir.TakeInit(n)...) switch n.Op() { default: base.Fatalf("select %v", n.Op()) @@ -171,8 +169,7 @@ func walkSelectCases(cases []*ir.CommClause) []ir.Node { for _, cas := range cases { ir.SetPos(cas) - init = append(init, cas.Init()...) - cas.PtrInit().Set(nil) + init = append(init, ir.TakeInit(cas)...) n := cas.Comm if n == nil { // default: diff --git a/src/cmd/compile/internal/walk/stmt.go b/src/cmd/compile/internal/walk/stmt.go index 8641a58e2e..3440c66506 100644 --- a/src/cmd/compile/internal/walk/stmt.go +++ b/src/cmd/compile/internal/walk/stmt.go @@ -55,8 +55,7 @@ func walkStmt(n ir.Node) ir.Node { if n.Typecheck() == 0 { base.Fatalf("missing typecheck: %+v", n) } - init := n.Init() - n.PtrInit().Set(nil) + init := ir.TakeInit(n) n = walkExpr(n, &init) if n.Op() == ir.ONAME { // copy rewrote to a statement list and a temp for the length. @@ -67,7 +66,7 @@ func walkStmt(n ir.Node) ir.Node { if len(init) > 0 { switch n.Op() { case ir.OAS, ir.OAS2, ir.OBLOCK: - n.PtrInit().Prepend(init...) + n.(ir.InitNode).PtrInit().Prepend(init...) default: init.Append(n) @@ -191,9 +190,8 @@ func walkDecl(n *ir.Decl) ir.Node { // walkFor walks an OFOR or OFORUNTIL node. func walkFor(n *ir.ForStmt) ir.Node { if n.Cond != nil { - walkStmtList(n.Cond.Init()) - init := n.Cond.Init() - n.Cond.PtrInit().Set(nil) + init := ir.TakeInit(n.Cond) + walkStmtList(init) n.Cond = walkExpr(n.Cond, &init) n.Cond = ir.InitExpr(init, n.Cond) } @@ -257,7 +255,7 @@ func walkIf(n *ir.IfStmt) ir.Node { func wrapCall(n *ir.CallExpr, init *ir.Nodes) ir.Node { if len(n.Init()) != 0 { walkStmtList(n.Init()) - init.Append(n.PtrInit().Take()...) + init.Append(ir.TakeInit(n)...) } isBuiltinCall := n.Op() != ir.OCALLFUNC && n.Op() != ir.OCALLMETH && n.Op() != ir.OCALLINTER diff --git a/src/cmd/compile/internal/walk/walk.go b/src/cmd/compile/internal/walk/walk.go index 25f53a8e7c..57c2d43753 100644 --- a/src/cmd/compile/internal/walk/walk.go +++ b/src/cmd/compile/internal/walk/walk.go @@ -81,8 +81,7 @@ func walkRecv(n *ir.UnaryExpr) ir.Node { if n.Typecheck() == 0 { base.Fatalf("missing typecheck: %+v", n) } - init := n.Init() - n.PtrInit().Set(nil) + init := ir.TakeInit(n) n.X = walkExpr(n.X, &init) call := walkExpr(mkcall1(chanfn("chanrecv1", 2, n.X.Type()), nil, &init, n.X, typecheck.NodNil()), &init) -- cgit v1.2.3-54-g00ecf From f2538033c08a8c215a19610680d66f5909c5bcdd Mon Sep 17 00:00:00 2001 From: Matthew Dempsky Date: Sat, 2 Jan 2021 01:27:29 -0800 Subject: [dev.regabi] cmd/compile: remove Nodes.Set [generated] Just "=". It's cleaner. Passes toolstash -cmp. [git-generate] cd src/cmd/compile/internal/ir pkgs=$(go list . ../...) rf ' ex '"$(echo $pkgs)"' { var l Nodes var p *Nodes p.Set(l) -> *p = l } ex '"$(echo $pkgs)"' { var n InitNode var l Nodes *n.PtrInit() = l -> n.SetInit(l) } rm Nodes.Set ' Change-Id: Ic97219792243667146a02776553942ae1189ff7d Reviewed-on: https://go-review.googlesource.com/c/go/+/281002 Trust: Matthew Dempsky Run-TryBot: Matthew Dempsky TryBot-Result: Go Bot Reviewed-by: Cuong Manh Le --- src/cmd/compile/internal/deadcode/deadcode.go | 4 ++-- src/cmd/compile/internal/inline/inl.go | 16 ++++++++-------- src/cmd/compile/internal/ir/expr.go | 10 +++++----- src/cmd/compile/internal/ir/node.go | 4 ---- src/cmd/compile/internal/ir/stmt.go | 16 ++++++++-------- src/cmd/compile/internal/noder/noder.go | 14 +++++++------- src/cmd/compile/internal/pkginit/init.go | 2 +- src/cmd/compile/internal/reflectdata/reflect.go | 2 +- src/cmd/compile/internal/ssagen/abi.go | 2 +- src/cmd/compile/internal/typecheck/const.go | 4 ++-- src/cmd/compile/internal/typecheck/func.go | 8 ++++---- src/cmd/compile/internal/typecheck/iimport.go | 16 ++++++++-------- src/cmd/compile/internal/typecheck/typecheck.go | 8 ++++---- src/cmd/compile/internal/walk/assign.go | 4 ++-- src/cmd/compile/internal/walk/builtin.go | 6 +++--- src/cmd/compile/internal/walk/closure.go | 4 ++-- src/cmd/compile/internal/walk/expr.go | 6 +++--- src/cmd/compile/internal/walk/order.go | 12 ++++++------ src/cmd/compile/internal/walk/range.go | 2 +- src/cmd/compile/internal/walk/select.go | 8 ++++---- src/cmd/compile/internal/walk/stmt.go | 4 ++-- 21 files changed, 74 insertions(+), 78 deletions(-) (limited to 'src/cmd/compile/internal/noder/noder.go') diff --git a/src/cmd/compile/internal/deadcode/deadcode.go b/src/cmd/compile/internal/deadcode/deadcode.go index 474532bc17..c409320fc4 100644 --- a/src/cmd/compile/internal/deadcode/deadcode.go +++ b/src/cmd/compile/internal/deadcode/deadcode.go @@ -38,7 +38,7 @@ func Func(fn *ir.Func) { } } - fn.Body.Set([]ir.Node{ir.NewBlockStmt(base.Pos, nil)}) + fn.Body = []ir.Node{ir.NewBlockStmt(base.Pos, nil)} } func stmts(nn *ir.Nodes) { @@ -114,7 +114,7 @@ func stmts(nn *ir.Nodes) { } if cut { - nn.Set((*nn)[:i+1]) + *nn = (*nn)[:i+1] break } } diff --git a/src/cmd/compile/internal/inline/inl.go b/src/cmd/compile/internal/inline/inl.go index 24fbe3dac0..2887abb061 100644 --- a/src/cmd/compile/internal/inline/inl.go +++ b/src/cmd/compile/internal/inline/inl.go @@ -544,7 +544,7 @@ func inlnode(n ir.Node, maxCost int32, inlMap map[*ir.Func]bool, edit func(ir.No if as := n; as.Op() == ir.OAS2FUNC { as := as.(*ir.AssignListStmt) if as.Rhs[0].Op() == ir.OINLCALL { - as.Rhs.Set(inlconv2list(as.Rhs[0].(*ir.InlinedCallExpr))) + as.Rhs = inlconv2list(as.Rhs[0].(*ir.InlinedCallExpr)) as.SetOp(ir.OAS2) as.SetTypecheck(0) n = typecheck.Stmt(as) @@ -867,7 +867,7 @@ func mkinlcall(n *ir.CallExpr, fn *ir.Func, maxCost int32, inlMap map[*ir.Func]b vas.Y.SetType(param.Type) } else { lit := ir.NewCompLitExpr(base.Pos, ir.OCOMPLIT, ir.TypeNode(param.Type), nil) - lit.List.Set(varargs) + lit.List = varargs vas.Y = lit } } @@ -944,9 +944,9 @@ func mkinlcall(n *ir.CallExpr, fn *ir.Func, maxCost int32, inlMap map[*ir.Func]b //dumplist("ninit post", ninit); call := ir.NewInlinedCallExpr(base.Pos, nil, nil) - call.PtrInit().Set(ninit) - call.Body.Set(body) - call.ReturnVars.Set(retvars) + *call.PtrInit() = ninit + call.Body = body + call.ReturnVars = retvars call.SetType(n.Type()) call.SetTypecheck(1) @@ -1120,7 +1120,7 @@ func (subst *inlsubst) node(n ir.Node) ir.Node { for _, n := range subst.retvars { as.Lhs.Append(n) } - as.Rhs.Set(subst.list(n.Results)) + as.Rhs = subst.list(n.Results) if subst.delayretvars { for _, n := range as.Lhs { @@ -1139,7 +1139,7 @@ func (subst *inlsubst) node(n ir.Node) ir.Node { n := n.(*ir.BranchStmt) m := ir.Copy(n).(*ir.BranchStmt) m.SetPos(subst.updatedPos(m.Pos())) - m.PtrInit().Set(nil) + *m.PtrInit() = nil p := fmt.Sprintf("%s·%d", n.Label.Name, inlgen) m.Label = typecheck.Lookup(p) return m @@ -1148,7 +1148,7 @@ func (subst *inlsubst) node(n ir.Node) ir.Node { n := n.(*ir.LabelStmt) m := ir.Copy(n).(*ir.LabelStmt) m.SetPos(subst.updatedPos(m.Pos())) - m.PtrInit().Set(nil) + *m.PtrInit() = nil p := fmt.Sprintf("%s·%d", n.Label.Name, inlgen) m.Label = typecheck.Lookup(p) return m diff --git a/src/cmd/compile/internal/ir/expr.go b/src/cmd/compile/internal/ir/expr.go index 88fbdff1e0..1b88427146 100644 --- a/src/cmd/compile/internal/ir/expr.go +++ b/src/cmd/compile/internal/ir/expr.go @@ -67,7 +67,7 @@ func NewAddStringExpr(pos src.XPos, list []Node) *AddStringExpr { n := &AddStringExpr{} n.pos = pos n.op = OADDSTR - n.List.Set(list) + n.List = list return n } @@ -173,7 +173,7 @@ func NewCallExpr(pos src.XPos, op Op, fun Node, args []Node) *CallExpr { n.pos = pos n.orig = n n.SetOp(op) - n.Args.Set(args) + n.Args = args return n } @@ -231,7 +231,7 @@ func NewCompLitExpr(pos src.XPos, op Op, typ Ntype, list []Node) *CompLitExpr { n := &CompLitExpr{Ntype: typ} n.pos = pos n.SetOp(op) - n.List.Set(list) + n.List = list n.orig = n return n } @@ -364,8 +364,8 @@ func NewInlinedCallExpr(pos src.XPos, body, retvars []Node) *InlinedCallExpr { n := &InlinedCallExpr{} n.pos = pos n.op = OINLCALL - n.Body.Set(body) - n.ReturnVars.Set(retvars) + n.Body = body + n.ReturnVars = retvars return n } diff --git a/src/cmd/compile/internal/ir/node.go b/src/cmd/compile/internal/ir/node.go index 9945cc987a..9d1ee17aa8 100644 --- a/src/cmd/compile/internal/ir/node.go +++ b/src/cmd/compile/internal/ir/node.go @@ -323,10 +323,6 @@ const ( // a slice to save space. type Nodes []Node -// Set sets n to a slice. -// This takes ownership of the slice. -func (n *Nodes) Set(s []Node) { *n = s } - // Append appends entries to Nodes. func (n *Nodes) Append(a ...Node) { if len(a) == 0 { diff --git a/src/cmd/compile/internal/ir/stmt.go b/src/cmd/compile/internal/ir/stmt.go index 9c2cba9a08..b13c6b7795 100644 --- a/src/cmd/compile/internal/ir/stmt.go +++ b/src/cmd/compile/internal/ir/stmt.go @@ -70,8 +70,8 @@ func NewAssignListStmt(pos src.XPos, op Op, lhs, rhs []Node) *AssignListStmt { n := &AssignListStmt{} n.pos = pos n.SetOp(op) - n.Lhs.Set(lhs) - n.Rhs.Set(rhs) + n.Lhs = lhs + n.Rhs = rhs return n } @@ -141,7 +141,7 @@ func NewBlockStmt(pos src.XPos, list []Node) *BlockStmt { } } n.op = OBLOCK - n.List.Set(list) + n.List = list return n } @@ -216,7 +216,7 @@ func NewForStmt(pos src.XPos, init Node, cond, post Node, body []Node) *ForStmt if init != nil { n.init = []Node{init} } - n.Body.Set(body) + n.Body = body return n } @@ -262,8 +262,8 @@ func NewIfStmt(pos src.XPos, cond Node, body, els []Node) *IfStmt { n := &IfStmt{Cond: cond} n.pos = pos n.op = OIF - n.Body.Set(body) - n.Else.Set(els) + n.Body = body + n.Else = els return n } @@ -315,7 +315,7 @@ func NewRangeStmt(pos src.XPos, key, value, x Node, body []Node) *RangeStmt { n := &RangeStmt{X: x, Key: key, Value: value} n.pos = pos n.op = ORANGE - n.Body.Set(body) + n.Body = body return n } @@ -331,7 +331,7 @@ func NewReturnStmt(pos src.XPos, results []Node) *ReturnStmt { n.pos = pos n.op = ORETURN n.orig = n - n.Results.Set(results) + n.Results = results return n } diff --git a/src/cmd/compile/internal/noder/noder.go b/src/cmd/compile/internal/noder/noder.go index 948833f46e..678e378291 100644 --- a/src/cmd/compile/internal/noder/noder.go +++ b/src/cmd/compile/internal/noder/noder.go @@ -245,7 +245,7 @@ func (p *noder) funcBody(fn *ir.Func, block *syntax.BlockStmt) { if body == nil { body = []ir.Node{ir.NewBlockStmt(base.Pos, nil)} } - fn.Body.Set(body) + fn.Body = body base.Pos = p.makeXPos(block.Rbrace) fn.Endlineno = base.Pos @@ -772,7 +772,7 @@ func (p *noder) expr(expr syntax.Expr) ir.Node { for i, e := range l { l[i] = p.wrapname(expr.ElemList[i], e) } - n.List.Set(l) + n.List = l base.Pos = p.makeXPos(expr.Rbrace) return n case *syntax.KeyValueExpr: @@ -1128,8 +1128,8 @@ func (p *noder) stmtFall(stmt syntax.Stmt, fallOK bool) ir.Node { if list, ok := stmt.Lhs.(*syntax.ListExpr); ok && len(list.ElemList) != 1 || len(rhs) != 1 { n := ir.NewAssignListStmt(p.pos(stmt), ir.OAS2, nil, nil) n.Def = stmt.Op == syntax.Def - n.Lhs.Set(p.assignList(stmt.Lhs, n, n.Def)) - n.Rhs.Set(rhs) + n.Lhs = p.assignList(stmt.Lhs, n, n.Def) + n.Rhs = rhs return n } @@ -1276,7 +1276,7 @@ func (p *noder) ifStmt(stmt *syntax.IfStmt) ir.Node { e := p.stmt(stmt.Else) if e.Op() == ir.OBLOCK { e := e.(*ir.BlockStmt) - n.Else.Set(e.List) + n.Else = e.List } else { n.Else = []ir.Node{e} } @@ -1301,7 +1301,7 @@ func (p *noder) forStmt(stmt *syntax.ForStmt) ir.Node { n.Value = lhs[1] } } - n.Body.Set(p.blockStmt(stmt.Body)) + n.Body = p.blockStmt(stmt.Body) p.closeAnotherScope() return n } @@ -1359,7 +1359,7 @@ func (p *noder) caseClauses(clauses []*syntax.CaseClause, tswitch *ir.TypeSwitch body = body[:len(body)-1] } - n.Body.Set(p.stmtsFall(body, true)) + n.Body = p.stmtsFall(body, true) if l := len(n.Body); l > 0 && n.Body[l-1].Op() == ir.OFALL { if tswitch != nil { base.Errorf("cannot fallthrough in type switch") diff --git a/src/cmd/compile/internal/pkginit/init.go b/src/cmd/compile/internal/pkginit/init.go index 24fe1a7628..a32e09879c 100644 --- a/src/cmd/compile/internal/pkginit/init.go +++ b/src/cmd/compile/internal/pkginit/init.go @@ -49,7 +49,7 @@ func Task() *ir.Name { fn.Dcl = append(fn.Dcl, typecheck.InitTodoFunc.Dcl...) typecheck.InitTodoFunc.Dcl = nil - fn.Body.Set(nf) + fn.Body = nf typecheck.FinishFuncBody() typecheck.Func(fn) diff --git a/src/cmd/compile/internal/reflectdata/reflect.go b/src/cmd/compile/internal/reflectdata/reflect.go index 5f88262ddf..f926765326 100644 --- a/src/cmd/compile/internal/reflectdata/reflect.go +++ b/src/cmd/compile/internal/reflectdata/reflect.go @@ -1798,7 +1798,7 @@ func methodWrapper(rcvr *types.Type, method *types.Field) *obj.LSym { } else { fn.SetWrapper(true) // ignore frame for panic+recover matching call := ir.NewCallExpr(base.Pos, ir.OCALL, dot, nil) - call.Args.Set(ir.ParamNames(tfn.Type())) + call.Args = ir.ParamNames(tfn.Type()) call.IsDDD = tfn.Type().IsVariadic() if method.Type.NumResults() > 0 { ret := ir.NewReturnStmt(base.Pos, nil) diff --git a/src/cmd/compile/internal/ssagen/abi.go b/src/cmd/compile/internal/ssagen/abi.go index cd5d962b91..1c013dd2d8 100644 --- a/src/cmd/compile/internal/ssagen/abi.go +++ b/src/cmd/compile/internal/ssagen/abi.go @@ -303,7 +303,7 @@ func makeABIWrapper(f *ir.Func, wrapperABI obj.ABI) { tail = ir.NewBranchStmt(base.Pos, ir.ORETJMP, f.Nname.Sym()) } else { call := ir.NewCallExpr(base.Pos, ir.OCALL, f.Nname, nil) - call.Args.Set(ir.ParamNames(tfn.Type())) + call.Args = ir.ParamNames(tfn.Type()) call.IsDDD = tfn.Type().IsVariadic() tail = call if tfn.Type().NumResults() > 0 { diff --git a/src/cmd/compile/internal/typecheck/const.go b/src/cmd/compile/internal/typecheck/const.go index 5259218ef9..d6bf101974 100644 --- a/src/cmd/compile/internal/typecheck/const.go +++ b/src/cmd/compile/internal/typecheck/const.go @@ -509,7 +509,7 @@ func EvalConst(n ir.Node) ir.Node { } nl := ir.Copy(n).(*ir.AddStringExpr) - nl.List.Set(s[i:i2]) + nl.List = s[i:i2] newList = append(newList, OrigConst(nl, constant.MakeString(strings.Join(strs, "")))) i = i2 - 1 } else { @@ -518,7 +518,7 @@ func EvalConst(n ir.Node) ir.Node { } nn := ir.Copy(n).(*ir.AddStringExpr) - nn.List.Set(newList) + nn.List = newList return nn case ir.OCAP, ir.OLEN: diff --git a/src/cmd/compile/internal/typecheck/func.go b/src/cmd/compile/internal/typecheck/func.go index 296755028d..8592397004 100644 --- a/src/cmd/compile/internal/typecheck/func.go +++ b/src/cmd/compile/internal/typecheck/func.go @@ -52,7 +52,7 @@ func FixVariadicCall(call *ir.CallExpr) { extra[i] = nil // allow GC } - call.Args.Set(append(args[:vi], slice)) + call.Args = append(args[:vi], slice) call.IsDDD = true } @@ -313,7 +313,7 @@ func MethodValueWrapper(dot *ir.SelectorExpr) *ir.Func { } call := ir.NewCallExpr(base.Pos, ir.OCALL, ir.NewSelectorExpr(base.Pos, ir.OXDOT, ptr, meth), nil) - call.Args.Set(ir.ParamNames(tfn.Type())) + call.Args = ir.ParamNames(tfn.Type()) call.IsDDD = tfn.Type().IsVariadic() if t0.NumResults() != 0 { ret := ir.NewReturnStmt(base.Pos, nil) @@ -323,7 +323,7 @@ func MethodValueWrapper(dot *ir.SelectorExpr) *ir.Func { body = append(body, call) } - fn.Body.Set(body) + fn.Body = body FinishFuncBody() Func(fn) @@ -798,7 +798,7 @@ func tcMake(n *ir.CallExpr) ir.Node { return n } - n.Args.Set(nil) + n.Args = nil l := args[0] l = typecheck(l, ctxType) t := l.Type() diff --git a/src/cmd/compile/internal/typecheck/iimport.go b/src/cmd/compile/internal/typecheck/iimport.go index 00ecd9b819..0caac362e3 100644 --- a/src/cmd/compile/internal/typecheck/iimport.go +++ b/src/cmd/compile/internal/typecheck/iimport.go @@ -779,7 +779,7 @@ func (r *importReader) caseList(switchExpr ir.Node) []*ir.CaseClause { cases := make([]*ir.CaseClause, r.uint64()) for i := range cases { cas := ir.NewCaseStmt(r.pos(), nil, nil) - cas.List.Set(r.stmtList()) + cas.List = r.stmtList() if namedTypeSwitch { // Note: per-case variables will have distinct, dotted // names after import. That's okay: swt.go only needs @@ -789,7 +789,7 @@ func (r *importReader) caseList(switchExpr ir.Node) []*ir.CaseClause { cas.Var = caseVar caseVar.Defn = switchExpr } - cas.Body.Set(r.stmtList()) + cas.Body = r.stmtList() cases[i] = cas } return cases @@ -932,7 +932,7 @@ func (r *importReader) node() ir.Node { case ir.OCOPY, ir.OCOMPLEX, ir.OREAL, ir.OIMAG, ir.OAPPEND, ir.OCAP, ir.OCLOSE, ir.ODELETE, ir.OLEN, ir.OMAKE, ir.ONEW, ir.OPANIC, ir.ORECOVER, ir.OPRINT, ir.OPRINTN: n := builtinCall(r.pos(), op) - n.Args.Set(r.exprList()) + n.Args = r.exprList() if op == ir.OAPPEND { n.IsDDD = r.bool() } @@ -945,7 +945,7 @@ func (r *importReader) node() ir.Node { pos := r.pos() init := r.stmtList() n := ir.NewCallExpr(pos, ir.OCALL, r.expr(), r.exprList()) - n.PtrInit().Set(init) + *n.PtrInit() = init n.IsDDD = r.bool() return n @@ -1033,14 +1033,14 @@ func (r *importReader) node() ir.Node { case ir.OIF: pos, init := r.pos(), r.stmtList() n := ir.NewIfStmt(pos, r.expr(), r.stmtList(), r.stmtList()) - n.PtrInit().Set(init) + *n.PtrInit() = init return n case ir.OFOR: pos, init := r.pos(), r.stmtList() cond, post := r.exprsOrNil() n := ir.NewForStmt(pos, nil, cond, post, r.stmtList()) - n.PtrInit().Set(init) + *n.PtrInit() = init return n case ir.ORANGE: @@ -1052,7 +1052,7 @@ func (r *importReader) node() ir.Node { pos := r.pos() init := r.stmtList() n := ir.NewSelectStmt(pos, r.commList()) - n.PtrInit().Set(init) + *n.PtrInit() = init return n case ir.OSWITCH: @@ -1060,7 +1060,7 @@ func (r *importReader) node() ir.Node { init := r.stmtList() x, _ := r.exprsOrNil() n := ir.NewSwitchStmt(pos, x, r.caseList(x)) - n.PtrInit().Set(init) + *n.PtrInit() = init return n // case OCASE: diff --git a/src/cmd/compile/internal/typecheck/typecheck.go b/src/cmd/compile/internal/typecheck/typecheck.go index 0ee66df2cf..d0922e8508 100644 --- a/src/cmd/compile/internal/typecheck/typecheck.go +++ b/src/cmd/compile/internal/typecheck/typecheck.go @@ -64,7 +64,7 @@ func FuncBody(n *ir.Func) { CheckUnused(n) CheckReturn(n) if base.Errors() > errorsBefore { - n.Body.Set(nil) // type errors; do not compile + n.Body = nil // type errors; do not compile } } @@ -971,9 +971,9 @@ func typecheckargs(n ir.InitNode) { switch n := n.(type) { case *ir.CallExpr: - n.Args.Set(list) + n.Args = list case *ir.ReturnStmt: - n.Results.Set(list) + n.Results = list } n.PtrInit().Append(Stmt(as)) @@ -1687,7 +1687,7 @@ func stringtoruneslit(n *ir.ConvExpr) ir.Node { } nn := ir.NewCompLitExpr(base.Pos, ir.OCOMPLIT, ir.TypeNode(n.Type()), nil) - nn.List.Set(l) + nn.List = l return Expr(nn) } diff --git a/src/cmd/compile/internal/walk/assign.go b/src/cmd/compile/internal/walk/assign.go index 762baa0dd9..7f3e4cc995 100644 --- a/src/cmd/compile/internal/walk/assign.go +++ b/src/cmd/compile/internal/walk/assign.go @@ -264,7 +264,7 @@ func walkReturn(n *ir.ReturnStmt) ir.Node { // move function calls out, to make ascompatee's job easier. walkExprListSafe(n.Results, n.PtrInit()) - n.Results.Set(ascompatee(n.Op(), rl, n.Results, n.PtrInit())) + n.Results = ascompatee(n.Op(), rl, n.Results, n.PtrInit()) return n } walkExprList(n.Results, n.PtrInit()) @@ -281,7 +281,7 @@ func walkReturn(n *ir.ReturnStmt) ir.Node { a := ir.NewAssignStmt(base.Pos, nname, rhs[i]) res[i] = convas(a, n.PtrInit()) } - n.Results.Set(res) + n.Results = res return n } diff --git a/src/cmd/compile/internal/walk/builtin.go b/src/cmd/compile/internal/walk/builtin.go index 13837eeffc..a061181e2f 100644 --- a/src/cmd/compile/internal/walk/builtin.go +++ b/src/cmd/compile/internal/walk/builtin.go @@ -531,7 +531,7 @@ func walkPrint(nn *ir.CallExpr, init *ir.Nodes) ir.Node { t = append(t, n) } t = append(t, ir.NewString("\n")) - nn.Args.Set(t) + nn.Args = t } // Collapse runs of constant strings. @@ -551,7 +551,7 @@ func walkPrint(nn *ir.CallExpr, init *ir.Nodes) ir.Node { i++ } } - nn.Args.Set(t) + nn.Args = t calls := []ir.Node{mkcall("printlock", nil, init)} for i, n := range nn.Args { @@ -653,7 +653,7 @@ func walkPrint(nn *ir.CallExpr, init *ir.Nodes) ir.Node { walkExprList(calls, init) r := ir.NewBlockStmt(base.Pos, nil) - r.List.Set(calls) + r.List = calls return walkStmt(typecheck.Stmt(r)) } diff --git a/src/cmd/compile/internal/walk/closure.go b/src/cmd/compile/internal/walk/closure.go index 62d2a362b1..fcdb43f113 100644 --- a/src/cmd/compile/internal/walk/closure.go +++ b/src/cmd/compile/internal/walk/closure.go @@ -107,7 +107,7 @@ func Closure(fn *ir.Func) { if len(body) > 0 { typecheck.Stmts(body) - fn.Enter.Set(body) + fn.Enter = body fn.SetNeedctxt(true) } } @@ -131,7 +131,7 @@ func walkClosure(clo *ir.ClosureExpr, init *ir.Nodes) ir.Node { clos := ir.NewCompLitExpr(base.Pos, ir.OCOMPLIT, ir.TypeNode(typ), nil) clos.SetEsc(clo.Esc()) - clos.List.Set(append([]ir.Node{ir.NewUnaryExpr(base.Pos, ir.OCFUNC, fn.Nname)}, closureArgs(clo)...)) + clos.List = append([]ir.Node{ir.NewUnaryExpr(base.Pos, ir.OCFUNC, fn.Nname)}, closureArgs(clo)...) addr := typecheck.NodAddr(clos) addr.SetEsc(clo.Esc()) diff --git a/src/cmd/compile/internal/walk/expr.go b/src/cmd/compile/internal/walk/expr.go index 7dfac30094..8a56526a36 100644 --- a/src/cmd/compile/internal/walk/expr.go +++ b/src/cmd/compile/internal/walk/expr.go @@ -477,7 +477,7 @@ func walkAddString(n *ir.AddStringExpr, init *ir.Nodes) ir.Node { cat := typecheck.LookupRuntime(fn) r := ir.NewCallExpr(base.Pos, ir.OCALL, cat, nil) - r.Args.Set(args) + r.Args = args r1 := typecheck.Expr(r) r1 = walkExpr(r1, init) r1.SetType(n.Type()) @@ -562,8 +562,8 @@ func walkCall1(n *ir.CallExpr, init *ir.Nodes) { } } - n.Args.Set(tempAssigns) - n.Rargs.Set(args) + n.Args = tempAssigns + n.Rargs = args } // walkDivMod walks an ODIV or OMOD node. diff --git a/src/cmd/compile/internal/walk/order.go b/src/cmd/compile/internal/walk/order.go index 679b795270..767af07414 100644 --- a/src/cmd/compile/internal/walk/order.go +++ b/src/cmd/compile/internal/walk/order.go @@ -423,7 +423,7 @@ func orderBlock(n *ir.Nodes, free map[string][]*ir.Name) { order.edge() order.stmtList(*n) order.cleanTemp(mark) - n.Set(order.out) + *n = order.out } // exprInPlace orders the side effects in *np and @@ -1233,9 +1233,9 @@ func (o *orderState) expr1(n, lhs ir.Node) ir.Node { // If left-hand side doesn't cause a short-circuit, issue right-hand side. nif := ir.NewIfStmt(base.Pos, r, nil, nil) if n.Op() == ir.OANDAND { - nif.Body.Set(gen) + nif.Body = gen } else { - nif.Else.Set(gen) + nif.Else = gen } o.out = append(o.out, nif) return r @@ -1401,7 +1401,7 @@ func (o *orderState) expr1(n, lhs ir.Node) ir.Node { statics = append(statics, r) } - n.List.Set(statics) + n.List = statics if len(dynamics) == 0 { return n @@ -1448,8 +1448,8 @@ func (o *orderState) as2(n *ir.AssignListStmt) { o.out = append(o.out, n) as := ir.NewAssignListStmt(base.Pos, ir.OAS2, nil, nil) - as.Lhs.Set(left) - as.Rhs.Set(tmplist) + as.Lhs = left + as.Rhs = tmplist o.stmt(typecheck.Stmt(as)) } diff --git a/src/cmd/compile/internal/walk/range.go b/src/cmd/compile/internal/walk/range.go index 3092b71d72..9225c429f0 100644 --- a/src/cmd/compile/internal/walk/range.go +++ b/src/cmd/compile/internal/walk/range.go @@ -429,7 +429,7 @@ func arrayClear(loop *ir.RangeStmt, v1, v2, a ir.Node) ir.Node { // i = len(a) - 1 // } n := ir.NewIfStmt(base.Pos, nil, nil, nil) - n.Body.Set(nil) + n.Body = nil n.Cond = ir.NewBinaryExpr(base.Pos, ir.ONE, ir.NewUnaryExpr(base.Pos, ir.OLEN, a), ir.NewInt(0)) // hp = &a[0] diff --git a/src/cmd/compile/internal/walk/select.go b/src/cmd/compile/internal/walk/select.go index c6e9b71384..776b020155 100644 --- a/src/cmd/compile/internal/walk/select.go +++ b/src/cmd/compile/internal/walk/select.go @@ -22,7 +22,7 @@ func walkSelect(sel *ir.SelectStmt) { init = append(init, walkSelectCases(sel.Cases)...) sel.Cases = nil - sel.Compiled.Set(init) + sel.Compiled = init walkStmtList(sel.Compiled) base.Pos = lno @@ -104,7 +104,7 @@ func walkSelectCases(cases []*ir.CommClause) []ir.Node { n := cas.Comm ir.SetPos(n) r := ir.NewIfStmt(base.Pos, nil, nil, nil) - r.PtrInit().Set(cas.Init()) + *r.PtrInit() = cas.Init() var call ir.Node switch n.Op() { default: @@ -136,8 +136,8 @@ func walkSelectCases(cases []*ir.CommClause) []ir.Node { } r.Cond = typecheck.Expr(call) - r.Body.Set(cas.Body) - r.Else.Set(append(dflt.Init(), dflt.Body...)) + r.Body = cas.Body + r.Else = append(dflt.Init(), dflt.Body...) return []ir.Node{r, ir.NewBranchStmt(base.Pos, ir.OBREAK, nil)} } diff --git a/src/cmd/compile/internal/walk/stmt.go b/src/cmd/compile/internal/walk/stmt.go index 3440c66506..460c0a7c10 100644 --- a/src/cmd/compile/internal/walk/stmt.go +++ b/src/cmd/compile/internal/walk/stmt.go @@ -61,7 +61,7 @@ func walkStmt(n ir.Node) ir.Node { // copy rewrote to a statement list and a temp for the length. // Throw away the temp to avoid plain values as statements. n = ir.NewBlockStmt(n.Pos(), init) - init.Set(nil) + init = nil } if len(init) > 0 { switch n.Op() { @@ -265,7 +265,7 @@ func wrapCall(n *ir.CallExpr, init *ir.Nodes) ir.Node { last := len(n.Args) - 1 if va := n.Args[last]; va.Op() == ir.OSLICELIT { va := va.(*ir.CompLitExpr) - n.Args.Set(append(n.Args[:last], va.List...)) + n.Args = append(n.Args[:last], va.List...) n.IsDDD = false } } -- cgit v1.2.3-54-g00ecf From f24e40c14a0a767b6663c85dc900bb9e6b7c2d8e Mon Sep 17 00:00:00 2001 From: Matthew Dempsky Date: Sun, 3 Jan 2021 20:14:00 -0800 Subject: [dev.regabi] cmd/compile: remove Name.Class_ accessors These aren't part of the Node interface anymore, so no need to keep them around. Passes toolstash -cmp. [git-generate] cd src/cmd/compile/internal/ir : Fix one off case that causes trouble for rf. sed -i -e 's/n.SetClass(ir.PAUTO)/n.Class_ = ir.PAUTO/' ../ssa/export_test.go pkgs=$(go list . ../...) rf ' ex '"$(echo $pkgs)"' { var n *Name var c Class n.Class() -> n.Class_ n.SetClass(c) -> n.Class_ = c } rm Name.Class rm Name.SetClass mv Name.Class_ Name.Class ' Change-Id: Ifb304bf4691a8c455456aabd8aa77178d4a49500 Reviewed-on: https://go-review.googlesource.com/c/go/+/281294 Trust: Matthew Dempsky Run-TryBot: Matthew Dempsky TryBot-Result: Go Bot Reviewed-by: Cuong Manh Le --- src/cmd/compile/internal/dwarfgen/dwarf.go | 24 ++++++------- src/cmd/compile/internal/escape/escape.go | 26 +++++++------- src/cmd/compile/internal/gc/abiutilsaux_test.go | 2 +- src/cmd/compile/internal/gc/compile.go | 2 +- src/cmd/compile/internal/gc/export.go | 2 +- src/cmd/compile/internal/gc/obj.go | 2 +- src/cmd/compile/internal/inline/inl.go | 24 ++++++------- src/cmd/compile/internal/ir/expr.go | 6 ++-- src/cmd/compile/internal/ir/func.go | 4 +-- src/cmd/compile/internal/ir/name.go | 8 ++--- src/cmd/compile/internal/ir/scc.go | 2 +- src/cmd/compile/internal/liveness/plive.go | 14 ++++---- src/cmd/compile/internal/noder/noder.go | 6 ++-- src/cmd/compile/internal/pkginit/init.go | 4 +-- src/cmd/compile/internal/pkginit/initorder.go | 10 +++--- src/cmd/compile/internal/reflectdata/reflect.go | 8 ++--- src/cmd/compile/internal/ssa/deadstore.go | 8 ++--- src/cmd/compile/internal/ssa/export_test.go | 2 +- src/cmd/compile/internal/ssagen/nowb.go | 2 +- src/cmd/compile/internal/ssagen/pgen.go | 14 ++++---- src/cmd/compile/internal/ssagen/pgen_test.go | 4 +-- src/cmd/compile/internal/ssagen/ssa.go | 48 ++++++++++++------------- src/cmd/compile/internal/staticdata/data.go | 6 ++-- src/cmd/compile/internal/staticinit/sched.go | 6 ++-- src/cmd/compile/internal/typecheck/dcl.go | 4 +-- src/cmd/compile/internal/typecheck/export.go | 2 +- src/cmd/compile/internal/typecheck/func.go | 4 +-- src/cmd/compile/internal/typecheck/iexport.go | 8 ++--- src/cmd/compile/internal/typecheck/iimport.go | 2 +- src/cmd/compile/internal/typecheck/syms.go | 2 +- src/cmd/compile/internal/typecheck/typecheck.go | 4 +-- src/cmd/compile/internal/typecheck/universe.go | 2 +- src/cmd/compile/internal/walk/assign.go | 4 +-- src/cmd/compile/internal/walk/closure.go | 6 ++-- src/cmd/compile/internal/walk/complit.go | 8 ++--- src/cmd/compile/internal/walk/convert.go | 6 ++-- src/cmd/compile/internal/walk/expr.go | 2 +- src/cmd/compile/internal/walk/order.go | 2 +- src/cmd/compile/internal/walk/race.go | 2 +- src/cmd/compile/internal/walk/stmt.go | 2 +- src/cmd/compile/internal/walk/walk.go | 6 ++-- 41 files changed, 149 insertions(+), 151 deletions(-) (limited to 'src/cmd/compile/internal/noder/noder.go') diff --git a/src/cmd/compile/internal/dwarfgen/dwarf.go b/src/cmd/compile/internal/dwarfgen/dwarf.go index 6eac9d547e..1534adaac8 100644 --- a/src/cmd/compile/internal/dwarfgen/dwarf.go +++ b/src/cmd/compile/internal/dwarfgen/dwarf.go @@ -76,7 +76,7 @@ func Info(fnsym *obj.LSym, infosym *obj.LSym, curfn interface{}) ([]dwarf.Scope, if n.Op() != ir.ONAME { // might be OTYPE or OLITERAL continue } - switch n.Class_ { + switch n.Class { case ir.PAUTO: if !n.Used() { // Text == nil -> generating abstract function @@ -171,7 +171,7 @@ func createDwarfVars(fnsym *obj.LSym, complexOK bool, fn *ir.Func, apDecls []*ir if c == '.' || n.Type().IsUntyped() { continue } - if n.Class_ == ir.PPARAM && !ssagen.TypeOK(n.Type()) { + if n.Class == ir.PPARAM && !ssagen.TypeOK(n.Type()) { // SSA-able args get location lists, and may move in and // out of registers, so those are handled elsewhere. // Autos and named output params seem to get handled @@ -186,10 +186,10 @@ func createDwarfVars(fnsym *obj.LSym, complexOK bool, fn *ir.Func, apDecls []*ir typename := dwarf.InfoPrefix + types.TypeSymName(n.Type()) decls = append(decls, n) abbrev := dwarf.DW_ABRV_AUTO_LOCLIST - isReturnValue := (n.Class_ == ir.PPARAMOUT) - if n.Class_ == ir.PPARAM || n.Class_ == ir.PPARAMOUT { + isReturnValue := (n.Class == ir.PPARAMOUT) + if n.Class == ir.PPARAM || n.Class == ir.PPARAMOUT { abbrev = dwarf.DW_ABRV_PARAM_LOCLIST - } else if n.Class_ == ir.PAUTOHEAP { + } else if n.Class == ir.PAUTOHEAP { // If dcl in question has been promoted to heap, do a bit // of extra work to recover original class (auto or param); // see issue 30908. This insures that we get the proper @@ -198,9 +198,9 @@ func createDwarfVars(fnsym *obj.LSym, complexOK bool, fn *ir.Func, apDecls []*ir // and not stack). // TODO(thanm): generate a better location expression stackcopy := n.Stackcopy - if stackcopy != nil && (stackcopy.Class_ == ir.PPARAM || stackcopy.Class_ == ir.PPARAMOUT) { + if stackcopy != nil && (stackcopy.Class == ir.PPARAM || stackcopy.Class == ir.PPARAMOUT) { abbrev = dwarf.DW_ABRV_PARAM_LOCLIST - isReturnValue = (stackcopy.Class_ == ir.PPARAMOUT) + isReturnValue = (stackcopy.Class == ir.PPARAMOUT) } } inlIndex := 0 @@ -275,7 +275,7 @@ func createSimpleVar(fnsym *obj.LSym, n *ir.Name) *dwarf.Var { var abbrev int var offs int64 - switch n.Class_ { + switch n.Class { case ir.PAUTO: offs = n.FrameOffset() abbrev = dwarf.DW_ABRV_AUTO @@ -291,7 +291,7 @@ func createSimpleVar(fnsym *obj.LSym, n *ir.Name) *dwarf.Var { abbrev = dwarf.DW_ABRV_PARAM offs = n.FrameOffset() + base.Ctxt.FixedFrameSize() default: - base.Fatalf("createSimpleVar unexpected class %v for node %v", n.Class_, n) + base.Fatalf("createSimpleVar unexpected class %v for node %v", n.Class, n) } typename := dwarf.InfoPrefix + types.TypeSymName(n.Type()) @@ -308,7 +308,7 @@ func createSimpleVar(fnsym *obj.LSym, n *ir.Name) *dwarf.Var { declpos := base.Ctxt.InnermostPos(declPos(n)) return &dwarf.Var{ Name: n.Sym().Name, - IsReturnValue: n.Class_ == ir.PPARAMOUT, + IsReturnValue: n.Class == ir.PPARAMOUT, IsInlFormal: n.InlFormal(), Abbrev: abbrev, StackOffset: int32(offs), @@ -353,7 +353,7 @@ func createComplexVar(fnsym *obj.LSym, fn *ir.Func, varID ssa.VarID) *dwarf.Var n := debug.Vars[varID] var abbrev int - switch n.Class_ { + switch n.Class { case ir.PAUTO: abbrev = dwarf.DW_ABRV_AUTO_LOCLIST case ir.PPARAM, ir.PPARAMOUT: @@ -377,7 +377,7 @@ func createComplexVar(fnsym *obj.LSym, fn *ir.Func, varID ssa.VarID) *dwarf.Var declpos := base.Ctxt.InnermostPos(n.Pos()) dvar := &dwarf.Var{ Name: n.Sym().Name, - IsReturnValue: n.Class_ == ir.PPARAMOUT, + IsReturnValue: n.Class == ir.PPARAMOUT, IsInlFormal: n.InlFormal(), Abbrev: abbrev, Type: base.Ctxt.Lookup(typename), diff --git a/src/cmd/compile/internal/escape/escape.go b/src/cmd/compile/internal/escape/escape.go index 1aba0a3fd2..6a2e685fe8 100644 --- a/src/cmd/compile/internal/escape/escape.go +++ b/src/cmd/compile/internal/escape/escape.go @@ -519,7 +519,7 @@ func (e *escape) exprSkipInit(k hole, n ir.Node) { case ir.ONAME: n := n.(*ir.Name) - if n.Class_ == ir.PFUNC || n.Class_ == ir.PEXTERN { + if n.Class == ir.PFUNC || n.Class == ir.PEXTERN { return } e.flow(k, e.oldLoc(n)) @@ -791,7 +791,7 @@ func (e *escape) addr(n ir.Node) hole { base.Fatalf("unexpected addr: %v", n) case ir.ONAME: n := n.(*ir.Name) - if n.Class_ == ir.PEXTERN { + if n.Class == ir.PEXTERN { break } k = e.oldLoc(n).asHole() @@ -899,7 +899,7 @@ func (e *escape) call(ks []hole, call, where ir.Node) { switch call.Op() { case ir.OCALLFUNC: switch v := ir.StaticValue(call.X); { - case v.Op() == ir.ONAME && v.(*ir.Name).Class_ == ir.PFUNC: + case v.Op() == ir.ONAME && v.(*ir.Name).Class == ir.PFUNC: fn = v.(*ir.Name) case v.Op() == ir.OCLOSURE: fn = v.(*ir.ClosureExpr).Func.Nname @@ -1589,7 +1589,7 @@ func (b *batch) finish(fns []*ir.Func) { } func (l *location) isName(c ir.Class) bool { - return l.n != nil && l.n.Op() == ir.ONAME && l.n.(*ir.Name).Class_ == c + return l.n != nil && l.n.Op() == ir.ONAME && l.n.(*ir.Name).Class == c } const numEscResults = 7 @@ -1882,7 +1882,7 @@ func HeapAllocReason(n ir.Node) string { // Parameters are always passed via the stack. if n.Op() == ir.ONAME { n := n.(*ir.Name) - if n.Class_ == ir.PPARAM || n.Class_ == ir.PPARAMOUT { + if n.Class == ir.PPARAM || n.Class == ir.PPARAMOUT { return "" } } @@ -1939,7 +1939,7 @@ func addrescapes(n ir.Node) { // if this is a tmpname (PAUTO), it was tagged by tmpname as not escaping. // on PPARAM it means something different. - if n.Class_ == ir.PAUTO && n.Esc() == ir.EscNever { + if n.Class == ir.PAUTO && n.Esc() == ir.EscNever { break } @@ -1949,7 +1949,7 @@ func addrescapes(n ir.Node) { break } - if n.Class_ != ir.PPARAM && n.Class_ != ir.PPARAMOUT && n.Class_ != ir.PAUTO { + if n.Class != ir.PPARAM && n.Class != ir.PPARAMOUT && n.Class != ir.PAUTO { break } @@ -2003,7 +2003,7 @@ func moveToHeap(n *ir.Name) { if base.Flag.CompilingRuntime { base.Errorf("%v escapes to heap, not allowed in runtime", n) } - if n.Class_ == ir.PAUTOHEAP { + if n.Class == ir.PAUTOHEAP { ir.Dump("n", n) base.Fatalf("double move to heap") } @@ -2022,7 +2022,7 @@ func moveToHeap(n *ir.Name) { // Parameters have a local stack copy used at function start/end // in addition to the copy in the heap that may live longer than // the function. - if n.Class_ == ir.PPARAM || n.Class_ == ir.PPARAMOUT { + if n.Class == ir.PPARAM || n.Class == ir.PPARAMOUT { if n.FrameOffset() == types.BADWIDTH { base.Fatalf("addrescapes before param assignment") } @@ -2034,9 +2034,9 @@ func moveToHeap(n *ir.Name) { stackcopy := typecheck.NewName(n.Sym()) stackcopy.SetType(n.Type()) stackcopy.SetFrameOffset(n.FrameOffset()) - stackcopy.Class_ = n.Class_ + stackcopy.Class = n.Class stackcopy.Heapaddr = heapaddr - if n.Class_ == ir.PPARAMOUT { + if n.Class == ir.PPARAMOUT { // Make sure the pointer to the heap copy is kept live throughout the function. // The function could panic at any point, and then a defer could recover. // Thus, we need the pointer to the heap copy always available so the @@ -2058,7 +2058,7 @@ func moveToHeap(n *ir.Name) { } // Parameters are before locals, so can stop early. // This limits the search even in functions with many local variables. - if d.Class_ == ir.PAUTO { + if d.Class == ir.PAUTO { break } } @@ -2069,7 +2069,7 @@ func moveToHeap(n *ir.Name) { } // Modify n in place so that uses of n now mean indirection of the heapaddr. - n.Class_ = ir.PAUTOHEAP + n.Class = ir.PAUTOHEAP n.SetFrameOffset(0) n.Heapaddr = heapaddr n.SetEsc(ir.EscHeap) diff --git a/src/cmd/compile/internal/gc/abiutilsaux_test.go b/src/cmd/compile/internal/gc/abiutilsaux_test.go index e6590beac0..9386b554b0 100644 --- a/src/cmd/compile/internal/gc/abiutilsaux_test.go +++ b/src/cmd/compile/internal/gc/abiutilsaux_test.go @@ -21,7 +21,7 @@ import ( func mkParamResultField(t *types.Type, s *types.Sym, which ir.Class) *types.Field { field := types.NewField(src.NoXPos, s, t) n := typecheck.NewName(s) - n.Class_ = which + n.Class = which field.Nname = n n.SetType(t) return field diff --git a/src/cmd/compile/internal/gc/compile.go b/src/cmd/compile/internal/gc/compile.go index 1b3dd672f3..25b1c76737 100644 --- a/src/cmd/compile/internal/gc/compile.go +++ b/src/cmd/compile/internal/gc/compile.go @@ -83,7 +83,7 @@ func compile(fn *ir.Func) { // because symbols must be allocated before the parallel // phase of the compiler. for _, n := range fn.Dcl { - switch n.Class_ { + switch n.Class { case ir.PPARAM, ir.PPARAMOUT, ir.PAUTO: if liveness.ShouldTrack(n) && n.Addrtaken() { reflectdata.WriteType(n.Type()) diff --git a/src/cmd/compile/internal/gc/export.go b/src/cmd/compile/internal/gc/export.go index c65c6c8335..356fcfa671 100644 --- a/src/cmd/compile/internal/gc/export.go +++ b/src/cmd/compile/internal/gc/export.go @@ -83,7 +83,7 @@ type exporter struct { func (p *exporter) markObject(n ir.Node) { if n.Op() == ir.ONAME { n := n.(*ir.Name) - if n.Class_ == ir.PFUNC { + if n.Class == ir.PFUNC { inline.Inline_Flood(n, typecheck.Export) } } diff --git a/src/cmd/compile/internal/gc/obj.go b/src/cmd/compile/internal/gc/obj.go index 30cfac1b71..fbb2145e1b 100644 --- a/src/cmd/compile/internal/gc/obj.go +++ b/src/cmd/compile/internal/gc/obj.go @@ -188,7 +188,7 @@ func dumpGlobal(n *ir.Name) { if n.Type() == nil { base.Fatalf("external %v nil type\n", n) } - if n.Class_ == ir.PFUNC { + if n.Class == ir.PFUNC { return } if n.Sym().Pkg != types.LocalPkg { diff --git a/src/cmd/compile/internal/inline/inl.go b/src/cmd/compile/internal/inline/inl.go index b9b424b74d..6f5f6499ce 100644 --- a/src/cmd/compile/internal/inline/inl.go +++ b/src/cmd/compile/internal/inline/inl.go @@ -199,8 +199,8 @@ func Inline_Flood(n *ir.Name, exportsym func(*ir.Name)) { if n == nil { return } - if n.Op() != ir.ONAME || n.Class_ != ir.PFUNC { - base.Fatalf("inlFlood: unexpected %v, %v, %v", n, n.Op(), n.Class_) + if n.Op() != ir.ONAME || n.Class != ir.PFUNC { + base.Fatalf("inlFlood: unexpected %v, %v, %v", n, n.Op(), n.Class) } fn := n.Func if fn == nil { @@ -227,7 +227,7 @@ func Inline_Flood(n *ir.Name, exportsym func(*ir.Name)) { case ir.ONAME: n := n.(*ir.Name) - switch n.Class_ { + switch n.Class { case ir.PFUNC: Inline_Flood(n, exportsym) exportsym(n) @@ -292,7 +292,7 @@ func (v *hairyVisitor) doNode(n ir.Node) error { // runtime.throw is a "cheap call" like panic in normal code. if n.X.Op() == ir.ONAME { name := n.X.(*ir.Name) - if name.Class_ == ir.PFUNC && types.IsRuntimePkg(name.Sym().Pkg) { + if name.Class == ir.PFUNC && types.IsRuntimePkg(name.Sym().Pkg) { fn := name.Sym().Name if fn == "getcallerpc" || fn == "getcallersp" { return errors.New("call to " + fn) @@ -407,7 +407,7 @@ func (v *hairyVisitor) doNode(n ir.Node) error { case ir.ONAME: n := n.(*ir.Name) - if n.Class_ == ir.PAUTO { + if n.Class == ir.PAUTO { v.usedLocals[n] = true } @@ -627,7 +627,7 @@ func inlCallee(fn ir.Node) *ir.Func { return n.Func case ir.ONAME: fn := fn.(*ir.Name) - if fn.Class_ == ir.PFUNC { + if fn.Class == ir.PFUNC { return fn.Func } case ir.OCLOSURE: @@ -759,7 +759,7 @@ func mkinlcall(n *ir.CallExpr, fn *ir.Func, maxCost int32, inlMap map[*ir.Func]b if ln.Op() != ir.ONAME { continue } - if ln.Class_ == ir.PPARAMOUT { // return values handled below. + if ln.Class == ir.PPARAMOUT { // return values handled below. continue } if ir.IsParamStackCopy(ln) { // ignore the on-stack copy of a parameter that moved to the heap @@ -772,7 +772,7 @@ func mkinlcall(n *ir.CallExpr, fn *ir.Func, maxCost int32, inlMap map[*ir.Func]b inlf := typecheck.Expr(inlvar(ln)).(*ir.Name) inlvars[ln] = inlf if base.Flag.GenDwarfInl > 0 { - if ln.Class_ == ir.PPARAM { + if ln.Class == ir.PPARAM { inlf.Name().SetInlFormal(true) } else { inlf.Name().SetInlLocal(true) @@ -975,7 +975,7 @@ func inlvar(var_ *ir.Name) *ir.Name { n := typecheck.NewName(var_.Sym()) n.SetType(var_.Type()) - n.Class_ = ir.PAUTO + n.Class = ir.PAUTO n.SetUsed(true) n.Curfn = ir.CurFunc // the calling function, not the called one n.SetAddrtaken(var_.Addrtaken()) @@ -988,7 +988,7 @@ func inlvar(var_ *ir.Name) *ir.Name { func retvar(t *types.Field, i int) *ir.Name { n := typecheck.NewName(typecheck.LookupNum("~R", i)) n.SetType(t.Type) - n.Class_ = ir.PAUTO + n.Class = ir.PAUTO n.SetUsed(true) n.Curfn = ir.CurFunc // the calling function, not the called one ir.CurFunc.Dcl = append(ir.CurFunc.Dcl, n) @@ -1000,7 +1000,7 @@ func retvar(t *types.Field, i int) *ir.Name { func argvar(t *types.Type, i int) ir.Node { n := typecheck.NewName(typecheck.LookupNum("~arg", i)) n.SetType(t.Elem()) - n.Class_ = ir.PAUTO + n.Class = ir.PAUTO n.SetUsed(true) n.Curfn = ir.CurFunc // the calling function, not the called one ir.CurFunc.Dcl = append(ir.CurFunc.Dcl, n) @@ -1170,7 +1170,7 @@ func (subst *inlsubst) updatedPos(xpos src.XPos) src.XPos { func pruneUnusedAutos(ll []*ir.Name, vis *hairyVisitor) []*ir.Name { s := make([]*ir.Name, 0, len(ll)) for _, n := range ll { - if n.Class_ == ir.PAUTO { + if n.Class == ir.PAUTO { if _, found := vis.usedLocals[n]; !found { continue } diff --git a/src/cmd/compile/internal/ir/expr.go b/src/cmd/compile/internal/ir/expr.go index 1b88427146..6d81bf8781 100644 --- a/src/cmd/compile/internal/ir/expr.go +++ b/src/cmd/compile/internal/ir/expr.go @@ -527,7 +527,7 @@ func (n *SelectorExpr) FuncName() *Name { panic(n.no("FuncName")) } fn := NewNameAt(n.Selection.Pos, MethodSym(n.X.Type(), n.Sel)) - fn.Class_ = PFUNC + fn.Class = PFUNC fn.SetType(n.Type()) return fn } @@ -736,7 +736,7 @@ func IsAddressable(n Node) bool { case ONAME: n := n.(*Name) - if n.Class_ == PFUNC { + if n.Class == PFUNC { return false } return true @@ -771,7 +771,7 @@ func staticValue1(nn Node) Node { return nil } n := nn.(*Name) - if n.Class_ != PAUTO || n.Addrtaken() { + if n.Class != PAUTO || n.Addrtaken() { return nil } diff --git a/src/cmd/compile/internal/ir/func.go b/src/cmd/compile/internal/ir/func.go index 1eaca9c6f3..12ef083c19 100644 --- a/src/cmd/compile/internal/ir/func.go +++ b/src/cmd/compile/internal/ir/func.go @@ -245,11 +245,11 @@ func FuncSymName(s *types.Sym) string { // MarkFunc marks a node as a function. func MarkFunc(n *Name) { - if n.Op() != ONAME || n.Class_ != Pxxx { + if n.Op() != ONAME || n.Class != Pxxx { base.Fatalf("expected ONAME/Pxxx node, got %v", n) } - n.Class_ = PFUNC + n.Class = PFUNC n.Sym().SetFunc(true) } diff --git a/src/cmd/compile/internal/ir/name.go b/src/cmd/compile/internal/ir/name.go index 689ef983f6..58b4ababff 100644 --- a/src/cmd/compile/internal/ir/name.go +++ b/src/cmd/compile/internal/ir/name.go @@ -37,7 +37,7 @@ func (*Ident) CanBeNtype() {} type Name struct { miniExpr BuiltinOp Op // uint8 - Class_ Class // uint8 + Class Class // uint8 pragma PragmaFlag // int16 flags bitset16 sym *types.Sym @@ -222,8 +222,6 @@ func (n *Name) Sym() *types.Sym { return n.sym } func (n *Name) SetSym(x *types.Sym) { n.sym = x } func (n *Name) SubOp() Op { return n.BuiltinOp } func (n *Name) SetSubOp(x Op) { n.BuiltinOp = x } -func (n *Name) Class() Class { return n.Class_ } -func (n *Name) SetClass(x Class) { n.Class_ = x } func (n *Name) SetFunc(x *Func) { n.Func = x } func (n *Name) Offset() int64 { panic("Name.Offset") } func (n *Name) SetOffset(x int64) { @@ -425,7 +423,7 @@ func IsParamStackCopy(n Node) bool { return false } name := n.(*Name) - return (name.Class_ == PPARAM || name.Class_ == PPARAMOUT) && name.Heapaddr != nil + return (name.Class == PPARAM || name.Class == PPARAMOUT) && name.Heapaddr != nil } // IsParamHeapCopy reports whether this is the on-heap copy of @@ -435,7 +433,7 @@ func IsParamHeapCopy(n Node) bool { return false } name := n.(*Name) - return name.Class_ == PAUTOHEAP && name.Stackcopy != nil + return name.Class == PAUTOHEAP && name.Stackcopy != nil } var RegFP *Name diff --git a/src/cmd/compile/internal/ir/scc.go b/src/cmd/compile/internal/ir/scc.go index f35c4d44e9..83c6074170 100644 --- a/src/cmd/compile/internal/ir/scc.go +++ b/src/cmd/compile/internal/ir/scc.go @@ -87,7 +87,7 @@ func (v *bottomUpVisitor) visit(n *Func) uint32 { Visit(n, func(n Node) { switch n.Op() { case ONAME: - if n := n.(*Name); n.Class_ == PFUNC { + if n := n.(*Name); n.Class == PFUNC { do(n.Defn) } case ODOTMETH, OCALLPART, OMETHEXPR: diff --git a/src/cmd/compile/internal/liveness/plive.go b/src/cmd/compile/internal/liveness/plive.go index 91f10b0a9d..26d90824b2 100644 --- a/src/cmd/compile/internal/liveness/plive.go +++ b/src/cmd/compile/internal/liveness/plive.go @@ -181,7 +181,7 @@ type progeffectscache struct { // nor do we care about empty structs (handled by the pointer check), // nor do we care about the fake PAUTOHEAP variables. func ShouldTrack(n *ir.Name) bool { - return (n.Class_ == ir.PAUTO || n.Class_ == ir.PPARAM || n.Class_ == ir.PPARAMOUT) && n.Type().HasPointers() + return (n.Class == ir.PAUTO || n.Class == ir.PPARAM || n.Class == ir.PPARAMOUT) && n.Type().HasPointers() } // getvariables returns the list of on-stack variables that we need to track @@ -208,7 +208,7 @@ func (lv *liveness) initcache() { lv.cache.initialized = true for i, node := range lv.vars { - switch node.Class_ { + switch node.Class { case ir.PPARAM: // A return instruction with a p.to is a tail return, which brings // the stack pointer back up (if it ever went down) and then jumps @@ -386,7 +386,7 @@ func (lv *liveness) pointerMap(liveout bitvec.BitVec, vars []*ir.Name, args, loc break } node := vars[i] - switch node.Class_ { + switch node.Class { case ir.PAUTO: typebits.Set(node.Type(), node.FrameOffset()+lv.stkptrsize, locals) @@ -687,7 +687,7 @@ func (lv *liveness) epilogue() { // don't need to keep the stack copy live? if lv.fn.HasDefer() { for i, n := range lv.vars { - if n.Class_ == ir.PPARAMOUT { + if n.Class == ir.PPARAMOUT { if n.IsOutputParamHeapAddr() { // Just to be paranoid. Heap addresses are PAUTOs. base.Fatalf("variable %v both output param and heap output param", n) @@ -785,7 +785,7 @@ func (lv *liveness) epilogue() { if !liveout.Get(int32(i)) { continue } - if n.Class_ == ir.PPARAM { + if n.Class == ir.PPARAM { continue // ok } base.Fatalf("bad live variable at entry of %v: %L", lv.fn.Nname, n) @@ -818,7 +818,7 @@ func (lv *liveness) epilogue() { // the only things that can possibly be live are the // input parameters. for j, n := range lv.vars { - if n.Class_ != ir.PPARAM && lv.stackMaps[0].Get(int32(j)) { + if n.Class != ir.PPARAM && lv.stackMaps[0].Get(int32(j)) { lv.f.Fatalf("%v %L recorded as live on entry", lv.fn.Nname, n) } } @@ -1063,7 +1063,7 @@ func (lv *liveness) emit() (argsSym, liveSym *obj.LSym) { // (Nodes without pointers aren't in lv.vars; see livenessShouldTrack.) var maxArgNode *ir.Name for _, n := range lv.vars { - switch n.Class_ { + switch n.Class { case ir.PPARAM, ir.PPARAMOUT: if maxArgNode == nil || n.FrameOffset() > maxArgNode.FrameOffset() { maxArgNode = n diff --git a/src/cmd/compile/internal/noder/noder.go b/src/cmd/compile/internal/noder/noder.go index 678e378291..76913c62a6 100644 --- a/src/cmd/compile/internal/noder/noder.go +++ b/src/cmd/compile/internal/noder/noder.go @@ -1176,10 +1176,10 @@ func (p *noder) stmtFall(stmt syntax.Stmt, fallOK bool) ir.Node { n := ir.NewReturnStmt(p.pos(stmt), p.exprList(stmt.Results)) if len(n.Results) == 0 && ir.CurFunc != nil { for _, ln := range ir.CurFunc.Dcl { - if ln.Class_ == ir.PPARAM { + if ln.Class == ir.PPARAM { continue } - if ln.Class_ != ir.PPARAMOUT { + if ln.Class != ir.PPARAMOUT { break } if ln.Sym().Def != ln { @@ -1956,7 +1956,7 @@ func oldname(s *types.Sym) ir.Node { if c == nil || c.Curfn != ir.CurFunc { // Do not have a closure var for the active closure yet; make one. c = typecheck.NewName(s) - c.Class_ = ir.PAUTOHEAP + c.Class = ir.PAUTOHEAP c.SetIsClosureVar(true) c.Defn = n diff --git a/src/cmd/compile/internal/pkginit/init.go b/src/cmd/compile/internal/pkginit/init.go index a32e09879c..5bc66c7e1b 100644 --- a/src/cmd/compile/internal/pkginit/init.go +++ b/src/cmd/compile/internal/pkginit/init.go @@ -32,7 +32,7 @@ func Task() *ir.Name { if n.Op() == ir.ONONAME { continue } - if n.Op() != ir.ONAME || n.(*ir.Name).Class_ != ir.PEXTERN { + if n.Op() != ir.ONAME || n.(*ir.Name).Class != ir.PEXTERN { base.Fatalf("bad inittask: %v", n) } deps = append(deps, n.(*ir.Name).Linksym()) @@ -89,7 +89,7 @@ func Task() *ir.Name { sym := typecheck.Lookup(".inittask") task := typecheck.NewName(sym) task.SetType(types.Types[types.TUINT8]) // fake type - task.Class_ = ir.PEXTERN + task.Class = ir.PEXTERN sym.Def = task lsym := task.Linksym() ot := 0 diff --git a/src/cmd/compile/internal/pkginit/initorder.go b/src/cmd/compile/internal/pkginit/initorder.go index 1c222c1de4..bdefd594ff 100644 --- a/src/cmd/compile/internal/pkginit/initorder.go +++ b/src/cmd/compile/internal/pkginit/initorder.go @@ -140,7 +140,7 @@ func (o *InitOrder) processAssign(n ir.Node) { defn := dep.Defn // Skip dependencies on functions (PFUNC) and // variables already initialized (InitDone). - if dep.Class_ != ir.PEXTERN || o.order[defn] == orderDone { + if dep.Class != ir.PEXTERN || o.order[defn] == orderDone { continue } o.order[n]++ @@ -204,7 +204,7 @@ func (o *InitOrder) findInitLoopAndExit(n *ir.Name, path *[]*ir.Name) { *path = append(*path, n) for _, ref := range refers { // Short-circuit variables that were initialized. - if ref.Class_ == ir.PEXTERN && o.order[ref.Defn] == orderDone { + if ref.Class == ir.PEXTERN && o.order[ref.Defn] == orderDone { continue } @@ -221,7 +221,7 @@ func reportInitLoopAndExit(l []*ir.Name) { // the start. i := -1 for j, n := range l { - if n.Class_ == ir.PEXTERN && (i == -1 || n.Pos().Before(l[i].Pos())) { + if n.Class == ir.PEXTERN && (i == -1 || n.Pos().Before(l[i].Pos())) { i = j } } @@ -291,7 +291,7 @@ func (d *initDeps) visit(n ir.Node) { switch n.Op() { case ir.ONAME: n := n.(*ir.Name) - switch n.Class_ { + switch n.Class { case ir.PEXTERN, ir.PFUNC: d.foundDep(n) } @@ -324,7 +324,7 @@ func (d *initDeps) foundDep(n *ir.Name) { return } d.seen.Add(n) - if d.transitive && n.Class_ == ir.PFUNC { + if d.transitive && n.Class == ir.PFUNC { d.inspectList(n.Defn.(*ir.Func).Body) } } diff --git a/src/cmd/compile/internal/reflectdata/reflect.go b/src/cmd/compile/internal/reflectdata/reflect.go index f926765326..30857fff6d 100644 --- a/src/cmd/compile/internal/reflectdata/reflect.go +++ b/src/cmd/compile/internal/reflectdata/reflect.go @@ -840,7 +840,7 @@ func TypePtr(t *types.Type) *ir.AddrExpr { if s.Def == nil { n := ir.NewNameAt(src.NoXPos, s) n.SetType(types.Types[types.TUINT8]) - n.Class_ = ir.PEXTERN + n.Class = ir.PEXTERN n.SetTypecheck(1) s.Def = n } @@ -859,7 +859,7 @@ func ITabAddr(t, itype *types.Type) *ir.AddrExpr { if s.Def == nil { n := typecheck.NewName(s) n.SetType(types.Types[types.TUINT8]) - n.Class_ = ir.PEXTERN + n.Class = ir.PEXTERN n.SetTypecheck(1) s.Def = n itabs = append(itabs, itabEntry{t: t, itype: itype, lsym: n.Linksym()}) @@ -1370,7 +1370,7 @@ func WriteTabs() { // } nsym := dname(p.Sym().Name, "", nil, true) t := p.Type() - if p.Class_ != ir.PFUNC { + if p.Class != ir.PFUNC { t = types.NewPtr(t) } tsym := WriteType(t) @@ -1674,7 +1674,7 @@ func ZeroAddr(size int64) ir.Node { if s.Def == nil { x := typecheck.NewName(s) x.SetType(types.Types[types.TUINT8]) - x.Class_ = ir.PEXTERN + x.Class = ir.PEXTERN x.SetTypecheck(1) s.Def = x } diff --git a/src/cmd/compile/internal/ssa/deadstore.go b/src/cmd/compile/internal/ssa/deadstore.go index a68c82ba97..530918da4d 100644 --- a/src/cmd/compile/internal/ssa/deadstore.go +++ b/src/cmd/compile/internal/ssa/deadstore.go @@ -148,7 +148,7 @@ func elimDeadAutosGeneric(f *Func) { case OpAddr, OpLocalAddr: // Propagate the address if it points to an auto. n, ok := v.Aux.(*ir.Name) - if !ok || n.Class() != ir.PAUTO { + if !ok || n.Class != ir.PAUTO { return } if addr[v] == nil { @@ -159,7 +159,7 @@ func elimDeadAutosGeneric(f *Func) { case OpVarDef, OpVarKill: // v should be eliminated if we eliminate the auto. n, ok := v.Aux.(*ir.Name) - if !ok || n.Class() != ir.PAUTO { + if !ok || n.Class != ir.PAUTO { return } if elim[v] == nil { @@ -175,7 +175,7 @@ func elimDeadAutosGeneric(f *Func) { // may not be used by the inline code, but will be used by // panic processing). n, ok := v.Aux.(*ir.Name) - if !ok || n.Class() != ir.PAUTO { + if !ok || n.Class != ir.PAUTO { return } if !used[n] { @@ -307,7 +307,7 @@ func elimUnreadAutos(f *Func) { if !ok { continue } - if n.Class() != ir.PAUTO { + if n.Class != ir.PAUTO { continue } diff --git a/src/cmd/compile/internal/ssa/export_test.go b/src/cmd/compile/internal/ssa/export_test.go index 8712ff78c1..32e6d09d1b 100644 --- a/src/cmd/compile/internal/ssa/export_test.go +++ b/src/cmd/compile/internal/ssa/export_test.go @@ -70,7 +70,7 @@ func (TestFrontend) StringData(s string) *obj.LSym { } func (TestFrontend) Auto(pos src.XPos, t *types.Type) *ir.Name { n := ir.NewNameAt(pos, &types.Sym{Name: "aFakeAuto"}) - n.SetClass(ir.PAUTO) + n.Class = ir.PAUTO return n } func (d TestFrontend) SplitString(s LocalSlot) (LocalSlot, LocalSlot) { diff --git a/src/cmd/compile/internal/ssagen/nowb.go b/src/cmd/compile/internal/ssagen/nowb.go index 26858fac87..60cfb2f698 100644 --- a/src/cmd/compile/internal/ssagen/nowb.go +++ b/src/cmd/compile/internal/ssagen/nowb.go @@ -76,7 +76,7 @@ func (c *nowritebarrierrecChecker) findExtraCalls(nn ir.Node) { return } fn := n.X.(*ir.Name) - if fn.Class_ != ir.PFUNC || fn.Defn == nil { + if fn.Class != ir.PFUNC || fn.Defn == nil { return } if !types.IsRuntimePkg(fn.Sym().Pkg) || fn.Sym().Name != "systemstack" { diff --git a/src/cmd/compile/internal/ssagen/pgen.go b/src/cmd/compile/internal/ssagen/pgen.go index 2be10ff7af..bbd319d735 100644 --- a/src/cmd/compile/internal/ssagen/pgen.go +++ b/src/cmd/compile/internal/ssagen/pgen.go @@ -34,11 +34,11 @@ import ( // the top of the stack and increasing in size. // Non-autos sort on offset. func cmpstackvarlt(a, b *ir.Name) bool { - if (a.Class_ == ir.PAUTO) != (b.Class_ == ir.PAUTO) { - return b.Class_ == ir.PAUTO + if (a.Class == ir.PAUTO) != (b.Class == ir.PAUTO) { + return b.Class == ir.PAUTO } - if a.Class_ != ir.PAUTO { + if a.Class != ir.PAUTO { return a.FrameOffset() < b.FrameOffset() } @@ -79,7 +79,7 @@ func (s *ssafn) AllocFrame(f *ssa.Func) { // Mark the PAUTO's unused. for _, ln := range fn.Dcl { - if ln.Class_ == ir.PAUTO { + if ln.Class == ir.PAUTO { ln.SetUsed(false) } } @@ -94,7 +94,7 @@ func (s *ssafn) AllocFrame(f *ssa.Func) { for _, b := range f.Blocks { for _, v := range b.Values { if n, ok := v.Aux.(*ir.Name); ok { - switch n.Class_ { + switch n.Class { case ir.PPARAM, ir.PPARAMOUT: // Don't modify nodfp; it is a global. if n != ir.RegFP { @@ -120,7 +120,7 @@ func (s *ssafn) AllocFrame(f *ssa.Func) { // Reassign stack offsets of the locals that are used. lastHasPtr := false for i, n := range fn.Dcl { - if n.Op() != ir.ONAME || n.Class_ != ir.PAUTO { + if n.Op() != ir.ONAME || n.Class != ir.PAUTO { continue } if !n.Used() { @@ -207,7 +207,7 @@ func init() { func StackOffset(slot ssa.LocalSlot) int32 { n := slot.N var off int64 - switch n.Class_ { + switch n.Class { case ir.PAUTO: off = n.FrameOffset() if base.Ctxt.FixedFrameSize() == 0 { diff --git a/src/cmd/compile/internal/ssagen/pgen_test.go b/src/cmd/compile/internal/ssagen/pgen_test.go index 82d8447e9f..69ed8ad74e 100644 --- a/src/cmd/compile/internal/ssagen/pgen_test.go +++ b/src/cmd/compile/internal/ssagen/pgen_test.go @@ -46,7 +46,7 @@ func TestCmpstackvar(t *testing.T) { n := typecheck.NewName(s) n.SetType(t) n.SetFrameOffset(xoffset) - n.Class_ = cl + n.Class = cl return n } testdata := []struct { @@ -161,7 +161,7 @@ func TestStackvarSort(t *testing.T) { n := typecheck.NewName(s) n.SetType(t) n.SetFrameOffset(xoffset) - n.Class_ = cl + n.Class = cl return n } inp := []*ir.Name{ diff --git a/src/cmd/compile/internal/ssagen/ssa.go b/src/cmd/compile/internal/ssagen/ssa.go index 8e3b09aac3..5998c42012 100644 --- a/src/cmd/compile/internal/ssagen/ssa.go +++ b/src/cmd/compile/internal/ssagen/ssa.go @@ -436,7 +436,7 @@ func buildssa(fn *ir.Func, worker int) *ssa.Func { var args []ssa.Param var results []ssa.Param for _, n := range fn.Dcl { - switch n.Class_ { + switch n.Class { case ir.PPARAM: s.decladdrs[n] = s.entryNewValue2A(ssa.OpLocalAddr, types.NewPtr(n.Type()), n, s.sp, s.startmem) args = append(args, ssa.Param{Type: n.Type(), Offset: int32(n.FrameOffset())}) @@ -457,13 +457,13 @@ func buildssa(fn *ir.Func, worker int) *ssa.Func { case ir.PFUNC: // local function - already handled by frontend default: - s.Fatalf("local variable with class %v unimplemented", n.Class_) + s.Fatalf("local variable with class %v unimplemented", n.Class) } } // Populate SSAable arguments. for _, n := range fn.Dcl { - if n.Class_ == ir.PPARAM && s.canSSA(n) { + if n.Class == ir.PPARAM && s.canSSA(n) { v := s.newValue0A(ssa.OpArg, n.Type(), n) s.vars[n] = v s.addNamedValue(n, v) // This helps with debugging information, not needed for compilation itself. @@ -1166,7 +1166,7 @@ func (s *state) stmt(n ir.Node) { case ir.OCALLINTER: n := n.(*ir.CallExpr) s.callResult(n, callNormal) - if n.Op() == ir.OCALLFUNC && n.X.Op() == ir.ONAME && n.X.(*ir.Name).Class_ == ir.PFUNC { + if n.Op() == ir.OCALLFUNC && n.X.Op() == ir.ONAME && n.X.(*ir.Name).Class == ir.PFUNC { if fn := n.X.Sym().Name; base.Flag.CompilingRuntime && fn == "throw" || n.X.Sym().Pkg == ir.Pkgs.Runtime && (fn == "throwinit" || fn == "gopanic" || fn == "panicwrap" || fn == "block" || fn == "panicmakeslicelen" || fn == "panicmakeslicecap") { m := s.mem() @@ -1242,7 +1242,7 @@ func (s *state) stmt(n ir.Node) { case ir.ODCL: n := n.(*ir.Decl) - if n.X.Class_ == ir.PAUTOHEAP { + if n.X.Class == ir.PAUTOHEAP { s.Fatalf("DCL %v", n) } @@ -1634,7 +1634,7 @@ func (s *state) stmt(n ir.Node) { if !v.Addrtaken() { s.Fatalf("VARLIVE variable %v must have Addrtaken set", v) } - switch v.Class_ { + switch v.Class { case ir.PAUTO, ir.PPARAM, ir.PPARAMOUT: default: s.Fatalf("VARLIVE variable %v must be Auto or Arg", v) @@ -2110,7 +2110,7 @@ func (s *state) expr(n ir.Node) *ssa.Value { return s.entryNewValue1A(ssa.OpAddr, n.Type(), aux, s.sb) case ir.ONAME: n := n.(*ir.Name) - if n.Class_ == ir.PFUNC { + if n.Class == ir.PFUNC { // "value" of a function is the address of the function's closure sym := staticdata.FuncLinksym(n) return s.entryNewValue1A(ssa.OpAddr, types.NewPtr(n.Type()), sym, s.sb) @@ -3003,7 +3003,7 @@ func (s *state) append(n *ir.CallExpr, inplace bool) *ssa.Value { if inplace { if sn.Op() == ir.ONAME { sn := sn.(*ir.Name) - if sn.Class_ != ir.PEXTERN { + if sn.Class != ir.PEXTERN { // Tell liveness we're about to build a new slice s.vars[memVar] = s.newValue1A(ssa.OpVarDef, types.TypeMem, sn, s.mem()) } @@ -3222,7 +3222,7 @@ func (s *state) assign(left ir.Node, right *ssa.Value, deref bool, skip skipMask // If this assignment clobbers an entire local variable, then emit // OpVarDef so liveness analysis knows the variable is redefined. - if base := clobberBase(left); base.Op() == ir.ONAME && base.(*ir.Name).Class_ != ir.PEXTERN && skip == 0 { + if base := clobberBase(left); base.Op() == ir.ONAME && base.(*ir.Name).Class != ir.PEXTERN && skip == 0 { s.vars[memVar] = s.newValue1Apos(ssa.OpVarDef, types.TypeMem, base.(*ir.Name), s.mem(), !ir.IsAutoTmp(base)) } @@ -4385,7 +4385,7 @@ func (s *state) openDeferRecord(n *ir.CallExpr) { closureVal := s.expr(fn) closure := s.openDeferSave(nil, fn.Type(), closureVal) opendefer.closureNode = closure.Aux.(*ir.Name) - if !(fn.Op() == ir.ONAME && fn.(*ir.Name).Class_ == ir.PFUNC) { + if !(fn.Op() == ir.ONAME && fn.(*ir.Name).Class == ir.PFUNC) { opendefer.closure = closure } } else if n.Op() == ir.OCALLMETH { @@ -4651,7 +4651,7 @@ func (s *state) call(n *ir.CallExpr, k callKind, returnResultAddr bool) *ssa.Val switch n.Op() { case ir.OCALLFUNC: testLateExpansion = k != callDeferStack && ssa.LateCallExpansionEnabledWithin(s.f) - if k == callNormal && fn.Op() == ir.ONAME && fn.(*ir.Name).Class_ == ir.PFUNC { + if k == callNormal && fn.Op() == ir.ONAME && fn.(*ir.Name).Class == ir.PFUNC { fn := fn.(*ir.Name) sym = fn.Sym() break @@ -4958,7 +4958,7 @@ func (s *state) addr(n ir.Node) *ssa.Value { fallthrough case ir.ONAME: n := n.(*ir.Name) - switch n.Class_ { + switch n.Class { case ir.PEXTERN: // global variable v := s.entryNewValue1A(ssa.OpAddr, t, n.Linksym(), s.sb) @@ -4987,7 +4987,7 @@ func (s *state) addr(n ir.Node) *ssa.Value { // that cse works on their addresses return s.newValue2Apos(ssa.OpLocalAddr, t, n, s.sp, s.mem(), true) default: - s.Fatalf("variable address class %v not implemented", n.Class_) + s.Fatalf("variable address class %v not implemented", n.Class) return nil } case ir.ORESULT: @@ -5096,10 +5096,10 @@ func (s *state) canSSAName(name *ir.Name) bool { if ir.IsParamHeapCopy(name) { return false } - if name.Class_ == ir.PAUTOHEAP { + if name.Class == ir.PAUTOHEAP { s.Fatalf("canSSA of PAUTOHEAP %v", name) } - switch name.Class_ { + switch name.Class { case ir.PEXTERN: return false case ir.PPARAMOUT: @@ -5117,7 +5117,7 @@ func (s *state) canSSAName(name *ir.Name) bool { return false } } - if name.Class_ == ir.PPARAM && name.Sym() != nil && name.Sym().Name == ".this" { + if name.Class == ir.PPARAM && name.Sym() != nil && name.Sym().Name == ".this" { // wrappers generated by genwrapper need to update // the .this pointer in place. // TODO: treat as a PPARAMOUT? @@ -6210,7 +6210,7 @@ func (s *state) mem() *ssa.Value { } func (s *state) addNamedValue(n *ir.Name, v *ssa.Value) { - if n.Class_ == ir.Pxxx { + if n.Class == ir.Pxxx { // Don't track our marker nodes (memVar etc.). return } @@ -6218,7 +6218,7 @@ func (s *state) addNamedValue(n *ir.Name, v *ssa.Value) { // Don't track temporary variables. return } - if n.Class_ == ir.PPARAMOUT { + if n.Class == ir.PPARAMOUT { // Don't track named output values. This prevents return values // from being assigned too early. See #14591 and #14762. TODO: allow this. return @@ -6741,8 +6741,8 @@ func defframe(s *State, e *ssafn) { if !n.Needzero() { continue } - if n.Class_ != ir.PAUTO { - e.Fatalf(n.Pos(), "needzero class %d", n.Class_) + if n.Class != ir.PAUTO { + e.Fatalf(n.Pos(), "needzero class %d", n.Class) } if n.Type().Size()%int64(types.PtrSize) != 0 || n.FrameOffset()%int64(types.PtrSize) != 0 || n.Type().Size() == 0 { e.Fatalf(n.Pos(), "var %L has size %d offset %d", n, n.Type().Size(), n.Offset_) @@ -6826,7 +6826,7 @@ func AddAux2(a *obj.Addr, v *ssa.Value, offset int64) { a.Name = obj.NAME_EXTERN a.Sym = n case *ir.Name: - if n.Class_ == ir.PPARAM || n.Class_ == ir.PPARAMOUT { + if n.Class == ir.PPARAM || n.Class == ir.PPARAMOUT { a.Name = obj.NAME_PARAM a.Sym = ir.Orig(n).(*ir.Name).Linksym() a.Offset += n.FrameOffset() @@ -6968,7 +6968,7 @@ func AddrAuto(a *obj.Addr, v *ssa.Value) { a.Sym = n.Linksym() a.Reg = int16(Arch.REGSP) a.Offset = n.FrameOffset() + off - if n.Class_ == ir.PPARAM || n.Class_ == ir.PPARAMOUT { + if n.Class == ir.PPARAM || n.Class == ir.PPARAMOUT { a.Name = obj.NAME_PARAM } else { a.Name = obj.NAME_AUTO @@ -7198,7 +7198,7 @@ func (e *ssafn) DerefItab(it *obj.LSym, offset int64) *obj.LSym { func (e *ssafn) SplitSlot(parent *ssa.LocalSlot, suffix string, offset int64, t *types.Type) ssa.LocalSlot { node := parent.N - if node.Class_ != ir.PAUTO || node.Addrtaken() { + if node.Class != ir.PAUTO || node.Addrtaken() { // addressed things and non-autos retain their parents (i.e., cannot truly be split) return ssa.LocalSlot{N: node, Type: t, Off: parent.Off + offset} } @@ -7208,7 +7208,7 @@ func (e *ssafn) SplitSlot(parent *ssa.LocalSlot, suffix string, offset int64, t s.Def = n ir.AsNode(s.Def).Name().SetUsed(true) n.SetType(t) - n.Class_ = ir.PAUTO + n.Class = ir.PAUTO n.SetEsc(ir.EscNever) n.Curfn = e.curfn e.curfn.Dcl = append(e.curfn.Dcl, n) diff --git a/src/cmd/compile/internal/staticdata/data.go b/src/cmd/compile/internal/staticdata/data.go index 27d9cec06d..94fa6760a0 100644 --- a/src/cmd/compile/internal/staticdata/data.go +++ b/src/cmd/compile/internal/staticdata/data.go @@ -50,8 +50,8 @@ func InitFunc(n *ir.Name, noff int64, f *ir.Name) { if n.Sym() == nil { base.Fatalf("pfuncsym nil n sym") } - if f.Class_ != ir.PFUNC { - base.Fatalf("pfuncsym class not PFUNC %d", f.Class_) + if f.Class != ir.PFUNC { + base.Fatalf("pfuncsym class not PFUNC %d", f.Class) } s := n.Linksym() s.WriteAddr(base.Ctxt, noff, types.PtrSize, FuncLinksym(f), 0) @@ -259,7 +259,7 @@ func FuncSym(s *types.Sym) *types.Sym { } func FuncLinksym(n *ir.Name) *obj.LSym { - if n.Op() != ir.ONAME || n.Class_ != ir.PFUNC { + if n.Op() != ir.ONAME || n.Class != ir.PFUNC { base.Fatalf("expected func name: %v", n) } return FuncSym(n.Sym()).Linksym() diff --git a/src/cmd/compile/internal/staticinit/sched.go b/src/cmd/compile/internal/staticinit/sched.go index 8e4ce55954..ac0b6cd87e 100644 --- a/src/cmd/compile/internal/staticinit/sched.go +++ b/src/cmd/compile/internal/staticinit/sched.go @@ -78,12 +78,12 @@ func (s *Schedule) tryStaticInit(nn ir.Node) bool { // like staticassign but we are copying an already // initialized value r. func (s *Schedule) staticcopy(l *ir.Name, loff int64, rn *ir.Name, typ *types.Type) bool { - if rn.Class_ == ir.PFUNC { + if rn.Class == ir.PFUNC { // TODO if roff != 0 { panic } staticdata.InitFunc(l, loff, rn) return true } - if rn.Class_ != ir.PEXTERN || rn.Sym().Pkg != types.LocalPkg { + if rn.Class != ir.PEXTERN || rn.Sym().Pkg != types.LocalPkg { return false } if rn.Defn.Op() != ir.OAS { @@ -246,7 +246,7 @@ func (s *Schedule) StaticAssign(l *ir.Name, loff int64, r ir.Node, typ *types.Ty case ir.OSTR2BYTES: r := r.(*ir.ConvExpr) - if l.Class_ == ir.PEXTERN && r.X.Op() == ir.OLITERAL { + if l.Class == ir.PEXTERN && r.X.Op() == ir.OLITERAL { sval := ir.StringVal(r.X) staticdata.InitSliceBytes(l, loff, sval) return true diff --git a/src/cmd/compile/internal/typecheck/dcl.go b/src/cmd/compile/internal/typecheck/dcl.go index 5eaf100eed..6c3aa3781e 100644 --- a/src/cmd/compile/internal/typecheck/dcl.go +++ b/src/cmd/compile/internal/typecheck/dcl.go @@ -91,7 +91,7 @@ func Declare(n *ir.Name, ctxt ir.Class) { s.Lastlineno = base.Pos s.Def = n n.Vargen = int32(gen) - n.Class_ = ctxt + n.Class = ctxt if ctxt == ir.PFUNC { n.Sym().SetFunc(true) } @@ -455,7 +455,7 @@ func TempAt(pos src.XPos, curfn *ir.Func, t *types.Type) *ir.Name { n := ir.NewNameAt(pos, s) s.Def = n n.SetType(t) - n.Class_ = ir.PAUTO + n.Class = ir.PAUTO n.SetEsc(ir.EscNever) n.Curfn = curfn n.SetUsed(true) diff --git a/src/cmd/compile/internal/typecheck/export.go b/src/cmd/compile/internal/typecheck/export.go index c525391401..63d0a1ec6c 100644 --- a/src/cmd/compile/internal/typecheck/export.go +++ b/src/cmd/compile/internal/typecheck/export.go @@ -53,7 +53,7 @@ func importsym(ipkg *types.Pkg, pos src.XPos, s *types.Sym, op ir.Op, ctxt ir.Cl } n := ir.NewDeclNameAt(pos, op, s) - n.Class_ = ctxt // TODO(mdempsky): Move this into NewDeclNameAt too? + n.Class = ctxt // TODO(mdempsky): Move this into NewDeclNameAt too? s.SetPkgDef(n) return n } diff --git a/src/cmd/compile/internal/typecheck/func.go b/src/cmd/compile/internal/typecheck/func.go index 8592397004..b3efb8f25a 100644 --- a/src/cmd/compile/internal/typecheck/func.go +++ b/src/cmd/compile/internal/typecheck/func.go @@ -129,7 +129,7 @@ func CaptureVars(fn *ir.Func) { outermost := v.Defn.(*ir.Name) // out parameters will be assigned to implicitly upon return. - if outermost.Class_ != ir.PPARAMOUT && !outermost.Addrtaken() && !outermost.Assigned() && v.Type().Size() <= 128 { + if outermost.Class != ir.PPARAMOUT && !outermost.Addrtaken() && !outermost.Assigned() && v.Type().Size() <= 128 { v.SetByval(true) } else { outermost.SetAddrtaken(true) @@ -408,7 +408,7 @@ func tcFunc(n *ir.Func) { } for _, ln := range n.Dcl { - if ln.Op() == ir.ONAME && (ln.Class_ == ir.PPARAM || ln.Class_ == ir.PPARAMOUT) { + if ln.Op() == ir.ONAME && (ln.Class == ir.PPARAM || ln.Class == ir.PPARAMOUT) { ln.Decldepth = 1 } } diff --git a/src/cmd/compile/internal/typecheck/iexport.go b/src/cmd/compile/internal/typecheck/iexport.go index dd515b8ccd..a7927c39a3 100644 --- a/src/cmd/compile/internal/typecheck/iexport.go +++ b/src/cmd/compile/internal/typecheck/iexport.go @@ -430,7 +430,7 @@ func (p *iexporter) doDecl(n *ir.Name) { switch n.Op() { case ir.ONAME: - switch n.Class_ { + switch n.Class { case ir.PEXTERN: // Variable. w.tag('V') @@ -450,7 +450,7 @@ func (p *iexporter) doDecl(n *ir.Name) { w.funcExt(n) default: - base.Fatalf("unexpected class: %v, %v", n, n.Class_) + base.Fatalf("unexpected class: %v, %v", n, n.Class) } case ir.OLITERAL: @@ -1260,7 +1260,7 @@ func (w *exportWriter) expr(n ir.Node) { case ir.ONAME: // Package scope name. n := n.(*ir.Name) - if (n.Class_ == ir.PEXTERN || n.Class_ == ir.PFUNC) && !ir.IsBlank(n) { + if (n.Class == ir.PEXTERN || n.Class == ir.PFUNC) && !ir.IsBlank(n) { w.op(ir.ONONAME) w.qualifiedIdent(n) break @@ -1526,7 +1526,7 @@ func (w *exportWriter) localName(n *ir.Name) { // PPARAM/PPARAMOUT, because we only want to include vargen in // non-param names. var v int32 - if n.Class_ == ir.PAUTO || (n.Class_ == ir.PAUTOHEAP && n.Stackcopy == nil) { + if n.Class == ir.PAUTO || (n.Class == ir.PAUTOHEAP && n.Stackcopy == nil) { v = n.Vargen } diff --git a/src/cmd/compile/internal/typecheck/iimport.go b/src/cmd/compile/internal/typecheck/iimport.go index 2dc7e70b65..15c57b2380 100644 --- a/src/cmd/compile/internal/typecheck/iimport.go +++ b/src/cmd/compile/internal/typecheck/iimport.go @@ -333,7 +333,7 @@ func (r *importReader) doDecl(sym *types.Sym) *ir.Name { // methodSym already marked m.Sym as a function. m := ir.NewNameAt(mpos, ir.MethodSym(recv.Type, msym)) - m.Class_ = ir.PFUNC + m.Class = ir.PFUNC m.SetType(mtyp) m.Func = ir.NewFunc(mpos) diff --git a/src/cmd/compile/internal/typecheck/syms.go b/src/cmd/compile/internal/typecheck/syms.go index 01c03b5f9f..28db40db91 100644 --- a/src/cmd/compile/internal/typecheck/syms.go +++ b/src/cmd/compile/internal/typecheck/syms.go @@ -30,7 +30,7 @@ func SubstArgTypes(old *ir.Name, types_ ...*types.Type) *ir.Name { types.CalcSize(t) } n := ir.NewNameAt(old.Pos(), old.Sym()) - n.Class_ = old.Class() + n.Class = old.Class n.SetType(types.SubstAny(old.Type(), &types_)) if len(types_) > 0 { base.Fatalf("substArgTypes: too many argument types") diff --git a/src/cmd/compile/internal/typecheck/typecheck.go b/src/cmd/compile/internal/typecheck/typecheck.go index 812b94de0d..981f4ef1d6 100644 --- a/src/cmd/compile/internal/typecheck/typecheck.go +++ b/src/cmd/compile/internal/typecheck/typecheck.go @@ -2099,7 +2099,7 @@ func CheckUnused(fn *ir.Func) { // Propagate the used flag for typeswitch variables up to the NONAME in its definition. for _, ln := range fn.Dcl { - if ln.Op() == ir.ONAME && ln.Class_ == ir.PAUTO && ln.Used() { + if ln.Op() == ir.ONAME && ln.Class == ir.PAUTO && ln.Used() { if guard, ok := ln.Defn.(*ir.TypeSwitchGuard); ok { guard.Used = true } @@ -2107,7 +2107,7 @@ func CheckUnused(fn *ir.Func) { } for _, ln := range fn.Dcl { - if ln.Op() != ir.ONAME || ln.Class_ != ir.PAUTO || ln.Used() { + if ln.Op() != ir.ONAME || ln.Class != ir.PAUTO || ln.Used() { continue } if defn, ok := ln.Defn.(*ir.TypeSwitchGuard); ok { diff --git a/src/cmd/compile/internal/typecheck/universe.go b/src/cmd/compile/internal/typecheck/universe.go index f1e7ed4273..402b8deeb3 100644 --- a/src/cmd/compile/internal/typecheck/universe.go +++ b/src/cmd/compile/internal/typecheck/universe.go @@ -357,6 +357,6 @@ func DeclareUniverse() { ir.RegFP = NewName(Lookup(".fp")) ir.RegFP.SetType(types.Types[types.TINT32]) - ir.RegFP.Class_ = ir.PPARAM + ir.RegFP.Class = ir.PPARAM ir.RegFP.SetUsed(true) } diff --git a/src/cmd/compile/internal/walk/assign.go b/src/cmd/compile/internal/walk/assign.go index ec0f60ad93..3fe810ac4e 100644 --- a/src/cmd/compile/internal/walk/assign.go +++ b/src/cmd/compile/internal/walk/assign.go @@ -392,7 +392,7 @@ func ascompatee(op ir.Op, nl, nr []ir.Node) []ir.Node { appendWalkStmt(&late, convas(ir.NewAssignStmt(base.Pos, lorig, r), &late)) - if name == nil || name.Addrtaken() || name.Class_ == ir.PEXTERN || name.Class_ == ir.PAUTOHEAP { + if name == nil || name.Addrtaken() || name.Class == ir.PEXTERN || name.Class == ir.PAUTOHEAP { memWrite = true continue } @@ -418,7 +418,7 @@ func readsMemory(n ir.Node) bool { switch n.Op() { case ir.ONAME: n := n.(*ir.Name) - return n.Class_ == ir.PEXTERN || n.Class_ == ir.PAUTOHEAP || n.Addrtaken() + return n.Class == ir.PEXTERN || n.Class == ir.PAUTOHEAP || n.Addrtaken() case ir.OADD, ir.OAND, diff --git a/src/cmd/compile/internal/walk/closure.go b/src/cmd/compile/internal/walk/closure.go index fcdb43f113..449df88f9e 100644 --- a/src/cmd/compile/internal/walk/closure.go +++ b/src/cmd/compile/internal/walk/closure.go @@ -52,7 +52,7 @@ func Closure(fn *ir.Func) { v = addr } - v.Class_ = ir.PPARAM + v.Class = ir.PPARAM decls = append(decls, v) fld := types.NewField(src.NoXPos, v.Sym(), v.Type()) @@ -84,7 +84,7 @@ func Closure(fn *ir.Func) { if v.Byval() && v.Type().Width <= int64(2*types.PtrSize) { // If it is a small variable captured by value, downgrade it to PAUTO. - v.Class_ = ir.PAUTO + v.Class = ir.PAUTO fn.Dcl = append(fn.Dcl, v) body = append(body, ir.NewAssignStmt(base.Pos, v, cr)) } else { @@ -92,7 +92,7 @@ func Closure(fn *ir.Func) { // and initialize in entry prologue. addr := typecheck.NewName(typecheck.Lookup("&" + v.Sym().Name)) addr.SetType(types.NewPtr(v.Type())) - addr.Class_ = ir.PAUTO + addr.Class = ir.PAUTO addr.SetUsed(true) addr.Curfn = fn fn.Dcl = append(fn.Dcl, addr) diff --git a/src/cmd/compile/internal/walk/complit.go b/src/cmd/compile/internal/walk/complit.go index d8605d39bd..8a77bba2ad 100644 --- a/src/cmd/compile/internal/walk/complit.go +++ b/src/cmd/compile/internal/walk/complit.go @@ -68,7 +68,7 @@ func isSimpleName(nn ir.Node) bool { return false } n := nn.(*ir.Name) - return n.Class_ != ir.PAUTOHEAP && n.Class_ != ir.PEXTERN + return n.Class != ir.PAUTOHEAP && n.Class != ir.PEXTERN } func litas(l ir.Node, r ir.Node, init *ir.Nodes) { @@ -294,7 +294,7 @@ func slicelit(ctxt initContext, n *ir.CompLitExpr, var_ ir.Node, init *ir.Nodes) // copy static to slice var_ = typecheck.AssignExpr(var_) name, offset, ok := staticinit.StaticLoc(var_) - if !ok || name.Class_ != ir.PEXTERN { + if !ok || name.Class != ir.PEXTERN { base.Fatalf("slicelit: %v", var_) } staticdata.InitSlice(name, offset, vstat, t.NumElem()) @@ -657,7 +657,7 @@ func genAsStatic(as *ir.AssignStmt) { } name, offset, ok := staticinit.StaticLoc(as.X) - if !ok || (name.Class_ != ir.PEXTERN && as.X != ir.BlankNode) { + if !ok || (name.Class != ir.PEXTERN && as.X != ir.BlankNode) { base.Fatalf("genAsStatic: lhs %v", as.X) } @@ -674,7 +674,7 @@ func genAsStatic(as *ir.AssignStmt) { if r.Offset_ != 0 { base.Fatalf("genAsStatic %+v", as) } - if r.Class_ == ir.PFUNC { + if r.Class == ir.PFUNC { staticdata.InitFunc(name, offset, r) return } diff --git a/src/cmd/compile/internal/walk/convert.go b/src/cmd/compile/internal/walk/convert.go index d0cd5ff753..85459fd92f 100644 --- a/src/cmd/compile/internal/walk/convert.go +++ b/src/cmd/compile/internal/walk/convert.go @@ -68,12 +68,12 @@ func walkConvInterface(n *ir.ConvExpr, init *ir.Nodes) ir.Node { if ir.Names.Staticuint64s == nil { ir.Names.Staticuint64s = typecheck.NewName(ir.Pkgs.Runtime.Lookup("staticuint64s")) - ir.Names.Staticuint64s.Class_ = ir.PEXTERN + ir.Names.Staticuint64s.Class = ir.PEXTERN // The actual type is [256]uint64, but we use [256*8]uint8 so we can address // individual bytes. ir.Names.Staticuint64s.SetType(types.NewArray(types.Types[types.TUINT8], 256*8)) ir.Names.Zerobase = typecheck.NewName(ir.Pkgs.Runtime.Lookup("zerobase")) - ir.Names.Zerobase.Class_ = ir.PEXTERN + ir.Names.Zerobase.Class = ir.PEXTERN ir.Names.Zerobase.SetType(types.Types[types.TUINTPTR]) } @@ -98,7 +98,7 @@ func walkConvInterface(n *ir.ConvExpr, init *ir.Nodes) ir.Node { xe := ir.NewIndexExpr(base.Pos, ir.Names.Staticuint64s, index) xe.SetBounded(true) value = xe - case n.X.Op() == ir.ONAME && n.X.(*ir.Name).Class_ == ir.PEXTERN && n.X.(*ir.Name).Readonly(): + case n.X.Op() == ir.ONAME && n.X.(*ir.Name).Class == ir.PEXTERN && n.X.(*ir.Name).Readonly(): // n.Left is a readonly global; use it directly. value = n.X case !fromType.IsInterface() && n.Esc() == ir.EscNone && fromType.Width <= 1024: diff --git a/src/cmd/compile/internal/walk/expr.go b/src/cmd/compile/internal/walk/expr.go index 8a56526a36..3dffb496e9 100644 --- a/src/cmd/compile/internal/walk/expr.go +++ b/src/cmd/compile/internal/walk/expr.go @@ -52,7 +52,7 @@ func walkExpr(n ir.Node, init *ir.Nodes) ir.Node { base.Fatalf("expression has untyped type: %+v", n) } - if n.Op() == ir.ONAME && n.(*ir.Name).Class_ == ir.PAUTOHEAP { + if n.Op() == ir.ONAME && n.(*ir.Name).Class == ir.PAUTOHEAP { n := n.(*ir.Name) nn := ir.NewStarExpr(base.Pos, n.Heapaddr) nn.X.MarkNonNil() diff --git a/src/cmd/compile/internal/walk/order.go b/src/cmd/compile/internal/walk/order.go index 2164685cd4..38a9bec6e3 100644 --- a/src/cmd/compile/internal/walk/order.go +++ b/src/cmd/compile/internal/walk/order.go @@ -235,7 +235,7 @@ func (o *orderState) safeExpr(n ir.Node) ir.Node { // because we emit explicit VARKILL instructions marking the end of those // temporaries' lifetimes. func isaddrokay(n ir.Node) bool { - return ir.IsAddressable(n) && (n.Op() != ir.ONAME || n.(*ir.Name).Class_ == ir.PEXTERN || ir.IsAutoTmp(n)) + return ir.IsAddressable(n) && (n.Op() != ir.ONAME || n.(*ir.Name).Class == ir.PEXTERN || ir.IsAutoTmp(n)) } // addrTemp ensures that n is okay to pass by address to runtime routines. diff --git a/src/cmd/compile/internal/walk/race.go b/src/cmd/compile/internal/walk/race.go index 20becf9be9..77cabe50c6 100644 --- a/src/cmd/compile/internal/walk/race.go +++ b/src/cmd/compile/internal/walk/race.go @@ -39,7 +39,7 @@ func instrument(fn *ir.Func) { // race in the future. nodpc := ir.NewNameAt(src.NoXPos, typecheck.Lookup(".fp")) - nodpc.Class_ = ir.PPARAM + nodpc.Class = ir.PPARAM nodpc.SetUsed(true) nodpc.SetType(types.Types[types.TUINTPTR]) nodpc.SetFrameOffset(int64(-types.PtrSize)) diff --git a/src/cmd/compile/internal/walk/stmt.go b/src/cmd/compile/internal/walk/stmt.go index 460c0a7c10..1df491bd4e 100644 --- a/src/cmd/compile/internal/walk/stmt.go +++ b/src/cmd/compile/internal/walk/stmt.go @@ -176,7 +176,7 @@ func walkStmtList(s []ir.Node) { // walkDecl walks an ODCL node. func walkDecl(n *ir.Decl) ir.Node { v := n.X - if v.Class_ == ir.PAUTOHEAP { + if v.Class == ir.PAUTOHEAP { if base.Flag.CompilingRuntime { base.Errorf("%v escapes to heap, not allowed in runtime", v) } diff --git a/src/cmd/compile/internal/walk/walk.go b/src/cmd/compile/internal/walk/walk.go index 57c2d43753..928b673752 100644 --- a/src/cmd/compile/internal/walk/walk.go +++ b/src/cmd/compile/internal/walk/walk.go @@ -61,7 +61,7 @@ func Walk(fn *ir.Func) { func paramoutheap(fn *ir.Func) bool { for _, ln := range fn.Dcl { - switch ln.Class_ { + switch ln.Class { case ir.PPARAMOUT: if ir.IsParamStackCopy(ln) || ln.Addrtaken() { return true @@ -137,7 +137,7 @@ func paramstoheap(params *types.Type) []ir.Node { if stackcopy := v.Name().Stackcopy; stackcopy != nil { nn = append(nn, walkStmt(ir.NewDecl(base.Pos, ir.ODCL, v.(*ir.Name)))) - if stackcopy.Class_ == ir.PPARAM { + if stackcopy.Class == ir.PPARAM { nn = append(nn, walkStmt(typecheck.Stmt(ir.NewAssignStmt(base.Pos, v, stackcopy)))) } } @@ -185,7 +185,7 @@ func returnsfromheap(params *types.Type) []ir.Node { if v == nil { continue } - if stackcopy := v.Name().Stackcopy; stackcopy != nil && stackcopy.Class_ == ir.PPARAMOUT { + if stackcopy := v.Name().Stackcopy; stackcopy != nil && stackcopy.Class == ir.PPARAMOUT { nn = append(nn, walkStmt(typecheck.Stmt(ir.NewAssignStmt(base.Pos, stackcopy, v)))) } } -- cgit v1.2.3-54-g00ecf