diff options
Diffstat (limited to 'src/cmd/compile/internal/gc/inl.go')
-rw-r--r-- | src/cmd/compile/internal/gc/inl.go | 949 |
1 files changed, 471 insertions, 478 deletions
diff --git a/src/cmd/compile/internal/gc/inl.go b/src/cmd/compile/internal/gc/inl.go index 419056985f..6310762c1f 100644 --- a/src/cmd/compile/internal/gc/inl.go +++ b/src/cmd/compile/internal/gc/inl.go @@ -27,11 +27,14 @@ package gc import ( + "cmd/compile/internal/base" + "cmd/compile/internal/ir" "cmd/compile/internal/logopt" "cmd/compile/internal/types" "cmd/internal/obj" "cmd/internal/src" "fmt" + "go/constant" "strings" ) @@ -50,27 +53,27 @@ const ( // Get the function's package. For ordinary functions it's on the ->sym, but for imported methods // the ->sym can be re-used in the local package, so peel it off the receiver's type. -func fnpkg(fn *Node) *types.Pkg { - if fn.IsMethod() { +func fnpkg(fn ir.Node) *types.Pkg { + if ir.IsMethod(fn) { // method - rcvr := fn.Type.Recv().Type + rcvr := fn.Type().Recv().Type if rcvr.IsPtr() { rcvr = rcvr.Elem() } if rcvr.Sym == nil { - Fatalf("receiver with no sym: [%v] %L (%v)", fn.Sym, fn, rcvr) + base.Fatalf("receiver with no sym: [%v] %L (%v)", fn.Sym(), fn, rcvr) } return rcvr.Sym.Pkg } // non-method - return fn.Sym.Pkg + return fn.Sym().Pkg } // Lazy typechecking of imported bodies. For local functions, caninl will set ->typecheck // because they're a copy of an already checked body. -func typecheckinl(fn *Node) { +func typecheckinl(fn ir.Node) { lno := setlineno(fn) expandInline(fn) @@ -81,17 +84,17 @@ func typecheckinl(fn *Node) { // the ->inl of a local function has been typechecked before caninl copied it. pkg := fnpkg(fn) - if pkg == localpkg || pkg == nil { + if pkg == ir.LocalPkg || pkg == nil { return // typecheckinl on local function } - if Debug.m > 2 || Debug_export != 0 { - fmt.Printf("typecheck import [%v] %L { %#v }\n", fn.Sym, fn, asNodes(fn.Func.Inl.Body)) + if base.Flag.LowerM > 2 || base.Debug.Export != 0 { + fmt.Printf("typecheck import [%v] %L { %#v }\n", fn.Sym(), fn, ir.AsNodes(fn.Func().Inl.Body)) } savefn := Curfn Curfn = fn - typecheckslice(fn.Func.Inl.Body, ctxStmt) + typecheckslice(fn.Func().Inl.Body, ctxStmt) Curfn = savefn // During expandInline (which imports fn.Func.Inl.Body), @@ -99,65 +102,65 @@ func typecheckinl(fn *Node) { // to fn.Func.Inl.Dcl for consistency with how local functions // behave. (Append because typecheckinl may be called multiple // times.) - fn.Func.Inl.Dcl = append(fn.Func.Inl.Dcl, fn.Func.Dcl...) - fn.Func.Dcl = nil + fn.Func().Inl.Dcl = append(fn.Func().Inl.Dcl, fn.Func().Dcl...) + fn.Func().Dcl = nil - lineno = lno + base.Pos = lno } // Caninl determines whether fn is inlineable. // If so, caninl saves fn->nbody in fn->inl and substitutes it with a copy. // fn and ->nbody will already have been typechecked. -func caninl(fn *Node) { - if fn.Op != ODCLFUNC { - Fatalf("caninl %v", fn) +func caninl(fn ir.Node) { + if fn.Op() != ir.ODCLFUNC { + base.Fatalf("caninl %v", fn) } - if fn.Func.Nname == nil { - Fatalf("caninl no nname %+v", fn) + if fn.Func().Nname == nil { + base.Fatalf("caninl no nname %+v", fn) } var reason string // reason, if any, that the function was not inlined - if Debug.m > 1 || logopt.Enabled() { + if base.Flag.LowerM > 1 || logopt.Enabled() { defer func() { if reason != "" { - if Debug.m > 1 { - fmt.Printf("%v: cannot inline %v: %s\n", fn.Line(), fn.Func.Nname, reason) + if base.Flag.LowerM > 1 { + fmt.Printf("%v: cannot inline %v: %s\n", ir.Line(fn), fn.Func().Nname, reason) } if logopt.Enabled() { - logopt.LogOpt(fn.Pos, "cannotInlineFunction", "inline", fn.funcname(), reason) + logopt.LogOpt(fn.Pos(), "cannotInlineFunction", "inline", ir.FuncName(fn), reason) } } }() } // If marked "go:noinline", don't inline - if fn.Func.Pragma&Noinline != 0 { + if fn.Func().Pragma&ir.Noinline != 0 { reason = "marked go:noinline" return } // If marked "go:norace" and -race compilation, don't inline. - if flag_race && fn.Func.Pragma&Norace != 0 { + if base.Flag.Race && fn.Func().Pragma&ir.Norace != 0 { reason = "marked go:norace with -race compilation" return } // If marked "go:nocheckptr" and -d checkptr compilation, don't inline. - if Debug_checkptr != 0 && fn.Func.Pragma&NoCheckPtr != 0 { + if base.Debug.Checkptr != 0 && fn.Func().Pragma&ir.NoCheckPtr != 0 { reason = "marked go:nocheckptr" return } // If marked "go:cgo_unsafe_args", don't inline, since the // function makes assumptions about its argument frame layout. - if fn.Func.Pragma&CgoUnsafeArgs != 0 { + if fn.Func().Pragma&ir.CgoUnsafeArgs != 0 { reason = "marked go:cgo_unsafe_args" return } // If marked as "go:uintptrescapes", don't inline, since the // escape information is lost during inlining. - if fn.Func.Pragma&UintptrEscapes != 0 { + if fn.Func().Pragma&ir.UintptrEscapes != 0 { reason = "marked as having an escaping uintptr argument" return } @@ -166,29 +169,29 @@ func caninl(fn *Node) { // granularity, so inlining yeswritebarrierrec functions can // confuse it (#22342). As a workaround, disallow inlining // them for now. - if fn.Func.Pragma&Yeswritebarrierrec != 0 { + if fn.Func().Pragma&ir.Yeswritebarrierrec != 0 { reason = "marked go:yeswritebarrierrec" return } // If fn has no body (is defined outside of Go), cannot inline it. - if fn.Nbody.Len() == 0 { + if fn.Body().Len() == 0 { reason = "no function body" return } if fn.Typecheck() == 0 { - Fatalf("caninl on non-typechecked function %v", fn) + base.Fatalf("caninl on non-typechecked function %v", fn) } - n := fn.Func.Nname - if n.Func.InlinabilityChecked() { + n := fn.Func().Nname + if n.Func().InlinabilityChecked() { return } - defer n.Func.SetInlinabilityChecked(true) + defer n.Func().SetInlinabilityChecked(true) cc := int32(inlineExtraCallCost) - if Debug.l == 4 { + if base.Flag.LowerL == 4 { cc = 1 // this appears to yield better performance than 0. } @@ -204,9 +207,9 @@ func caninl(fn *Node) { visitor := hairyVisitor{ budget: inlineMaxBudget, extraCallCost: cc, - usedLocals: make(map[*Node]bool), + usedLocals: make(map[ir.Node]bool), } - if visitor.visitList(fn.Nbody) { + if visitor.visitList(fn.Body()) { reason = visitor.reason return } @@ -215,82 +218,77 @@ func caninl(fn *Node) { return } - n.Func.Inl = &Inline{ + n.Func().Inl = &ir.Inline{ Cost: inlineMaxBudget - visitor.budget, - Dcl: inlcopylist(pruneUnusedAutos(n.Name.Defn.Func.Dcl, &visitor)), - Body: inlcopylist(fn.Nbody.Slice()), + Dcl: inlcopylist(pruneUnusedAutos(n.Name().Defn.Func().Dcl, &visitor)), + Body: inlcopylist(fn.Body().Slice()), } - // hack, TODO, check for better way to link method nodes back to the thing with the ->inl - // this is so export can find the body of a method - fn.Type.FuncType().Nname = asTypesNode(n) - - if Debug.m > 1 { - fmt.Printf("%v: can inline %#v with cost %d as: %#v { %#v }\n", fn.Line(), n, inlineMaxBudget-visitor.budget, fn.Type, asNodes(n.Func.Inl.Body)) - } else if Debug.m != 0 { - fmt.Printf("%v: can inline %v\n", fn.Line(), n) + if base.Flag.LowerM > 1 { + fmt.Printf("%v: can inline %#v with cost %d as: %#v { %#v }\n", ir.Line(fn), n, inlineMaxBudget-visitor.budget, fn.Type(), ir.AsNodes(n.Func().Inl.Body)) + } else if base.Flag.LowerM != 0 { + fmt.Printf("%v: can inline %v\n", ir.Line(fn), n) } if logopt.Enabled() { - logopt.LogOpt(fn.Pos, "canInlineFunction", "inline", fn.funcname(), fmt.Sprintf("cost: %d", inlineMaxBudget-visitor.budget)) + logopt.LogOpt(fn.Pos(), "canInlineFunction", "inline", ir.FuncName(fn), fmt.Sprintf("cost: %d", inlineMaxBudget-visitor.budget)) } } // inlFlood marks n's inline body for export and recursively ensures // all called functions are marked too. -func inlFlood(n *Node) { +func inlFlood(n ir.Node) { if n == nil { return } - if n.Op != ONAME || n.Class() != PFUNC { - Fatalf("inlFlood: unexpected %v, %v, %v", n, n.Op, n.Class()) + if n.Op() != ir.ONAME || n.Class() != ir.PFUNC { + base.Fatalf("inlFlood: unexpected %v, %v, %v", n, n.Op(), n.Class()) } - if n.Func == nil { - Fatalf("inlFlood: missing Func on %v", n) + if n.Func() == nil { + base.Fatalf("inlFlood: missing Func on %v", n) } - if n.Func.Inl == nil { + if n.Func().Inl == nil { return } - if n.Func.ExportInline() { + if n.Func().ExportInline() { return } - n.Func.SetExportInline(true) + n.Func().SetExportInline(true) typecheckinl(n) // Recursively identify all referenced functions for // reexport. We want to include even non-called functions, // because after inlining they might be callable. - inspectList(asNodes(n.Func.Inl.Body), func(n *Node) bool { - switch n.Op { - case ONAME: + ir.InspectList(ir.AsNodes(n.Func().Inl.Body), func(n ir.Node) bool { + switch n.Op() { + case ir.OMETHEXPR: + inlFlood(methodExprName(n)) + + case ir.ONAME: switch n.Class() { - case PFUNC: - if n.isMethodExpression() { - inlFlood(asNode(n.Type.Nname())) - } else { - inlFlood(n) - exportsym(n) - } - case PEXTERN: + case ir.PFUNC: + inlFlood(n) + exportsym(n) + case ir.PEXTERN: exportsym(n) } - case ODOTMETH: - fn := asNode(n.Type.Nname()) + case ir.ODOTMETH: + fn := methodExprName(n) inlFlood(fn) - case OCALLPART: + case ir.OCALLPART: // Okay, because we don't yet inline indirect // calls to method values. - case OCLOSURE: + case ir.OCLOSURE: // If the closure is inlinable, we'll need to // flood it too. But today we don't support // inlining functions that contain closures. // // When we do, we'll probably want: // inlFlood(n.Func.Closure.Func.Nname) - Fatalf("unexpected closure in inlinable function") + base.Fatalf("unexpected closure in inlinable function") } return true }) @@ -302,11 +300,11 @@ type hairyVisitor struct { budget int32 reason string extraCallCost int32 - usedLocals map[*Node]bool + usedLocals map[ir.Node]bool } // Look for anything we want to punt on. -func (v *hairyVisitor) visitList(ll Nodes) bool { +func (v *hairyVisitor) visitList(ll ir.Nodes) bool { for _, n := range ll.Slice() { if v.visit(n) { return true @@ -315,20 +313,20 @@ func (v *hairyVisitor) visitList(ll Nodes) bool { return false } -func (v *hairyVisitor) visit(n *Node) bool { +func (v *hairyVisitor) visit(n ir.Node) bool { if n == nil { return false } - switch n.Op { + switch n.Op() { // Call is okay if inlinable and we have the budget for the body. - case OCALLFUNC: + case ir.OCALLFUNC: // Functions that call runtime.getcaller{pc,sp} can not be inlined // because getcaller{pc,sp} expect a pointer to the caller's first argument. // // runtime.throw is a "cheap call" like panic in normal code. - if n.Left.Op == ONAME && n.Left.Class() == PFUNC && isRuntimePkg(n.Left.Sym.Pkg) { - fn := n.Left.Sym.Name + if n.Left().Op() == ir.ONAME && n.Left().Class() == ir.PFUNC && isRuntimePkg(n.Left().Sym().Pkg) { + fn := n.Left().Sym().Name if fn == "getcallerpc" || fn == "getcallersp" { v.reason = "call to " + fn return true @@ -344,8 +342,8 @@ func (v *hairyVisitor) visit(n *Node) bool { break } - if fn := inlCallee(n.Left); fn != nil && fn.Func.Inl != nil { - v.budget -= fn.Func.Inl.Cost + if fn := inlCallee(n.Left()); fn != nil && fn.Func().Inl != nil { + v.budget -= fn.Func().Inl.Cost break } @@ -353,16 +351,13 @@ func (v *hairyVisitor) visit(n *Node) bool { v.budget -= v.extraCallCost // Call is okay if inlinable and we have the budget for the body. - case OCALLMETH: - t := n.Left.Type + case ir.OCALLMETH: + t := n.Left().Type() if t == nil { - Fatalf("no function type for [%p] %+v\n", n.Left, n.Left) - } - if t.Nname() == nil { - Fatalf("no function definition for [%p] %+v\n", t, t) + base.Fatalf("no function type for [%p] %+v\n", n.Left(), n.Left()) } - if isRuntimePkg(n.Left.Sym.Pkg) { - fn := n.Left.Sym.Name + if isRuntimePkg(n.Left().Sym().Pkg) { + fn := n.Left().Sym().Name if fn == "heapBits.nextArena" { // Special case: explicitly allow // mid-stack inlining of @@ -372,7 +367,7 @@ func (v *hairyVisitor) visit(n *Node) bool { break } } - if inlfn := asNode(t.FuncType().Nname).Func; inlfn.Inl != nil { + if inlfn := methodExprName(n.Left()).Func(); inlfn.Inl != nil { v.budget -= inlfn.Inl.Cost break } @@ -380,58 +375,58 @@ func (v *hairyVisitor) visit(n *Node) bool { v.budget -= v.extraCallCost // Things that are too hairy, irrespective of the budget - case OCALL, OCALLINTER: + case ir.OCALL, ir.OCALLINTER: // Call cost for non-leaf inlining. v.budget -= v.extraCallCost - case OPANIC: + case ir.OPANIC: v.budget -= inlineExtraPanicCost - case ORECOVER: + case ir.ORECOVER: // recover matches the argument frame pointer to find // the right panic value, so it needs an argument frame. v.reason = "call to recover" return true - case OCLOSURE, - ORANGE, - OSELECT, - OGO, - ODEFER, - ODCLTYPE, // can't print yet - ORETJMP: - v.reason = "unhandled op " + n.Op.String() + case ir.OCLOSURE, + ir.ORANGE, + ir.OSELECT, + ir.OGO, + ir.ODEFER, + ir.ODCLTYPE, // can't print yet + ir.ORETJMP: + v.reason = "unhandled op " + n.Op().String() return true - case OAPPEND: + case ir.OAPPEND: v.budget -= inlineExtraAppendCost - case ODCLCONST, OEMPTY, OFALL: + case ir.ODCLCONST, ir.OEMPTY, ir.OFALL: // These nodes don't produce code; omit from inlining budget. return false - case OLABEL: + case ir.OLABEL: // TODO(mdempsky): Add support for inlining labeled control statements. - if n.labeledControl() != nil { + if labeledControl(n) != nil { v.reason = "labeled control" return true } - case OBREAK, OCONTINUE: - if n.Sym != nil { + case ir.OBREAK, ir.OCONTINUE: + if n.Sym() != nil { // Should have short-circuited due to labeledControl above. - Fatalf("unexpected labeled break/continue: %v", n) + base.Fatalf("unexpected labeled break/continue: %v", n) } - case OIF: - if Isconst(n.Left, CTBOOL) { + case ir.OIF: + if ir.IsConst(n.Left(), constant.Bool) { // This if and the condition cost nothing. - return v.visitList(n.Ninit) || v.visitList(n.Nbody) || - v.visitList(n.Rlist) + return v.visitList(n.Init()) || v.visitList(n.Body()) || + v.visitList(n.Rlist()) } - case ONAME: - if n.Class() == PAUTO { + case ir.ONAME: + if n.Class() == ir.PAUTO { v.usedLocals[n] = true } @@ -440,67 +435,67 @@ func (v *hairyVisitor) visit(n *Node) bool { v.budget-- // When debugging, don't stop early, to get full cost of inlining this function - if v.budget < 0 && Debug.m < 2 && !logopt.Enabled() { + if v.budget < 0 && base.Flag.LowerM < 2 && !logopt.Enabled() { return true } - return v.visit(n.Left) || v.visit(n.Right) || - v.visitList(n.List) || v.visitList(n.Rlist) || - v.visitList(n.Ninit) || v.visitList(n.Nbody) + return v.visit(n.Left()) || v.visit(n.Right()) || + v.visitList(n.List()) || v.visitList(n.Rlist()) || + v.visitList(n.Init()) || v.visitList(n.Body()) } // inlcopylist (together with inlcopy) recursively copies a list of nodes, except // that it keeps the same ONAME, OTYPE, and OLITERAL nodes. It is used for copying // the body and dcls of an inlineable function. -func inlcopylist(ll []*Node) []*Node { - s := make([]*Node, 0, len(ll)) +func inlcopylist(ll []ir.Node) []ir.Node { + s := make([]ir.Node, 0, len(ll)) for _, n := range ll { s = append(s, inlcopy(n)) } return s } -func inlcopy(n *Node) *Node { +func inlcopy(n ir.Node) ir.Node { if n == nil { return nil } - switch n.Op { - case ONAME, OTYPE, OLITERAL: + switch n.Op() { + case ir.ONAME, ir.OTYPE, ir.OLITERAL, ir.ONIL: return n } - m := n.copy() - if n.Op != OCALLPART && m.Func != nil { - Fatalf("unexpected Func: %v", m) + m := ir.Copy(n) + if n.Op() != ir.OCALLPART && m.Func() != nil { + base.Fatalf("unexpected Func: %v", m) } - m.Left = inlcopy(n.Left) - m.Right = inlcopy(n.Right) - m.List.Set(inlcopylist(n.List.Slice())) - m.Rlist.Set(inlcopylist(n.Rlist.Slice())) - m.Ninit.Set(inlcopylist(n.Ninit.Slice())) - m.Nbody.Set(inlcopylist(n.Nbody.Slice())) + m.SetLeft(inlcopy(n.Left())) + m.SetRight(inlcopy(n.Right())) + m.PtrList().Set(inlcopylist(n.List().Slice())) + m.PtrRlist().Set(inlcopylist(n.Rlist().Slice())) + m.PtrInit().Set(inlcopylist(n.Init().Slice())) + m.PtrBody().Set(inlcopylist(n.Body().Slice())) return m } -func countNodes(n *Node) int { +func countNodes(n ir.Node) int { if n == nil { return 0 } cnt := 1 - cnt += countNodes(n.Left) - cnt += countNodes(n.Right) - for _, n1 := range n.Ninit.Slice() { + cnt += countNodes(n.Left()) + cnt += countNodes(n.Right()) + for _, n1 := range n.Init().Slice() { cnt += countNodes(n1) } - for _, n1 := range n.Nbody.Slice() { + for _, n1 := range n.Body().Slice() { cnt += countNodes(n1) } - for _, n1 := range n.List.Slice() { + for _, n1 := range n.List().Slice() { cnt += countNodes(n1) } - for _, n1 := range n.Rlist.Slice() { + for _, n1 := range n.Rlist().Slice() { cnt += countNodes(n1) } return cnt @@ -508,7 +503,7 @@ func countNodes(n *Node) int { // Inlcalls/nodelist/node walks fn's statements and expressions and substitutes any // calls made to inlineable functions. This is the external entry point. -func inlcalls(fn *Node) { +func inlcalls(fn ir.Node) { savefn := Curfn Curfn = fn maxCost := int32(inlineMaxBudget) @@ -521,31 +516,31 @@ func inlcalls(fn *Node) { // but allow inlining if there is a recursion cycle of many functions. // Most likely, the inlining will stop before we even hit the beginning of // the cycle again, but the map catches the unusual case. - inlMap := make(map[*Node]bool) + inlMap := make(map[ir.Node]bool) fn = inlnode(fn, maxCost, inlMap) if fn != Curfn { - Fatalf("inlnode replaced curfn") + base.Fatalf("inlnode replaced curfn") } Curfn = savefn } // Turn an OINLCALL into a statement. -func inlconv2stmt(n *Node) { - n.Op = OBLOCK +func inlconv2stmt(n ir.Node) { + n.SetOp(ir.OBLOCK) // n->ninit stays - n.List.Set(n.Nbody.Slice()) + n.PtrList().Set(n.Body().Slice()) - n.Nbody.Set(nil) - n.Rlist.Set(nil) + n.PtrBody().Set(nil) + n.PtrRlist().Set(nil) } // Turn an OINLCALL into a single valued expression. // The result of inlconv2expr MUST be assigned back to n, e.g. // n.Left = inlconv2expr(n.Left) -func inlconv2expr(n *Node) *Node { - r := n.Rlist.First() - return addinit(r, append(n.Ninit.Slice(), n.Nbody.Slice()...)) +func inlconv2expr(n ir.Node) ir.Node { + r := n.Rlist().First() + return addinit(r, append(n.Init().Slice(), n.Body().Slice()...)) } // Turn the rlist (with the return values) of the OINLCALL in @@ -553,17 +548,17 @@ func inlconv2expr(n *Node) *Node { // containing the inlined statements on the first list element so // order will be preserved Used in return, oas2func and call // statements. -func inlconv2list(n *Node) []*Node { - if n.Op != OINLCALL || n.Rlist.Len() == 0 { - Fatalf("inlconv2list %+v\n", n) +func inlconv2list(n ir.Node) []ir.Node { + if n.Op() != ir.OINLCALL || n.Rlist().Len() == 0 { + base.Fatalf("inlconv2list %+v\n", n) } - s := n.Rlist.Slice() - s[0] = addinit(s[0], append(n.Ninit.Slice(), n.Nbody.Slice()...)) + s := n.Rlist().Slice() + s[0] = addinit(s[0], append(n.Init().Slice(), n.Body().Slice()...)) return s } -func inlnodelist(l Nodes, maxCost int32, inlMap map[*Node]bool) { +func inlnodelist(l ir.Nodes, maxCost int32, inlMap map[ir.Node]bool) { s := l.Slice() for i := range s { s[i] = inlnode(s[i], maxCost, inlMap) @@ -583,80 +578,80 @@ func inlnodelist(l Nodes, maxCost int32, inlMap map[*Node]bool) { // shorter and less complicated. // The result of inlnode MUST be assigned back to n, e.g. // n.Left = inlnode(n.Left) -func inlnode(n *Node, maxCost int32, inlMap map[*Node]bool) *Node { +func inlnode(n ir.Node, maxCost int32, inlMap map[ir.Node]bool) ir.Node { if n == nil { return n } - switch n.Op { - case ODEFER, OGO: - switch n.Left.Op { - case OCALLFUNC, OCALLMETH: - n.Left.SetNoInline(true) + switch n.Op() { + case ir.ODEFER, ir.OGO: + switch n.Left().Op() { + case ir.OCALLFUNC, ir.OCALLMETH: + n.Left().SetNoInline(true) } // TODO do them here (or earlier), // so escape analysis can avoid more heapmoves. - case OCLOSURE: + case ir.OCLOSURE: return n - case OCALLMETH: + case ir.OCALLMETH: // Prevent inlining some reflect.Value methods when using checkptr, // even when package reflect was compiled without it (#35073). - if s := n.Left.Sym; Debug_checkptr != 0 && isReflectPkg(s.Pkg) && (s.Name == "Value.UnsafeAddr" || s.Name == "Value.Pointer") { + if s := n.Left().Sym(); base.Debug.Checkptr != 0 && isReflectPkg(s.Pkg) && (s.Name == "Value.UnsafeAddr" || s.Name == "Value.Pointer") { return n } } lno := setlineno(n) - inlnodelist(n.Ninit, maxCost, inlMap) - for _, n1 := range n.Ninit.Slice() { - if n1.Op == OINLCALL { + inlnodelist(n.Init(), maxCost, inlMap) + for _, n1 := range n.Init().Slice() { + if n1.Op() == ir.OINLCALL { inlconv2stmt(n1) } } - n.Left = inlnode(n.Left, maxCost, inlMap) - if n.Left != nil && n.Left.Op == OINLCALL { - n.Left = inlconv2expr(n.Left) + n.SetLeft(inlnode(n.Left(), maxCost, inlMap)) + if n.Left() != nil && n.Left().Op() == ir.OINLCALL { + n.SetLeft(inlconv2expr(n.Left())) } - n.Right = inlnode(n.Right, maxCost, inlMap) - if n.Right != nil && n.Right.Op == OINLCALL { - if n.Op == OFOR || n.Op == OFORUNTIL { - inlconv2stmt(n.Right) - } else if n.Op == OAS2FUNC { - n.Rlist.Set(inlconv2list(n.Right)) - n.Right = nil - n.Op = OAS2 + n.SetRight(inlnode(n.Right(), maxCost, inlMap)) + if n.Right() != nil && n.Right().Op() == ir.OINLCALL { + if n.Op() == ir.OFOR || n.Op() == ir.OFORUNTIL { + inlconv2stmt(n.Right()) + } else if n.Op() == ir.OAS2FUNC { + n.PtrRlist().Set(inlconv2list(n.Right())) + n.SetRight(nil) + n.SetOp(ir.OAS2) n.SetTypecheck(0) n = typecheck(n, ctxStmt) } else { - n.Right = inlconv2expr(n.Right) + n.SetRight(inlconv2expr(n.Right())) } } - inlnodelist(n.List, maxCost, inlMap) - if n.Op == OBLOCK { - for _, n2 := range n.List.Slice() { - if n2.Op == OINLCALL { + inlnodelist(n.List(), maxCost, inlMap) + if n.Op() == ir.OBLOCK { + for _, n2 := range n.List().Slice() { + if n2.Op() == ir.OINLCALL { inlconv2stmt(n2) } } } else { - s := n.List.Slice() + s := n.List().Slice() for i1, n1 := range s { - if n1 != nil && n1.Op == OINLCALL { + if n1 != nil && n1.Op() == ir.OINLCALL { s[i1] = inlconv2expr(s[i1]) } } } - inlnodelist(n.Rlist, maxCost, inlMap) - s := n.Rlist.Slice() + inlnodelist(n.Rlist(), maxCost, inlMap) + s := n.Rlist().Slice() for i1, n1 := range s { - if n1.Op == OINLCALL { - if n.Op == OIF { + if n1.Op() == ir.OINLCALL { + if n.Op() == ir.OIF { inlconv2stmt(n1) } else { s[i1] = inlconv2expr(s[i1]) @@ -664,9 +659,9 @@ func inlnode(n *Node, maxCost int32, inlMap map[*Node]bool) *Node { } } - inlnodelist(n.Nbody, maxCost, inlMap) - for _, n := range n.Nbody.Slice() { - if n.Op == OINLCALL { + inlnodelist(n.Body(), maxCost, inlMap) + for _, n := range n.Body().Slice() { + if n.Op() == ir.OINLCALL { inlconv2stmt(n) } } @@ -674,75 +669,70 @@ func inlnode(n *Node, maxCost int32, inlMap map[*Node]bool) *Node { // with all the branches out of the way, it is now time to // transmogrify this node itself unless inhibited by the // switch at the top of this function. - switch n.Op { - case OCALLFUNC, OCALLMETH: + switch n.Op() { + case ir.OCALLFUNC, ir.OCALLMETH: if n.NoInline() { return n } } - switch n.Op { - case OCALLFUNC: - if Debug.m > 3 { - fmt.Printf("%v:call to func %+v\n", n.Line(), n.Left) + switch n.Op() { + case ir.OCALLFUNC: + if base.Flag.LowerM > 3 { + fmt.Printf("%v:call to func %+v\n", ir.Line(n), n.Left()) } if isIntrinsicCall(n) { break } - if fn := inlCallee(n.Left); fn != nil && fn.Func.Inl != nil { + if fn := inlCallee(n.Left()); fn != nil && fn.Func().Inl != nil { n = mkinlcall(n, fn, maxCost, inlMap) } - case OCALLMETH: - if Debug.m > 3 { - fmt.Printf("%v:call to meth %L\n", n.Line(), n.Left.Right) + case ir.OCALLMETH: + if base.Flag.LowerM > 3 { + fmt.Printf("%v:call to meth %L\n", ir.Line(n), n.Left().Right()) } // typecheck should have resolved ODOTMETH->type, whose nname points to the actual function. - if n.Left.Type == nil { - Fatalf("no function type for [%p] %+v\n", n.Left, n.Left) + if n.Left().Type() == nil { + base.Fatalf("no function type for [%p] %+v\n", n.Left(), n.Left()) } - if n.Left.Type.Nname() == nil { - Fatalf("no function definition for [%p] %+v\n", n.Left.Type, n.Left.Type) - } - - n = mkinlcall(n, asNode(n.Left.Type.FuncType().Nname), maxCost, inlMap) + n = mkinlcall(n, methodExprName(n.Left()), maxCost, inlMap) } - lineno = lno + base.Pos = lno return n } // inlCallee takes a function-typed expression and returns the underlying function ONAME // that it refers to if statically known. Otherwise, it returns nil. -func inlCallee(fn *Node) *Node { +func inlCallee(fn ir.Node) ir.Node { fn = staticValue(fn) switch { - case fn.Op == ONAME && fn.Class() == PFUNC: - if fn.isMethodExpression() { - n := asNode(fn.Type.Nname()) - // Check that receiver type matches fn.Left. - // TODO(mdempsky): Handle implicit dereference - // of pointer receiver argument? - if n == nil || !types.Identical(n.Type.Recv().Type, fn.Left.Type) { - return nil - } - return n + case fn.Op() == ir.OMETHEXPR: + n := methodExprName(fn) + // Check that receiver type matches fn.Left. + // TODO(mdempsky): Handle implicit dereference + // of pointer receiver argument? + if n == nil || !types.Identical(n.Type().Recv().Type, fn.Left().Type()) { + return nil } + return n + case fn.Op() == ir.ONAME && fn.Class() == ir.PFUNC: return fn - case fn.Op == OCLOSURE: - c := fn.Func.Closure + case fn.Op() == ir.OCLOSURE: + c := fn.Func().Decl caninl(c) - return c.Func.Nname + return c.Func().Nname } return nil } -func staticValue(n *Node) *Node { +func staticValue(n ir.Node) ir.Node { for { - if n.Op == OCONVNOP { - n = n.Left + if n.Op() == ir.OCONVNOP { + n = n.Left() continue } @@ -757,34 +747,34 @@ func staticValue(n *Node) *Node { // staticValue1 implements a simple SSA-like optimization. If n is a local variable // that is initialized and never reassigned, staticValue1 returns the initializer // expression. Otherwise, it returns nil. -func staticValue1(n *Node) *Node { - if n.Op != ONAME || n.Class() != PAUTO || n.Name.Addrtaken() { +func staticValue1(n ir.Node) ir.Node { + if n.Op() != ir.ONAME || n.Class() != ir.PAUTO || n.Name().Addrtaken() { return nil } - defn := n.Name.Defn + defn := n.Name().Defn if defn == nil { return nil } - var rhs *Node + var rhs ir.Node FindRHS: - switch defn.Op { - case OAS: - rhs = defn.Right - case OAS2: - for i, lhs := range defn.List.Slice() { + switch defn.Op() { + case ir.OAS: + rhs = defn.Right() + case ir.OAS2: + for i, lhs := range defn.List().Slice() { if lhs == n { - rhs = defn.Rlist.Index(i) + rhs = defn.Rlist().Index(i) break FindRHS } } - Fatalf("%v missing from LHS of %v", n, defn) + base.Fatalf("%v missing from LHS of %v", n, defn) default: return nil } if rhs == nil { - Fatalf("RHS is nil: %v", defn) + base.Fatalf("RHS is nil: %v", defn) } unsafe, _ := reassigned(n) @@ -801,70 +791,70 @@ FindRHS: // useful for -m output documenting the reason for inhibited optimizations. // NB: global variables are always considered to be re-assigned. // TODO: handle initial declaration not including an assignment and followed by a single assignment? -func reassigned(n *Node) (bool, *Node) { - if n.Op != ONAME { - Fatalf("reassigned %v", n) +func reassigned(n ir.Node) (bool, ir.Node) { + if n.Op() != ir.ONAME { + base.Fatalf("reassigned %v", n) } // no way to reliably check for no-reassignment of globals, assume it can be - if n.Name.Curfn == nil { + if n.Name().Curfn == nil { return true, nil } - f := n.Name.Curfn + f := n.Name().Curfn // There just might be a good reason for this although this can be pretty surprising: // local variables inside a closure have Curfn pointing to the OCLOSURE node instead // of the corresponding ODCLFUNC. // We need to walk the function body to check for reassignments so we follow the // linkage to the ODCLFUNC node as that is where body is held. - if f.Op == OCLOSURE { - f = f.Func.Closure + if f.Op() == ir.OCLOSURE { + f = f.Func().Decl } v := reassignVisitor{name: n} - a := v.visitList(f.Nbody) + a := v.visitList(f.Body()) return a != nil, a } type reassignVisitor struct { - name *Node + name ir.Node } -func (v *reassignVisitor) visit(n *Node) *Node { +func (v *reassignVisitor) visit(n ir.Node) ir.Node { if n == nil { return nil } - switch n.Op { - case OAS: - if n.Left == v.name && n != v.name.Name.Defn { + switch n.Op() { + case ir.OAS: + if n.Left() == v.name && n != v.name.Name().Defn { return n } - case OAS2, OAS2FUNC, OAS2MAPR, OAS2DOTTYPE: - for _, p := range n.List.Slice() { - if p == v.name && n != v.name.Name.Defn { + case ir.OAS2, ir.OAS2FUNC, ir.OAS2MAPR, ir.OAS2DOTTYPE: + for _, p := range n.List().Slice() { + if p == v.name && n != v.name.Name().Defn { return n } } } - if a := v.visit(n.Left); a != nil { + if a := v.visit(n.Left()); a != nil { return a } - if a := v.visit(n.Right); a != nil { + if a := v.visit(n.Right()); a != nil { return a } - if a := v.visitList(n.List); a != nil { + if a := v.visitList(n.List()); a != nil { return a } - if a := v.visitList(n.Rlist); a != nil { + if a := v.visitList(n.Rlist()); a != nil { return a } - if a := v.visitList(n.Ninit); a != nil { + if a := v.visitList(n.Init()); a != nil { return a } - if a := v.visitList(n.Nbody); a != nil { + if a := v.visitList(n.Body()); a != nil { return a } return nil } -func (v *reassignVisitor) visitList(l Nodes) *Node { +func (v *reassignVisitor) visitList(l ir.Nodes) ir.Node { for _, n := range l.Slice() { if a := v.visit(n); a != nil { return a @@ -873,18 +863,18 @@ func (v *reassignVisitor) visitList(l Nodes) *Node { return nil } -func inlParam(t *types.Field, as *Node, inlvars map[*Node]*Node) *Node { - n := asNode(t.Nname) - if n == nil || n.isBlank() { - return nblank +func inlParam(t *types.Field, as ir.Node, inlvars map[ir.Node]ir.Node) ir.Node { + n := ir.AsNode(t.Nname) + if n == nil || ir.IsBlank(n) { + return ir.BlankNode } inlvar := inlvars[n] if inlvar == nil { - Fatalf("missing inlvar for %v", n) + base.Fatalf("missing inlvar for %v", n) } - as.Ninit.Append(nod(ODCL, inlvar, nil)) - inlvar.Name.Defn = as + as.PtrInit().Append(ir.Nod(ir.ODCL, inlvar, nil)) + inlvar.Name().Defn = as return inlvar } @@ -897,33 +887,33 @@ var inlgen int // parameters. // The result of mkinlcall MUST be assigned back to n, e.g. // n.Left = mkinlcall(n.Left, fn, isddd) -func mkinlcall(n, fn *Node, maxCost int32, inlMap map[*Node]bool) *Node { - if fn.Func.Inl == nil { +func mkinlcall(n, fn ir.Node, maxCost int32, inlMap map[ir.Node]bool) ir.Node { + if fn.Func().Inl == nil { if logopt.Enabled() { - logopt.LogOpt(n.Pos, "cannotInlineCall", "inline", Curfn.funcname(), - fmt.Sprintf("%s cannot be inlined", fn.pkgFuncName())) + logopt.LogOpt(n.Pos(), "cannotInlineCall", "inline", ir.FuncName(Curfn), + fmt.Sprintf("%s cannot be inlined", ir.PkgFuncName(fn))) } return n } - if fn.Func.Inl.Cost > maxCost { + if fn.Func().Inl.Cost > maxCost { // The inlined function body is too big. Typically we use this check to restrict // inlining into very big functions. See issue 26546 and 17566. if logopt.Enabled() { - logopt.LogOpt(n.Pos, "cannotInlineCall", "inline", Curfn.funcname(), - fmt.Sprintf("cost %d of %s exceeds max large caller cost %d", fn.Func.Inl.Cost, fn.pkgFuncName(), maxCost)) + logopt.LogOpt(n.Pos(), "cannotInlineCall", "inline", ir.FuncName(Curfn), + fmt.Sprintf("cost %d of %s exceeds max large caller cost %d", fn.Func().Inl.Cost, ir.PkgFuncName(fn), maxCost)) } return n } - if fn == Curfn || fn.Name.Defn == Curfn { + if fn == Curfn || fn.Name().Defn == Curfn { // Can't recursively inline a function into itself. if logopt.Enabled() { - logopt.LogOpt(n.Pos, "cannotInlineCall", "inline", fmt.Sprintf("recursive call to %s", Curfn.funcname())) + logopt.LogOpt(n.Pos(), "cannotInlineCall", "inline", fmt.Sprintf("recursive call to %s", ir.FuncName(Curfn))) } return n } - if instrumenting && isRuntimePkg(fn.Sym.Pkg) { + if instrumenting && isRuntimePkg(fn.Sym().Pkg) { // Runtime package must not be instrumented. // Instrument skips runtime package. However, some runtime code can be // inlined into other packages and instrumented there. To avoid this, @@ -934,8 +924,8 @@ func mkinlcall(n, fn *Node, maxCost int32, inlMap map[*Node]bool) *Node { } if inlMap[fn] { - if Debug.m > 1 { - fmt.Printf("%v: cannot inline %v into %v: repeated recursive cycle\n", n.Line(), fn, Curfn.funcname()) + if base.Flag.LowerM > 1 { + fmt.Printf("%v: cannot inline %v into %v: repeated recursive cycle\n", ir.Line(n), fn, ir.FuncName(Curfn)) } return n } @@ -943,115 +933,115 @@ func mkinlcall(n, fn *Node, maxCost int32, inlMap map[*Node]bool) *Node { defer func() { inlMap[fn] = false }() - if Debug_typecheckinl == 0 { + if base.Debug.TypecheckInl == 0 { typecheckinl(fn) } // We have a function node, and it has an inlineable body. - if Debug.m > 1 { - fmt.Printf("%v: inlining call to %v %#v { %#v }\n", n.Line(), fn.Sym, fn.Type, asNodes(fn.Func.Inl.Body)) - } else if Debug.m != 0 { - fmt.Printf("%v: inlining call to %v\n", n.Line(), fn) + if base.Flag.LowerM > 1 { + fmt.Printf("%v: inlining call to %v %#v { %#v }\n", ir.Line(n), fn.Sym(), fn.Type(), ir.AsNodes(fn.Func().Inl.Body)) + } else if base.Flag.LowerM != 0 { + fmt.Printf("%v: inlining call to %v\n", ir.Line(n), fn) } - if Debug.m > 2 { - fmt.Printf("%v: Before inlining: %+v\n", n.Line(), n) + if base.Flag.LowerM > 2 { + fmt.Printf("%v: Before inlining: %+v\n", ir.Line(n), n) } - if ssaDump != "" && ssaDump == Curfn.funcname() { + if ssaDump != "" && ssaDump == ir.FuncName(Curfn) { ssaDumpInlined = append(ssaDumpInlined, fn) } - ninit := n.Ninit + ninit := n.Init() // For normal function calls, the function callee expression // may contain side effects (e.g., added by addinit during // inlconv2expr or inlconv2list). Make sure to preserve these, // if necessary (#42703). - if n.Op == OCALLFUNC { - callee := n.Left - for callee.Op == OCONVNOP { - ninit.AppendNodes(&callee.Ninit) - callee = callee.Left + if n.Op() == ir.OCALLFUNC { + callee := n.Left() + for callee.Op() == ir.OCONVNOP { + ninit.AppendNodes(callee.PtrInit()) + callee = callee.Left() } - if callee.Op != ONAME && callee.Op != OCLOSURE { - Fatalf("unexpected callee expression: %v", callee) + if callee.Op() != ir.ONAME && callee.Op() != ir.OCLOSURE && callee.Op() != ir.OMETHEXPR { + base.Fatalf("unexpected callee expression: %v", callee) } } // Make temp names to use instead of the originals. - inlvars := make(map[*Node]*Node) + inlvars := make(map[ir.Node]ir.Node) // record formals/locals for later post-processing - var inlfvars []*Node + var inlfvars []ir.Node // Handle captured variables when inlining closures. - if fn.Name.Defn != nil { - if c := fn.Name.Defn.Func.Closure; c != nil { - for _, v := range c.Func.Closure.Func.Cvars.Slice() { - if v.Op == OXXX { + if fn.Name().Defn != nil { + if c := fn.Name().Defn.Func().OClosure; c != nil { + for _, v := range c.Func().ClosureVars.Slice() { + if v.Op() == ir.OXXX { continue } - o := v.Name.Param.Outer + o := v.Name().Param.Outer // make sure the outer param matches the inlining location // NB: if we enabled inlining of functions containing OCLOSURE or refined // the reassigned check via some sort of copy propagation this would most // likely need to be changed to a loop to walk up to the correct Param - if o == nil || (o.Name.Curfn != Curfn && o.Name.Curfn.Func.Closure != Curfn) { - Fatalf("%v: unresolvable capture %v %v\n", n.Line(), fn, v) + if o == nil || (o.Name().Curfn != Curfn && o.Name().Curfn.Func().OClosure != Curfn) { + base.Fatalf("%v: unresolvable capture %v %v\n", ir.Line(n), fn, v) } - if v.Name.Byval() { + if v.Name().Byval() { iv := typecheck(inlvar(v), ctxExpr) - ninit.Append(nod(ODCL, iv, nil)) - ninit.Append(typecheck(nod(OAS, iv, o), ctxStmt)) + ninit.Append(ir.Nod(ir.ODCL, iv, nil)) + ninit.Append(typecheck(ir.Nod(ir.OAS, iv, o), ctxStmt)) inlvars[v] = iv } else { - addr := newname(lookup("&" + v.Sym.Name)) - addr.Type = types.NewPtr(v.Type) + addr := NewName(lookup("&" + v.Sym().Name)) + addr.SetType(types.NewPtr(v.Type())) ia := typecheck(inlvar(addr), ctxExpr) - ninit.Append(nod(ODCL, ia, nil)) - ninit.Append(typecheck(nod(OAS, ia, nod(OADDR, o, nil)), ctxStmt)) + ninit.Append(ir.Nod(ir.ODCL, ia, nil)) + ninit.Append(typecheck(ir.Nod(ir.OAS, ia, ir.Nod(ir.OADDR, o, nil)), ctxStmt)) inlvars[addr] = ia // When capturing by reference, all occurrence of the captured var // must be substituted with dereference of the temporary address - inlvars[v] = typecheck(nod(ODEREF, ia, nil), ctxExpr) + inlvars[v] = typecheck(ir.Nod(ir.ODEREF, ia, nil), ctxExpr) } } } } - for _, ln := range fn.Func.Inl.Dcl { - if ln.Op != ONAME { + for _, ln := range fn.Func().Inl.Dcl { + if ln.Op() != ir.ONAME { continue } - if ln.Class() == PPARAMOUT { // return values handled below. + if ln.Class() == ir.PPARAMOUT { // return values handled below. continue } - if ln.isParamStackCopy() { // ignore the on-stack copy of a parameter that moved to the heap + if isParamStackCopy(ln) { // ignore the on-stack copy of a parameter that moved to the heap // TODO(mdempsky): Remove once I'm confident // this never actually happens. We currently // perform inlining before escape analysis, so // nothing should have moved to the heap yet. - Fatalf("impossible: %v", ln) + base.Fatalf("impossible: %v", ln) } inlf := typecheck(inlvar(ln), ctxExpr) inlvars[ln] = inlf - if genDwarfInline > 0 { - if ln.Class() == PPARAM { - inlf.Name.SetInlFormal(true) + if base.Flag.GenDwarfInl > 0 { + if ln.Class() == ir.PPARAM { + inlf.Name().SetInlFormal(true) } else { - inlf.Name.SetInlLocal(true) + inlf.Name().SetInlLocal(true) } - inlf.Pos = ln.Pos + inlf.SetPos(ln.Pos()) inlfvars = append(inlfvars, inlf) } } nreturns := 0 - inspectList(asNodes(fn.Func.Inl.Body), func(n *Node) bool { - if n != nil && n.Op == ORETURN { + ir.InspectList(ir.AsNodes(fn.Func().Inl.Body), func(n ir.Node) bool { + if n != nil && n.Op() == ir.ORETURN { nreturns++ } return true @@ -1063,10 +1053,10 @@ func mkinlcall(n, fn *Node, maxCost int32, inlMap map[*Node]bool) *Node { delayretvars := nreturns == 1 // temporaries for return values. - var retvars []*Node - for i, t := range fn.Type.Results().Fields().Slice() { - var m *Node - if n := asNode(t.Nname); n != nil && !n.isBlank() && !strings.HasPrefix(n.Sym.Name, "~r") { + var retvars []ir.Node + for i, t := range fn.Type().Results().Fields().Slice() { + var m ir.Node + if n := ir.AsNode(t.Nname); n != nil && !ir.IsBlank(n) && !strings.HasPrefix(n.Sym().Name, "~r") { m = inlvar(n) m = typecheck(m, ctxExpr) inlvars[n] = m @@ -1076,13 +1066,13 @@ func mkinlcall(n, fn *Node, maxCost int32, inlMap map[*Node]bool) *Node { m = retvar(t, i) } - if genDwarfInline > 0 { + if base.Flag.GenDwarfInl > 0 { // Don't update the src.Pos on a return variable if it // was manufactured by the inliner (e.g. "~R2"); such vars // were not part of the original callee. - if !strings.HasPrefix(m.Sym.Name, "~R") { - m.Name.SetInlFormal(true) - m.Pos = t.Pos + if !strings.HasPrefix(m.Sym().Name, "~R") { + m.Name().SetInlFormal(true) + m.SetPos(t.Pos) inlfvars = append(inlfvars, m) } } @@ -1091,53 +1081,53 @@ func mkinlcall(n, fn *Node, maxCost int32, inlMap map[*Node]bool) *Node { } // Assign arguments to the parameters' temp names. - as := nod(OAS2, nil, nil) + as := ir.Nod(ir.OAS2, nil, nil) as.SetColas(true) - if n.Op == OCALLMETH { - if n.Left.Left == nil { - Fatalf("method call without receiver: %+v", n) + if n.Op() == ir.OCALLMETH { + if n.Left().Left() == nil { + base.Fatalf("method call without receiver: %+v", n) } - as.Rlist.Append(n.Left.Left) + as.PtrRlist().Append(n.Left().Left()) } - as.Rlist.Append(n.List.Slice()...) + as.PtrRlist().Append(n.List().Slice()...) // For non-dotted calls to variadic functions, we assign the // variadic parameter's temp name separately. - var vas *Node + var vas ir.Node - if recv := fn.Type.Recv(); recv != nil { - as.List.Append(inlParam(recv, as, inlvars)) + if recv := fn.Type().Recv(); recv != nil { + as.PtrList().Append(inlParam(recv, as, inlvars)) } - for _, param := range fn.Type.Params().Fields().Slice() { + for _, param := range fn.Type().Params().Fields().Slice() { // For ordinary parameters or variadic parameters in // dotted calls, just add the variable to the // assignment list, and we're done. if !param.IsDDD() || n.IsDDD() { - as.List.Append(inlParam(param, as, inlvars)) + as.PtrList().Append(inlParam(param, as, inlvars)) continue } // Otherwise, we need to collect the remaining values // to pass as a slice. - x := as.List.Len() - for as.List.Len() < as.Rlist.Len() { - as.List.Append(argvar(param.Type, as.List.Len())) + x := as.List().Len() + for as.List().Len() < as.Rlist().Len() { + as.PtrList().Append(argvar(param.Type, as.List().Len())) } - varargs := as.List.Slice()[x:] + varargs := as.List().Slice()[x:] - vas = nod(OAS, nil, nil) - vas.Left = inlParam(param, vas, inlvars) + vas = ir.Nod(ir.OAS, nil, nil) + vas.SetLeft(inlParam(param, vas, inlvars)) if len(varargs) == 0 { - vas.Right = nodnil() - vas.Right.Type = param.Type + vas.SetRight(nodnil()) + vas.Right().SetType(param.Type) } else { - vas.Right = nod(OCOMPLIT, nil, typenod(param.Type)) - vas.Right.List.Set(varargs) + vas.SetRight(ir.Nod(ir.OCOMPLIT, nil, typenod(param.Type))) + vas.Right().PtrList().Set(varargs) } } - if as.Rlist.Len() != 0 { + if as.Rlist().Len() != 0 { as = typecheck(as, ctxStmt) ninit.Append(as) } @@ -1150,8 +1140,8 @@ func mkinlcall(n, fn *Node, maxCost int32, inlMap map[*Node]bool) *Node { if !delayretvars { // Zero the return parameters. for _, n := range retvars { - ninit.Append(nod(ODCL, n, nil)) - ras := nod(OAS, n, nil) + ninit.Append(ir.Nod(ir.ODCL, n, nil)) + ras := ir.Nod(ir.OAS, n, nil) ras = typecheck(ras, ctxStmt) ninit.Append(ras) } @@ -1162,25 +1152,25 @@ func mkinlcall(n, fn *Node, maxCost int32, inlMap map[*Node]bool) *Node { inlgen++ parent := -1 - if b := Ctxt.PosTable.Pos(n.Pos).Base(); b != nil { + if b := base.Ctxt.PosTable.Pos(n.Pos()).Base(); b != nil { parent = b.InliningIndex() } - newIndex := Ctxt.InlTree.Add(parent, n.Pos, fn.Sym.Linksym()) + newIndex := base.Ctxt.InlTree.Add(parent, n.Pos(), fn.Sym().Linksym()) // Add an inline mark just before the inlined body. // This mark is inline in the code so that it's a reasonable spot // to put a breakpoint. Not sure if that's really necessary or not // (in which case it could go at the end of the function instead). // Note issue 28603. - inlMark := nod(OINLMARK, nil, nil) - inlMark.Pos = n.Pos.WithIsStmt() - inlMark.Xoffset = int64(newIndex) + inlMark := ir.Nod(ir.OINLMARK, nil, nil) + inlMark.SetPos(n.Pos().WithIsStmt()) + inlMark.SetOffset(int64(newIndex)) ninit.Append(inlMark) - if genDwarfInline > 0 { - if !fn.Sym.Linksym().WasInlined() { - Ctxt.DwFixups.SetPrecursorFunc(fn.Sym.Linksym(), fn) - fn.Sym.Linksym().Set(obj.AttrWasInlined, true) + if base.Flag.GenDwarfInl > 0 { + if !fn.Sym().Linksym().WasInlined() { + base.Ctxt.DwFixups.SetPrecursorFunc(fn.Sym().Linksym(), fn) + fn.Sym().Linksym().Set(obj.AttrWasInlined, true) } } @@ -1193,26 +1183,26 @@ func mkinlcall(n, fn *Node, maxCost int32, inlMap map[*Node]bool) *Node { newInlIndex: newIndex, } - body := subst.list(asNodes(fn.Func.Inl.Body)) + body := subst.list(ir.AsNodes(fn.Func().Inl.Body)) - lab := nodSym(OLABEL, nil, retlabel) + lab := nodSym(ir.OLABEL, nil, retlabel) body = append(body, lab) typecheckslice(body, ctxStmt) - if genDwarfInline > 0 { + if base.Flag.GenDwarfInl > 0 { for _, v := range inlfvars { - v.Pos = subst.updatedPos(v.Pos) + v.SetPos(subst.updatedPos(v.Pos())) } } //dumplist("ninit post", ninit); - call := nod(OINLCALL, nil, nil) - call.Ninit.Set(ninit.Slice()) - call.Nbody.Set(body) - call.Rlist.Set(retvars) - call.Type = n.Type + call := ir.Nod(ir.OINLCALL, nil, nil) + call.PtrInit().Set(ninit.Slice()) + call.PtrBody().Set(body) + call.PtrRlist().Set(retvars) + call.SetType(n.Type()) call.SetTypecheck(1) // transitive inlining @@ -1221,15 +1211,15 @@ func mkinlcall(n, fn *Node, maxCost int32, inlMap map[*Node]bool) *Node { // instead we emit the things that the body needs // and each use must redo the inlining. // luckily these are small. - inlnodelist(call.Nbody, maxCost, inlMap) - for _, n := range call.Nbody.Slice() { - if n.Op == OINLCALL { + inlnodelist(call.Body(), maxCost, inlMap) + for _, n := range call.Body().Slice() { + if n.Op() == ir.OINLCALL { inlconv2stmt(n) } } - if Debug.m > 2 { - fmt.Printf("%v: After inlining %+v\n\n", call.Line(), call) + if base.Flag.LowerM > 2 { + fmt.Printf("%v: After inlining %+v\n\n", ir.Line(call), call) } return call @@ -1238,42 +1228,42 @@ func mkinlcall(n, fn *Node, maxCost int32, inlMap map[*Node]bool) *Node { // Every time we expand a function we generate a new set of tmpnames, // PAUTO's in the calling functions, and link them off of the // PPARAM's, PAUTOS and PPARAMOUTs of the called function. -func inlvar(var_ *Node) *Node { - if Debug.m > 3 { +func inlvar(var_ ir.Node) ir.Node { + if base.Flag.LowerM > 3 { fmt.Printf("inlvar %+v\n", var_) } - n := newname(var_.Sym) - n.Type = var_.Type - n.SetClass(PAUTO) - n.Name.SetUsed(true) - n.Name.Curfn = Curfn // the calling function, not the called one - n.Name.SetAddrtaken(var_.Name.Addrtaken()) + n := NewName(var_.Sym()) + n.SetType(var_.Type()) + n.SetClass(ir.PAUTO) + n.Name().SetUsed(true) + n.Name().Curfn = Curfn // the calling function, not the called one + n.Name().SetAddrtaken(var_.Name().Addrtaken()) - Curfn.Func.Dcl = append(Curfn.Func.Dcl, n) + Curfn.Func().Dcl = append(Curfn.Func().Dcl, n) return n } // Synthesize a variable to store the inlined function's results in. -func retvar(t *types.Field, i int) *Node { - n := newname(lookupN("~R", i)) - n.Type = t.Type - n.SetClass(PAUTO) - n.Name.SetUsed(true) - n.Name.Curfn = Curfn // the calling function, not the called one - Curfn.Func.Dcl = append(Curfn.Func.Dcl, n) +func retvar(t *types.Field, i int) ir.Node { + n := NewName(lookupN("~R", i)) + n.SetType(t.Type) + n.SetClass(ir.PAUTO) + n.Name().SetUsed(true) + n.Name().Curfn = Curfn // the calling function, not the called one + Curfn.Func().Dcl = append(Curfn.Func().Dcl, n) return n } // Synthesize a variable to store the inlined function's arguments // when they come from a multiple return call. -func argvar(t *types.Type, i int) *Node { - n := newname(lookupN("~arg", i)) - n.Type = t.Elem() - n.SetClass(PAUTO) - n.Name.SetUsed(true) - n.Name.Curfn = Curfn // the calling function, not the called one - Curfn.Func.Dcl = append(Curfn.Func.Dcl, n) +func argvar(t *types.Type, i int) ir.Node { + n := NewName(lookupN("~arg", i)) + n.SetType(t.Elem()) + n.SetClass(ir.PAUTO) + n.Name().SetUsed(true) + n.Name().Curfn = Curfn // the calling function, not the called one + Curfn.Func().Dcl = append(Curfn.Func().Dcl, n) return n } @@ -1284,13 +1274,13 @@ type inlsubst struct { retlabel *types.Sym // Temporary result variables. - retvars []*Node + retvars []ir.Node // Whether result variables should be initialized at the // "return" statement. delayretvars bool - inlvars map[*Node]*Node + inlvars map[ir.Node]ir.Node // bases maps from original PosBase to PosBase with an extra // inlined call frame. @@ -1302,8 +1292,8 @@ type inlsubst struct { } // list inlines a list of nodes. -func (subst *inlsubst) list(ll Nodes) []*Node { - s := make([]*Node, 0, ll.Len()) +func (subst *inlsubst) list(ll ir.Nodes) []ir.Node { + s := make([]ir.Node, 0, ll.Len()) for _, n := range ll.Slice() { s = append(s, subst.node(n)) } @@ -1314,98 +1304,101 @@ func (subst *inlsubst) list(ll Nodes) []*Node { // inlined function, substituting references to input/output // parameters with ones to the tmpnames, and substituting returns with // assignments to the output. -func (subst *inlsubst) node(n *Node) *Node { +func (subst *inlsubst) node(n ir.Node) ir.Node { if n == nil { return nil } - switch n.Op { - case ONAME: + switch n.Op() { + case ir.ONAME: if inlvar := subst.inlvars[n]; inlvar != nil { // These will be set during inlnode - if Debug.m > 2 { + if base.Flag.LowerM > 2 { fmt.Printf("substituting name %+v -> %+v\n", n, inlvar) } return inlvar } - if Debug.m > 2 { + if base.Flag.LowerM > 2 { fmt.Printf("not substituting name %+v\n", n) } return n - case OLITERAL, OTYPE: + case ir.OMETHEXPR: + return n + + case ir.OLITERAL, ir.ONIL, ir.OTYPE: // If n is a named constant or type, we can continue // using it in the inline copy. Otherwise, make a copy // so we can update the line number. - if n.Sym != nil { + if n.Sym() != nil { return n } // Since we don't handle bodies with closures, this return is guaranteed to belong to the current inlined function. // dump("Return before substitution", n); - case ORETURN: - m := nodSym(OGOTO, nil, subst.retlabel) - m.Ninit.Set(subst.list(n.Ninit)) + case ir.ORETURN: + m := nodSym(ir.OGOTO, nil, subst.retlabel) + m.PtrInit().Set(subst.list(n.Init())) - if len(subst.retvars) != 0 && n.List.Len() != 0 { - as := nod(OAS2, nil, nil) + if len(subst.retvars) != 0 && n.List().Len() != 0 { + as := ir.Nod(ir.OAS2, nil, nil) // Make a shallow copy of retvars. // Otherwise OINLCALL.Rlist will be the same list, // and later walk and typecheck may clobber it. for _, n := range subst.retvars { - as.List.Append(n) + as.PtrList().Append(n) } - as.Rlist.Set(subst.list(n.List)) + as.PtrRlist().Set(subst.list(n.List())) if subst.delayretvars { - for _, n := range as.List.Slice() { - as.Ninit.Append(nod(ODCL, n, nil)) - n.Name.Defn = as + for _, n := range as.List().Slice() { + as.PtrInit().Append(ir.Nod(ir.ODCL, n, nil)) + n.Name().Defn = as } } as = typecheck(as, ctxStmt) - m.Ninit.Append(as) + m.PtrInit().Append(as) } - typecheckslice(m.Ninit.Slice(), ctxStmt) + typecheckslice(m.Init().Slice(), ctxStmt) m = typecheck(m, ctxStmt) // dump("Return after substitution", m); return m - case OGOTO, OLABEL: - m := n.copy() - m.Pos = subst.updatedPos(m.Pos) - m.Ninit.Set(nil) - p := fmt.Sprintf("%s·%d", n.Sym.Name, inlgen) - m.Sym = lookup(p) + case ir.OGOTO, ir.OLABEL: + m := ir.Copy(n) + m.SetPos(subst.updatedPos(m.Pos())) + m.PtrInit().Set(nil) + p := fmt.Sprintf("%s·%d", n.Sym().Name, inlgen) + m.SetSym(lookup(p)) return m } - m := n.copy() - m.Pos = subst.updatedPos(m.Pos) - m.Ninit.Set(nil) + m := ir.Copy(n) + m.SetPos(subst.updatedPos(m.Pos())) + m.PtrInit().Set(nil) - if n.Op == OCLOSURE { - Fatalf("cannot inline function containing closure: %+v", n) + if n.Op() == ir.OCLOSURE { + base.Fatalf("cannot inline function containing closure: %+v", n) } - m.Left = subst.node(n.Left) - m.Right = subst.node(n.Right) - m.List.Set(subst.list(n.List)) - m.Rlist.Set(subst.list(n.Rlist)) - m.Ninit.Set(append(m.Ninit.Slice(), subst.list(n.Ninit)...)) - m.Nbody.Set(subst.list(n.Nbody)) + m.SetLeft(subst.node(n.Left())) + m.SetRight(subst.node(n.Right())) + m.PtrList().Set(subst.list(n.List())) + m.PtrRlist().Set(subst.list(n.Rlist())) + m.PtrInit().Set(append(m.Init().Slice(), subst.list(n.Init())...)) + m.PtrBody().Set(subst.list(n.Body())) return m } func (subst *inlsubst) updatedPos(xpos src.XPos) src.XPos { - pos := Ctxt.PosTable.Pos(xpos) + pos := base.Ctxt.PosTable.Pos(xpos) oldbase := pos.Base() // can be nil newbase := subst.bases[oldbase] if newbase == nil { @@ -1413,13 +1406,13 @@ func (subst *inlsubst) updatedPos(xpos src.XPos) src.XPos { subst.bases[oldbase] = newbase } pos.SetBase(newbase) - return Ctxt.PosTable.XPos(pos) + return base.Ctxt.PosTable.XPos(pos) } -func pruneUnusedAutos(ll []*Node, vis *hairyVisitor) []*Node { - s := make([]*Node, 0, len(ll)) +func pruneUnusedAutos(ll []ir.Node, vis *hairyVisitor) []ir.Node { + s := make([]ir.Node, 0, len(ll)) for _, n := range ll { - if n.Class() == PAUTO { + if n.Class() == ir.PAUTO { if _, found := vis.usedLocals[n]; !found { continue } @@ -1431,49 +1424,49 @@ func pruneUnusedAutos(ll []*Node, vis *hairyVisitor) []*Node { // devirtualize replaces interface method calls within fn with direct // concrete-type method calls where applicable. -func devirtualize(fn *Node) { +func devirtualize(fn ir.Node) { Curfn = fn - inspectList(fn.Nbody, func(n *Node) bool { - if n.Op == OCALLINTER { + ir.InspectList(fn.Body(), func(n ir.Node) bool { + if n.Op() == ir.OCALLINTER { devirtualizeCall(n) } return true }) } -func devirtualizeCall(call *Node) { - recv := staticValue(call.Left.Left) - if recv.Op != OCONVIFACE { +func devirtualizeCall(call ir.Node) { + recv := staticValue(call.Left().Left()) + if recv.Op() != ir.OCONVIFACE { return } - typ := recv.Left.Type + typ := recv.Left().Type() if typ.IsInterface() { return } - x := nodl(call.Left.Pos, ODOTTYPE, call.Left.Left, nil) - x.Type = typ - x = nodlSym(call.Left.Pos, OXDOT, x, call.Left.Sym) + x := ir.NodAt(call.Left().Pos(), ir.ODOTTYPE, call.Left().Left(), nil) + x.SetType(typ) + x = nodlSym(call.Left().Pos(), ir.OXDOT, x, call.Left().Sym()) x = typecheck(x, ctxExpr|ctxCallee) - switch x.Op { - case ODOTMETH: - if Debug.m != 0 { - Warnl(call.Pos, "devirtualizing %v to %v", call.Left, typ) - } - call.Op = OCALLMETH - call.Left = x - case ODOTINTER: + switch x.Op() { + case ir.ODOTMETH: + if base.Flag.LowerM != 0 { + base.WarnfAt(call.Pos(), "devirtualizing %v to %v", call.Left(), typ) + } + call.SetOp(ir.OCALLMETH) + call.SetLeft(x) + case ir.ODOTINTER: // Promoted method from embedded interface-typed field (#42279). - if Debug.m != 0 { - Warnl(call.Pos, "partially devirtualizing %v to %v", call.Left, typ) + if base.Flag.LowerM != 0 { + base.WarnfAt(call.Pos(), "partially devirtualizing %v to %v", call.Left(), typ) } - call.Op = OCALLINTER - call.Left = x + call.SetOp(ir.OCALLINTER) + call.SetLeft(x) default: // TODO(mdempsky): Turn back into Fatalf after more testing. - if Debug.m != 0 { - Warnl(call.Pos, "failed to devirtualize %v (%v)", x, x.Op) + if base.Flag.LowerM != 0 { + base.WarnfAt(call.Pos(), "failed to devirtualize %v (%v)", x, x.Op()) } return } @@ -1484,12 +1477,12 @@ func devirtualizeCall(call *Node) { // Receiver parameter size may have changed; need to update // call.Type to get correct stack offsets for result // parameters. - checkwidth(x.Type) - switch ft := x.Type; ft.NumResults() { + checkwidth(x.Type()) + switch ft := x.Type(); ft.NumResults() { case 0: case 1: - call.Type = ft.Results().Field(0).Type + call.SetType(ft.Results().Field(0).Type) default: - call.Type = ft.Results() + call.SetType(ft.Results()) } } |