aboutsummaryrefslogtreecommitdiff
path: root/src/cmd/compile/internal/gc
diff options
context:
space:
mode:
authorRuss Cox <rsc@golang.org>2020-12-23 00:02:08 -0500
committerRuss Cox <rsc@golang.org>2020-12-23 06:37:41 +0000
commitf9d373720e76a45cf2d0cb4507fe49dae33afd25 (patch)
tree32f30c9897223d45953de787a628189a10680941 /src/cmd/compile/internal/gc
parent14d667341f9c8c58a9fb38d4954766a230eacf3b (diff)
downloadgo-f9d373720e76a45cf2d0cb4507fe49dae33afd25.tar.gz
go-f9d373720e76a45cf2d0cb4507fe49dae33afd25.zip
[dev.regabi] cmd/compile: remove Left, Right etc methods [generated]
Now that the generic graph structure methods - Left, Right, and so on - have been removed from the Node interface, each implementation's uses can be replaced with direct field access, using more specific names, and the methods themselves can be deleted. Passes buildall w/ toolstash -cmp. [git-generate] cd src/cmd/compile/internal/ir rf ' mv Func.iota Func.Iota_ mv Name.fn Name.Func_ ' cd ../gc rf ' ex . ../ir { import "cmd/compile/internal/ir" import "cmd/compile/internal/types" var ns ir.Nodes var b bool var i64 int64 var n ir.Node var op ir.Op var sym *types.Sym var class ir.Class var decl *ir.Decl decl.Left() -> decl.X decl.SetLeft(n) -> decl.X = n var asl *ir.AssignListStmt asl.List() -> asl.Lhs asl.PtrList() -> &asl.Lhs asl.SetList(ns) -> asl.Lhs = ns asl.Rlist() -> asl.Rhs asl.PtrRlist() -> &asl.Rhs asl.SetRlist(ns) -> asl.Rhs = ns asl.Colas() -> asl.Def asl.SetColas(b) -> asl.Def = b var as *ir.AssignStmt as.Left() -> as.X as.SetLeft(n) -> as.X = n as.Right() -> as.Y as.SetRight(n) -> as.Y = n as.Colas() -> as.Def as.SetColas(b) -> as.Def = b var ao *ir.AssignOpStmt ao.Left() -> ao.X ao.SetLeft(n) -> ao.X = n ao.Right() -> ao.Y ao.SetRight(n) -> ao.Y = n ao.SubOp() -> ao.AsOp ao.SetSubOp(op) -> ao.AsOp = op ao.Implicit() -> ao.IncDec ao.SetImplicit(b) -> ao.IncDec = b var bl *ir.BlockStmt bl.List() -> bl.List_ bl.PtrList() -> &bl.List_ bl.SetList(ns) -> bl.List_ = ns var br *ir.BranchStmt br.Sym() -> br.Label br.SetSym(sym) -> br.Label = sym var cas *ir.CaseStmt cas.List() -> cas.List_ cas.PtrList() -> &cas.List_ cas.SetList(ns) -> cas.List_ = ns cas.Body() -> cas.Body_ cas.PtrBody() -> &cas.Body_ cas.SetBody(ns) -> cas.Body_ = ns cas.Rlist() -> cas.Vars cas.PtrRlist() -> &cas.Vars cas.SetRlist(ns) -> cas.Vars = ns cas.Left() -> cas.Comm cas.SetLeft(n) -> cas.Comm = n var fr *ir.ForStmt fr.Sym() -> fr.Label fr.SetSym(sym) -> fr.Label = sym fr.Left() -> fr.Cond fr.SetLeft(n) -> fr.Cond = n fr.Right() -> fr.Post fr.SetRight(n) -> fr.Post = n fr.Body() -> fr.Body_ fr.PtrBody() -> &fr.Body_ fr.SetBody(ns) -> fr.Body_ = ns fr.List() -> fr.Late fr.PtrList() -> &fr.Late fr.SetList(ns) -> fr.Late = ns fr.HasBreak() -> fr.HasBreak_ fr.SetHasBreak(b) -> fr.HasBreak_ = b var gs *ir.GoDeferStmt gs.Left() -> gs.Call gs.SetLeft(n) -> gs.Call = n var ifs *ir.IfStmt ifs.Left() -> ifs.Cond ifs.SetLeft(n) -> ifs.Cond = n ifs.Body() -> ifs.Body_ ifs.PtrBody() -> &ifs.Body_ ifs.SetBody(ns) -> ifs.Body_ = ns ifs.Rlist() -> ifs.Else ifs.PtrRlist() -> &ifs.Else ifs.SetRlist(ns) -> ifs.Else = ns ifs.Likely() -> ifs.Likely_ ifs.SetLikely(b) -> ifs.Likely_ = b var im *ir.InlineMarkStmt im.Offset() -> im.Index im.SetOffset(i64) -> im.Index = i64 var lab *ir.LabelStmt lab.Sym() -> lab.Label lab.SetSym(sym) -> lab.Label = sym var rng *ir.RangeStmt rng.Sym() -> rng.Label rng.SetSym(sym) -> rng.Label = sym rng.Right() -> rng.X rng.SetRight(n) -> rng.X = n rng.Body() -> rng.Body_ rng.PtrBody() -> &rng.Body_ rng.SetBody(ns) -> rng.Body_ = ns rng.List() -> rng.Vars rng.PtrList() -> &rng.Vars rng.SetList(ns) -> rng.Vars = ns rng.HasBreak() -> rng.HasBreak_ rng.SetHasBreak(b) -> rng.HasBreak_ = b rng.Colas() -> rng.Def rng.SetColas(b) -> rng.Def = b var ret *ir.ReturnStmt ret.List() -> ret.Results ret.PtrList() -> &ret.Results ret.SetList(ns) -> ret.Results = ns var sel *ir.SelectStmt sel.List() -> sel.Cases sel.PtrList() -> &sel.Cases sel.SetList(ns) -> sel.Cases = ns sel.Sym() -> sel.Label sel.SetSym(sym) -> sel.Label = sym sel.HasBreak() -> sel.HasBreak_ sel.SetHasBreak(b) -> sel.HasBreak_ = b sel.Body() -> sel.Compiled sel.PtrBody() -> &sel.Compiled sel.SetBody(ns) -> sel.Compiled = ns var send *ir.SendStmt send.Left() -> send.Chan send.SetLeft(n) -> send.Chan = n send.Right() -> send.Value send.SetRight(n) -> send.Value = n var sw *ir.SwitchStmt sw.Left() -> sw.Tag sw.SetLeft(n) -> sw.Tag = n sw.List() -> sw.Cases sw.PtrList() -> &sw.Cases sw.SetList(ns) -> sw.Cases = ns sw.Body() -> sw.Compiled sw.PtrBody() -> &sw.Compiled sw.SetBody(ns) -> sw.Compiled = ns sw.Sym() -> sw.Label sw.SetSym(sym) -> sw.Label = sym sw.HasBreak() -> sw.HasBreak_ sw.SetHasBreak(b) -> sw.HasBreak_ = b var tg *ir.TypeSwitchGuard tg.Left() -> tg.Tag tg.SetLeft(nil) -> tg.Tag = nil tg.SetLeft(n) -> tg.Tag = n.(*ir.Ident) tg.Right() -> tg.X tg.SetRight(n) -> tg.X = n var adds *ir.AddStringExpr adds.List() -> adds.List_ adds.PtrList() -> &adds.List_ adds.SetList(ns) -> adds.List_ = ns var addr *ir.AddrExpr addr.Left() -> addr.X addr.SetLeft(n) -> addr.X = n addr.Right() -> addr.Alloc addr.SetRight(n) -> addr.Alloc = n var bin *ir.BinaryExpr bin.Left() -> bin.X bin.SetLeft(n) -> bin.X = n bin.Right() -> bin.Y bin.SetRight(n) -> bin.Y = n var log *ir.LogicalExpr log.Left() -> log.X log.SetLeft(n) -> log.X = n log.Right() -> log.Y log.SetRight(n) -> log.Y = n var call *ir.CallExpr call.Left() -> call.X call.SetLeft(n) -> call.X = n call.List() -> call.Args call.PtrList() -> &call.Args call.SetList(ns) -> call.Args = ns call.Rlist() -> call.Rargs call.PtrRlist() -> &call.Rargs call.SetRlist(ns) -> call.Rargs = ns call.IsDDD() -> call.DDD call.SetIsDDD(b) -> call.DDD = b call.NoInline() -> call.NoInline_ call.SetNoInline(b) -> call.NoInline_ = b call.Body() -> call.Body_ call.PtrBody() -> &call.Body_ call.SetBody(ns) -> call.Body_ = ns var cp *ir.CallPartExpr cp.Func() -> cp.Func_ cp.Left() -> cp.X cp.SetLeft(n) -> cp.X = n cp.Sym() -> cp.Method.Sym var clo *ir.ClosureExpr clo.Func() -> clo.Func_ var cr *ir.ClosureReadExpr cr.Offset() -> cr.Offset_ var cl *ir.CompLitExpr cl.Right() -> cl.Ntype cl.SetRight(nil) -> cl.Ntype = nil cl.SetRight(n) -> cl.Ntype = ir.Node(n).(ir.Ntype) cl.List() -> cl.List_ cl.PtrList() -> &cl.List_ cl.SetList(ns) -> cl.List_ = ns var conv *ir.ConvExpr conv.Left() -> conv.X conv.SetLeft(n) -> conv.X = n var ix *ir.IndexExpr ix.Left() -> ix.X ix.SetLeft(n) -> ix.X = n ix.Right() -> ix.Index ix.SetRight(n) -> ix.Index = n ix.IndexMapLValue() -> ix.Assigned ix.SetIndexMapLValue(b) -> ix.Assigned = b var kv *ir.KeyExpr kv.Left() -> kv.Key kv.SetLeft(n) -> kv.Key = n kv.Right() -> kv.Value kv.SetRight(n) -> kv.Value = n var sk *ir.StructKeyExpr sk.Sym() -> sk.Field sk.SetSym(sym) -> sk.Field = sym sk.Left() -> sk.Value sk.SetLeft(n) -> sk.Value = n sk.Offset() -> sk.Offset_ sk.SetOffset(i64) -> sk.Offset_ = i64 var ic *ir.InlinedCallExpr ic.Body() -> ic.Body_ ic.PtrBody() -> &ic.Body_ ic.SetBody(ns) -> ic.Body_ = ns ic.Rlist() -> ic.ReturnVars ic.PtrRlist() -> &ic.ReturnVars ic.SetRlist(ns) -> ic.ReturnVars = ns var mak *ir.MakeExpr mak.Left() -> mak.Len mak.SetLeft(n) -> mak.Len = n mak.Right() -> mak.Cap mak.SetRight(n) -> mak.Cap = n var par *ir.ParenExpr par.Left() -> par.X par.SetLeft(n) -> par.X = n var res *ir.ResultExpr res.Offset() -> res.Offset_ res.SetOffset(i64) -> res.Offset_ = i64 var dot *ir.SelectorExpr dot.Left() -> dot.X dot.SetLeft(n) -> dot.X = n dot.Sym() -> dot.Sel dot.SetSym(sym) -> dot.Sel = sym dot.Offset() -> dot.Offset_ dot.SetOffset(i64) -> dot.Offset_ = i64 var sl *ir.SliceExpr sl.Left() -> sl.X sl.SetLeft(n) -> sl.X = n sl.List() -> sl.List_ sl.PtrList() -> &sl.List_ sl.SetList(ns) -> sl.List_ = ns var sh *ir.SliceHeaderExpr sh.Left() -> sh.Ptr sh.SetLeft(n) -> sh.Ptr = n sh.List() -> sh.LenCap_ sh.PtrList() -> &sh.LenCap_ sh.SetList(ns) -> sh.LenCap_ = ns var st *ir.StarExpr st.Left() -> st.X st.SetLeft(n) -> st.X = n var ta *ir.TypeAssertExpr ta.Left() -> ta.X ta.SetLeft(n) -> ta.X = n ta.Right() -> ta.Ntype ta.SetRight(n) -> ta.Ntype = n ta.List() -> ta.Itab ta.PtrList() -> &ta.Itab ta.SetList(ns) -> ta.Itab = ns var u *ir.UnaryExpr u.Left() -> u.X u.SetLeft(n) -> u.X = n var fn *ir.Func fn.Body() -> fn.Body_ fn.PtrBody() -> &fn.Body_ fn.SetBody(ns) -> fn.Body_ = ns fn.Iota() -> fn.Iota_ fn.SetIota(i64) -> fn.Iota_ = i64 fn.Func() -> fn var nam *ir.Name nam.SubOp() -> nam.BuiltinOp nam.SetSubOp(op) -> nam.BuiltinOp = op nam.Class() -> nam.Class_ nam.SetClass(class) -> nam.Class_ = class nam.Func() -> nam.Func_ nam.Offset() -> nam.Offset_ nam.SetOffset(i64) -> nam.Offset_ = i64 } ex . ../ir { import "cmd/compile/internal/ir" var n ir.Nodes (&n).Append -> n.Append (&n).AppendNodes -> n.AppendNodes (&n).MoveNodes -> n.MoveNodes (&n).Prepend -> n.Prepend (&n).Set -> n.Set (&n).Set1 -> n.Set1 (&n).Set2 -> n.Set2 (&n).Set3 -> n.Set3 var ntype ir.Ntype ir.Node(ntype).(ir.Ntype) -> ntype } ' cd ../ir rf ' rm \ Decl.Left Decl.SetLeft \ AssignListStmt.List AssignListStmt.PtrList AssignListStmt.SetList \ AssignListStmt.Rlist AssignListStmt.PtrRlist AssignListStmt.SetRlist \ AssignListStmt.Colas AssignListStmt.SetColas \ AssignStmt.Left AssignStmt.SetLeft \ AssignStmt.Right AssignStmt.SetRight \ AssignStmt.Colas AssignStmt.SetColas \ AssignOpStmt.Left AssignOpStmt.SetLeft \ AssignOpStmt.Right AssignOpStmt.SetRight \ AssignOpStmt.SubOp AssignOpStmt.SetSubOp \ AssignOpStmt.Implicit AssignOpStmt.SetImplicit \ BlockStmt.List BlockStmt.PtrList BlockStmt.SetList \ BranchStmt.SetSym \ CaseStmt.List CaseStmt.PtrList CaseStmt.SetList \ CaseStmt.Body CaseStmt.PtrBody CaseStmt.SetBody \ CaseStmt.Rlist CaseStmt.PtrRlist CaseStmt.SetRlist \ CaseStmt.Left CaseStmt.SetLeft \ ForStmt.Left ForStmt.SetLeft \ ForStmt.Right ForStmt.SetRight \ ForStmt.Body ForStmt.PtrBody ForStmt.SetBody \ ForStmt.List ForStmt.PtrList ForStmt.SetList \ ForStmt.HasBreak ForStmt.SetHasBreak \ ForStmt.Sym ForStmt.SetSym \ GoDeferStmt.Left GoDeferStmt.SetLeft \ IfStmt.Left IfStmt.SetLeft \ IfStmt.Body IfStmt.PtrBody IfStmt.SetBody \ IfStmt.Rlist IfStmt.PtrRlist IfStmt.SetRlist \ IfStmt.Likely IfStmt.SetLikely \ LabelStmt.SetSym \ RangeStmt.Right RangeStmt.SetRight \ RangeStmt.Body RangeStmt.PtrBody RangeStmt.SetBody \ RangeStmt.List RangeStmt.PtrList RangeStmt.SetList \ RangeStmt.HasBreak RangeStmt.SetHasBreak \ RangeStmt.Colas RangeStmt.SetColas \ RangeStmt.Sym RangeStmt.SetSym \ ReturnStmt.List ReturnStmt.PtrList ReturnStmt.SetList \ SelectStmt.List SelectStmt.PtrList SelectStmt.SetList \ SelectStmt.HasBreak SelectStmt.SetHasBreak \ SelectStmt.Body SelectStmt.PtrBody SelectStmt.SetBody \ SelectStmt.Sym SelectStmt.SetSym \ SendStmt.Left SendStmt.SetLeft \ SendStmt.Right SendStmt.SetRight \ SwitchStmt.Left SwitchStmt.SetLeft \ SwitchStmt.List SwitchStmt.PtrList SwitchStmt.SetList \ SwitchStmt.Body SwitchStmt.PtrBody SwitchStmt.SetBody \ SwitchStmt.HasBreak SwitchStmt.SetHasBreak \ SwitchStmt.Sym SwitchStmt.SetSym \ TypeSwitchGuard.Left TypeSwitchGuard.SetLeft \ TypeSwitchGuard.Right TypeSwitchGuard.SetRight \ AddStringExpr.List AddStringExpr.PtrList AddStringExpr.SetList \ AddrExpr.Left AddrExpr.SetLeft \ AddrExpr.Right AddrExpr.SetRight \ BinaryExpr.Left BinaryExpr.SetLeft \ BinaryExpr.Right BinaryExpr.SetRight \ LogicalExpr.Left LogicalExpr.SetLeft \ LogicalExpr.Right LogicalExpr.SetRight \ CallExpr.Left CallExpr.SetLeft \ CallExpr.List CallExpr.PtrList CallExpr.SetList \ CallExpr.Rlist CallExpr.PtrRlist CallExpr.SetRlist \ CallExpr.NoInline CallExpr.SetNoInline \ CallExpr.Body CallExpr.PtrBody CallExpr.SetBody \ CallExpr.IsDDD CallExpr.SetIsDDD \ CallPartExpr.Left CallPartExpr.SetLeft \ ClosureReadExpr.Offset \ ClosureReadExpr.Type \ # provided by miniExpr already CompLitExpr.Right CompLitExpr.SetRight \ CompLitExpr.List CompLitExpr.PtrList CompLitExpr.SetList \ ConvExpr.Left ConvExpr.SetLeft \ IndexExpr.Left IndexExpr.SetLeft \ IndexExpr.Right IndexExpr.SetRight \ IndexExpr.IndexMapLValue IndexExpr.SetIndexMapLValue \ KeyExpr.Left KeyExpr.SetLeft \ KeyExpr.Right KeyExpr.SetRight \ StructKeyExpr.Left StructKeyExpr.SetLeft \ StructKeyExpr.Offset StructKeyExpr.SetOffset \ StructKeyExpr.SetSym \ InlinedCallExpr.Body InlinedCallExpr.PtrBody InlinedCallExpr.SetBody \ InlinedCallExpr.Rlist InlinedCallExpr.PtrRlist InlinedCallExpr.SetRlist \ MakeExpr.Left MakeExpr.SetLeft \ MakeExpr.Right MakeExpr.SetRight \ MethodExpr.Left MethodExpr.SetLeft \ MethodExpr.Right MethodExpr.SetRight \ MethodExpr.Offset MethodExpr.SetOffset \ MethodExpr.Class MethodExpr.SetClass \ ParenExpr.Left ParenExpr.SetLeft \ ResultExpr.Offset ResultExpr.SetOffset \ ReturnStmt.IsDDD \ SelectorExpr.Left SelectorExpr.SetLeft \ SelectorExpr.Offset SelectorExpr.SetOffset \ SelectorExpr.SetSym \ SliceExpr.Left SliceExpr.SetLeft \ SliceExpr.List SliceExpr.PtrList SliceExpr.SetList \ SliceHeaderExpr.Left SliceHeaderExpr.SetLeft \ SliceHeaderExpr.List SliceHeaderExpr.PtrList SliceHeaderExpr.SetList \ StarExpr.Left StarExpr.SetLeft \ TypeAssertExpr.Left TypeAssertExpr.SetLeft \ TypeAssertExpr.Right TypeAssertExpr.SetRight \ TypeAssertExpr.List TypeAssertExpr.PtrList TypeAssertExpr.SetList \ UnaryExpr.Left UnaryExpr.SetLeft \ Func.Body Func.PtrBody Func.SetBody \ Func.Iota Func.SetIota \ CallPartExpr.Func ClosureExpr.Func Func.Func Name.Func \ mv BlockStmt.List_ BlockStmt.List mv CaseStmt.List_ CaseStmt.List mv CaseStmt.Body_ CaseStmt.Body mv ForStmt.Body_ ForStmt.Body mv ForStmt.HasBreak_ ForStmt.HasBreak mv Func.Iota_ Func.Iota mv IfStmt.Body_ IfStmt.Body mv IfStmt.Likely_ IfStmt.Likely mv RangeStmt.Body_ RangeStmt.Body mv RangeStmt.HasBreak_ RangeStmt.HasBreak mv SelectStmt.HasBreak_ SelectStmt.HasBreak mv SwitchStmt.HasBreak_ SwitchStmt.HasBreak mv AddStringExpr.List_ AddStringExpr.List mv CallExpr.NoInline_ CallExpr.NoInline mv CallExpr.Body_ CallExpr.Body # TODO what is this? mv CallExpr.DDD CallExpr.IsDDD mv ClosureReadExpr.Offset_ ClosureReadExpr.Offset mv CompLitExpr.List_ CompLitExpr.List mv StructKeyExpr.Offset_ StructKeyExpr.Offset mv InlinedCallExpr.Body_ InlinedCallExpr.Body mv ResultExpr.Offset_ ResultExpr.Offset mv SelectorExpr.Offset_ SelectorExpr.Offset mv SliceExpr.List_ SliceExpr.List mv SliceHeaderExpr.LenCap_ SliceHeaderExpr.LenCap mv Func.Body_ Func.Body mv CallPartExpr.Func_ CallPartExpr.Func mv ClosureExpr.Func_ ClosureExpr.Func mv Name.Func_ Name.Func ' Change-Id: Ia2ee59649674f83eb123e63fda7a7781cf91cc56 Reviewed-on: https://go-review.googlesource.com/c/go/+/277935 Trust: Russ Cox <rsc@golang.org> Run-TryBot: Russ Cox <rsc@golang.org> TryBot-Result: Go Bot <gobot@golang.org> Reviewed-by: Matthew Dempsky <mdempsky@google.com>
Diffstat (limited to 'src/cmd/compile/internal/gc')
-rw-r--r--src/cmd/compile/internal/gc/abiutilsaux_test.go4
-rw-r--r--src/cmd/compile/internal/gc/alg.go74
-rw-r--r--src/cmd/compile/internal/gc/bexport.go2
-rw-r--r--src/cmd/compile/internal/gc/closure.go56
-rw-r--r--src/cmd/compile/internal/gc/const.go50
-rw-r--r--src/cmd/compile/internal/gc/dcl.go24
-rw-r--r--src/cmd/compile/internal/gc/escape.go330
-rw-r--r--src/cmd/compile/internal/gc/export.go2
-rw-r--r--src/cmd/compile/internal/gc/gen.go6
-rw-r--r--src/cmd/compile/internal/gc/gsubr.go10
-rw-r--r--src/cmd/compile/internal/gc/iexport.go178
-rw-r--r--src/cmd/compile/internal/gc/iimport.go74
-rw-r--r--src/cmd/compile/internal/gc/init.go10
-rw-r--r--src/cmd/compile/internal/gc/initorder.go24
-rw-r--r--src/cmd/compile/internal/gc/inl.go204
-rw-r--r--src/cmd/compile/internal/gc/main.go2
-rw-r--r--src/cmd/compile/internal/gc/noder.go90
-rw-r--r--src/cmd/compile/internal/gc/obj.go8
-rw-r--r--src/cmd/compile/internal/gc/order.go420
-rw-r--r--src/cmd/compile/internal/gc/pgen.go44
-rw-r--r--src/cmd/compile/internal/gc/pgen_test.go4
-rw-r--r--src/cmd/compile/internal/gc/plive.go14
-rw-r--r--src/cmd/compile/internal/gc/range.go144
-rw-r--r--src/cmd/compile/internal/gc/reflect.go6
-rw-r--r--src/cmd/compile/internal/gc/scc.go10
-rw-r--r--src/cmd/compile/internal/gc/scope.go6
-rw-r--r--src/cmd/compile/internal/gc/select.go116
-rw-r--r--src/cmd/compile/internal/gc/sinit.go180
-rw-r--r--src/cmd/compile/internal/gc/ssa.go518
-rw-r--r--src/cmd/compile/internal/gc/subr.go108
-rw-r--r--src/cmd/compile/internal/gc/swt.go172
-rw-r--r--src/cmd/compile/internal/gc/typecheck.go816
-rw-r--r--src/cmd/compile/internal/gc/universe.go6
-rw-r--r--src/cmd/compile/internal/gc/unsafe.go24
-rw-r--r--src/cmd/compile/internal/gc/walk.go844
35 files changed, 2290 insertions, 2290 deletions
diff --git a/src/cmd/compile/internal/gc/abiutilsaux_test.go b/src/cmd/compile/internal/gc/abiutilsaux_test.go
index fd0b197207..de35e8edd6 100644
--- a/src/cmd/compile/internal/gc/abiutilsaux_test.go
+++ b/src/cmd/compile/internal/gc/abiutilsaux_test.go
@@ -20,7 +20,7 @@ import (
func mkParamResultField(t *types.Type, s *types.Sym, which ir.Class) *types.Field {
field := types.NewField(src.NoXPos, s, t)
n := NewName(s)
- n.SetClass(which)
+ n.Class_ = which
field.Nname = n
n.SetType(t)
return field
@@ -78,7 +78,7 @@ func verifyParamResultOffset(t *testing.T, f *types.Field, r ABIParamAssignment,
n := ir.AsNode(f.Nname).(*ir.Name)
if n.FrameOffset() != int64(r.Offset) {
t.Errorf("%s %d: got offset %d wanted %d t=%v",
- which, idx, r.Offset, n.Offset(), f.Type)
+ which, idx, r.Offset, n.Offset_, f.Type)
return 1
}
return 0
diff --git a/src/cmd/compile/internal/gc/alg.go b/src/cmd/compile/internal/gc/alg.go
index 730db9c1c9..bb2717a8b5 100644
--- a/src/cmd/compile/internal/gc/alg.go
+++ b/src/cmd/compile/internal/gc/alg.go
@@ -324,11 +324,11 @@ func genhash(t *types.Type) *obj.LSym {
nx := ir.NewIndexExpr(base.Pos, np, ni)
nx.SetBounded(true)
na := nodAddr(nx)
- call.PtrList().Append(na)
- call.PtrList().Append(nh)
- loop.PtrBody().Append(ir.NewAssignStmt(base.Pos, nh, call))
+ call.Args.Append(na)
+ call.Args.Append(nh)
+ loop.Body.Append(ir.NewAssignStmt(base.Pos, nh, call))
- fn.PtrBody().Append(loop)
+ fn.Body.Append(loop)
case types.TSTRUCT:
// Walk the struct using memhash for runs of AMEM
@@ -348,9 +348,9 @@ func genhash(t *types.Type) *obj.LSym {
call := ir.NewCallExpr(base.Pos, ir.OCALL, hashel, nil)
nx := ir.NewSelectorExpr(base.Pos, ir.OXDOT, np, f.Sym) // TODO: fields from other packages?
na := nodAddr(nx)
- call.PtrList().Append(na)
- call.PtrList().Append(nh)
- fn.PtrBody().Append(ir.NewAssignStmt(base.Pos, nh, call))
+ call.Args.Append(na)
+ call.Args.Append(nh)
+ fn.Body.Append(ir.NewAssignStmt(base.Pos, nh, call))
i++
continue
}
@@ -363,21 +363,21 @@ func genhash(t *types.Type) *obj.LSym {
call := ir.NewCallExpr(base.Pos, ir.OCALL, hashel, nil)
nx := ir.NewSelectorExpr(base.Pos, ir.OXDOT, np, f.Sym) // TODO: fields from other packages?
na := nodAddr(nx)
- call.PtrList().Append(na)
- call.PtrList().Append(nh)
- call.PtrList().Append(nodintconst(size))
- fn.PtrBody().Append(ir.NewAssignStmt(base.Pos, nh, call))
+ call.Args.Append(na)
+ call.Args.Append(nh)
+ call.Args.Append(nodintconst(size))
+ fn.Body.Append(ir.NewAssignStmt(base.Pos, nh, call))
i = next
}
}
r := ir.NewReturnStmt(base.Pos, nil)
- r.PtrList().Append(nh)
- fn.PtrBody().Append(r)
+ r.Results.Append(nh)
+ fn.Body.Append(r)
if base.Flag.LowerR != 0 {
- ir.DumpList("genhash body", fn.Body())
+ ir.DumpList("genhash body", fn.Body)
}
funcbody()
@@ -386,7 +386,7 @@ func genhash(t *types.Type) *obj.LSym {
typecheckFunc(fn)
Curfn = fn
- typecheckslice(fn.Body().Slice(), ctxStmt)
+ typecheckslice(fn.Body.Slice(), ctxStmt)
Curfn = nil
if base.Debug.DclStack != 0 {
@@ -587,11 +587,11 @@ func geneq(t *types.Type) *obj.LSym {
for i := int64(0); i < nelem; i++ {
// if check {} else { goto neq }
nif := ir.NewIfStmt(base.Pos, checkIdx(nodintconst(i)), nil, nil)
- nif.PtrRlist().Append(ir.NewBranchStmt(base.Pos, ir.OGOTO, neq))
- fn.PtrBody().Append(nif)
+ nif.Else.Append(ir.NewBranchStmt(base.Pos, ir.OGOTO, neq))
+ fn.Body.Append(nif)
}
if last {
- fn.PtrBody().Append(ir.NewAssignStmt(base.Pos, nr, checkIdx(nodintconst(nelem))))
+ fn.Body.Append(ir.NewAssignStmt(base.Pos, nr, checkIdx(nodintconst(nelem))))
}
} else {
// Generate a for loop.
@@ -604,11 +604,11 @@ func geneq(t *types.Type) *obj.LSym {
loop.PtrInit().Append(init)
// if eq(pi, qi) {} else { goto neq }
nif := ir.NewIfStmt(base.Pos, checkIdx(i), nil, nil)
- nif.PtrRlist().Append(ir.NewBranchStmt(base.Pos, ir.OGOTO, neq))
- loop.PtrBody().Append(nif)
- fn.PtrBody().Append(loop)
+ nif.Else.Append(ir.NewBranchStmt(base.Pos, ir.OGOTO, neq))
+ loop.Body.Append(nif)
+ fn.Body.Append(loop)
if last {
- fn.PtrBody().Append(ir.NewAssignStmt(base.Pos, nr, nodbool(true)))
+ fn.Body.Append(ir.NewAssignStmt(base.Pos, nr, nodbool(true)))
}
}
}
@@ -718,42 +718,42 @@ func geneq(t *types.Type) *obj.LSym {
}
if len(flatConds) == 0 {
- fn.PtrBody().Append(ir.NewAssignStmt(base.Pos, nr, nodbool(true)))
+ fn.Body.Append(ir.NewAssignStmt(base.Pos, nr, nodbool(true)))
} else {
for _, c := range flatConds[:len(flatConds)-1] {
// if cond {} else { goto neq }
n := ir.NewIfStmt(base.Pos, c, nil, nil)
- n.PtrRlist().Append(ir.NewBranchStmt(base.Pos, ir.OGOTO, neq))
- fn.PtrBody().Append(n)
+ n.Else.Append(ir.NewBranchStmt(base.Pos, ir.OGOTO, neq))
+ fn.Body.Append(n)
}
- fn.PtrBody().Append(ir.NewAssignStmt(base.Pos, nr, flatConds[len(flatConds)-1]))
+ fn.Body.Append(ir.NewAssignStmt(base.Pos, nr, flatConds[len(flatConds)-1]))
}
}
// ret:
// return
ret := autolabel(".ret")
- fn.PtrBody().Append(ir.NewLabelStmt(base.Pos, ret))
- fn.PtrBody().Append(ir.NewReturnStmt(base.Pos, nil))
+ fn.Body.Append(ir.NewLabelStmt(base.Pos, ret))
+ fn.Body.Append(ir.NewReturnStmt(base.Pos, nil))
// neq:
// r = false
// return (or goto ret)
- fn.PtrBody().Append(ir.NewLabelStmt(base.Pos, neq))
- fn.PtrBody().Append(ir.NewAssignStmt(base.Pos, nr, nodbool(false)))
+ fn.Body.Append(ir.NewLabelStmt(base.Pos, neq))
+ fn.Body.Append(ir.NewAssignStmt(base.Pos, nr, nodbool(false)))
if EqCanPanic(t) || anyCall(fn) {
// Epilogue is large, so share it with the equal case.
- fn.PtrBody().Append(ir.NewBranchStmt(base.Pos, ir.OGOTO, ret))
+ fn.Body.Append(ir.NewBranchStmt(base.Pos, ir.OGOTO, ret))
} else {
// Epilogue is small, so don't bother sharing.
- fn.PtrBody().Append(ir.NewReturnStmt(base.Pos, nil))
+ fn.Body.Append(ir.NewReturnStmt(base.Pos, nil))
}
// TODO(khr): the epilogue size detection condition above isn't perfect.
// We should really do a generic CL that shares epilogues across
// the board. See #24936.
if base.Flag.LowerR != 0 {
- ir.DumpList("geneq body", fn.Body())
+ ir.DumpList("geneq body", fn.Body)
}
funcbody()
@@ -762,7 +762,7 @@ func geneq(t *types.Type) *obj.LSym {
typecheckFunc(fn)
Curfn = fn
- typecheckslice(fn.Body().Slice(), ctxStmt)
+ typecheckslice(fn.Body.Slice(), ctxStmt)
Curfn = nil
if base.Debug.DclStack != 0 {
@@ -869,10 +869,10 @@ func eqmem(p ir.Node, q ir.Node, field *types.Sym, size int64) ir.Node {
fn, needsize := eqmemfunc(size, nx.Type().Elem())
call := ir.NewCallExpr(base.Pos, ir.OCALL, fn, nil)
- call.PtrList().Append(nx)
- call.PtrList().Append(ny)
+ call.Args.Append(nx)
+ call.Args.Append(ny)
if needsize {
- call.PtrList().Append(nodintconst(size))
+ call.Args.Append(nodintconst(size))
}
return call
diff --git a/src/cmd/compile/internal/gc/bexport.go b/src/cmd/compile/internal/gc/bexport.go
index 2347971fc2..3c377d8ba3 100644
--- a/src/cmd/compile/internal/gc/bexport.go
+++ b/src/cmd/compile/internal/gc/bexport.go
@@ -17,7 +17,7 @@ type exporter struct {
func (p *exporter) markObject(n ir.Node) {
if n.Op() == ir.ONAME {
n := n.(*ir.Name)
- if n.Class() == ir.PFUNC {
+ if n.Class_ == ir.PFUNC {
inlFlood(n, exportsym)
}
}
diff --git a/src/cmd/compile/internal/gc/closure.go b/src/cmd/compile/internal/gc/closure.go
index f47b2e2b07..1019cff331 100644
--- a/src/cmd/compile/internal/gc/closure.go
+++ b/src/cmd/compile/internal/gc/closure.go
@@ -77,11 +77,11 @@ func (p *noder) funcLit(expr *syntax.FuncLit) ir.Node {
// TODO: This creation of the named function should probably really be done in a
// separate pass from type-checking.
func typecheckclosure(clo *ir.ClosureExpr, top int) {
- fn := clo.Func()
+ fn := clo.Func
// Set current associated iota value, so iota can be used inside
// function in ConstSpec, see issue #22344
if x := getIotaValue(); x >= 0 {
- fn.SetIota(x)
+ fn.Iota = x
}
fn.ClosureType = typecheck(fn.ClosureType, ctxType)
@@ -124,7 +124,7 @@ func typecheckclosure(clo *ir.ClosureExpr, top int) {
Curfn = fn
olddd := decldepth
decldepth = 1
- typecheckslice(fn.Body().Slice(), ctxStmt)
+ typecheckslice(fn.Body.Slice(), ctxStmt)
decldepth = olddd
Curfn = oldfn
}
@@ -195,7 +195,7 @@ func capturevars(fn *ir.Func) {
outermost := v.Defn.(*ir.Name)
// out parameters will be assigned to implicitly upon return.
- if outermost.Class() != ir.PPARAMOUT && !outermost.Name().Addrtaken() && !outermost.Name().Assigned() && v.Type().Width <= 128 {
+ if outermost.Class_ != ir.PPARAMOUT && !outermost.Name().Addrtaken() && !outermost.Name().Assigned() && v.Type().Width <= 128 {
v.SetByval(true)
} else {
outermost.Name().SetAddrtaken(true)
@@ -262,7 +262,7 @@ func transformclosure(fn *ir.Func) {
v = addr
}
- v.SetClass(ir.PPARAM)
+ v.Class_ = ir.PPARAM
decls = append(decls, v)
fld := types.NewField(src.NoXPos, v.Sym(), v.Type())
@@ -294,7 +294,7 @@ func transformclosure(fn *ir.Func) {
if v.Byval() && v.Type().Width <= int64(2*Widthptr) {
// If it is a small variable captured by value, downgrade it to PAUTO.
- v.SetClass(ir.PAUTO)
+ v.Class_ = ir.PAUTO
fn.Dcl = append(fn.Dcl, v)
body = append(body, ir.NewAssignStmt(base.Pos, v, cr))
} else {
@@ -302,7 +302,7 @@ func transformclosure(fn *ir.Func) {
// and initialize in entry prologue.
addr := NewName(lookup("&" + v.Sym().Name))
addr.SetType(types.NewPtr(v.Type()))
- addr.SetClass(ir.PAUTO)
+ addr.Class_ = ir.PAUTO
addr.SetUsed(true)
addr.Curfn = fn
fn.Dcl = append(fn.Dcl, addr)
@@ -328,7 +328,7 @@ func transformclosure(fn *ir.Func) {
// hasemptycvars reports whether closure clo has an
// empty list of captured vars.
func hasemptycvars(clo *ir.ClosureExpr) bool {
- return len(clo.Func().ClosureVars) == 0
+ return len(clo.Func.ClosureVars) == 0
}
// closuredebugruntimecheck applies boilerplate checks for debug flags
@@ -336,9 +336,9 @@ func hasemptycvars(clo *ir.ClosureExpr) bool {
func closuredebugruntimecheck(clo *ir.ClosureExpr) {
if base.Debug.Closure > 0 {
if clo.Esc() == EscHeap {
- base.WarnfAt(clo.Pos(), "heap closure, captured vars = %v", clo.Func().ClosureVars)
+ base.WarnfAt(clo.Pos(), "heap closure, captured vars = %v", clo.Func.ClosureVars)
} else {
- base.WarnfAt(clo.Pos(), "stack closure, captured vars = %v", clo.Func().ClosureVars)
+ base.WarnfAt(clo.Pos(), "stack closure, captured vars = %v", clo.Func.ClosureVars)
}
}
if base.Flag.CompilingRuntime && clo.Esc() == EscHeap {
@@ -366,7 +366,7 @@ func closureType(clo *ir.ClosureExpr) *types.Type {
fields := []*ir.Field{
namedfield(".F", types.Types[types.TUINTPTR]),
}
- for _, v := range clo.Func().ClosureVars {
+ for _, v := range clo.Func.ClosureVars {
typ := v.Type()
if !v.Byval() {
typ = types.NewPtr(typ)
@@ -379,7 +379,7 @@ func closureType(clo *ir.ClosureExpr) *types.Type {
}
func walkclosure(clo *ir.ClosureExpr, init *ir.Nodes) ir.Node {
- fn := clo.Func()
+ fn := clo.Func
// If no closure vars, don't bother wrapping.
if hasemptycvars(clo) {
@@ -394,7 +394,7 @@ func walkclosure(clo *ir.ClosureExpr, init *ir.Nodes) ir.Node {
clos := ir.NewCompLitExpr(base.Pos, ir.OCOMPLIT, ir.TypeNode(typ).(ir.Ntype), nil)
clos.SetEsc(clo.Esc())
- clos.PtrList().Set(append([]ir.Node{ir.NewUnaryExpr(base.Pos, ir.OCFUNC, fn.Nname)}, fn.ClosureEnter.Slice()...))
+ clos.List.Set(append([]ir.Node{ir.NewUnaryExpr(base.Pos, ir.OCFUNC, fn.Nname)}, fn.ClosureEnter.Slice()...))
addr := nodAddr(clos)
addr.SetEsc(clo.Esc())
@@ -407,7 +407,7 @@ func walkclosure(clo *ir.ClosureExpr, init *ir.Nodes) ir.Node {
if !types.Identical(typ, x.Type()) {
panic("closure type does not match order's assigned type")
}
- addr.SetRight(x)
+ addr.Alloc = x
clo.Prealloc = nil
}
@@ -428,13 +428,13 @@ func typecheckpartialcall(n ir.Node, sym *types.Sym) *ir.CallPartExpr {
fn := makepartialcall(dot, dot.Type(), sym)
fn.SetWrapper(true)
- return ir.NewCallPartExpr(dot.Pos(), dot.Left(), dot.Selection, fn)
+ return ir.NewCallPartExpr(dot.Pos(), dot.X, dot.Selection, fn)
}
// makepartialcall returns a DCLFUNC node representing the wrapper function (*-fm) needed
// for partial calls.
func makepartialcall(dot *ir.SelectorExpr, t0 *types.Type, meth *types.Sym) *ir.Func {
- rcvrtype := dot.Left().Type()
+ rcvrtype := dot.X.Type()
sym := methodSymSuffix(rcvrtype, meth, "-fm")
if sym.Uniq() {
@@ -480,24 +480,24 @@ func makepartialcall(dot *ir.SelectorExpr, t0 *types.Type, meth *types.Sym) *ir.
}
call := ir.NewCallExpr(base.Pos, ir.OCALL, ir.NewSelectorExpr(base.Pos, ir.OXDOT, ptr, meth), nil)
- call.PtrList().Set(paramNnames(tfn.Type()))
- call.SetIsDDD(tfn.Type().IsVariadic())
+ call.Args.Set(paramNnames(tfn.Type()))
+ call.IsDDD = tfn.Type().IsVariadic()
if t0.NumResults() != 0 {
ret := ir.NewReturnStmt(base.Pos, nil)
- ret.PtrList().Set1(call)
+ ret.Results.Set1(call)
body = append(body, ret)
} else {
body = append(body, call)
}
- fn.PtrBody().Set(body)
+ fn.Body.Set(body)
funcbody()
typecheckFunc(fn)
// Need to typecheck the body of the just-generated wrapper.
// typecheckslice() requires that Curfn is set when processing an ORETURN.
Curfn = fn
- typecheckslice(fn.Body().Slice(), ctxStmt)
+ typecheckslice(fn.Body.Slice(), ctxStmt)
sym.Def = fn
Target.Decls = append(Target.Decls, fn)
Curfn = savecurfn
@@ -512,7 +512,7 @@ func makepartialcall(dot *ir.SelectorExpr, t0 *types.Type, meth *types.Sym) *ir.
func partialCallType(n *ir.CallPartExpr) *types.Type {
t := tostruct([]*ir.Field{
namedfield("F", types.Types[types.TUINTPTR]),
- namedfield("R", n.Left().Type()),
+ namedfield("R", n.X.Type()),
})
t.SetNoalg(true)
return t
@@ -526,13 +526,13 @@ func walkpartialcall(n *ir.CallPartExpr, init *ir.Nodes) ir.Node {
//
// Like walkclosure above.
- if n.Left().Type().IsInterface() {
+ if n.X.Type().IsInterface() {
// Trigger panic for method on nil interface now.
// Otherwise it happens in the wrapper and is confusing.
- n.SetLeft(cheapexpr(n.Left(), init))
- n.SetLeft(walkexpr(n.Left(), nil))
+ n.X = cheapexpr(n.X, init)
+ n.X = walkexpr(n.X, nil)
- tab := typecheck(ir.NewUnaryExpr(base.Pos, ir.OITAB, n.Left()), ctxExpr)
+ tab := typecheck(ir.NewUnaryExpr(base.Pos, ir.OITAB, n.X), ctxExpr)
c := ir.NewUnaryExpr(base.Pos, ir.OCHECKNIL, tab)
c.SetTypecheck(1)
@@ -543,7 +543,7 @@ func walkpartialcall(n *ir.CallPartExpr, init *ir.Nodes) ir.Node {
clos := ir.NewCompLitExpr(base.Pos, ir.OCOMPLIT, ir.TypeNode(typ).(ir.Ntype), nil)
clos.SetEsc(n.Esc())
- clos.PtrList().Set2(ir.NewUnaryExpr(base.Pos, ir.OCFUNC, n.Func().Nname), n.Left())
+ clos.List.Set2(ir.NewUnaryExpr(base.Pos, ir.OCFUNC, n.Func.Nname), n.X)
addr := nodAddr(clos)
addr.SetEsc(n.Esc())
@@ -556,7 +556,7 @@ func walkpartialcall(n *ir.CallPartExpr, init *ir.Nodes) ir.Node {
if !types.Identical(typ, x.Type()) {
panic("partial call type does not match order's assigned type")
}
- addr.SetRight(x)
+ addr.Alloc = x
n.Prealloc = nil
}
diff --git a/src/cmd/compile/internal/gc/const.go b/src/cmd/compile/internal/gc/const.go
index e54cd0a102..19eb8bc537 100644
--- a/src/cmd/compile/internal/gc/const.go
+++ b/src/cmd/compile/internal/gc/const.go
@@ -163,8 +163,8 @@ func convlit1(n ir.Node, t *types.Type, explicit bool, context func() string) ir
}
n := n.(*ir.UnaryExpr)
- n.SetLeft(convlit(n.Left(), ot))
- if n.Left().Type() == nil {
+ n.X = convlit(n.X, ot)
+ if n.X.Type() == nil {
n.SetType(nil)
return n
}
@@ -181,13 +181,13 @@ func convlit1(n ir.Node, t *types.Type, explicit bool, context func() string) ir
var l, r ir.Node
switch n := n.(type) {
case *ir.BinaryExpr:
- n.SetLeft(convlit(n.Left(), ot))
- n.SetRight(convlit(n.Right(), ot))
- l, r = n.Left(), n.Right()
+ n.X = convlit(n.X, ot)
+ n.Y = convlit(n.Y, ot)
+ l, r = n.X, n.Y
case *ir.LogicalExpr:
- n.SetLeft(convlit(n.Left(), ot))
- n.SetRight(convlit(n.Right(), ot))
- l, r = n.Left(), n.Right()
+ n.X = convlit(n.X, ot)
+ n.Y = convlit(n.Y, ot)
+ l, r = n.X, n.Y
}
if l.Type() == nil || r.Type() == nil {
@@ -213,8 +213,8 @@ func convlit1(n ir.Node, t *types.Type, explicit bool, context func() string) ir
case ir.OLSH, ir.ORSH:
n := n.(*ir.BinaryExpr)
- n.SetLeft(convlit1(n.Left(), t, explicit, nil))
- n.SetType(n.Left().Type())
+ n.X = convlit1(n.X, t, explicit, nil)
+ n.SetType(n.X.Type())
if n.Type() != nil && !n.Type().IsInteger() {
base.Errorf("invalid operation: %v (shift of type %v)", n, n.Type())
n.SetType(nil)
@@ -452,7 +452,7 @@ func evalConst(n ir.Node) ir.Node {
switch n.Op() {
case ir.OPLUS, ir.ONEG, ir.OBITNOT, ir.ONOT:
n := n.(*ir.UnaryExpr)
- nl := n.Left()
+ nl := n.X
if nl.Op() == ir.OLITERAL {
var prec uint
if n.Type().IsUnsigned() {
@@ -463,7 +463,7 @@ func evalConst(n ir.Node) ir.Node {
case ir.OADD, ir.OSUB, ir.OMUL, ir.ODIV, ir.OMOD, ir.OOR, ir.OXOR, ir.OAND, ir.OANDNOT:
n := n.(*ir.BinaryExpr)
- nl, nr := n.Left(), n.Right()
+ nl, nr := n.X, n.Y
if nl.Op() == ir.OLITERAL && nr.Op() == ir.OLITERAL {
rval := nr.Val()
@@ -488,21 +488,21 @@ func evalConst(n ir.Node) ir.Node {
case ir.OOROR, ir.OANDAND:
n := n.(*ir.LogicalExpr)
- nl, nr := n.Left(), n.Right()
+ nl, nr := n.X, n.Y
if nl.Op() == ir.OLITERAL && nr.Op() == ir.OLITERAL {
return origConst(n, constant.BinaryOp(nl.Val(), tokenForOp[n.Op()], nr.Val()))
}
case ir.OEQ, ir.ONE, ir.OLT, ir.OLE, ir.OGT, ir.OGE:
n := n.(*ir.BinaryExpr)
- nl, nr := n.Left(), n.Right()
+ nl, nr := n.X, n.Y
if nl.Op() == ir.OLITERAL && nr.Op() == ir.OLITERAL {
return origBoolConst(n, constant.Compare(nl.Val(), tokenForOp[n.Op()], nr.Val()))
}
case ir.OLSH, ir.ORSH:
n := n.(*ir.BinaryExpr)
- nl, nr := n.Left(), n.Right()
+ nl, nr := n.X, n.Y
if nl.Op() == ir.OLITERAL && nr.Op() == ir.OLITERAL {
// shiftBound from go/types; "so we can express smallestFloat64"
const shiftBound = 1023 - 1 + 52
@@ -517,14 +517,14 @@ func evalConst(n ir.Node) ir.Node {
case ir.OCONV, ir.ORUNESTR:
n := n.(*ir.ConvExpr)
- nl := n.Left()
+ nl := n.X
if ir.OKForConst[n.Type().Kind()] && nl.Op() == ir.OLITERAL {
return origConst(n, convertVal(nl.Val(), n.Type(), true))
}
case ir.OCONVNOP:
n := n.(*ir.ConvExpr)
- nl := n.Left()
+ nl := n.X
if ir.OKForConst[n.Type().Kind()] && nl.Op() == ir.OLITERAL {
// set so n.Orig gets OCONV instead of OCONVNOP
n.SetOp(ir.OCONV)
@@ -534,7 +534,7 @@ func evalConst(n ir.Node) ir.Node {
case ir.OADDSTR:
// Merge adjacent constants in the argument list.
n := n.(*ir.AddStringExpr)
- s := n.List().Slice()
+ s := n.List.Slice()
need := 0
for i := 0; i < len(s); i++ {
if i == 0 || !ir.IsConst(s[i-1], constant.String) || !ir.IsConst(s[i], constant.String) {
@@ -564,7 +564,7 @@ func evalConst(n ir.Node) ir.Node {
}
nl := ir.Copy(n).(*ir.AddStringExpr)
- nl.PtrList().Set(s[i:i2])
+ nl.List.Set(s[i:i2])
newList = append(newList, origConst(nl, constant.MakeString(strings.Join(strs, ""))))
i = i2 - 1
} else {
@@ -573,12 +573,12 @@ func evalConst(n ir.Node) ir.Node {
}
nn := ir.Copy(n).(*ir.AddStringExpr)
- nn.PtrList().Set(newList)
+ nn.List.Set(newList)
return nn
case ir.OCAP, ir.OLEN:
n := n.(*ir.UnaryExpr)
- nl := n.Left()
+ nl := n.X
switch nl.Type().Kind() {
case types.TSTRING:
if ir.IsConst(nl, constant.String) {
@@ -596,21 +596,21 @@ func evalConst(n ir.Node) ir.Node {
case ir.OREAL:
n := n.(*ir.UnaryExpr)
- nl := n.Left()
+ nl := n.X
if nl.Op() == ir.OLITERAL {
return origConst(n, constant.Real(nl.Val()))
}
case ir.OIMAG:
n := n.(*ir.UnaryExpr)
- nl := n.Left()
+ nl := n.X
if nl.Op() == ir.OLITERAL {
return origConst(n, constant.Imag(nl.Val()))
}
case ir.OCOMPLEX:
n := n.(*ir.BinaryExpr)
- nl, nr := n.Left(), n.Right()
+ nl, nr := n.X, n.Y
if nl.Op() == ir.OLITERAL && nr.Op() == ir.OLITERAL {
return origConst(n, makeComplex(nl.Val(), nr.Val()))
}
@@ -871,7 +871,7 @@ func (s *constSet) add(pos src.XPos, n ir.Node, what, where string) {
if conv := n; conv.Op() == ir.OCONVIFACE {
conv := conv.(*ir.ConvExpr)
if conv.Implicit() {
- n = conv.Left()
+ n = conv.X
}
}
diff --git a/src/cmd/compile/internal/gc/dcl.go b/src/cmd/compile/internal/gc/dcl.go
index d85f10faf3..9bd044c368 100644
--- a/src/cmd/compile/internal/gc/dcl.go
+++ b/src/cmd/compile/internal/gc/dcl.go
@@ -120,7 +120,7 @@ func declare(n *ir.Name, ctxt ir.Class) {
s.Lastlineno = base.Pos
s.Def = n
n.Vargen = int32(gen)
- n.SetClass(ctxt)
+ n.Class_ = ctxt
if ctxt == ir.PFUNC {
n.Sym().SetFunc(true)
}
@@ -137,9 +137,9 @@ func variter(vl []*ir.Name, t ir.Ntype, el []ir.Node) []ir.Node {
if len(el) == 1 && len(vl) > 1 {
e := el[0]
as2 := ir.NewAssignListStmt(base.Pos, ir.OAS2, nil, nil)
- as2.PtrRlist().Set1(e)
+ as2.Rhs.Set1(e)
for _, v := range vl {
- as2.PtrList().Append(v)
+ as2.Lhs.Append(v)
declare(v, dclcontext)
v.Ntype = t
v.Defn = as2
@@ -234,7 +234,7 @@ func oldname(s *types.Sym) ir.Node {
if c == nil || c.Curfn != Curfn {
// Do not have a closure var for the active closure yet; make one.
c = NewName(s)
- c.SetClass(ir.PAUTOHEAP)
+ c.Class_ = ir.PAUTOHEAP
c.SetIsClosureVar(true)
c.SetIsDDD(n.IsDDD())
c.Defn = n
@@ -810,11 +810,11 @@ func makefuncsym(s *types.Sym) {
// setNodeNameFunc marks a node as a function.
func setNodeNameFunc(n *ir.Name) {
- if n.Op() != ir.ONAME || n.Class() != ir.Pxxx {
+ if n.Op() != ir.ONAME || n.Class_ != ir.Pxxx {
base.Fatalf("expected ONAME/Pxxx node, got %v", n)
}
- n.SetClass(ir.PFUNC)
+ n.Class_ = ir.PFUNC
n.Sym().SetFunc(true)
}
@@ -876,11 +876,11 @@ func (c *nowritebarrierrecChecker) findExtraCalls(nn ir.Node) {
return
}
n := nn.(*ir.CallExpr)
- if n.Left() == nil || n.Left().Op() != ir.ONAME {
+ if n.X == nil || n.X.Op() != ir.ONAME {
return
}
- fn := n.Left().(*ir.Name)
- if fn.Class() != ir.PFUNC || fn.Name().Defn == nil {
+ fn := n.X.(*ir.Name)
+ if fn.Class_ != ir.PFUNC || fn.Name().Defn == nil {
return
}
if !isRuntimePkg(fn.Sym().Pkg) || fn.Sym().Name != "systemstack" {
@@ -888,14 +888,14 @@ func (c *nowritebarrierrecChecker) findExtraCalls(nn ir.Node) {
}
var callee *ir.Func
- arg := n.List().First()
+ arg := n.Args.First()
switch arg.Op() {
case ir.ONAME:
arg := arg.(*ir.Name)
callee = arg.Name().Defn.(*ir.Func)
case ir.OCLOSURE:
arg := arg.(*ir.ClosureExpr)
- callee = arg.Func()
+ callee = arg.Func
default:
base.Fatalf("expected ONAME or OCLOSURE node, got %+v", arg)
}
@@ -973,7 +973,7 @@ func (c *nowritebarrierrecChecker) check() {
q.PushRight(target.Nname)
}
for !q.Empty() {
- fn := q.PopLeft().Func()
+ fn := q.PopLeft().Func
// Check fn.
if fn.WBPos.IsKnown() {
diff --git a/src/cmd/compile/internal/gc/escape.go b/src/cmd/compile/internal/gc/escape.go
index 6510dfc4b3..21f02e9471 100644
--- a/src/cmd/compile/internal/gc/escape.go
+++ b/src/cmd/compile/internal/gc/escape.go
@@ -228,21 +228,21 @@ func (e *Escape) walkFunc(fn *ir.Func) {
if e.labels == nil {
e.labels = make(map[*types.Sym]labelState)
}
- e.labels[n.Sym()] = nonlooping
+ e.labels[n.Label] = nonlooping
case ir.OGOTO:
// If we visited the label before the goto,
// then this is a looping label.
n := n.(*ir.BranchStmt)
- if e.labels[n.Sym()] == nonlooping {
- e.labels[n.Sym()] = looping
+ if e.labels[n.Label] == nonlooping {
+ e.labels[n.Label] = looping
}
}
})
e.curfn = fn
e.loopDepth = 1
- e.block(fn.Body())
+ e.block(fn.Body)
if len(e.labels) != 0 {
base.FatalfAt(fn.Pos(), "leftover labels after walkFunc")
@@ -304,18 +304,18 @@ func (e *Escape) stmt(n ir.Node) {
case ir.OBLOCK:
n := n.(*ir.BlockStmt)
- e.stmts(n.List())
+ e.stmts(n.List)
case ir.ODCL:
// Record loop depth at declaration.
n := n.(*ir.Decl)
- if !ir.IsBlank(n.Left()) {
- e.dcl(n.Left())
+ if !ir.IsBlank(n.X) {
+ e.dcl(n.X)
}
case ir.OLABEL:
n := n.(*ir.LabelStmt)
- switch e.labels[n.Sym()] {
+ switch e.labels[n.Label] {
case nonlooping:
if base.Flag.LowerM > 2 {
fmt.Printf("%v:%v non-looping label\n", base.FmtPos(base.Pos), n)
@@ -328,127 +328,127 @@ func (e *Escape) stmt(n ir.Node) {
default:
base.Fatalf("label missing tag")
}
- delete(e.labels, n.Sym())
+ delete(e.labels, n.Label)
case ir.OIF:
n := n.(*ir.IfStmt)
- e.discard(n.Left())
- e.block(n.Body())
- e.block(n.Rlist())
+ e.discard(n.Cond)
+ e.block(n.Body)
+ e.block(n.Else)
case ir.OFOR, ir.OFORUNTIL:
n := n.(*ir.ForStmt)
e.loopDepth++
- e.discard(n.Left())
- e.stmt(n.Right())
- e.block(n.Body())
+ e.discard(n.Cond)
+ e.stmt(n.Post)
+ e.block(n.Body)
e.loopDepth--
case ir.ORANGE:
// for List = range Right { Nbody }
n := n.(*ir.RangeStmt)
e.loopDepth++
- ks := e.addrs(n.List())
- e.block(n.Body())
+ ks := e.addrs(n.Vars)
+ e.block(n.Body)
e.loopDepth--
// Right is evaluated outside the loop.
k := e.discardHole()
if len(ks) >= 2 {
- if n.Right().Type().IsArray() {
+ if n.X.Type().IsArray() {
k = ks[1].note(n, "range")
} else {
k = ks[1].deref(n, "range-deref")
}
}
- e.expr(e.later(k), n.Right())
+ e.expr(e.later(k), n.X)
case ir.OSWITCH:
n := n.(*ir.SwitchStmt)
- typesw := n.Left() != nil && n.Left().Op() == ir.OTYPESW
+ typesw := n.Tag != nil && n.Tag.Op() == ir.OTYPESW
var ks []EscHole
- for _, cas := range n.List().Slice() { // cases
+ for _, cas := range n.Cases.Slice() { // cases
cas := cas.(*ir.CaseStmt)
- if typesw && n.Left().(*ir.TypeSwitchGuard).Left() != nil {
- cv := cas.Rlist().First()
+ if typesw && n.Tag.(*ir.TypeSwitchGuard).Tag != nil {
+ cv := cas.Vars.First()
k := e.dcl(cv) // type switch variables have no ODCL.
if cv.Type().HasPointers() {
ks = append(ks, k.dotType(cv.Type(), cas, "switch case"))
}
}
- e.discards(cas.List())
- e.block(cas.Body())
+ e.discards(cas.List)
+ e.block(cas.Body)
}
if typesw {
- e.expr(e.teeHole(ks...), n.Left().(*ir.TypeSwitchGuard).Right())
+ e.expr(e.teeHole(ks...), n.Tag.(*ir.TypeSwitchGuard).X)
} else {
- e.discard(n.Left())
+ e.discard(n.Tag)
}
case ir.OSELECT:
n := n.(*ir.SelectStmt)
- for _, cas := range n.List().Slice() {
+ for _, cas := range n.Cases.Slice() {
cas := cas.(*ir.CaseStmt)
- e.stmt(cas.Left())
- e.block(cas.Body())
+ e.stmt(cas.Comm)
+ e.block(cas.Body)
}
case ir.OSELRECV2:
n := n.(*ir.AssignListStmt)
- e.assign(n.List().First(), n.Rlist().First(), "selrecv", n)
- e.assign(n.List().Second(), nil, "selrecv", n)
+ e.assign(n.Lhs.First(), n.Rhs.First(), "selrecv", n)
+ e.assign(n.Lhs.Second(), nil, "selrecv", n)
case ir.ORECV:
// TODO(mdempsky): Consider e.discard(n.Left).
n := n.(*ir.UnaryExpr)
e.exprSkipInit(e.discardHole(), n) // already visited n.Ninit
case ir.OSEND:
n := n.(*ir.SendStmt)
- e.discard(n.Left())
- e.assignHeap(n.Right(), "send", n)
+ e.discard(n.Chan)
+ e.assignHeap(n.Value, "send", n)
case ir.OAS:
n := n.(*ir.AssignStmt)
- e.assign(n.Left(), n.Right(), "assign", n)
+ e.assign(n.X, n.Y, "assign", n)
case ir.OASOP:
n := n.(*ir.AssignOpStmt)
- e.assign(n.Left(), n.Right(), "assign", n)
+ e.assign(n.X, n.Y, "assign", n)
case ir.OAS2:
n := n.(*ir.AssignListStmt)
- for i, nl := range n.List().Slice() {
- e.assign(nl, n.Rlist().Index(i), "assign-pair", n)
+ for i, nl := range n.Lhs.Slice() {
+ e.assign(nl, n.Rhs.Index(i), "assign-pair", n)
}
case ir.OAS2DOTTYPE: // v, ok = x.(type)
n := n.(*ir.AssignListStmt)
- e.assign(n.List().First(), n.Rlist().First(), "assign-pair-dot-type", n)
- e.assign(n.List().Second(), nil, "assign-pair-dot-type", n)
+ e.assign(n.Lhs.First(), n.Rhs.First(), "assign-pair-dot-type", n)
+ e.assign(n.Lhs.Second(), nil, "assign-pair-dot-type", n)
case ir.OAS2MAPR: // v, ok = m[k]
n := n.(*ir.AssignListStmt)
- e.assign(n.List().First(), n.Rlist().First(), "assign-pair-mapr", n)
- e.assign(n.List().Second(), nil, "assign-pair-mapr", n)
+ e.assign(n.Lhs.First(), n.Rhs.First(), "assign-pair-mapr", n)
+ e.assign(n.Lhs.Second(), nil, "assign-pair-mapr", n)
case ir.OAS2RECV: // v, ok = <-ch
n := n.(*ir.AssignListStmt)
- e.assign(n.List().First(), n.Rlist().First(), "assign-pair-receive", n)
- e.assign(n.List().Second(), nil, "assign-pair-receive", n)
+ e.assign(n.Lhs.First(), n.Rhs.First(), "assign-pair-receive", n)
+ e.assign(n.Lhs.Second(), nil, "assign-pair-receive", n)
case ir.OAS2FUNC:
n := n.(*ir.AssignListStmt)
- e.stmts(n.Rlist().First().Init())
- e.call(e.addrs(n.List()), n.Rlist().First(), nil)
+ e.stmts(n.Rhs.First().Init())
+ e.call(e.addrs(n.Lhs), n.Rhs.First(), nil)
case ir.ORETURN:
n := n.(*ir.ReturnStmt)
results := e.curfn.Type().Results().FieldSlice()
- for i, v := range n.List().Slice() {
+ for i, v := range n.Results.Slice() {
e.assign(ir.AsNode(results[i].Nname), v, "return", n)
}
case ir.OCALLFUNC, ir.OCALLMETH, ir.OCALLINTER, ir.OCLOSE, ir.OCOPY, ir.ODELETE, ir.OPANIC, ir.OPRINT, ir.OPRINTN, ir.ORECOVER:
e.call(nil, n, nil)
case ir.OGO, ir.ODEFER:
n := n.(*ir.GoDeferStmt)
- e.stmts(n.Left().Init())
- e.call(nil, n.Left(), n)
+ e.stmts(n.Call.Init())
+ e.call(nil, n.Call, n)
case ir.ORETJMP:
// TODO(mdempsky): What do? esc.go just ignores it.
@@ -491,7 +491,7 @@ func (e *Escape) exprSkipInit(k EscHole, n ir.Node) {
uintptrEscapesHack := k.uintptrEscapesHack
k.uintptrEscapesHack = false
- if uintptrEscapesHack && n.Op() == ir.OCONVNOP && n.(*ir.ConvExpr).Left().Type().IsUnsafePtr() {
+ if uintptrEscapesHack && n.Op() == ir.OCONVNOP && n.(*ir.ConvExpr).X.Type().IsUnsafePtr() {
// nop
} else if k.derefs >= 0 && !n.Type().HasPointers() {
k = e.discardHole()
@@ -506,7 +506,7 @@ func (e *Escape) exprSkipInit(k EscHole, n ir.Node) {
case ir.ONAME:
n := n.(*ir.Name)
- if n.Class() == ir.PFUNC || n.Class() == ir.PEXTERN {
+ if n.Class_ == ir.PFUNC || n.Class_ == ir.PEXTERN {
return
}
e.flow(k, e.oldLoc(n))
@@ -517,46 +517,46 @@ func (e *Escape) exprSkipInit(k EscHole, n ir.Node) {
case ir.OPLUS, ir.ONEG, ir.OBITNOT, ir.ONOT:
n := n.(*ir.UnaryExpr)
- e.discard(n.Left())
+ e.discard(n.X)
case ir.OADD, ir.OSUB, ir.OOR, ir.OXOR, ir.OMUL, ir.ODIV, ir.OMOD, ir.OLSH, ir.ORSH, ir.OAND, ir.OANDNOT, ir.OEQ, ir.ONE, ir.OLT, ir.OLE, ir.OGT, ir.OGE:
n := n.(*ir.BinaryExpr)
- e.discard(n.Left())
- e.discard(n.Right())
+ e.discard(n.X)
+ e.discard(n.Y)
case ir.OANDAND, ir.OOROR:
n := n.(*ir.LogicalExpr)
- e.discard(n.Left())
- e.discard(n.Right())
+ e.discard(n.X)
+ e.discard(n.Y)
case ir.OADDR:
n := n.(*ir.AddrExpr)
- e.expr(k.addr(n, "address-of"), n.Left()) // "address-of"
+ e.expr(k.addr(n, "address-of"), n.X) // "address-of"
case ir.ODEREF:
n := n.(*ir.StarExpr)
- e.expr(k.deref(n, "indirection"), n.Left()) // "indirection"
+ e.expr(k.deref(n, "indirection"), n.X) // "indirection"
case ir.ODOT, ir.ODOTMETH, ir.ODOTINTER:
n := n.(*ir.SelectorExpr)
- e.expr(k.note(n, "dot"), n.Left())
+ e.expr(k.note(n, "dot"), n.X)
case ir.ODOTPTR:
n := n.(*ir.SelectorExpr)
- e.expr(k.deref(n, "dot of pointer"), n.Left()) // "dot of pointer"
+ e.expr(k.deref(n, "dot of pointer"), n.X) // "dot of pointer"
case ir.ODOTTYPE, ir.ODOTTYPE2:
n := n.(*ir.TypeAssertExpr)
- e.expr(k.dotType(n.Type(), n, "dot"), n.Left())
+ e.expr(k.dotType(n.Type(), n, "dot"), n.X)
case ir.OINDEX:
n := n.(*ir.IndexExpr)
- if n.Left().Type().IsArray() {
- e.expr(k.note(n, "fixed-array-index-of"), n.Left())
+ if n.X.Type().IsArray() {
+ e.expr(k.note(n, "fixed-array-index-of"), n.X)
} else {
// TODO(mdempsky): Fix why reason text.
- e.expr(k.deref(n, "dot of pointer"), n.Left())
+ e.expr(k.deref(n, "dot of pointer"), n.X)
}
- e.discard(n.Right())
+ e.discard(n.Index)
case ir.OINDEXMAP:
n := n.(*ir.IndexExpr)
- e.discard(n.Left())
- e.discard(n.Right())
+ e.discard(n.X)
+ e.discard(n.Index)
case ir.OSLICE, ir.OSLICEARR, ir.OSLICE3, ir.OSLICE3ARR, ir.OSLICESTR:
n := n.(*ir.SliceExpr)
- e.expr(k.note(n, "slice"), n.Left())
+ e.expr(k.note(n, "slice"), n.X)
low, high, max := n.SliceBounds()
e.discard(low)
e.discard(high)
@@ -564,29 +564,29 @@ func (e *Escape) exprSkipInit(k EscHole, n ir.Node) {
case ir.OCONV, ir.OCONVNOP:
n := n.(*ir.ConvExpr)
- if checkPtr(e.curfn, 2) && n.Type().IsUnsafePtr() && n.Left().Type().IsPtr() {
+ if checkPtr(e.curfn, 2) && n.Type().IsUnsafePtr() && n.X.Type().IsPtr() {
// When -d=checkptr=2 is enabled, treat
// conversions to unsafe.Pointer as an
// escaping operation. This allows better
// runtime instrumentation, since we can more
// easily detect object boundaries on the heap
// than the stack.
- e.assignHeap(n.Left(), "conversion to unsafe.Pointer", n)
- } else if n.Type().IsUnsafePtr() && n.Left().Type().IsUintptr() {
- e.unsafeValue(k, n.Left())
+ e.assignHeap(n.X, "conversion to unsafe.Pointer", n)
+ } else if n.Type().IsUnsafePtr() && n.X.Type().IsUintptr() {
+ e.unsafeValue(k, n.X)
} else {
- e.expr(k, n.Left())
+ e.expr(k, n.X)
}
case ir.OCONVIFACE:
n := n.(*ir.ConvExpr)
- if !n.Left().Type().IsInterface() && !isdirectiface(n.Left().Type()) {
+ if !n.X.Type().IsInterface() && !isdirectiface(n.X.Type()) {
k = e.spill(k, n)
}
- e.expr(k.note(n, "interface-converted"), n.Left())
+ e.expr(k.note(n, "interface-converted"), n.X)
case ir.ORECV:
n := n.(*ir.UnaryExpr)
- e.discard(n.Left())
+ e.discard(n.X)
case ir.OCALLMETH, ir.OCALLFUNC, ir.OCALLINTER, ir.OLEN, ir.OCAP, ir.OCOMPLEX, ir.OREAL, ir.OIMAG, ir.OAPPEND, ir.OCOPY:
e.call([]EscHole{k}, n, nil)
@@ -598,15 +598,15 @@ func (e *Escape) exprSkipInit(k EscHole, n ir.Node) {
case ir.OMAKESLICE:
n := n.(*ir.MakeExpr)
e.spill(k, n)
- e.discard(n.Left())
- e.discard(n.Right())
+ e.discard(n.Len)
+ e.discard(n.Cap)
case ir.OMAKECHAN:
n := n.(*ir.MakeExpr)
- e.discard(n.Left())
+ e.discard(n.Len)
case ir.OMAKEMAP:
n := n.(*ir.MakeExpr)
e.spill(k, n)
- e.discard(n.Left())
+ e.discard(n.Len)
case ir.ORECOVER:
// nop
@@ -633,17 +633,17 @@ func (e *Escape) exprSkipInit(k EscHole, n ir.Node) {
name, _ := m.Nname.(*ir.Name)
paramK := e.tagHole(ks, name, m.Type.Recv())
- e.expr(e.teeHole(paramK, closureK), n.Left())
+ e.expr(e.teeHole(paramK, closureK), n.X)
case ir.OPTRLIT:
n := n.(*ir.AddrExpr)
- e.expr(e.spill(k, n), n.Left())
+ e.expr(e.spill(k, n), n.X)
case ir.OARRAYLIT:
n := n.(*ir.CompLitExpr)
- for _, elt := range n.List().Slice() {
+ for _, elt := range n.List.Slice() {
if elt.Op() == ir.OKEY {
- elt = elt.(*ir.KeyExpr).Right()
+ elt = elt.(*ir.KeyExpr).Value
}
e.expr(k.note(n, "array literal element"), elt)
}
@@ -653,17 +653,17 @@ func (e *Escape) exprSkipInit(k EscHole, n ir.Node) {
k = e.spill(k, n)
k.uintptrEscapesHack = uintptrEscapesHack // for ...uintptr parameters
- for _, elt := range n.List().Slice() {
+ for _, elt := range n.List.Slice() {
if elt.Op() == ir.OKEY {
- elt = elt.(*ir.KeyExpr).Right()
+ elt = elt.(*ir.KeyExpr).Value
}
e.expr(k.note(n, "slice-literal-element"), elt)
}
case ir.OSTRUCTLIT:
n := n.(*ir.CompLitExpr)
- for _, elt := range n.List().Slice() {
- e.expr(k.note(n, "struct literal element"), elt.(*ir.StructKeyExpr).Left())
+ for _, elt := range n.List.Slice() {
+ e.expr(k.note(n, "struct literal element"), elt.(*ir.StructKeyExpr).Value)
}
case ir.OMAPLIT:
@@ -671,10 +671,10 @@ func (e *Escape) exprSkipInit(k EscHole, n ir.Node) {
e.spill(k, n)
// Map keys and values are always stored in the heap.
- for _, elt := range n.List().Slice() {
+ for _, elt := range n.List.Slice() {
elt := elt.(*ir.KeyExpr)
- e.assignHeap(elt.Left(), "map literal key", n)
- e.assignHeap(elt.Right(), "map literal value", n)
+ e.assignHeap(elt.Key, "map literal key", n)
+ e.assignHeap(elt.Value, "map literal value", n)
}
case ir.OCLOSURE:
@@ -682,7 +682,7 @@ func (e *Escape) exprSkipInit(k EscHole, n ir.Node) {
k = e.spill(k, n)
// Link addresses of captured variables to closure.
- for _, v := range n.Func().ClosureVars {
+ for _, v := range n.Func.ClosureVars {
k := k
if !v.Byval() {
k = k.addr(v, "reference")
@@ -694,7 +694,7 @@ func (e *Escape) exprSkipInit(k EscHole, n ir.Node) {
case ir.ORUNES2STR, ir.OBYTES2STR, ir.OSTR2RUNES, ir.OSTR2BYTES, ir.ORUNESTR:
n := n.(*ir.ConvExpr)
e.spill(k, n)
- e.discard(n.Left())
+ e.discard(n.X)
case ir.OADDSTR:
n := n.(*ir.AddStringExpr)
@@ -702,7 +702,7 @@ func (e *Escape) exprSkipInit(k EscHole, n ir.Node) {
// Arguments of OADDSTR never escape;
// runtime.concatstrings makes sure of that.
- e.discards(n.List())
+ e.discards(n.List)
}
}
@@ -718,31 +718,31 @@ func (e *Escape) unsafeValue(k EscHole, n ir.Node) {
switch n.Op() {
case ir.OCONV, ir.OCONVNOP:
n := n.(*ir.ConvExpr)
- if n.Left().Type().IsUnsafePtr() {
- e.expr(k, n.Left())
+ if n.X.Type().IsUnsafePtr() {
+ e.expr(k, n.X)
} else {
- e.discard(n.Left())
+ e.discard(n.X)
}
case ir.ODOTPTR:
n := n.(*ir.SelectorExpr)
if isReflectHeaderDataField(n) {
- e.expr(k.deref(n, "reflect.Header.Data"), n.Left())
+ e.expr(k.deref(n, "reflect.Header.Data"), n.X)
} else {
- e.discard(n.Left())
+ e.discard(n.X)
}
case ir.OPLUS, ir.ONEG, ir.OBITNOT:
n := n.(*ir.UnaryExpr)
- e.unsafeValue(k, n.Left())
+ e.unsafeValue(k, n.X)
case ir.OADD, ir.OSUB, ir.OOR, ir.OXOR, ir.OMUL, ir.ODIV, ir.OMOD, ir.OAND, ir.OANDNOT:
n := n.(*ir.BinaryExpr)
- e.unsafeValue(k, n.Left())
- e.unsafeValue(k, n.Right())
+ e.unsafeValue(k, n.X)
+ e.unsafeValue(k, n.Y)
case ir.OLSH, ir.ORSH:
n := n.(*ir.BinaryExpr)
- e.unsafeValue(k, n.Left())
+ e.unsafeValue(k, n.X)
// RHS need not be uintptr-typed (#32959) and can't meaningfully
// flow pointers anyway.
- e.discard(n.Right())
+ e.discard(n.Y)
default:
e.exprSkipInit(e.discardHole(), n)
}
@@ -775,7 +775,7 @@ func (e *Escape) addr(n ir.Node) EscHole {
base.Fatalf("unexpected addr: %v", n)
case ir.ONAME:
n := n.(*ir.Name)
- if n.Class() == ir.PEXTERN {
+ if n.Class_ == ir.PEXTERN {
break
}
k = e.oldLoc(n).asHole()
@@ -784,21 +784,21 @@ func (e *Escape) addr(n ir.Node) EscHole {
e.addr(n.Name_)
case ir.ODOT:
n := n.(*ir.SelectorExpr)
- k = e.addr(n.Left())
+ k = e.addr(n.X)
case ir.OINDEX:
n := n.(*ir.IndexExpr)
- e.discard(n.Right())
- if n.Left().Type().IsArray() {
- k = e.addr(n.Left())
+ e.discard(n.Index)
+ if n.X.Type().IsArray() {
+ k = e.addr(n.X)
} else {
- e.discard(n.Left())
+ e.discard(n.X)
}
case ir.ODEREF, ir.ODOTPTR:
e.discard(n)
case ir.OINDEXMAP:
n := n.(*ir.IndexExpr)
- e.discard(n.Left())
- e.assignHeap(n.Right(), "key of map put", n)
+ e.discard(n.X)
+ e.assignHeap(n.Index, "key of map put", n)
}
if !n.Type().HasPointers() {
@@ -876,17 +876,17 @@ func (e *Escape) call(ks []EscHole, call, where ir.Node) {
var fn *ir.Name
switch call.Op() {
case ir.OCALLFUNC:
- switch v := staticValue(call.Left()); {
- case v.Op() == ir.ONAME && v.(*ir.Name).Class() == ir.PFUNC:
+ switch v := staticValue(call.X); {
+ case v.Op() == ir.ONAME && v.(*ir.Name).Class_ == ir.PFUNC:
fn = v.(*ir.Name)
case v.Op() == ir.OCLOSURE:
- fn = v.(*ir.ClosureExpr).Func().Nname
+ fn = v.(*ir.ClosureExpr).Func.Nname
}
case ir.OCALLMETH:
- fn = methodExprName(call.Left())
+ fn = methodExprName(call.X)
}
- fntype := call.Left().Type()
+ fntype := call.X.Type()
if fn != nil {
fntype = fn.Type()
}
@@ -898,20 +898,20 @@ func (e *Escape) call(ks []EscHole, call, where ir.Node) {
}
if r := fntype.Recv(); r != nil {
- argument(e.tagHole(ks, fn, r), call.Left().(*ir.SelectorExpr).Left())
+ argument(e.tagHole(ks, fn, r), call.X.(*ir.SelectorExpr).X)
} else {
// Evaluate callee function expression.
- argument(e.discardHole(), call.Left())
+ argument(e.discardHole(), call.X)
}
- args := call.List().Slice()
+ args := call.Args.Slice()
for i, param := range fntype.Params().FieldSlice() {
argument(e.tagHole(ks, fn, param), args[i])
}
case ir.OAPPEND:
call := call.(*ir.CallExpr)
- args := call.List().Slice()
+ args := call.Args.Slice()
// Appendee slice may flow directly to the result, if
// it has enough capacity. Alternatively, a new heap
@@ -923,7 +923,7 @@ func (e *Escape) call(ks []EscHole, call, where ir.Node) {
}
argument(appendeeK, args[0])
- if call.IsDDD() {
+ if call.IsDDD {
appendedK := e.discardHole()
if args[1].Type().IsSlice() && args[1].Type().Elem().HasPointers() {
appendedK = e.heapHole().deref(call, "appended slice...")
@@ -937,30 +937,30 @@ func (e *Escape) call(ks []EscHole, call, where ir.Node) {
case ir.OCOPY:
call := call.(*ir.BinaryExpr)
- argument(e.discardHole(), call.Left())
+ argument(e.discardHole(), call.X)
copiedK := e.discardHole()
- if call.Right().Type().IsSlice() && call.Right().Type().Elem().HasPointers() {
+ if call.Y.Type().IsSlice() && call.Y.Type().Elem().HasPointers() {
copiedK = e.heapHole().deref(call, "copied slice")
}
- argument(copiedK, call.Right())
+ argument(copiedK, call.Y)
case ir.OPANIC:
call := call.(*ir.UnaryExpr)
- argument(e.heapHole(), call.Left())
+ argument(e.heapHole(), call.X)
case ir.OCOMPLEX:
call := call.(*ir.BinaryExpr)
- argument(e.discardHole(), call.Left())
- argument(e.discardHole(), call.Right())
+ argument(e.discardHole(), call.X)
+ argument(e.discardHole(), call.Y)
case ir.ODELETE, ir.OPRINT, ir.OPRINTN, ir.ORECOVER:
call := call.(*ir.CallExpr)
- for _, arg := range call.List().Slice() {
+ for _, arg := range call.Args.Slice() {
argument(e.discardHole(), arg)
}
case ir.OLEN, ir.OCAP, ir.OREAL, ir.OIMAG, ir.OCLOSE:
call := call.(*ir.UnaryExpr)
- argument(e.discardHole(), call.Left())
+ argument(e.discardHole(), call.X)
}
}
@@ -1557,7 +1557,7 @@ func (e *Escape) finish(fns []*ir.Func) {
}
func (l *EscLocation) isName(c ir.Class) bool {
- return l.n != nil && l.n.Op() == ir.ONAME && l.n.(*ir.Name).Class() == c
+ return l.n != nil && l.n.Op() == ir.ONAME && l.n.(*ir.Name).Class_ == c
}
const numEscResults = 7
@@ -1726,10 +1726,10 @@ func isSliceSelfAssign(dst, src ir.Node) bool {
return false
case ir.ODEREF:
dst := dst.(*ir.StarExpr)
- dstX = dst.Left()
+ dstX = dst.X
case ir.ODOTPTR:
dst := dst.(*ir.SelectorExpr)
- dstX = dst.Left()
+ dstX = dst.X
}
if dstX.Op() != ir.ONAME {
return false
@@ -1749,7 +1749,7 @@ func isSliceSelfAssign(dst, src ir.Node) bool {
// For slicing an array (not pointer to array), there is an implicit OADDR.
// We check that to determine non-pointer array slicing.
src := src.(*ir.SliceExpr)
- if src.Left().Op() == ir.OADDR {
+ if src.X.Op() == ir.OADDR {
return false
}
default:
@@ -1757,15 +1757,15 @@ func isSliceSelfAssign(dst, src ir.Node) bool {
}
// slice is applied to ONAME dereference.
var baseX ir.Node
- switch base := src.(*ir.SliceExpr).Left(); base.Op() {
+ switch base := src.(*ir.SliceExpr).X; base.Op() {
default:
return false
case ir.ODEREF:
base := base.(*ir.StarExpr)
- baseX = base.Left()
+ baseX = base.X
case ir.ODOTPTR:
base := base.(*ir.SelectorExpr)
- baseX = base.Left()
+ baseX = base.X
}
if baseX.Op() != ir.ONAME {
return false
@@ -1801,14 +1801,14 @@ func isSelfAssign(dst, src ir.Node) bool {
// Safe trailing accessors that are permitted to differ.
dst := dst.(*ir.SelectorExpr)
src := src.(*ir.SelectorExpr)
- return samesafeexpr(dst.Left(), src.Left())
+ return samesafeexpr(dst.X, src.X)
case ir.OINDEX:
dst := dst.(*ir.IndexExpr)
src := src.(*ir.IndexExpr)
- if mayAffectMemory(dst.Right()) || mayAffectMemory(src.Right()) {
+ if mayAffectMemory(dst.Index) || mayAffectMemory(src.Index) {
return false
}
- return samesafeexpr(dst.Left(), src.Left())
+ return samesafeexpr(dst.X, src.X)
default:
return false
}
@@ -1834,27 +1834,27 @@ func mayAffectMemory(n ir.Node) bool {
case ir.OADD, ir.OSUB, ir.OOR, ir.OXOR, ir.OMUL, ir.OLSH, ir.ORSH, ir.OAND, ir.OANDNOT, ir.ODIV, ir.OMOD:
n := n.(*ir.BinaryExpr)
- return mayAffectMemory(n.Left()) || mayAffectMemory(n.Right())
+ return mayAffectMemory(n.X) || mayAffectMemory(n.Y)
case ir.OINDEX:
n := n.(*ir.IndexExpr)
- return mayAffectMemory(n.Left()) || mayAffectMemory(n.Right())
+ return mayAffectMemory(n.X) || mayAffectMemory(n.Index)
case ir.OCONVNOP, ir.OCONV:
n := n.(*ir.ConvExpr)
- return mayAffectMemory(n.Left())
+ return mayAffectMemory(n.X)
case ir.OLEN, ir.OCAP, ir.ONOT, ir.OBITNOT, ir.OPLUS, ir.ONEG, ir.OALIGNOF, ir.OOFFSETOF, ir.OSIZEOF:
n := n.(*ir.UnaryExpr)
- return mayAffectMemory(n.Left())
+ return mayAffectMemory(n.X)
case ir.ODOT, ir.ODOTPTR:
n := n.(*ir.SelectorExpr)
- return mayAffectMemory(n.Left())
+ return mayAffectMemory(n.X)
case ir.ODEREF:
n := n.(*ir.StarExpr)
- return mayAffectMemory(n.Left())
+ return mayAffectMemory(n.X)
default:
return true
@@ -1871,7 +1871,7 @@ func heapAllocReason(n ir.Node) string {
// Parameters are always passed via the stack.
if n.Op() == ir.ONAME {
n := n.(*ir.Name)
- if n.Class() == ir.PPARAM || n.Class() == ir.PPARAMOUT {
+ if n.Class_ == ir.PPARAM || n.Class_ == ir.PPARAMOUT {
return ""
}
}
@@ -1893,9 +1893,9 @@ func heapAllocReason(n ir.Node) string {
if n.Op() == ir.OMAKESLICE {
n := n.(*ir.MakeExpr)
- r := n.Right()
+ r := n.Cap
if r == nil {
- r = n.Left()
+ r = n.Len
}
if !smallintconst(r) {
return "non-constant size"
@@ -1928,7 +1928,7 @@ func addrescapes(n ir.Node) {
// if this is a tmpname (PAUTO), it was tagged by tmpname as not escaping.
// on PPARAM it means something different.
- if n.Class() == ir.PAUTO && n.Esc() == EscNever {
+ if n.Class_ == ir.PAUTO && n.Esc() == EscNever {
break
}
@@ -1938,7 +1938,7 @@ func addrescapes(n ir.Node) {
break
}
- if n.Class() != ir.PPARAM && n.Class() != ir.PPARAMOUT && n.Class() != ir.PAUTO {
+ if n.Class_ != ir.PPARAM && n.Class_ != ir.PPARAMOUT && n.Class_ != ir.PAUTO {
break
}
@@ -1969,18 +1969,18 @@ func addrescapes(n ir.Node) {
// is always a heap pointer anyway.
case ir.ODOT:
n := n.(*ir.SelectorExpr)
- addrescapes(n.Left())
+ addrescapes(n.X)
case ir.OINDEX:
n := n.(*ir.IndexExpr)
- if !n.Left().Type().IsSlice() {
- addrescapes(n.Left())
+ if !n.X.Type().IsSlice() {
+ addrescapes(n.X)
}
case ir.OPAREN:
n := n.(*ir.ParenExpr)
- addrescapes(n.Left())
+ addrescapes(n.X)
case ir.OCONVNOP:
n := n.(*ir.ConvExpr)
- addrescapes(n.Left())
+ addrescapes(n.X)
}
}
@@ -1992,7 +1992,7 @@ func moveToHeap(n *ir.Name) {
if base.Flag.CompilingRuntime {
base.Errorf("%v escapes to heap, not allowed in runtime", n)
}
- if n.Class() == ir.PAUTOHEAP {
+ if n.Class_ == ir.PAUTOHEAP {
ir.Dump("n", n)
base.Fatalf("double move to heap")
}
@@ -2011,7 +2011,7 @@ func moveToHeap(n *ir.Name) {
// Parameters have a local stack copy used at function start/end
// in addition to the copy in the heap that may live longer than
// the function.
- if n.Class() == ir.PPARAM || n.Class() == ir.PPARAMOUT {
+ if n.Class_ == ir.PPARAM || n.Class_ == ir.PPARAMOUT {
if n.FrameOffset() == types.BADWIDTH {
base.Fatalf("addrescapes before param assignment")
}
@@ -2023,9 +2023,9 @@ func moveToHeap(n *ir.Name) {
stackcopy := NewName(n.Sym())
stackcopy.SetType(n.Type())
stackcopy.SetFrameOffset(n.FrameOffset())
- stackcopy.SetClass(n.Class())
+ stackcopy.Class_ = n.Class_
stackcopy.Heapaddr = heapaddr
- if n.Class() == ir.PPARAMOUT {
+ if n.Class_ == ir.PPARAMOUT {
// Make sure the pointer to the heap copy is kept live throughout the function.
// The function could panic at any point, and then a defer could recover.
// Thus, we need the pointer to the heap copy always available so the
@@ -2047,7 +2047,7 @@ func moveToHeap(n *ir.Name) {
}
// Parameters are before locals, so can stop early.
// This limits the search even in functions with many local variables.
- if d.Class() == ir.PAUTO {
+ if d.Class_ == ir.PAUTO {
break
}
}
@@ -2058,7 +2058,7 @@ func moveToHeap(n *ir.Name) {
}
// Modify n in place so that uses of n now mean indirection of the heapaddr.
- n.SetClass(ir.PAUTOHEAP)
+ n.Class_ = ir.PAUTOHEAP
n.SetFrameOffset(0)
n.Heapaddr = heapaddr
n.SetEsc(EscHeap)
@@ -2084,7 +2084,7 @@ func (e *Escape) paramTag(fn *ir.Func, narg int, f *types.Field) string {
return fmt.Sprintf("arg#%d", narg)
}
- if fn.Body().Len() == 0 {
+ if fn.Body.Len() == 0 {
// Assume that uintptr arguments must be held live across the call.
// This is most important for syscall.Syscall.
// See golang.org/issue/13372.
@@ -2106,7 +2106,7 @@ func (e *Escape) paramTag(fn *ir.Func, narg int, f *types.Field) string {
// External functions are assumed unsafe, unless
// //go:noescape is given before the declaration.
- if fn.Func().Pragma&ir.Noescape != 0 {
+ if fn.Pragma&ir.Noescape != 0 {
if base.Flag.LowerM != 0 && f.Sym != nil {
base.WarnfAt(f.Pos, "%v does not escape", name())
}
@@ -2120,7 +2120,7 @@ func (e *Escape) paramTag(fn *ir.Func, narg int, f *types.Field) string {
return esc.Encode()
}
- if fn.Func().Pragma&ir.UintptrEscapes != 0 {
+ if fn.Pragma&ir.UintptrEscapes != 0 {
if f.Type.IsUintptr() {
if base.Flag.LowerM != 0 {
base.WarnfAt(f.Pos, "marking %v as escaping uintptr", name())
diff --git a/src/cmd/compile/internal/gc/export.go b/src/cmd/compile/internal/gc/export.go
index 8a8295537c..2855f815be 100644
--- a/src/cmd/compile/internal/gc/export.go
+++ b/src/cmd/compile/internal/gc/export.go
@@ -83,7 +83,7 @@ func importsym(ipkg *types.Pkg, pos src.XPos, s *types.Sym, op ir.Op, ctxt ir.Cl
}
n := ir.NewDeclNameAt(pos, op, s)
- n.SetClass(ctxt) // TODO(mdempsky): Move this into NewDeclNameAt too?
+ n.Class_ = ctxt // TODO(mdempsky): Move this into NewDeclNameAt too?
s.SetPkgDef(n)
s.Importdef = ipkg
return n
diff --git a/src/cmd/compile/internal/gc/gen.go b/src/cmd/compile/internal/gc/gen.go
index 25b241e236..f83c636472 100644
--- a/src/cmd/compile/internal/gc/gen.go
+++ b/src/cmd/compile/internal/gc/gen.go
@@ -35,7 +35,7 @@ func isParamStackCopy(n ir.Node) bool {
return false
}
name := n.(*ir.Name)
- return (name.Class() == ir.PPARAM || name.Class() == ir.PPARAMOUT) && name.Heapaddr != nil
+ return (name.Class_ == ir.PPARAM || name.Class_ == ir.PPARAMOUT) && name.Heapaddr != nil
}
// isParamHeapCopy reports whether this is the on-heap copy of
@@ -45,7 +45,7 @@ func isParamHeapCopy(n ir.Node) bool {
return false
}
name := n.(*ir.Name)
- return name.Class() == ir.PAUTOHEAP && name.Name().Stackcopy != nil
+ return name.Class_ == ir.PAUTOHEAP && name.Name().Stackcopy != nil
}
// autotmpname returns the name for an autotmp variable numbered n.
@@ -79,7 +79,7 @@ func tempAt(pos src.XPos, curfn *ir.Func, t *types.Type) *ir.Name {
n := ir.NewNameAt(pos, s)
s.Def = n
n.SetType(t)
- n.SetClass(ir.PAUTO)
+ n.Class_ = ir.PAUTO
n.SetEsc(EscNever)
n.Curfn = curfn
n.SetUsed(true)
diff --git a/src/cmd/compile/internal/gc/gsubr.go b/src/cmd/compile/internal/gc/gsubr.go
index b0ad01bc5d..6008abeff8 100644
--- a/src/cmd/compile/internal/gc/gsubr.go
+++ b/src/cmd/compile/internal/gc/gsubr.go
@@ -270,16 +270,16 @@ func makeABIWrapper(f *ir.Func, wrapperABI obj.ABI) {
tail = ir.NewBranchStmt(base.Pos, ir.ORETJMP, f.Nname.Sym())
} else {
call := ir.NewCallExpr(base.Pos, ir.OCALL, f.Nname, nil)
- call.PtrList().Set(paramNnames(tfn.Type()))
- call.SetIsDDD(tfn.Type().IsVariadic())
+ call.Args.Set(paramNnames(tfn.Type()))
+ call.IsDDD = tfn.Type().IsVariadic()
tail = call
if tfn.Type().NumResults() > 0 {
n := ir.NewReturnStmt(base.Pos, nil)
- n.PtrList().Set1(call)
+ n.Results.Set1(call)
tail = n
}
}
- fn.PtrBody().Append(tail)
+ fn.Body.Append(tail)
funcbody()
if base.Debug.DclStack != 0 {
@@ -288,7 +288,7 @@ func makeABIWrapper(f *ir.Func, wrapperABI obj.ABI) {
typecheckFunc(fn)
Curfn = fn
- typecheckslice(fn.Body().Slice(), ctxStmt)
+ typecheckslice(fn.Body.Slice(), ctxStmt)
escapeFuncs([]*ir.Func{fn}, false)
diff --git a/src/cmd/compile/internal/gc/iexport.go b/src/cmd/compile/internal/gc/iexport.go
index 0f7d62c5bf..60aa2eae8b 100644
--- a/src/cmd/compile/internal/gc/iexport.go
+++ b/src/cmd/compile/internal/gc/iexport.go
@@ -429,7 +429,7 @@ func (p *iexporter) doDecl(n *ir.Name) {
switch n.Op() {
case ir.ONAME:
- switch n.Class() {
+ switch n.Class_ {
case ir.PEXTERN:
// Variable.
w.tag('V')
@@ -449,7 +449,7 @@ func (p *iexporter) doDecl(n *ir.Name) {
w.funcExt(n)
default:
- base.Fatalf("unexpected class: %v, %v", n, n.Class())
+ base.Fatalf("unexpected class: %v, %v", n, n.Class_)
}
case ir.OLITERAL:
@@ -528,7 +528,7 @@ func (p *iexporter) doInline(f *ir.Name) {
w := p.newWriter()
w.setPkg(fnpkg(f), false)
- w.stmtList(ir.AsNodes(f.Func().Inl.Body))
+ w.stmtList(ir.AsNodes(f.Func.Inl.Body))
w.finish("inl", p.inlineIndex, f.Sym())
}
@@ -983,14 +983,14 @@ func (w *exportWriter) funcExt(n *ir.Name) {
}
// Inline body.
- if n.Func().Inl != nil {
- w.uint64(1 + uint64(n.Func().Inl.Cost))
- if n.Func().ExportInline() {
+ if n.Func.Inl != nil {
+ w.uint64(1 + uint64(n.Func.Inl.Cost))
+ if n.Func.ExportInline() {
w.p.doInline(n)
}
// Endlineno for inlined function.
- w.pos(n.Func().Endlineno)
+ w.pos(n.Func.Endlineno)
} else {
w.uint64(0)
}
@@ -1068,27 +1068,27 @@ func (w *exportWriter) stmt(n ir.Node) {
// generate OBLOCK nodes except to denote an empty
// function body, although that may change.)
n := n.(*ir.BlockStmt)
- for _, n := range n.List().Slice() {
+ for _, n := range n.List.Slice() {
w.stmt(n)
}
case ir.ODCL:
n := n.(*ir.Decl)
w.op(ir.ODCL)
- w.pos(n.Left().Pos())
- w.localName(n.Left().(*ir.Name))
- w.typ(n.Left().Type())
+ w.pos(n.X.Pos())
+ w.localName(n.X.(*ir.Name))
+ w.typ(n.X.Type())
case ir.OAS:
// Don't export "v = <N>" initializing statements, hope they're always
// preceded by the DCL which will be re-parsed and typecheck to reproduce
// the "v = <N>" again.
n := n.(*ir.AssignStmt)
- if n.Right() != nil {
+ if n.Y != nil {
w.op(ir.OAS)
w.pos(n.Pos())
- w.expr(n.Left())
- w.expr(n.Right())
+ w.expr(n.X)
+ w.expr(n.Y)
}
case ir.OASOP:
@@ -1096,23 +1096,23 @@ func (w *exportWriter) stmt(n ir.Node) {
w.op(ir.OASOP)
w.pos(n.Pos())
w.op(n.AsOp)
- w.expr(n.Left())
- if w.bool(!n.Implicit()) {
- w.expr(n.Right())
+ w.expr(n.X)
+ if w.bool(!n.IncDec) {
+ w.expr(n.Y)
}
case ir.OAS2, ir.OAS2DOTTYPE, ir.OAS2FUNC, ir.OAS2MAPR, ir.OAS2RECV:
n := n.(*ir.AssignListStmt)
w.op(ir.OAS2)
w.pos(n.Pos())
- w.exprList(n.List())
- w.exprList(n.Rlist())
+ w.exprList(n.Lhs)
+ w.exprList(n.Rhs)
case ir.ORETURN:
n := n.(*ir.ReturnStmt)
w.op(ir.ORETURN)
w.pos(n.Pos())
- w.exprList(n.List())
+ w.exprList(n.Results)
// case ORETJMP:
// unreachable - generated by compiler for trampolin routines
@@ -1121,32 +1121,32 @@ func (w *exportWriter) stmt(n ir.Node) {
n := n.(*ir.GoDeferStmt)
w.op(n.Op())
w.pos(n.Pos())
- w.expr(n.Left())
+ w.expr(n.Call)
case ir.OIF:
n := n.(*ir.IfStmt)
w.op(ir.OIF)
w.pos(n.Pos())
w.stmtList(n.Init())
- w.expr(n.Left())
- w.stmtList(n.Body())
- w.stmtList(n.Rlist())
+ w.expr(n.Cond)
+ w.stmtList(n.Body)
+ w.stmtList(n.Else)
case ir.OFOR:
n := n.(*ir.ForStmt)
w.op(ir.OFOR)
w.pos(n.Pos())
w.stmtList(n.Init())
- w.exprsOrNil(n.Left(), n.Right())
- w.stmtList(n.Body())
+ w.exprsOrNil(n.Cond, n.Post)
+ w.stmtList(n.Body)
case ir.ORANGE:
n := n.(*ir.RangeStmt)
w.op(ir.ORANGE)
w.pos(n.Pos())
- w.stmtList(n.List())
- w.expr(n.Right())
- w.stmtList(n.Body())
+ w.stmtList(n.Vars)
+ w.expr(n.X)
+ w.stmtList(n.Body)
case ir.OSELECT:
n := n.(*ir.SelectStmt)
@@ -1161,7 +1161,7 @@ func (w *exportWriter) stmt(n ir.Node) {
w.op(n.Op())
w.pos(n.Pos())
w.stmtList(n.Init())
- w.exprsOrNil(n.Left(), nil)
+ w.exprsOrNil(n.Tag, nil)
w.caseList(n)
// case OCASE:
@@ -1191,11 +1191,11 @@ func isNamedTypeSwitch(n ir.Node) bool {
return false
}
sw := n.(*ir.SwitchStmt)
- if sw.Left() == nil || sw.Left().Op() != ir.OTYPESW {
+ if sw.Tag == nil || sw.Tag.Op() != ir.OTYPESW {
return false
}
- guard := sw.Left().(*ir.TypeSwitchGuard)
- return guard.Left() != nil
+ guard := sw.Tag.(*ir.TypeSwitchGuard)
+ return guard.Tag != nil
}
func (w *exportWriter) caseList(sw ir.Node) {
@@ -1203,19 +1203,19 @@ func (w *exportWriter) caseList(sw ir.Node) {
var cases []ir.Node
if sw.Op() == ir.OSWITCH {
- cases = sw.(*ir.SwitchStmt).List().Slice()
+ cases = sw.(*ir.SwitchStmt).Cases.Slice()
} else {
- cases = sw.(*ir.SelectStmt).List().Slice()
+ cases = sw.(*ir.SelectStmt).Cases.Slice()
}
w.uint64(uint64(len(cases)))
for _, cas := range cases {
cas := cas.(*ir.CaseStmt)
w.pos(cas.Pos())
- w.stmtList(cas.List())
+ w.stmtList(cas.List)
if namedTypeSwitch {
- w.localName(cas.Rlist().First().(*ir.Name))
+ w.localName(cas.Vars.First().(*ir.Name))
}
- w.stmtList(cas.Body())
+ w.stmtList(cas.Body)
}
}
@@ -1230,21 +1230,21 @@ func simplifyForExport(n ir.Node) ir.Node {
switch n.Op() {
case ir.OPAREN:
n := n.(*ir.ParenExpr)
- return simplifyForExport(n.Left())
+ return simplifyForExport(n.X)
case ir.ODEREF:
n := n.(*ir.StarExpr)
if n.Implicit() {
- return simplifyForExport(n.Left())
+ return simplifyForExport(n.X)
}
case ir.OADDR:
n := n.(*ir.AddrExpr)
if n.Implicit() {
- return simplifyForExport(n.Left())
+ return simplifyForExport(n.X)
}
case ir.ODOT, ir.ODOTPTR:
n := n.(*ir.SelectorExpr)
if n.Implicit() {
- return simplifyForExport(n.Left())
+ return simplifyForExport(n.X)
}
}
return n
@@ -1283,7 +1283,7 @@ func (w *exportWriter) expr(n ir.Node) {
case ir.ONAME:
// Package scope name.
n := n.(*ir.Name)
- if (n.Class() == ir.PEXTERN || n.Class() == ir.PFUNC) && !ir.IsBlank(n) {
+ if (n.Class_ == ir.PEXTERN || n.Class_ == ir.PFUNC) && !ir.IsBlank(n) {
w.op(ir.ONONAME)
w.qualifiedIdent(n)
break
@@ -1305,14 +1305,14 @@ func (w *exportWriter) expr(n ir.Node) {
w.op(ir.OTYPESW)
w.pos(n.Pos())
var s *types.Sym
- if n.Left() != nil {
- if n.Left().Op() != ir.ONONAME {
- base.Fatalf("expected ONONAME, got %v", n.Left())
+ if n.Tag != nil {
+ if n.Tag.Op() != ir.ONONAME {
+ base.Fatalf("expected ONONAME, got %v", n.Tag)
}
- s = n.Left().Sym()
+ s = n.Tag.Sym()
}
w.localIdent(s, 0) // declared pseudo-variable, if any
- w.exprsOrNil(n.Right(), nil)
+ w.exprsOrNil(n.X, nil)
// case OTARRAY, OTMAP, OTCHAN, OTSTRUCT, OTINTER, OTFUNC:
// should have been resolved by typechecking - handled by default case
@@ -1327,27 +1327,27 @@ func (w *exportWriter) expr(n ir.Node) {
n := n.(*ir.AddrExpr)
w.op(ir.OADDR)
w.pos(n.Pos())
- w.expr(n.Left())
+ w.expr(n.X)
case ir.OSTRUCTLIT:
n := n.(*ir.CompLitExpr)
w.op(ir.OSTRUCTLIT)
w.pos(n.Pos())
w.typ(n.Type())
- w.fieldList(n.List()) // special handling of field names
+ w.fieldList(n.List) // special handling of field names
case ir.OARRAYLIT, ir.OSLICELIT, ir.OMAPLIT:
n := n.(*ir.CompLitExpr)
w.op(ir.OCOMPLIT)
w.pos(n.Pos())
w.typ(n.Type())
- w.exprList(n.List())
+ w.exprList(n.List)
case ir.OKEY:
n := n.(*ir.KeyExpr)
w.op(ir.OKEY)
w.pos(n.Pos())
- w.exprsOrNil(n.Left(), n.Right())
+ w.exprsOrNil(n.Key, n.Value)
// case OSTRUCTKEY:
// unreachable - handled in case OSTRUCTLIT by elemList
@@ -1357,35 +1357,35 @@ func (w *exportWriter) expr(n ir.Node) {
n := n.(*ir.CallPartExpr)
w.op(ir.OXDOT)
w.pos(n.Pos())
- w.expr(n.Left())
- w.selector(n.Sym())
+ w.expr(n.X)
+ w.selector(n.Method.Sym)
case ir.OXDOT, ir.ODOT, ir.ODOTPTR, ir.ODOTINTER, ir.ODOTMETH:
n := n.(*ir.SelectorExpr)
w.op(ir.OXDOT)
w.pos(n.Pos())
- w.expr(n.Left())
- w.selector(n.Sym())
+ w.expr(n.X)
+ w.selector(n.Sel)
case ir.ODOTTYPE, ir.ODOTTYPE2:
n := n.(*ir.TypeAssertExpr)
w.op(ir.ODOTTYPE)
w.pos(n.Pos())
- w.expr(n.Left())
+ w.expr(n.X)
w.typ(n.Type())
case ir.OINDEX, ir.OINDEXMAP:
n := n.(*ir.IndexExpr)
w.op(ir.OINDEX)
w.pos(n.Pos())
- w.expr(n.Left())
- w.expr(n.Right())
+ w.expr(n.X)
+ w.expr(n.Index)
case ir.OSLICE, ir.OSLICESTR, ir.OSLICEARR:
n := n.(*ir.SliceExpr)
w.op(ir.OSLICE)
w.pos(n.Pos())
- w.expr(n.Left())
+ w.expr(n.X)
low, high, _ := n.SliceBounds()
w.exprsOrNil(low, high)
@@ -1393,7 +1393,7 @@ func (w *exportWriter) expr(n ir.Node) {
n := n.(*ir.SliceExpr)
w.op(ir.OSLICE3)
w.pos(n.Pos())
- w.expr(n.Left())
+ w.expr(n.X)
low, high, max := n.SliceBounds()
w.exprsOrNil(low, high)
w.expr(max)
@@ -1403,33 +1403,33 @@ func (w *exportWriter) expr(n ir.Node) {
n := n.(*ir.BinaryExpr)
w.op(n.Op())
w.pos(n.Pos())
- w.expr(n.Left())
- w.expr(n.Right())
+ w.expr(n.X)
+ w.expr(n.Y)
w.op(ir.OEND)
case ir.OCONV, ir.OCONVIFACE, ir.OCONVNOP, ir.OBYTES2STR, ir.ORUNES2STR, ir.OSTR2BYTES, ir.OSTR2RUNES, ir.ORUNESTR:
n := n.(*ir.ConvExpr)
w.op(ir.OCONV)
w.pos(n.Pos())
- w.expr(n.Left())
+ w.expr(n.X)
w.typ(n.Type())
case ir.OREAL, ir.OIMAG, ir.OCAP, ir.OCLOSE, ir.OLEN, ir.ONEW, ir.OPANIC:
n := n.(*ir.UnaryExpr)
w.op(n.Op())
w.pos(n.Pos())
- w.expr(n.Left())
+ w.expr(n.X)
w.op(ir.OEND)
case ir.OAPPEND, ir.ODELETE, ir.ORECOVER, ir.OPRINT, ir.OPRINTN:
n := n.(*ir.CallExpr)
w.op(n.Op())
w.pos(n.Pos())
- w.exprList(n.List()) // emits terminating OEND
+ w.exprList(n.Args) // emits terminating OEND
// only append() calls may contain '...' arguments
if n.Op() == ir.OAPPEND {
- w.bool(n.IsDDD())
- } else if n.IsDDD() {
+ w.bool(n.IsDDD)
+ } else if n.IsDDD {
base.Fatalf("exporter: unexpected '...' with %v call", n.Op())
}
@@ -1438,9 +1438,9 @@ func (w *exportWriter) expr(n ir.Node) {
w.op(ir.OCALL)
w.pos(n.Pos())
w.stmtList(n.Init())
- w.expr(n.Left())
- w.exprList(n.List())
- w.bool(n.IsDDD())
+ w.expr(n.X)
+ w.exprList(n.Args)
+ w.bool(n.IsDDD)
case ir.OMAKEMAP, ir.OMAKECHAN, ir.OMAKESLICE:
n := n.(*ir.MakeExpr)
@@ -1451,12 +1451,12 @@ func (w *exportWriter) expr(n ir.Node) {
default:
// empty list
w.op(ir.OEND)
- case n.Right() != nil:
- w.expr(n.Left())
- w.expr(n.Right())
+ case n.Cap != nil:
+ w.expr(n.Len)
+ w.expr(n.Cap)
w.op(ir.OEND)
- case n.Left() != nil && (n.Op() == ir.OMAKESLICE || !n.Left().Type().IsUntyped()):
- w.expr(n.Left())
+ case n.Len != nil && (n.Op() == ir.OMAKESLICE || !n.Len.Type().IsUntyped()):
+ w.expr(n.Len)
w.op(ir.OEND)
}
@@ -1465,26 +1465,26 @@ func (w *exportWriter) expr(n ir.Node) {
n := n.(*ir.UnaryExpr)
w.op(n.Op())
w.pos(n.Pos())
- w.expr(n.Left())
+ w.expr(n.X)
case ir.OADDR:
n := n.(*ir.AddrExpr)
w.op(n.Op())
w.pos(n.Pos())
- w.expr(n.Left())
+ w.expr(n.X)
case ir.ODEREF:
n := n.(*ir.StarExpr)
w.op(n.Op())
w.pos(n.Pos())
- w.expr(n.Left())
+ w.expr(n.X)
case ir.OSEND:
n := n.(*ir.SendStmt)
w.op(n.Op())
w.pos(n.Pos())
- w.expr(n.Left())
- w.expr(n.Right())
+ w.expr(n.Chan)
+ w.expr(n.Value)
// binary expressions
case ir.OADD, ir.OAND, ir.OANDNOT, ir.ODIV, ir.OEQ, ir.OGE, ir.OGT, ir.OLE, ir.OLT,
@@ -1492,21 +1492,21 @@ func (w *exportWriter) expr(n ir.Node) {
n := n.(*ir.BinaryExpr)
w.op(n.Op())
w.pos(n.Pos())
- w.expr(n.Left())
- w.expr(n.Right())
+ w.expr(n.X)
+ w.expr(n.Y)
case ir.OANDAND, ir.OOROR:
n := n.(*ir.LogicalExpr)
w.op(n.Op())
w.pos(n.Pos())
- w.expr(n.Left())
- w.expr(n.Right())
+ w.expr(n.X)
+ w.expr(n.Y)
case ir.OADDSTR:
n := n.(*ir.AddStringExpr)
w.op(ir.OADDSTR)
w.pos(n.Pos())
- w.exprList(n.List())
+ w.exprList(n.List)
case ir.ODCLCONST:
// if exporting, DCLCONST should just be removed as its usage
@@ -1543,8 +1543,8 @@ func (w *exportWriter) fieldList(list ir.Nodes) {
w.uint64(uint64(list.Len()))
for _, n := range list.Slice() {
n := n.(*ir.StructKeyExpr)
- w.selector(n.Sym())
- w.expr(n.Left())
+ w.selector(n.Field)
+ w.expr(n.Value)
}
}
@@ -1557,7 +1557,7 @@ func (w *exportWriter) localName(n *ir.Name) {
// PPARAM/PPARAMOUT, because we only want to include vargen in
// non-param names.
var v int32
- if n.Class() == ir.PAUTO || (n.Class() == ir.PAUTOHEAP && n.Name().Stackcopy == nil) {
+ if n.Class_ == ir.PAUTO || (n.Class_ == ir.PAUTOHEAP && n.Name().Stackcopy == nil) {
v = n.Name().Vargen
}
diff --git a/src/cmd/compile/internal/gc/iimport.go b/src/cmd/compile/internal/gc/iimport.go
index 40f76cae7b..4f460d54a2 100644
--- a/src/cmd/compile/internal/gc/iimport.go
+++ b/src/cmd/compile/internal/gc/iimport.go
@@ -329,7 +329,7 @@ func (r *importReader) doDecl(sym *types.Sym) *ir.Name {
fn.SetType(mtyp)
m := newFuncNameAt(mpos, methodSym(recv.Type, msym), fn)
m.SetType(mtyp)
- m.SetClass(ir.PFUNC)
+ m.Class_ = ir.PFUNC
// methodSym already marked m.Sym as a function.
f := types.NewField(mpos, msym, mtyp)
@@ -643,10 +643,10 @@ func (r *importReader) funcExt(n *ir.Name) {
// Inline body.
if u := r.uint64(); u > 0 {
- n.Func().Inl = &ir.Inline{
+ n.Func.Inl = &ir.Inline{
Cost: int32(u - 1),
}
- n.Func().Endlineno = r.pos()
+ n.Func.Endlineno = r.pos()
}
}
@@ -757,7 +757,7 @@ func (r *importReader) stmtList() []ir.Node {
// Inline them into the statement list.
if n.Op() == ir.OBLOCK {
n := n.(*ir.BlockStmt)
- list = append(list, n.List().Slice()...)
+ list = append(list, n.List.Slice()...)
} else {
list = append(list, n)
}
@@ -772,17 +772,17 @@ func (r *importReader) caseList(sw ir.Node) []ir.Node {
cases := make([]ir.Node, r.uint64())
for i := range cases {
cas := ir.NewCaseStmt(r.pos(), nil, nil)
- cas.PtrList().Set(r.stmtList())
+ cas.List.Set(r.stmtList())
if namedTypeSwitch {
// Note: per-case variables will have distinct, dotted
// names after import. That's okay: swt.go only needs
// Sym for diagnostics anyway.
caseVar := ir.NewNameAt(cas.Pos(), r.ident())
declare(caseVar, dclcontext)
- cas.PtrRlist().Set1(caseVar)
- caseVar.Defn = sw.(*ir.SwitchStmt).Left()
+ cas.Vars.Set1(caseVar)
+ caseVar.Defn = sw.(*ir.SwitchStmt).Tag
}
- cas.PtrBody().Set(r.stmtList())
+ cas.Body.Set(r.stmtList())
cases[i] = cas
}
return cases
@@ -867,7 +867,7 @@ func (r *importReader) node() ir.Node {
savedlineno := base.Pos
base.Pos = r.pos()
n := ir.NewCompLitExpr(base.Pos, ir.OCOMPLIT, ir.TypeNode(r.typ()).(ir.Ntype), nil)
- n.PtrList().Set(r.elemList()) // special handling of field names
+ n.List.Set(r.elemList()) // special handling of field names
base.Pos = savedlineno
return n
@@ -876,7 +876,7 @@ func (r *importReader) node() ir.Node {
case ir.OCOMPLIT:
n := ir.NewCompLitExpr(r.pos(), ir.OCOMPLIT, ir.TypeNode(r.typ()).(ir.Ntype), nil)
- n.PtrList().Set(r.exprList())
+ n.List.Set(r.exprList())
return n
case ir.OKEY:
@@ -931,9 +931,9 @@ func (r *importReader) node() ir.Node {
case ir.OCOPY, ir.OCOMPLEX, ir.OREAL, ir.OIMAG, ir.OAPPEND, ir.OCAP, ir.OCLOSE, ir.ODELETE, ir.OLEN, ir.OMAKE, ir.ONEW, ir.OPANIC, ir.ORECOVER, ir.OPRINT, ir.OPRINTN:
n := builtinCall(r.pos(), op)
- n.PtrList().Set(r.exprList())
+ n.Args.Set(r.exprList())
if op == ir.OAPPEND {
- n.SetIsDDD(r.bool())
+ n.IsDDD = r.bool()
}
return n
@@ -943,15 +943,15 @@ func (r *importReader) node() ir.Node {
case ir.OCALL:
n := ir.NewCallExpr(r.pos(), ir.OCALL, nil, nil)
n.PtrInit().Set(r.stmtList())
- n.SetLeft(r.expr())
- n.PtrList().Set(r.exprList())
- n.SetIsDDD(r.bool())
+ n.X = r.expr()
+ n.Args.Set(r.exprList())
+ n.IsDDD = r.bool()
return n
case ir.OMAKEMAP, ir.OMAKECHAN, ir.OMAKESLICE:
n := builtinCall(r.pos(), ir.OMAKE)
- n.PtrList().Append(ir.TypeNode(r.typ()))
- n.PtrList().Append(r.exprList()...)
+ n.Args.Append(ir.TypeNode(r.typ()))
+ n.Args.Append(r.exprList()...)
return n
// unary expressions
@@ -1006,13 +1006,13 @@ func (r *importReader) node() ir.Node {
case ir.OASOP:
n := ir.NewAssignOpStmt(r.pos(), ir.OXXX, nil, nil)
- n.SetSubOp(r.op())
- n.SetLeft(r.expr())
+ n.AsOp = r.op()
+ n.X = r.expr()
if !r.bool() {
- n.SetRight(nodintconst(1))
- n.SetImplicit(true)
+ n.Y = nodintconst(1)
+ n.IncDec = true
} else {
- n.SetRight(r.expr())
+ n.Y = r.expr()
}
return n
@@ -1021,13 +1021,13 @@ func (r *importReader) node() ir.Node {
case ir.OAS2:
n := ir.NewAssignListStmt(r.pos(), ir.OAS2, nil, nil)
- n.PtrList().Set(r.exprList())
- n.PtrRlist().Set(r.exprList())
+ n.Lhs.Set(r.exprList())
+ n.Rhs.Set(r.exprList())
return n
case ir.ORETURN:
n := ir.NewReturnStmt(r.pos(), nil)
- n.PtrList().Set(r.exprList())
+ n.Results.Set(r.exprList())
return n
// case ORETJMP:
@@ -1039,40 +1039,40 @@ func (r *importReader) node() ir.Node {
case ir.OIF:
n := ir.NewIfStmt(r.pos(), nil, nil, nil)
n.PtrInit().Set(r.stmtList())
- n.SetLeft(r.expr())
- n.PtrBody().Set(r.stmtList())
- n.PtrRlist().Set(r.stmtList())
+ n.Cond = r.expr()
+ n.Body.Set(r.stmtList())
+ n.Else.Set(r.stmtList())
return n
case ir.OFOR:
n := ir.NewForStmt(r.pos(), nil, nil, nil, nil)
n.PtrInit().Set(r.stmtList())
left, right := r.exprsOrNil()
- n.SetLeft(left)
- n.SetRight(right)
- n.PtrBody().Set(r.stmtList())
+ n.Cond = left
+ n.Post = right
+ n.Body.Set(r.stmtList())
return n
case ir.ORANGE:
n := ir.NewRangeStmt(r.pos(), nil, nil, nil)
- n.PtrList().Set(r.stmtList())
- n.SetRight(r.expr())
- n.PtrBody().Set(r.stmtList())
+ n.Vars.Set(r.stmtList())
+ n.X = r.expr()
+ n.Body.Set(r.stmtList())
return n
case ir.OSELECT:
n := ir.NewSelectStmt(r.pos(), nil)
n.PtrInit().Set(r.stmtList())
r.exprsOrNil() // TODO(rsc): Delete (and fix exporter). These are always nil.
- n.PtrList().Set(r.caseList(n))
+ n.Cases.Set(r.caseList(n))
return n
case ir.OSWITCH:
n := ir.NewSwitchStmt(r.pos(), nil, nil)
n.PtrInit().Set(r.stmtList())
left, _ := r.exprsOrNil()
- n.SetLeft(left)
- n.PtrList().Set(r.caseList(n))
+ n.Tag = left
+ n.Cases.Set(r.caseList(n))
return n
// case OCASE:
diff --git a/src/cmd/compile/internal/gc/init.go b/src/cmd/compile/internal/gc/init.go
index 1c15ce1318..fbc88411cc 100644
--- a/src/cmd/compile/internal/gc/init.go
+++ b/src/cmd/compile/internal/gc/init.go
@@ -45,7 +45,7 @@ func fninit() *ir.Name {
if n.Op() == ir.ONONAME {
continue
}
- if n.Op() != ir.ONAME || n.(*ir.Name).Class() != ir.PEXTERN {
+ if n.Op() != ir.ONAME || n.(*ir.Name).Class_ != ir.PEXTERN {
base.Fatalf("bad inittask: %v", n)
}
deps = append(deps, n.(*ir.Name).Sym().Linksym())
@@ -62,7 +62,7 @@ func fninit() *ir.Name {
fn.Dcl = append(fn.Dcl, initTodo.Dcl...)
initTodo.Dcl = nil
- fn.PtrBody().Set(nf)
+ fn.Body.Set(nf)
funcbody()
typecheckFunc(fn)
@@ -83,8 +83,8 @@ func fninit() *ir.Name {
// Record user init functions.
for _, fn := range Target.Inits {
// Skip init functions with empty bodies.
- if fn.Body().Len() == 1 {
- if stmt := fn.Body().First(); stmt.Op() == ir.OBLOCK && stmt.(*ir.BlockStmt).List().Len() == 0 {
+ if fn.Body.Len() == 1 {
+ if stmt := fn.Body.First(); stmt.Op() == ir.OBLOCK && stmt.(*ir.BlockStmt).List.Len() == 0 {
continue
}
}
@@ -99,7 +99,7 @@ func fninit() *ir.Name {
sym := lookup(".inittask")
task := NewName(sym)
task.SetType(types.Types[types.TUINT8]) // fake type
- task.SetClass(ir.PEXTERN)
+ task.Class_ = ir.PEXTERN
sym.Def = task
lsym := sym.Linksym()
ot := 0
diff --git a/src/cmd/compile/internal/gc/initorder.go b/src/cmd/compile/internal/gc/initorder.go
index f99c6dd72c..ec3d7be45f 100644
--- a/src/cmd/compile/internal/gc/initorder.go
+++ b/src/cmd/compile/internal/gc/initorder.go
@@ -139,7 +139,7 @@ func (o *InitOrder) processAssign(n ir.Node) {
defn := dep.Defn
// Skip dependencies on functions (PFUNC) and
// variables already initialized (InitDone).
- if dep.Class() != ir.PEXTERN || o.order[defn] == orderDone {
+ if dep.Class_ != ir.PEXTERN || o.order[defn] == orderDone {
continue
}
o.order[n]++
@@ -203,7 +203,7 @@ func (o *InitOrder) findInitLoopAndExit(n *ir.Name, path *[]*ir.Name) {
*path = append(*path, n)
for _, ref := range refers {
// Short-circuit variables that were initialized.
- if ref.Class() == ir.PEXTERN && o.order[ref.Defn] == orderDone {
+ if ref.Class_ == ir.PEXTERN && o.order[ref.Defn] == orderDone {
continue
}
@@ -220,7 +220,7 @@ func reportInitLoopAndExit(l []*ir.Name) {
// the start.
i := -1
for j, n := range l {
- if n.Class() == ir.PEXTERN && (i == -1 || n.Pos().Before(l[i].Pos())) {
+ if n.Class_ == ir.PEXTERN && (i == -1 || n.Pos().Before(l[i].Pos())) {
i = j
}
}
@@ -255,13 +255,13 @@ func collectDeps(n ir.Node, transitive bool) ir.NameSet {
switch n.Op() {
case ir.OAS:
n := n.(*ir.AssignStmt)
- d.inspect(n.Right())
+ d.inspect(n.Y)
case ir.OAS2DOTTYPE, ir.OAS2FUNC, ir.OAS2MAPR, ir.OAS2RECV:
n := n.(*ir.AssignListStmt)
- d.inspect(n.Rlist().First())
+ d.inspect(n.Rhs.First())
case ir.ODCLFUNC:
n := n.(*ir.Func)
- d.inspectList(n.Body())
+ d.inspectList(n.Body)
default:
base.Fatalf("unexpected Op: %v", n.Op())
}
@@ -294,14 +294,14 @@ func (d *initDeps) visit(n ir.Node) {
case ir.ONAME:
n := n.(*ir.Name)
- switch n.Class() {
+ switch n.Class_ {
case ir.PEXTERN, ir.PFUNC:
d.foundDep(n)
}
case ir.OCLOSURE:
n := n.(*ir.ClosureExpr)
- d.inspectList(n.Func().Body())
+ d.inspectList(n.Func.Body)
case ir.ODOTMETH, ir.OCALLPART:
d.foundDep(methodExprName(n))
@@ -327,8 +327,8 @@ func (d *initDeps) foundDep(n *ir.Name) {
return
}
d.seen.Add(n)
- if d.transitive && n.Class() == ir.PFUNC {
- d.inspectList(n.Defn.(*ir.Func).Body())
+ if d.transitive && n.Class_ == ir.PFUNC {
+ d.inspectList(n.Defn.(*ir.Func).Body)
}
}
@@ -360,10 +360,10 @@ func firstLHS(n ir.Node) *ir.Name {
switch n.Op() {
case ir.OAS:
n := n.(*ir.AssignStmt)
- return n.Left().Name()
+ return n.X.Name()
case ir.OAS2DOTTYPE, ir.OAS2FUNC, ir.OAS2RECV, ir.OAS2MAPR:
n := n.(*ir.AssignListStmt)
- return n.List().First().Name()
+ return n.Lhs.First().Name()
}
base.Fatalf("unexpected Op: %v", n.Op())
diff --git a/src/cmd/compile/internal/gc/inl.go b/src/cmd/compile/internal/gc/inl.go
index 7cb7946806..edb2c5bb42 100644
--- a/src/cmd/compile/internal/gc/inl.go
+++ b/src/cmd/compile/internal/gc/inl.go
@@ -196,7 +196,7 @@ func caninl(fn *ir.Func) {
}
// If fn has no body (is defined outside of Go), cannot inline it.
- if fn.Body().Len() == 0 {
+ if fn.Body.Len() == 0 {
reason = "no function body"
return
}
@@ -206,10 +206,10 @@ func caninl(fn *ir.Func) {
}
n := fn.Nname
- if n.Func().InlinabilityChecked() {
+ if n.Func.InlinabilityChecked() {
return
}
- defer n.Func().SetInlinabilityChecked(true)
+ defer n.Func.SetInlinabilityChecked(true)
cc := int32(inlineExtraCallCost)
if base.Flag.LowerL == 4 {
@@ -235,14 +235,14 @@ func caninl(fn *ir.Func) {
return
}
- n.Func().Inl = &ir.Inline{
+ n.Func.Inl = &ir.Inline{
Cost: inlineMaxBudget - visitor.budget,
- Dcl: pruneUnusedAutos(n.Defn.(*ir.Func).Func().Dcl, &visitor),
- Body: ir.DeepCopyList(src.NoXPos, fn.Body().Slice()),
+ Dcl: pruneUnusedAutos(n.Defn.(*ir.Func).Dcl, &visitor),
+ Body: ir.DeepCopyList(src.NoXPos, fn.Body.Slice()),
}
if base.Flag.LowerM > 1 {
- fmt.Printf("%v: can inline %v with cost %d as: %v { %v }\n", ir.Line(fn), n, inlineMaxBudget-visitor.budget, fn.Type(), ir.AsNodes(n.Func().Inl.Body))
+ fmt.Printf("%v: can inline %v with cost %d as: %v { %v }\n", ir.Line(fn), n, inlineMaxBudget-visitor.budget, fn.Type(), ir.AsNodes(n.Func.Inl.Body))
} else if base.Flag.LowerM != 0 {
fmt.Printf("%v: can inline %v\n", ir.Line(fn), n)
}
@@ -257,10 +257,10 @@ func inlFlood(n *ir.Name, exportsym func(*ir.Name)) {
if n == nil {
return
}
- if n.Op() != ir.ONAME || n.Class() != ir.PFUNC {
- base.Fatalf("inlFlood: unexpected %v, %v, %v", n, n.Op(), n.Class())
+ if n.Op() != ir.ONAME || n.Class_ != ir.PFUNC {
+ base.Fatalf("inlFlood: unexpected %v, %v, %v", n, n.Op(), n.Class_)
}
- fn := n.Func()
+ fn := n.Func
if fn == nil {
base.Fatalf("inlFlood: missing Func on %v", n)
}
@@ -285,7 +285,7 @@ func inlFlood(n *ir.Name, exportsym func(*ir.Name)) {
case ir.ONAME:
n := n.(*ir.Name)
- switch n.Class() {
+ switch n.Class_ {
case ir.PFUNC:
inlFlood(n, exportsym)
exportsym(n)
@@ -348,9 +348,9 @@ func (v *hairyVisitor) doNode(n ir.Node) error {
// because getcaller{pc,sp} expect a pointer to the caller's first argument.
//
// runtime.throw is a "cheap call" like panic in normal code.
- if n.Left().Op() == ir.ONAME {
- name := n.Left().(*ir.Name)
- if name.Class() == ir.PFUNC && isRuntimePkg(name.Sym().Pkg) {
+ if n.X.Op() == ir.ONAME {
+ name := n.X.(*ir.Name)
+ if name.Class_ == ir.PFUNC && isRuntimePkg(name.Sym().Pkg) {
fn := name.Sym().Name
if fn == "getcallerpc" || fn == "getcallersp" {
return errors.New("call to " + fn)
@@ -367,7 +367,7 @@ func (v *hairyVisitor) doNode(n ir.Node) error {
break
}
- if fn := inlCallee(n.Left()); fn != nil && fn.Inl != nil {
+ if fn := inlCallee(n.X); fn != nil && fn.Inl != nil {
v.budget -= fn.Inl.Cost
break
}
@@ -378,12 +378,12 @@ func (v *hairyVisitor) doNode(n ir.Node) error {
// Call is okay if inlinable and we have the budget for the body.
case ir.OCALLMETH:
n := n.(*ir.CallExpr)
- t := n.Left().Type()
+ t := n.X.Type()
if t == nil {
- base.Fatalf("no function type for [%p] %+v\n", n.Left(), n.Left())
+ base.Fatalf("no function type for [%p] %+v\n", n.X, n.X)
}
- if isRuntimePkg(n.Left().Sym().Pkg) {
- fn := n.Left().Sym().Name
+ if isRuntimePkg(n.X.Sym().Pkg) {
+ fn := n.X.Sym().Name
if fn == "heapBits.nextArena" {
// Special case: explicitly allow
// mid-stack inlining of
@@ -393,7 +393,7 @@ func (v *hairyVisitor) doNode(n ir.Node) error {
break
}
}
- if inlfn := methodExprName(n.Left()).Func(); inlfn.Inl != nil {
+ if inlfn := methodExprName(n.X).Func; inlfn.Inl != nil {
v.budget -= inlfn.Inl.Cost
break
}
@@ -431,35 +431,35 @@ func (v *hairyVisitor) doNode(n ir.Node) error {
case ir.OFOR, ir.OFORUNTIL:
n := n.(*ir.ForStmt)
- if n.Sym() != nil {
+ if n.Label != nil {
return errors.New("labeled control")
}
case ir.OSWITCH:
n := n.(*ir.SwitchStmt)
- if n.Sym() != nil {
+ if n.Label != nil {
return errors.New("labeled control")
}
// case ir.ORANGE, ir.OSELECT in "unhandled" above
case ir.OBREAK, ir.OCONTINUE:
n := n.(*ir.BranchStmt)
- if n.Sym() != nil {
+ if n.Label != nil {
// Should have short-circuited due to labeled control error above.
base.Fatalf("unexpected labeled break/continue: %v", n)
}
case ir.OIF:
n := n.(*ir.IfStmt)
- if ir.IsConst(n.Left(), constant.Bool) {
+ if ir.IsConst(n.Cond, constant.Bool) {
// This if and the condition cost nothing.
// TODO(rsc): It seems strange that we visit the dead branch.
if err := ir.DoList(n.Init(), v.do); err != nil {
return err
}
- if err := ir.DoList(n.Body(), v.do); err != nil {
+ if err := ir.DoList(n.Body, v.do); err != nil {
return err
}
- if err := ir.DoList(n.Rlist(), v.do); err != nil {
+ if err := ir.DoList(n.Else, v.do); err != nil {
return err
}
return nil
@@ -467,7 +467,7 @@ func (v *hairyVisitor) doNode(n ir.Node) error {
case ir.ONAME:
n := n.(*ir.Name)
- if n.Class() == ir.PAUTO {
+ if n.Class_ == ir.PAUTO {
v.usedLocals[n] = true
}
@@ -526,8 +526,8 @@ func inlcalls(fn *ir.Func) {
// Turn an OINLCALL into a statement.
func inlconv2stmt(inlcall *ir.InlinedCallExpr) ir.Node {
n := ir.NewBlockStmt(inlcall.Pos(), nil)
- n.SetList(inlcall.Init())
- n.PtrList().AppendNodes(inlcall.PtrBody())
+ n.List = inlcall.Init()
+ n.List.AppendNodes(&inlcall.Body)
return n
}
@@ -535,8 +535,8 @@ func inlconv2stmt(inlcall *ir.InlinedCallExpr) ir.Node {
// The result of inlconv2expr MUST be assigned back to n, e.g.
// n.Left = inlconv2expr(n.Left)
func inlconv2expr(n *ir.InlinedCallExpr) ir.Node {
- r := n.Rlist().First()
- return initExpr(append(n.Init().Slice(), n.Body().Slice()...), r)
+ r := n.ReturnVars.First()
+ return initExpr(append(n.Init().Slice(), n.Body.Slice()...), r)
}
// Turn the rlist (with the return values) of the OINLCALL in
@@ -545,12 +545,12 @@ func inlconv2expr(n *ir.InlinedCallExpr) ir.Node {
// order will be preserved. Used in return, oas2func and call
// statements.
func inlconv2list(n *ir.InlinedCallExpr) []ir.Node {
- if n.Op() != ir.OINLCALL || n.Rlist().Len() == 0 {
+ if n.Op() != ir.OINLCALL || n.ReturnVars.Len() == 0 {
base.Fatalf("inlconv2list %+v\n", n)
}
- s := n.Rlist().Slice()
- s[0] = initExpr(append(n.Init().Slice(), n.Body().Slice()...), s[0])
+ s := n.ReturnVars.Slice()
+ s[0] = initExpr(append(n.Init().Slice(), n.Body.Slice()...), s[0])
return s
}
@@ -575,10 +575,10 @@ func inlnode(n ir.Node, maxCost int32, inlMap map[*ir.Func]bool, edit func(ir.No
switch n.Op() {
case ir.ODEFER, ir.OGO:
n := n.(*ir.GoDeferStmt)
- switch call := n.Left(); call.Op() {
+ switch call := n.Call; call.Op() {
case ir.OCALLFUNC, ir.OCALLMETH:
call := call.(*ir.CallExpr)
- call.SetNoInline(true)
+ call.NoInline = true
}
// TODO do them here (or earlier),
@@ -589,7 +589,7 @@ func inlnode(n ir.Node, maxCost int32, inlMap map[*ir.Func]bool, edit func(ir.No
// Prevent inlining some reflect.Value methods when using checkptr,
// even when package reflect was compiled without it (#35073).
n := n.(*ir.CallExpr)
- if s := n.Left().Sym(); base.Debug.Checkptr != 0 && isReflectPkg(s.Pkg) && (s.Name == "Value.UnsafeAddr" || s.Name == "Value.Pointer") {
+ if s := n.X.Sym(); base.Debug.Checkptr != 0 && isReflectPkg(s.Pkg) && (s.Name == "Value.UnsafeAddr" || s.Name == "Value.Pointer") {
return n
}
}
@@ -600,8 +600,8 @@ func inlnode(n ir.Node, maxCost int32, inlMap map[*ir.Func]bool, edit func(ir.No
if as := n; as.Op() == ir.OAS2FUNC {
as := as.(*ir.AssignListStmt)
- if as.Rlist().First().Op() == ir.OINLCALL {
- as.PtrRlist().Set(inlconv2list(as.Rlist().First().(*ir.InlinedCallExpr)))
+ if as.Rhs.First().Op() == ir.OINLCALL {
+ as.Rhs.Set(inlconv2list(as.Rhs.First().(*ir.InlinedCallExpr)))
as.SetOp(ir.OAS2)
as.SetTypecheck(0)
n = typecheck(as, ctxStmt)
@@ -614,7 +614,7 @@ func inlnode(n ir.Node, maxCost int32, inlMap map[*ir.Func]bool, edit func(ir.No
switch n.Op() {
case ir.OCALLFUNC, ir.OCALLMETH:
n := n.(*ir.CallExpr)
- if n.NoInline() {
+ if n.NoInline {
return n
}
}
@@ -624,27 +624,27 @@ func inlnode(n ir.Node, maxCost int32, inlMap map[*ir.Func]bool, edit func(ir.No
case ir.OCALLFUNC:
call = n.(*ir.CallExpr)
if base.Flag.LowerM > 3 {
- fmt.Printf("%v:call to func %+v\n", ir.Line(n), call.Left())
+ fmt.Printf("%v:call to func %+v\n", ir.Line(n), call.X)
}
if IsIntrinsicCall(call) {
break
}
- if fn := inlCallee(call.Left()); fn != nil && fn.Inl != nil {
+ if fn := inlCallee(call.X); fn != nil && fn.Inl != nil {
n = mkinlcall(call, fn, maxCost, inlMap, edit)
}
case ir.OCALLMETH:
call = n.(*ir.CallExpr)
if base.Flag.LowerM > 3 {
- fmt.Printf("%v:call to meth %v\n", ir.Line(n), call.Left().(*ir.SelectorExpr).Sel)
+ fmt.Printf("%v:call to meth %v\n", ir.Line(n), call.X.(*ir.SelectorExpr).Sel)
}
// typecheck should have resolved ODOTMETH->type, whose nname points to the actual function.
- if call.Left().Type() == nil {
- base.Fatalf("no function type for [%p] %+v\n", call.Left(), call.Left())
+ if call.X.Type() == nil {
+ base.Fatalf("no function type for [%p] %+v\n", call.X, call.X)
}
- n = mkinlcall(call, methodExprName(call.Left()).Func(), maxCost, inlMap, edit)
+ n = mkinlcall(call, methodExprName(call.X).Func, maxCost, inlMap, edit)
}
base.Pos = lno
@@ -681,15 +681,15 @@ func inlCallee(fn ir.Node) *ir.Func {
if n == nil || !types.Identical(n.Type().Recv().Type, fn.T) {
return nil
}
- return n.Func()
+ return n.Func
case ir.ONAME:
fn := fn.(*ir.Name)
- if fn.Class() == ir.PFUNC {
- return fn.Func()
+ if fn.Class_ == ir.PFUNC {
+ return fn.Func
}
case ir.OCLOSURE:
fn := fn.(*ir.ClosureExpr)
- c := fn.Func()
+ c := fn.Func
caninl(c)
return c
}
@@ -699,7 +699,7 @@ func inlCallee(fn ir.Node) *ir.Func {
func staticValue(n ir.Node) ir.Node {
for {
if n.Op() == ir.OCONVNOP {
- n = n.(*ir.ConvExpr).Left()
+ n = n.(*ir.ConvExpr).X
continue
}
@@ -719,7 +719,7 @@ func staticValue1(nn ir.Node) ir.Node {
return nil
}
n := nn.(*ir.Name)
- if n.Class() != ir.PAUTO || n.Name().Addrtaken() {
+ if n.Class_ != ir.PAUTO || n.Name().Addrtaken() {
return nil
}
@@ -733,12 +733,12 @@ FindRHS:
switch defn.Op() {
case ir.OAS:
defn := defn.(*ir.AssignStmt)
- rhs = defn.Right()
+ rhs = defn.Y
case ir.OAS2:
defn := defn.(*ir.AssignListStmt)
- for i, lhs := range defn.List().Slice() {
+ for i, lhs := range defn.Lhs.Slice() {
if lhs == n {
- rhs = defn.Rlist().Index(i)
+ rhs = defn.Rhs.Index(i)
break FindRHS
}
}
@@ -775,12 +775,12 @@ func reassigned(name *ir.Name) bool {
switch n.Op() {
case ir.OAS:
n := n.(*ir.AssignStmt)
- if n.Left() == name && n != name.Defn {
+ if n.X == name && n != name.Defn {
return true
}
case ir.OAS2, ir.OAS2FUNC, ir.OAS2MAPR, ir.OAS2DOTTYPE, ir.OAS2RECV, ir.OSELRECV2:
n := n.(*ir.AssignListStmt)
- for _, p := range n.List().Slice() {
+ for _, p := range n.Lhs.Slice() {
if p == name && n != name.Defn {
return true
}
@@ -887,11 +887,11 @@ func mkinlcall(n *ir.CallExpr, fn *ir.Func, maxCost int32, inlMap map[*ir.Func]b
// inlconv2expr or inlconv2list). Make sure to preserve these,
// if necessary (#42703).
if n.Op() == ir.OCALLFUNC {
- callee := n.Left()
+ callee := n.X
for callee.Op() == ir.OCONVNOP {
conv := callee.(*ir.ConvExpr)
ninit.AppendNodes(conv.PtrInit())
- callee = conv.Left()
+ callee = conv.X
}
if callee.Op() != ir.ONAME && callee.Op() != ir.OCLOSURE && callee.Op() != ir.OMETHEXPR {
base.Fatalf("unexpected callee expression: %v", callee)
@@ -944,7 +944,7 @@ func mkinlcall(n *ir.CallExpr, fn *ir.Func, maxCost int32, inlMap map[*ir.Func]b
if ln.Op() != ir.ONAME {
continue
}
- if ln.Class() == ir.PPARAMOUT { // return values handled below.
+ if ln.Class_ == ir.PPARAMOUT { // return values handled below.
continue
}
if isParamStackCopy(ln) { // ignore the on-stack copy of a parameter that moved to the heap
@@ -957,7 +957,7 @@ func mkinlcall(n *ir.CallExpr, fn *ir.Func, maxCost int32, inlMap map[*ir.Func]b
inlf := typecheck(inlvar(ln), ctxExpr)
inlvars[ln] = inlf
if base.Flag.GenDwarfInl > 0 {
- if ln.Class() == ir.PPARAM {
+ if ln.Class_ == ir.PPARAM {
inlf.Name().SetInlFormal(true)
} else {
inlf.Name().SetInlLocal(true)
@@ -1010,54 +1010,54 @@ func mkinlcall(n *ir.CallExpr, fn *ir.Func, maxCost int32, inlMap map[*ir.Func]b
// Assign arguments to the parameters' temp names.
as := ir.NewAssignListStmt(base.Pos, ir.OAS2, nil, nil)
- as.SetColas(true)
+ as.Def = true
if n.Op() == ir.OCALLMETH {
- sel := n.Left().(*ir.SelectorExpr)
- if sel.Left() == nil {
+ sel := n.X.(*ir.SelectorExpr)
+ if sel.X == nil {
base.Fatalf("method call without receiver: %+v", n)
}
- as.PtrRlist().Append(sel.Left())
+ as.Rhs.Append(sel.X)
}
- as.PtrRlist().Append(n.List().Slice()...)
+ as.Rhs.Append(n.Args.Slice()...)
// For non-dotted calls to variadic functions, we assign the
// variadic parameter's temp name separately.
var vas *ir.AssignStmt
if recv := fn.Type().Recv(); recv != nil {
- as.PtrList().Append(inlParam(recv, as, inlvars))
+ as.Lhs.Append(inlParam(recv, as, inlvars))
}
for _, param := range fn.Type().Params().Fields().Slice() {
// For ordinary parameters or variadic parameters in
// dotted calls, just add the variable to the
// assignment list, and we're done.
- if !param.IsDDD() || n.IsDDD() {
- as.PtrList().Append(inlParam(param, as, inlvars))
+ if !param.IsDDD() || n.IsDDD {
+ as.Lhs.Append(inlParam(param, as, inlvars))
continue
}
// Otherwise, we need to collect the remaining values
// to pass as a slice.
- x := as.List().Len()
- for as.List().Len() < as.Rlist().Len() {
- as.PtrList().Append(argvar(param.Type, as.List().Len()))
+ x := as.Lhs.Len()
+ for as.Lhs.Len() < as.Rhs.Len() {
+ as.Lhs.Append(argvar(param.Type, as.Lhs.Len()))
}
- varargs := as.List().Slice()[x:]
+ varargs := as.Lhs.Slice()[x:]
vas = ir.NewAssignStmt(base.Pos, nil, nil)
- vas.SetLeft(inlParam(param, vas, inlvars))
+ vas.X = inlParam(param, vas, inlvars)
if len(varargs) == 0 {
- vas.SetRight(nodnil())
- vas.Right().SetType(param.Type)
+ vas.Y = nodnil()
+ vas.Y.SetType(param.Type)
} else {
lit := ir.NewCompLitExpr(base.Pos, ir.OCOMPLIT, ir.TypeNode(param.Type).(ir.Ntype), nil)
- lit.PtrList().Set(varargs)
- vas.SetRight(lit)
+ lit.List.Set(varargs)
+ vas.Y = lit
}
}
- if as.Rlist().Len() != 0 {
+ if as.Rhs.Len() != 0 {
ninit.Append(typecheck(as, ctxStmt))
}
@@ -1093,7 +1093,7 @@ func mkinlcall(n *ir.CallExpr, fn *ir.Func, maxCost int32, inlMap map[*ir.Func]b
// Note issue 28603.
inlMark := ir.NewInlineMarkStmt(base.Pos, types.BADWIDTH)
inlMark.SetPos(n.Pos().WithIsStmt())
- inlMark.SetOffset(int64(newIndex))
+ inlMark.Index = int64(newIndex)
ninit.Append(inlMark)
if base.Flag.GenDwarfInl > 0 {
@@ -1130,8 +1130,8 @@ func mkinlcall(n *ir.CallExpr, fn *ir.Func, maxCost int32, inlMap map[*ir.Func]b
call := ir.NewInlinedCallExpr(base.Pos, nil, nil)
call.PtrInit().Set(ninit.Slice())
- call.PtrBody().Set(body)
- call.PtrRlist().Set(retvars)
+ call.Body.Set(body)
+ call.ReturnVars.Set(retvars)
call.SetType(n.Type())
call.SetTypecheck(1)
@@ -1160,7 +1160,7 @@ func inlvar(var_ ir.Node) ir.Node {
n := NewName(var_.Sym())
n.SetType(var_.Type())
- n.SetClass(ir.PAUTO)
+ n.Class_ = ir.PAUTO
n.SetUsed(true)
n.Curfn = Curfn // the calling function, not the called one
n.SetAddrtaken(var_.Name().Addrtaken())
@@ -1173,7 +1173,7 @@ func inlvar(var_ ir.Node) ir.Node {
func retvar(t *types.Field, i int) ir.Node {
n := NewName(lookupN("~R", i))
n.SetType(t.Type)
- n.SetClass(ir.PAUTO)
+ n.Class_ = ir.PAUTO
n.SetUsed(true)
n.Curfn = Curfn // the calling function, not the called one
Curfn.Dcl = append(Curfn.Dcl, n)
@@ -1185,7 +1185,7 @@ func retvar(t *types.Field, i int) ir.Node {
func argvar(t *types.Type, i int) ir.Node {
n := NewName(lookupN("~arg", i))
n.SetType(t.Elem())
- n.SetClass(ir.PAUTO)
+ n.Class_ = ir.PAUTO
n.SetUsed(true)
n.Curfn = Curfn // the calling function, not the called one
Curfn.Dcl = append(Curfn.Dcl, n)
@@ -1277,19 +1277,19 @@ func (subst *inlsubst) node(n ir.Node) ir.Node {
// this return is guaranteed to belong to the current inlined function.
n := n.(*ir.ReturnStmt)
init := subst.list(n.Init())
- if len(subst.retvars) != 0 && n.List().Len() != 0 {
+ if len(subst.retvars) != 0 && n.Results.Len() != 0 {
as := ir.NewAssignListStmt(base.Pos, ir.OAS2, nil, nil)
// Make a shallow copy of retvars.
// Otherwise OINLCALL.Rlist will be the same list,
// and later walk and typecheck may clobber it.
for _, n := range subst.retvars {
- as.PtrList().Append(n)
+ as.Lhs.Append(n)
}
- as.PtrRlist().Set(subst.list(n.List()))
+ as.Rhs.Set(subst.list(n.Results))
if subst.delayretvars {
- for _, n := range as.List().Slice() {
+ for _, n := range as.Lhs.Slice() {
as.PtrInit().Append(ir.NewDecl(base.Pos, ir.ODCL, n))
n.Name().Defn = as
}
@@ -1306,8 +1306,8 @@ func (subst *inlsubst) node(n ir.Node) ir.Node {
m := ir.Copy(n).(*ir.BranchStmt)
m.SetPos(subst.updatedPos(m.Pos()))
m.PtrInit().Set(nil)
- p := fmt.Sprintf("%s·%d", n.Sym().Name, inlgen)
- m.SetSym(lookup(p))
+ p := fmt.Sprintf("%s·%d", n.Label.Name, inlgen)
+ m.Label = lookup(p)
return m
case ir.OLABEL:
@@ -1315,8 +1315,8 @@ func (subst *inlsubst) node(n ir.Node) ir.Node {
m := ir.Copy(n).(*ir.LabelStmt)
m.SetPos(subst.updatedPos(m.Pos()))
m.PtrInit().Set(nil)
- p := fmt.Sprintf("%s·%d", n.Sym().Name, inlgen)
- m.SetSym(lookup(p))
+ p := fmt.Sprintf("%s·%d", n.Label.Name, inlgen)
+ m.Label = lookup(p)
return m
}
@@ -1345,7 +1345,7 @@ func (subst *inlsubst) updatedPos(xpos src.XPos) src.XPos {
func pruneUnusedAutos(ll []*ir.Name, vis *hairyVisitor) []*ir.Name {
s := make([]*ir.Name, 0, len(ll))
for _, n := range ll {
- if n.Class() == ir.PAUTO {
+ if n.Class_ == ir.PAUTO {
if _, found := vis.usedLocals[n]; !found {
continue
}
@@ -1359,7 +1359,7 @@ func pruneUnusedAutos(ll []*ir.Name, vis *hairyVisitor) []*ir.Name {
// concrete-type method calls where applicable.
func devirtualize(fn *ir.Func) {
Curfn = fn
- ir.VisitList(fn.Body(), func(n ir.Node) {
+ ir.VisitList(fn.Body, func(n ir.Node) {
if n.Op() == ir.OCALLINTER {
devirtualizeCall(n.(*ir.CallExpr))
}
@@ -1367,21 +1367,21 @@ func devirtualize(fn *ir.Func) {
}
func devirtualizeCall(call *ir.CallExpr) {
- sel := call.Left().(*ir.SelectorExpr)
- r := staticValue(sel.Left())
+ sel := call.X.(*ir.SelectorExpr)
+ r := staticValue(sel.X)
if r.Op() != ir.OCONVIFACE {
return
}
recv := r.(*ir.ConvExpr)
- typ := recv.Left().Type()
+ typ := recv.X.Type()
if typ.IsInterface() {
return
}
- dt := ir.NewTypeAssertExpr(sel.Pos(), sel.Left(), nil)
+ dt := ir.NewTypeAssertExpr(sel.Pos(), sel.X, nil)
dt.SetType(typ)
- x := typecheck(ir.NewSelectorExpr(sel.Pos(), ir.OXDOT, dt, sel.Sym()), ctxExpr|ctxCallee)
+ x := typecheck(ir.NewSelectorExpr(sel.Pos(), ir.OXDOT, dt, sel.Sel), ctxExpr|ctxCallee)
switch x.Op() {
case ir.ODOTMETH:
x := x.(*ir.SelectorExpr)
@@ -1389,7 +1389,7 @@ func devirtualizeCall(call *ir.CallExpr) {
base.WarnfAt(call.Pos(), "devirtualizing %v to %v", sel, typ)
}
call.SetOp(ir.OCALLMETH)
- call.SetLeft(x)
+ call.X = x
case ir.ODOTINTER:
// Promoted method from embedded interface-typed field (#42279).
x := x.(*ir.SelectorExpr)
@@ -1397,7 +1397,7 @@ func devirtualizeCall(call *ir.CallExpr) {
base.WarnfAt(call.Pos(), "partially devirtualizing %v to %v", sel, typ)
}
call.SetOp(ir.OCALLINTER)
- call.SetLeft(x)
+ call.X = x
default:
// TODO(mdempsky): Turn back into Fatalf after more testing.
if base.Flag.LowerM != 0 {
diff --git a/src/cmd/compile/internal/gc/main.go b/src/cmd/compile/internal/gc/main.go
index 94b4e0e674..c1cc7ed377 100644
--- a/src/cmd/compile/internal/gc/main.go
+++ b/src/cmd/compile/internal/gc/main.go
@@ -272,7 +272,7 @@ func Main(archInit func(*Arch)) {
for _, n := range Target.Decls {
if n.Op() == ir.ODCLFUNC {
n := n.(*ir.Func)
- if n.Func().OClosure != nil {
+ if n.OClosure != nil {
Curfn = n
transformclosure(n)
}
diff --git a/src/cmd/compile/internal/gc/noder.go b/src/cmd/compile/internal/gc/noder.go
index 4b7a22e654..728c4b1316 100644
--- a/src/cmd/compile/internal/gc/noder.go
+++ b/src/cmd/compile/internal/gc/noder.go
@@ -167,7 +167,7 @@ func (p *noder) funcBody(fn *ir.Func, block *syntax.BlockStmt) {
if body == nil {
body = []ir.Node{ir.NewBlockStmt(base.Pos, nil)}
}
- fn.PtrBody().Set(body)
+ fn.Body.Set(body)
base.Pos = p.makeXPos(block.Rbrace)
fn.Endlineno = base.Pos
@@ -650,13 +650,13 @@ func (p *noder) expr(expr syntax.Expr) ir.Node {
case *syntax.CompositeLit:
n := ir.NewCompLitExpr(p.pos(expr), ir.OCOMPLIT, nil, nil)
if expr.Type != nil {
- n.SetRight(p.expr(expr.Type))
+ n.Ntype = ir.Node(p.expr(expr.Type)).(ir.Ntype)
}
l := p.exprs(expr.ElemList)
for i, e := range l {
l[i] = p.wrapname(expr.ElemList[i], e)
}
- n.PtrList().Set(l)
+ n.List.Set(l)
base.Pos = p.makeXPos(expr.Rbrace)
return n
case *syntax.KeyValueExpr:
@@ -719,8 +719,8 @@ func (p *noder) expr(expr syntax.Expr) ir.Node {
return ir.NewBinaryExpr(pos, op, x, y)
case *syntax.CallExpr:
n := ir.NewCallExpr(p.pos(expr), ir.OCALL, p.expr(expr.Fun), nil)
- n.PtrList().Set(p.exprs(expr.ArgList))
- n.SetIsDDD(expr.HasDots)
+ n.Args.Set(p.exprs(expr.ArgList))
+ n.IsDDD = expr.HasDots
return n
case *syntax.ArrayType:
@@ -968,10 +968,10 @@ func (p *noder) stmtsFall(stmts []syntax.Stmt, fallOK bool) []ir.Node {
for i, stmt := range stmts {
s := p.stmtFall(stmt, fallOK && i+1 == len(stmts))
if s == nil {
- } else if s.Op() == ir.OBLOCK && s.(*ir.BlockStmt).List().Len() > 0 {
+ } else if s.Op() == ir.OBLOCK && s.(*ir.BlockStmt).List.Len() > 0 {
// Inline non-empty block.
// Empty blocks must be preserved for checkreturn.
- nodes = append(nodes, s.(*ir.BlockStmt).List().Slice()...)
+ nodes = append(nodes, s.(*ir.BlockStmt).List.Slice()...)
} else {
nodes = append(nodes, s)
}
@@ -1006,23 +1006,23 @@ func (p *noder) stmtFall(stmt syntax.Stmt, fallOK bool) ir.Node {
case *syntax.AssignStmt:
if stmt.Op != 0 && stmt.Op != syntax.Def {
n := ir.NewAssignOpStmt(p.pos(stmt), p.binOp(stmt.Op), p.expr(stmt.Lhs), p.expr(stmt.Rhs))
- n.SetImplicit(stmt.Rhs == syntax.ImplicitOne)
+ n.IncDec = stmt.Rhs == syntax.ImplicitOne
return n
}
rhs := p.exprList(stmt.Rhs)
if list, ok := stmt.Lhs.(*syntax.ListExpr); ok && len(list.ElemList) != 1 || len(rhs) != 1 {
n := ir.NewAssignListStmt(p.pos(stmt), ir.OAS2, nil, nil)
- n.SetColas(stmt.Op == syntax.Def)
- n.PtrList().Set(p.assignList(stmt.Lhs, n, n.Colas()))
- n.PtrRlist().Set(rhs)
+ n.Def = stmt.Op == syntax.Def
+ n.Lhs.Set(p.assignList(stmt.Lhs, n, n.Def))
+ n.Rhs.Set(rhs)
return n
}
n := ir.NewAssignStmt(p.pos(stmt), nil, nil)
- n.SetColas(stmt.Op == syntax.Def)
- n.SetLeft(p.assignList(stmt.Lhs, n, n.Colas())[0])
- n.SetRight(rhs[0])
+ n.Def = stmt.Op == syntax.Def
+ n.X = p.assignList(stmt.Lhs, n, n.Def)[0]
+ n.Y = rhs[0]
return n
case *syntax.BranchStmt:
@@ -1064,13 +1064,13 @@ func (p *noder) stmtFall(stmt syntax.Stmt, fallOK bool) ir.Node {
results = p.exprList(stmt.Results)
}
n := ir.NewReturnStmt(p.pos(stmt), nil)
- n.PtrList().Set(results)
- if n.List().Len() == 0 && Curfn != nil {
+ n.Results.Set(results)
+ if n.Results.Len() == 0 && Curfn != nil {
for _, ln := range Curfn.Dcl {
- if ln.Class() == ir.PPARAM {
+ if ln.Class_ == ir.PPARAM {
continue
}
- if ln.Class() != ir.PPARAMOUT {
+ if ln.Class_ != ir.PPARAMOUT {
break
}
if ln.Sym().Def != ln {
@@ -1163,16 +1163,16 @@ func (p *noder) ifStmt(stmt *syntax.IfStmt) ir.Node {
n.PtrInit().Set1(p.stmt(stmt.Init))
}
if stmt.Cond != nil {
- n.SetLeft(p.expr(stmt.Cond))
+ n.Cond = p.expr(stmt.Cond)
}
- n.PtrBody().Set(p.blockStmt(stmt.Then))
+ n.Body.Set(p.blockStmt(stmt.Then))
if stmt.Else != nil {
e := p.stmt(stmt.Else)
if e.Op() == ir.OBLOCK {
e := e.(*ir.BlockStmt)
- n.PtrRlist().Set(e.List().Slice())
+ n.Else.Set(e.List.Slice())
} else {
- n.PtrRlist().Set1(e)
+ n.Else.Set1(e)
}
}
p.closeAnotherScope()
@@ -1188,10 +1188,10 @@ func (p *noder) forStmt(stmt *syntax.ForStmt) ir.Node {
n := ir.NewRangeStmt(p.pos(r), nil, p.expr(r.X), nil)
if r.Lhs != nil {
- n.SetColas(r.Def)
- n.PtrList().Set(p.assignList(r.Lhs, n, n.Colas()))
+ n.Def = r.Def
+ n.Vars.Set(p.assignList(r.Lhs, n, n.Def))
}
- n.PtrBody().Set(p.blockStmt(stmt.Body))
+ n.Body.Set(p.blockStmt(stmt.Body))
p.closeAnotherScope()
return n
}
@@ -1201,12 +1201,12 @@ func (p *noder) forStmt(stmt *syntax.ForStmt) ir.Node {
n.PtrInit().Set1(p.stmt(stmt.Init))
}
if stmt.Cond != nil {
- n.SetLeft(p.expr(stmt.Cond))
+ n.Cond = p.expr(stmt.Cond)
}
if stmt.Post != nil {
- n.SetRight(p.stmt(stmt.Post))
+ n.Post = p.stmt(stmt.Post)
}
- n.PtrBody().Set(p.blockStmt(stmt.Body))
+ n.Body.Set(p.blockStmt(stmt.Body))
p.closeAnotherScope()
return n
}
@@ -1218,14 +1218,14 @@ func (p *noder) switchStmt(stmt *syntax.SwitchStmt) ir.Node {
n.PtrInit().Set1(p.stmt(stmt.Init))
}
if stmt.Tag != nil {
- n.SetLeft(p.expr(stmt.Tag))
+ n.Tag = p.expr(stmt.Tag)
}
var tswitch *ir.TypeSwitchGuard
- if l := n.Left(); l != nil && l.Op() == ir.OTYPESW {
+ if l := n.Tag; l != nil && l.Op() == ir.OTYPESW {
tswitch = l.(*ir.TypeSwitchGuard)
}
- n.PtrList().Set(p.caseClauses(stmt.Body, tswitch, stmt.Rbrace))
+ n.Cases.Set(p.caseClauses(stmt.Body, tswitch, stmt.Rbrace))
p.closeScope(stmt.Rbrace)
return n
@@ -1242,12 +1242,12 @@ func (p *noder) caseClauses(clauses []*syntax.CaseClause, tswitch *ir.TypeSwitch
n := ir.NewCaseStmt(p.pos(clause), nil, nil)
if clause.Cases != nil {
- n.PtrList().Set(p.exprList(clause.Cases))
+ n.List.Set(p.exprList(clause.Cases))
}
- if tswitch != nil && tswitch.Left() != nil {
- nn := NewName(tswitch.Left().Sym())
+ if tswitch != nil && tswitch.Tag != nil {
+ nn := NewName(tswitch.Tag.Sym())
declare(nn, dclcontext)
- n.PtrRlist().Set1(nn)
+ n.Vars.Set1(nn)
// keep track of the instances for reporting unused
nn.Defn = tswitch
}
@@ -1263,8 +1263,8 @@ func (p *noder) caseClauses(clauses []*syntax.CaseClause, tswitch *ir.TypeSwitch
body = body[:len(body)-1]
}
- n.PtrBody().Set(p.stmtsFall(body, true))
- if l := n.Body().Len(); l > 0 && n.Body().Index(l-1).Op() == ir.OFALL {
+ n.Body.Set(p.stmtsFall(body, true))
+ if l := n.Body.Len(); l > 0 && n.Body.Index(l-1).Op() == ir.OFALL {
if tswitch != nil {
base.Errorf("cannot fallthrough in type switch")
}
@@ -1283,7 +1283,7 @@ func (p *noder) caseClauses(clauses []*syntax.CaseClause, tswitch *ir.TypeSwitch
func (p *noder) selectStmt(stmt *syntax.SelectStmt) ir.Node {
n := ir.NewSelectStmt(p.pos(stmt), nil)
- n.PtrList().Set(p.commClauses(stmt.Body, stmt.Rbrace))
+ n.Cases.Set(p.commClauses(stmt.Body, stmt.Rbrace))
return n
}
@@ -1298,9 +1298,9 @@ func (p *noder) commClauses(clauses []*syntax.CommClause, rbrace syntax.Pos) []i
n := ir.NewCaseStmt(p.pos(clause), nil, nil)
if clause.Comm != nil {
- n.PtrList().Set1(p.stmt(clause.Comm))
+ n.List.Set1(p.stmt(clause.Comm))
}
- n.PtrBody().Set(p.stmts(clause.Body))
+ n.Body.Set(p.stmts(clause.Body))
nodes = append(nodes, n)
}
if len(clauses) > 0 {
@@ -1321,16 +1321,16 @@ func (p *noder) labeledStmt(label *syntax.LabeledStmt, fallOK bool) ir.Node {
switch ls.Op() {
case ir.OFOR:
ls := ls.(*ir.ForStmt)
- ls.SetSym(sym)
+ ls.Label = sym
case ir.ORANGE:
ls := ls.(*ir.RangeStmt)
- ls.SetSym(sym)
+ ls.Label = sym
case ir.OSWITCH:
ls := ls.(*ir.SwitchStmt)
- ls.SetSym(sym)
+ ls.Label = sym
case ir.OSELECT:
ls := ls.(*ir.SelectStmt)
- ls.SetSym(sym)
+ ls.Label = sym
}
}
}
@@ -1339,7 +1339,7 @@ func (p *noder) labeledStmt(label *syntax.LabeledStmt, fallOK bool) ir.Node {
if ls != nil {
if ls.Op() == ir.OBLOCK {
ls := ls.(*ir.BlockStmt)
- l = append(l, ls.List().Slice()...)
+ l = append(l, ls.List.Slice()...)
} else {
l = append(l, ls)
}
diff --git a/src/cmd/compile/internal/gc/obj.go b/src/cmd/compile/internal/gc/obj.go
index c6625da1da..9634cd51ae 100644
--- a/src/cmd/compile/internal/gc/obj.go
+++ b/src/cmd/compile/internal/gc/obj.go
@@ -214,7 +214,7 @@ func addptabs() {
if s.Pkg.Name != "main" {
continue
}
- if n.Type().Kind() == types.TFUNC && n.Class() == ir.PFUNC {
+ if n.Type().Kind() == types.TFUNC && n.Class_ == ir.PFUNC {
// function
ptabs = append(ptabs, ptabEntry{s: s, t: s.Def.Type()})
} else {
@@ -228,7 +228,7 @@ func dumpGlobal(n *ir.Name) {
if n.Type() == nil {
base.Fatalf("external %v nil type\n", n)
}
- if n.Class() == ir.PFUNC {
+ if n.Class_ == ir.PFUNC {
return
}
if n.Sym().Pkg != types.LocalPkg {
@@ -560,8 +560,8 @@ func pfuncsym(n *ir.Name, noff int64, f *ir.Name) {
if n.Sym() == nil {
base.Fatalf("pfuncsym nil n sym")
}
- if f.Class() != ir.PFUNC {
- base.Fatalf("pfuncsym class not PFUNC %d", f.Class())
+ if f.Class_ != ir.PFUNC {
+ base.Fatalf("pfuncsym class not PFUNC %d", f.Class_)
}
s := n.Sym().Linksym()
s.WriteAddr(base.Ctxt, noff, Widthptr, funcsym(f.Sym()).Linksym(), 0)
diff --git a/src/cmd/compile/internal/gc/order.go b/src/cmd/compile/internal/gc/order.go
index 96164d09fd..53d83c0ac8 100644
--- a/src/cmd/compile/internal/gc/order.go
+++ b/src/cmd/compile/internal/gc/order.go
@@ -55,10 +55,10 @@ type Order struct {
func order(fn *ir.Func) {
if base.Flag.W > 1 {
s := fmt.Sprintf("\nbefore order %v", fn.Sym())
- ir.DumpList(s, fn.Body())
+ ir.DumpList(s, fn.Body)
}
- orderBlock(fn.PtrBody(), map[string][]*ir.Name{})
+ orderBlock(&fn.Body, map[string][]*ir.Name{})
}
// append typechecks stmt and appends it to out.
@@ -136,12 +136,12 @@ func (o *Order) cheapExpr(n ir.Node) ir.Node {
return n
case ir.OLEN, ir.OCAP:
n := n.(*ir.UnaryExpr)
- l := o.cheapExpr(n.Left())
- if l == n.Left() {
+ l := o.cheapExpr(n.X)
+ if l == n.X {
return n
}
a := ir.SepCopy(n).(*ir.UnaryExpr)
- a.SetLeft(l)
+ a.X = l
return typecheck(a, ctxExpr)
}
@@ -162,59 +162,59 @@ func (o *Order) safeExpr(n ir.Node) ir.Node {
case ir.OLEN, ir.OCAP:
n := n.(*ir.UnaryExpr)
- l := o.safeExpr(n.Left())
- if l == n.Left() {
+ l := o.safeExpr(n.X)
+ if l == n.X {
return n
}
a := ir.SepCopy(n).(*ir.UnaryExpr)
- a.SetLeft(l)
+ a.X = l
return typecheck(a, ctxExpr)
case ir.ODOT:
n := n.(*ir.SelectorExpr)
- l := o.safeExpr(n.Left())
- if l == n.Left() {
+ l := o.safeExpr(n.X)
+ if l == n.X {
return n
}
a := ir.SepCopy(n).(*ir.SelectorExpr)
- a.SetLeft(l)
+ a.X = l
return typecheck(a, ctxExpr)
case ir.ODOTPTR:
n := n.(*ir.SelectorExpr)
- l := o.cheapExpr(n.Left())
- if l == n.Left() {
+ l := o.cheapExpr(n.X)
+ if l == n.X {
return n
}
a := ir.SepCopy(n).(*ir.SelectorExpr)
- a.SetLeft(l)
+ a.X = l
return typecheck(a, ctxExpr)
case ir.ODEREF:
n := n.(*ir.StarExpr)
- l := o.cheapExpr(n.Left())
- if l == n.Left() {
+ l := o.cheapExpr(n.X)
+ if l == n.X {
return n
}
a := ir.SepCopy(n).(*ir.StarExpr)
- a.SetLeft(l)
+ a.X = l
return typecheck(a, ctxExpr)
case ir.OINDEX, ir.OINDEXMAP:
n := n.(*ir.IndexExpr)
var l ir.Node
- if n.Left().Type().IsArray() {
- l = o.safeExpr(n.Left())
+ if n.X.Type().IsArray() {
+ l = o.safeExpr(n.X)
} else {
- l = o.cheapExpr(n.Left())
+ l = o.cheapExpr(n.X)
}
- r := o.cheapExpr(n.Right())
- if l == n.Left() && r == n.Right() {
+ r := o.cheapExpr(n.Index)
+ if l == n.X && r == n.Index {
return n
}
a := ir.SepCopy(n).(*ir.IndexExpr)
- a.SetLeft(l)
- a.SetRight(r)
+ a.X = l
+ a.Index = r
return typecheck(a, ctxExpr)
default:
@@ -230,7 +230,7 @@ func (o *Order) safeExpr(n ir.Node) ir.Node {
// because we emit explicit VARKILL instructions marking the end of those
// temporaries' lifetimes.
func isaddrokay(n ir.Node) bool {
- return islvalue(n) && (n.Op() != ir.ONAME || n.(*ir.Name).Class() == ir.PEXTERN || ir.IsAutoTmp(n))
+ return islvalue(n) && (n.Op() != ir.ONAME || n.(*ir.Name).Class_ == ir.PEXTERN || ir.IsAutoTmp(n))
}
// addrTemp ensures that n is okay to pass by address to runtime routines.
@@ -292,17 +292,17 @@ func mapKeyReplaceStrConv(n ir.Node) bool {
replaced = true
case ir.OSTRUCTLIT:
n := n.(*ir.CompLitExpr)
- for _, elem := range n.List().Slice() {
+ for _, elem := range n.List.Slice() {
elem := elem.(*ir.StructKeyExpr)
- if mapKeyReplaceStrConv(elem.Left()) {
+ if mapKeyReplaceStrConv(elem.Value) {
replaced = true
}
}
case ir.OARRAYLIT:
n := n.(*ir.CompLitExpr)
- for _, elem := range n.List().Slice() {
+ for _, elem := range n.List.Slice() {
if elem.Op() == ir.OKEY {
- elem = elem.(*ir.KeyExpr).Right()
+ elem = elem.(*ir.KeyExpr).Value
}
if mapKeyReplaceStrConv(elem) {
replaced = true
@@ -371,24 +371,24 @@ func orderMakeSliceCopy(s []ir.Node) {
as := s[0].(*ir.AssignStmt)
cp := s[1].(*ir.BinaryExpr)
- if as.Right() == nil || as.Right().Op() != ir.OMAKESLICE || ir.IsBlank(as.Left()) ||
- as.Left().Op() != ir.ONAME || cp.Left().Op() != ir.ONAME || cp.Right().Op() != ir.ONAME ||
- as.Left().Name() != cp.Left().Name() || cp.Left().Name() == cp.Right().Name() {
+ if as.Y == nil || as.Y.Op() != ir.OMAKESLICE || ir.IsBlank(as.X) ||
+ as.X.Op() != ir.ONAME || cp.X.Op() != ir.ONAME || cp.Y.Op() != ir.ONAME ||
+ as.X.Name() != cp.X.Name() || cp.X.Name() == cp.Y.Name() {
// The line above this one is correct with the differing equality operators:
// we want as.X and cp.X to be the same name,
// but we want the initial data to be coming from a different name.
return
}
- mk := as.Right().(*ir.MakeExpr)
- if mk.Esc() == EscNone || mk.Left() == nil || mk.Right() != nil {
+ mk := as.Y.(*ir.MakeExpr)
+ if mk.Esc() == EscNone || mk.Len == nil || mk.Cap != nil {
return
}
mk.SetOp(ir.OMAKESLICECOPY)
- mk.SetRight(cp.Right())
+ mk.Cap = cp.Y
// Set bounded when m = OMAKESLICE([]T, len(s)); OCOPY(m, s)
- mk.SetBounded(mk.Left().Op() == ir.OLEN && samesafeexpr(mk.Left().(*ir.UnaryExpr).Left(), cp.Right()))
- as.SetRight(typecheck(mk, ctxExpr))
+ mk.SetBounded(mk.Len.Op() == ir.OLEN && samesafeexpr(mk.Len.(*ir.UnaryExpr).X, cp.Y))
+ as.Y = typecheck(mk, ctxExpr)
s[1] = nil // remove separate copy call
}
@@ -479,25 +479,25 @@ func (o *Order) call(nn ir.Node) {
default:
base.Fatalf("unexpected call: %+v", n)
case *ir.UnaryExpr:
- n.SetLeft(o.expr(n.Left(), nil))
+ n.X = o.expr(n.X, nil)
case *ir.ConvExpr:
- n.SetLeft(o.expr(n.Left(), nil))
+ n.X = o.expr(n.X, nil)
case *ir.BinaryExpr:
- n.SetLeft(o.expr(n.Left(), nil))
- n.SetRight(o.expr(n.Right(), nil))
+ n.X = o.expr(n.X, nil)
+ n.Y = o.expr(n.Y, nil)
case *ir.MakeExpr:
- n.SetLeft(o.expr(n.Left(), nil))
- n.SetRight(o.expr(n.Right(), nil))
+ n.Len = o.expr(n.Len, nil)
+ n.Cap = o.expr(n.Cap, nil)
case *ir.CallExpr:
- o.exprList(n.List())
+ o.exprList(n.Args)
}
return
}
n := nn.(*ir.CallExpr)
fixVariadicCall(n)
- n.SetLeft(o.expr(n.Left(), nil))
- o.exprList(n.List())
+ n.X = o.expr(n.X, nil)
+ o.exprList(n.Args)
if n.Op() == ir.OCALLINTER {
return
@@ -509,21 +509,21 @@ func (o *Order) call(nn ir.Node) {
// still alive when we pop the temp stack.
if arg.Op() == ir.OCONVNOP {
arg := arg.(*ir.ConvExpr)
- if arg.Left().Type().IsUnsafePtr() {
- x := o.copyExpr(arg.Left())
- arg.SetLeft(x)
+ if arg.X.Type().IsUnsafePtr() {
+ x := o.copyExpr(arg.X)
+ arg.X = x
x.Name().SetAddrtaken(true) // ensure SSA keeps the x variable
- n.PtrBody().Append(typecheck(ir.NewUnaryExpr(base.Pos, ir.OVARLIVE, x), ctxStmt))
+ n.Body.Append(typecheck(ir.NewUnaryExpr(base.Pos, ir.OVARLIVE, x), ctxStmt))
}
}
}
// Check for "unsafe-uintptr" tag provided by escape analysis.
- for i, param := range n.Left().Type().Params().FieldSlice() {
+ for i, param := range n.X.Type().Params().FieldSlice() {
if param.Note == unsafeUintptrTag || param.Note == uintptrEscapesTag {
- if arg := n.List().Index(i); arg.Op() == ir.OSLICELIT {
+ if arg := n.Args.Index(i); arg.Op() == ir.OSLICELIT {
arg := arg.(*ir.CompLitExpr)
- for _, elt := range arg.List().Slice() {
+ for _, elt := range arg.List.Slice() {
keepAlive(elt)
}
} else {
@@ -555,34 +555,34 @@ func (o *Order) mapAssign(n ir.Node) {
case ir.OAS:
n := n.(*ir.AssignStmt)
- if n.Left().Op() == ir.OINDEXMAP {
- n.SetRight(o.safeMapRHS(n.Right()))
+ if n.X.Op() == ir.OINDEXMAP {
+ n.Y = o.safeMapRHS(n.Y)
}
o.out = append(o.out, n)
case ir.OASOP:
n := n.(*ir.AssignOpStmt)
- if n.Left().Op() == ir.OINDEXMAP {
- n.SetRight(o.safeMapRHS(n.Right()))
+ if n.X.Op() == ir.OINDEXMAP {
+ n.Y = o.safeMapRHS(n.Y)
}
o.out = append(o.out, n)
case ir.OAS2, ir.OAS2DOTTYPE, ir.OAS2MAPR, ir.OAS2FUNC:
n := n.(*ir.AssignListStmt)
var post []ir.Node
- for i, m := range n.List().Slice() {
+ for i, m := range n.Lhs.Slice() {
switch {
case m.Op() == ir.OINDEXMAP:
m := m.(*ir.IndexExpr)
- if !ir.IsAutoTmp(m.Left()) {
- m.SetLeft(o.copyExpr(m.Left()))
+ if !ir.IsAutoTmp(m.X) {
+ m.X = o.copyExpr(m.X)
}
- if !ir.IsAutoTmp(m.Right()) {
- m.SetRight(o.copyExpr(m.Right()))
+ if !ir.IsAutoTmp(m.Index) {
+ m.Index = o.copyExpr(m.Index)
}
fallthrough
case instrumenting && n.Op() == ir.OAS2FUNC && !ir.IsBlank(m):
t := o.newTemp(m.Type(), false)
- n.List().SetIndex(i, t)
+ n.Lhs.SetIndex(i, t)
a := ir.NewAssignStmt(base.Pos, m, t)
post = append(post, typecheck(a, ctxStmt))
}
@@ -598,7 +598,7 @@ func (o *Order) safeMapRHS(r ir.Node) ir.Node {
// We need to make sure the RHS won't panic. See issue 22881.
if r.Op() == ir.OAPPEND {
r := r.(*ir.CallExpr)
- s := r.List().Slice()[1:]
+ s := r.Args.Slice()[1:]
for i, n := range s {
s[i] = o.cheapExpr(n)
}
@@ -628,32 +628,32 @@ func (o *Order) stmt(n ir.Node) {
case ir.OAS:
n := n.(*ir.AssignStmt)
t := o.markTemp()
- n.SetLeft(o.expr(n.Left(), nil))
- n.SetRight(o.expr(n.Right(), n.Left()))
+ n.X = o.expr(n.X, nil)
+ n.Y = o.expr(n.Y, n.X)
o.mapAssign(n)
o.cleanTemp(t)
case ir.OASOP:
n := n.(*ir.AssignOpStmt)
t := o.markTemp()
- n.SetLeft(o.expr(n.Left(), nil))
- n.SetRight(o.expr(n.Right(), nil))
+ n.X = o.expr(n.X, nil)
+ n.Y = o.expr(n.Y, nil)
- if instrumenting || n.Left().Op() == ir.OINDEXMAP && (n.SubOp() == ir.ODIV || n.SubOp() == ir.OMOD) {
+ if instrumenting || n.X.Op() == ir.OINDEXMAP && (n.AsOp == ir.ODIV || n.AsOp == ir.OMOD) {
// Rewrite m[k] op= r into m[k] = m[k] op r so
// that we can ensure that if op panics
// because r is zero, the panic happens before
// the map assignment.
// DeepCopy is a big hammer here, but safeExpr
// makes sure there is nothing too deep being copied.
- l1 := o.safeExpr(n.Left())
+ l1 := o.safeExpr(n.X)
l2 := ir.DeepCopy(src.NoXPos, l1)
if l2.Op() == ir.OINDEXMAP {
l2 := l2.(*ir.IndexExpr)
- l2.SetIndexMapLValue(false)
+ l2.Assigned = false
}
l2 = o.copyExpr(l2)
- r := o.expr(typecheck(ir.NewBinaryExpr(n.Pos(), n.SubOp(), l2, n.Right()), ctxExpr), nil)
+ r := o.expr(typecheck(ir.NewBinaryExpr(n.Pos(), n.AsOp, l2, n.Y), ctxExpr), nil)
as := typecheck(ir.NewAssignStmt(n.Pos(), l1, r), ctxStmt)
o.mapAssign(as)
o.cleanTemp(t)
@@ -666,8 +666,8 @@ func (o *Order) stmt(n ir.Node) {
case ir.OAS2:
n := n.(*ir.AssignListStmt)
t := o.markTemp()
- o.exprList(n.List())
- o.exprList(n.Rlist())
+ o.exprList(n.Lhs)
+ o.exprList(n.Rhs)
o.mapAssign(n)
o.cleanTemp(t)
@@ -675,9 +675,9 @@ func (o *Order) stmt(n ir.Node) {
case ir.OAS2FUNC:
n := n.(*ir.AssignListStmt)
t := o.markTemp()
- o.exprList(n.List())
- o.init(n.Rlist().First())
- o.call(n.Rlist().First())
+ o.exprList(n.Lhs)
+ o.init(n.Rhs.First())
+ o.call(n.Rhs.First())
o.as2(n)
o.cleanTemp(t)
@@ -690,22 +690,22 @@ func (o *Order) stmt(n ir.Node) {
case ir.OAS2DOTTYPE, ir.OAS2RECV, ir.OAS2MAPR:
n := n.(*ir.AssignListStmt)
t := o.markTemp()
- o.exprList(n.List())
+ o.exprList(n.Lhs)
- switch r := n.Rlist().First(); r.Op() {
+ switch r := n.Rhs.First(); r.Op() {
case ir.ODOTTYPE2:
r := r.(*ir.TypeAssertExpr)
- r.SetLeft(o.expr(r.Left(), nil))
+ r.X = o.expr(r.X, nil)
case ir.ORECV:
r := r.(*ir.UnaryExpr)
- r.SetLeft(o.expr(r.Left(), nil))
+ r.X = o.expr(r.X, nil)
case ir.OINDEXMAP:
r := r.(*ir.IndexExpr)
- r.SetLeft(o.expr(r.Left(), nil))
- r.SetRight(o.expr(r.Right(), nil))
+ r.X = o.expr(r.X, nil)
+ r.Index = o.expr(r.Index, nil)
// See similar conversion for OINDEXMAP below.
- _ = mapKeyReplaceStrConv(r.Right())
- r.SetRight(o.mapKeyTemp(r.Left().Type(), r.Right()))
+ _ = mapKeyReplaceStrConv(r.Index)
+ r.Index = o.mapKeyTemp(r.X.Type(), r.Index)
default:
base.Fatalf("order.stmt: %v", r.Op())
}
@@ -716,7 +716,7 @@ func (o *Order) stmt(n ir.Node) {
// Special: does not save n onto out.
case ir.OBLOCK:
n := n.(*ir.BlockStmt)
- o.stmtList(n.List())
+ o.stmtList(n.List)
// Special: n->left is not an expression; save as is.
case ir.OBREAK,
@@ -741,22 +741,22 @@ func (o *Order) stmt(n ir.Node) {
case ir.OCLOSE, ir.ORECV:
n := n.(*ir.UnaryExpr)
t := o.markTemp()
- n.SetLeft(o.expr(n.Left(), nil))
+ n.X = o.expr(n.X, nil)
o.out = append(o.out, n)
o.cleanTemp(t)
case ir.OCOPY:
n := n.(*ir.BinaryExpr)
t := o.markTemp()
- n.SetLeft(o.expr(n.Left(), nil))
- n.SetRight(o.expr(n.Right(), nil))
+ n.X = o.expr(n.X, nil)
+ n.Y = o.expr(n.Y, nil)
o.out = append(o.out, n)
o.cleanTemp(t)
case ir.OPRINT, ir.OPRINTN, ir.ORECOVER:
n := n.(*ir.CallExpr)
t := o.markTemp()
- o.exprList(n.List())
+ o.exprList(n.Args)
o.out = append(o.out, n)
o.cleanTemp(t)
@@ -764,17 +764,17 @@ func (o *Order) stmt(n ir.Node) {
case ir.ODEFER, ir.OGO:
n := n.(*ir.GoDeferStmt)
t := o.markTemp()
- o.init(n.Left())
- o.call(n.Left())
+ o.init(n.Call)
+ o.call(n.Call)
o.out = append(o.out, n)
o.cleanTemp(t)
case ir.ODELETE:
n := n.(*ir.CallExpr)
t := o.markTemp()
- n.List().SetFirst(o.expr(n.List().First(), nil))
- n.List().SetSecond(o.expr(n.List().Second(), nil))
- n.List().SetSecond(o.mapKeyTemp(n.List().First().Type(), n.List().Second()))
+ n.Args.SetFirst(o.expr(n.Args.First(), nil))
+ n.Args.SetSecond(o.expr(n.Args.Second(), nil))
+ n.Args.SetSecond(o.mapKeyTemp(n.Args.First().Type(), n.Args.Second()))
o.out = append(o.out, n)
o.cleanTemp(t)
@@ -783,10 +783,10 @@ func (o *Order) stmt(n ir.Node) {
case ir.OFOR:
n := n.(*ir.ForStmt)
t := o.markTemp()
- n.SetLeft(o.exprInPlace(n.Left()))
- n.PtrBody().Prepend(o.cleanTempNoPop(t)...)
- orderBlock(n.PtrBody(), o.free)
- n.SetRight(orderStmtInPlace(n.Right(), o.free))
+ n.Cond = o.exprInPlace(n.Cond)
+ n.Body.Prepend(o.cleanTempNoPop(t)...)
+ orderBlock(&n.Body, o.free)
+ n.Post = orderStmtInPlace(n.Post, o.free)
o.out = append(o.out, n)
o.cleanTemp(t)
@@ -795,12 +795,12 @@ func (o *Order) stmt(n ir.Node) {
case ir.OIF:
n := n.(*ir.IfStmt)
t := o.markTemp()
- n.SetLeft(o.exprInPlace(n.Left()))
- n.PtrBody().Prepend(o.cleanTempNoPop(t)...)
- n.PtrRlist().Prepend(o.cleanTempNoPop(t)...)
+ n.Cond = o.exprInPlace(n.Cond)
+ n.Body.Prepend(o.cleanTempNoPop(t)...)
+ n.Else.Prepend(o.cleanTempNoPop(t)...)
o.popTemp(t)
- orderBlock(n.PtrBody(), o.free)
- orderBlock(n.PtrRlist(), o.free)
+ orderBlock(&n.Body, o.free)
+ orderBlock(&n.Else, o.free)
o.out = append(o.out, n)
// Special: argument will be converted to interface using convT2E
@@ -808,9 +808,9 @@ func (o *Order) stmt(n ir.Node) {
case ir.OPANIC:
n := n.(*ir.UnaryExpr)
t := o.markTemp()
- n.SetLeft(o.expr(n.Left(), nil))
- if !n.Left().Type().IsInterface() {
- n.SetLeft(o.addrTemp(n.Left()))
+ n.X = o.expr(n.X, nil)
+ if !n.X.Type().IsInterface() {
+ n.X = o.addrTemp(n.X)
}
o.out = append(o.out, n)
o.cleanTemp(t)
@@ -830,12 +830,12 @@ func (o *Order) stmt(n ir.Node) {
// Mark []byte(str) range expression to reuse string backing storage.
// It is safe because the storage cannot be mutated.
n := n.(*ir.RangeStmt)
- if n.Right().Op() == ir.OSTR2BYTES {
- n.Right().(*ir.ConvExpr).SetOp(ir.OSTR2BYTESTMP)
+ if n.X.Op() == ir.OSTR2BYTES {
+ n.X.(*ir.ConvExpr).SetOp(ir.OSTR2BYTESTMP)
}
t := o.markTemp()
- n.SetRight(o.expr(n.Right(), nil))
+ n.X = o.expr(n.X, nil)
orderBody := true
switch n.Type().Kind() {
@@ -843,7 +843,7 @@ func (o *Order) stmt(n ir.Node) {
base.Fatalf("order.stmt range %v", n.Type())
case types.TARRAY, types.TSLICE:
- if n.List().Len() < 2 || ir.IsBlank(n.List().Second()) {
+ if n.Vars.Len() < 2 || ir.IsBlank(n.Vars.Second()) {
// for i := range x will only use x once, to compute len(x).
// No need to copy it.
break
@@ -853,7 +853,7 @@ func (o *Order) stmt(n ir.Node) {
case types.TCHAN, types.TSTRING:
// chan, string, slice, array ranges use value multiple times.
// make copy.
- r := n.Right()
+ r := n.X
if r.Type().IsString() && r.Type() != types.Types[types.TSTRING] {
r = ir.NewConvExpr(base.Pos, ir.OCONV, nil, r)
@@ -861,7 +861,7 @@ func (o *Order) stmt(n ir.Node) {
r = typecheck(r, ctxExpr)
}
- n.SetRight(o.copyExpr(r))
+ n.X = o.copyExpr(r)
case types.TMAP:
if isMapClear(n) {
@@ -875,23 +875,23 @@ func (o *Order) stmt(n ir.Node) {
// copy the map value in case it is a map literal.
// TODO(rsc): Make tmp = literal expressions reuse tmp.
// For maps tmp is just one word so it hardly matters.
- r := n.Right()
- n.SetRight(o.copyExpr(r))
+ r := n.X
+ n.X = o.copyExpr(r)
// n.Prealloc is the temp for the iterator.
// hiter contains pointers and needs to be zeroed.
n.Prealloc = o.newTemp(hiter(n.Type()), true)
}
- o.exprListInPlace(n.List())
+ o.exprListInPlace(n.Vars)
if orderBody {
- orderBlock(n.PtrBody(), o.free)
+ orderBlock(&n.Body, o.free)
}
o.out = append(o.out, n)
o.cleanTemp(t)
case ir.ORETURN:
n := n.(*ir.ReturnStmt)
- o.exprList(n.List())
+ o.exprList(n.Results)
o.out = append(o.out, n)
// Special: clean case temporaries in each block entry.
@@ -906,9 +906,9 @@ func (o *Order) stmt(n ir.Node) {
case ir.OSELECT:
n := n.(*ir.SelectStmt)
t := o.markTemp()
- for _, ncas := range n.List().Slice() {
+ for _, ncas := range n.Cases.Slice() {
ncas := ncas.(*ir.CaseStmt)
- r := ncas.Left()
+ r := ncas.Comm
setlineno(ncas)
// Append any new body prologue to ninit.
@@ -927,17 +927,17 @@ func (o *Order) stmt(n ir.Node) {
case ir.OSELRECV2:
// case x, ok = <-c
r := r.(*ir.AssignListStmt)
- recv := r.Rlist().First().(*ir.UnaryExpr)
- recv.SetLeft(o.expr(recv.Left(), nil))
- if !ir.IsAutoTmp(recv.Left()) {
- recv.SetLeft(o.copyExpr(recv.Left()))
+ recv := r.Rhs.First().(*ir.UnaryExpr)
+ recv.X = o.expr(recv.X, nil)
+ if !ir.IsAutoTmp(recv.X) {
+ recv.X = o.copyExpr(recv.X)
}
init := r.PtrInit().Slice()
r.PtrInit().Set(nil)
- colas := r.Colas()
+ colas := r.Def
do := func(i int, t *types.Type) {
- n := r.List().Index(i)
+ n := r.Lhs.Index(i)
if ir.IsBlank(n) {
return
}
@@ -946,7 +946,7 @@ func (o *Order) stmt(n ir.Node) {
// declaration (and possible allocation) until inside the case body.
// Delete the ODCL nodes here and recreate them inside the body below.
if colas {
- if len(init) > 0 && init[0].Op() == ir.ODCL && init[0].(*ir.Decl).Left() == n {
+ if len(init) > 0 && init[0].Op() == ir.ODCL && init[0].(*ir.Decl).X == n {
init = init[1:]
}
dcl := typecheck(ir.NewDecl(base.Pos, ir.ODCL, n), ctxStmt)
@@ -955,9 +955,9 @@ func (o *Order) stmt(n ir.Node) {
tmp := o.newTemp(t, t.HasPointers())
as := typecheck(ir.NewAssignStmt(base.Pos, n, conv(tmp, n.Type())), ctxStmt)
ncas.PtrInit().Append(as)
- r.PtrList().SetIndex(i, tmp)
+ (&r.Lhs).SetIndex(i, tmp)
}
- do(0, recv.Left().Type().Elem())
+ do(0, recv.X.Type().Elem())
do(1, types.Types[types.TBOOL])
if len(init) != 0 {
ir.DumpList("ninit", r.Init())
@@ -974,28 +974,28 @@ func (o *Order) stmt(n ir.Node) {
// case c <- x
// r->left is c, r->right is x, both are always evaluated.
- r.SetLeft(o.expr(r.Left(), nil))
+ r.Chan = o.expr(r.Chan, nil)
- if !ir.IsAutoTmp(r.Left()) {
- r.SetLeft(o.copyExpr(r.Left()))
+ if !ir.IsAutoTmp(r.Chan) {
+ r.Chan = o.copyExpr(r.Chan)
}
- r.SetRight(o.expr(r.Right(), nil))
- if !ir.IsAutoTmp(r.Right()) {
- r.SetRight(o.copyExpr(r.Right()))
+ r.Value = o.expr(r.Value, nil)
+ if !ir.IsAutoTmp(r.Value) {
+ r.Value = o.copyExpr(r.Value)
}
}
}
// Now that we have accumulated all the temporaries, clean them.
// Also insert any ninit queued during the previous loop.
// (The temporary cleaning must follow that ninit work.)
- for _, cas := range n.List().Slice() {
+ for _, cas := range n.Cases.Slice() {
cas := cas.(*ir.CaseStmt)
- orderBlock(cas.PtrBody(), o.free)
- cas.PtrBody().Prepend(o.cleanTempNoPop(t)...)
+ orderBlock(&cas.Body, o.free)
+ cas.Body.Prepend(o.cleanTempNoPop(t)...)
// TODO(mdempsky): Is this actually necessary?
// walkselect appears to walk Ninit.
- cas.PtrBody().Prepend(cas.Init().Slice()...)
+ cas.Body.Prepend(cas.Init().Slice()...)
cas.PtrInit().Set(nil)
}
@@ -1006,14 +1006,14 @@ func (o *Order) stmt(n ir.Node) {
case ir.OSEND:
n := n.(*ir.SendStmt)
t := o.markTemp()
- n.SetLeft(o.expr(n.Left(), nil))
- n.SetRight(o.expr(n.Right(), nil))
+ n.Chan = o.expr(n.Chan, nil)
+ n.Value = o.expr(n.Value, nil)
if instrumenting {
// Force copying to the stack so that (chan T)(nil) <- x
// is still instrumented as a read of x.
- n.SetRight(o.copyExpr(n.Right()))
+ n.Value = o.copyExpr(n.Value)
} else {
- n.SetRight(o.addrTemp(n.Right()))
+ n.Value = o.addrTemp(n.Value)
}
o.out = append(o.out, n)
o.cleanTemp(t)
@@ -1029,15 +1029,15 @@ func (o *Order) stmt(n ir.Node) {
n := n.(*ir.SwitchStmt)
if base.Debug.Libfuzzer != 0 && !hasDefaultCase(n) {
// Add empty "default:" case for instrumentation.
- n.PtrList().Append(ir.NewCaseStmt(base.Pos, nil, nil))
+ n.Cases.Append(ir.NewCaseStmt(base.Pos, nil, nil))
}
t := o.markTemp()
- n.SetLeft(o.expr(n.Left(), nil))
- for _, ncas := range n.List().Slice() {
+ n.Tag = o.expr(n.Tag, nil)
+ for _, ncas := range n.Cases.Slice() {
ncas := ncas.(*ir.CaseStmt)
- o.exprListInPlace(ncas.List())
- orderBlock(ncas.PtrBody(), o.free)
+ o.exprListInPlace(ncas.List)
+ orderBlock(&ncas.Body, o.free)
}
o.out = append(o.out, n)
@@ -1048,9 +1048,9 @@ func (o *Order) stmt(n ir.Node) {
}
func hasDefaultCase(n *ir.SwitchStmt) bool {
- for _, ncas := range n.List().Slice() {
+ for _, ncas := range n.Cases.Slice() {
ncas := ncas.(*ir.CaseStmt)
- if ncas.List().Len() == 0 {
+ if ncas.List.Len() == 0 {
return true
}
}
@@ -1111,10 +1111,10 @@ func (o *Order) expr1(n, lhs ir.Node) ir.Node {
// Fewer than 5 strings use direct runtime helpers.
case ir.OADDSTR:
n := n.(*ir.AddStringExpr)
- o.exprList(n.List())
+ o.exprList(n.List)
- if n.List().Len() > 5 {
- t := types.NewArray(types.Types[types.TSTRING], int64(n.List().Len()))
+ if n.List.Len() > 5 {
+ t := types.NewArray(types.Types[types.TSTRING], int64(n.List.Len()))
n.Prealloc = o.newTemp(t, false)
}
@@ -1128,13 +1128,13 @@ func (o *Order) expr1(n, lhs ir.Node) ir.Node {
hasbyte := false
haslit := false
- for _, n1 := range n.List().Slice() {
+ for _, n1 := range n.List.Slice() {
hasbyte = hasbyte || n1.Op() == ir.OBYTES2STR
haslit = haslit || n1.Op() == ir.OLITERAL && len(ir.StringVal(n1)) != 0
}
if haslit && hasbyte {
- for _, n2 := range n.List().Slice() {
+ for _, n2 := range n.List.Slice() {
if n2.Op() == ir.OBYTES2STR {
n2 := n2.(*ir.ConvExpr)
n2.SetOp(ir.OBYTES2STRTMP)
@@ -1145,16 +1145,16 @@ func (o *Order) expr1(n, lhs ir.Node) ir.Node {
case ir.OINDEXMAP:
n := n.(*ir.IndexExpr)
- n.SetLeft(o.expr(n.Left(), nil))
- n.SetRight(o.expr(n.Right(), nil))
+ n.X = o.expr(n.X, nil)
+ n.Index = o.expr(n.Index, nil)
needCopy := false
- if !n.IndexMapLValue() {
+ if !n.Assigned {
// Enforce that any []byte slices we are not copying
// can not be changed before the map index by forcing
// the map index to happen immediately following the
// conversions. See copyExpr a few lines below.
- needCopy = mapKeyReplaceStrConv(n.Right())
+ needCopy = mapKeyReplaceStrConv(n.Index)
if instrumenting {
// Race detector needs the copy.
@@ -1163,7 +1163,7 @@ func (o *Order) expr1(n, lhs ir.Node) ir.Node {
}
// key must be addressable
- n.SetRight(o.mapKeyTemp(n.Left().Type(), n.Right()))
+ n.Index = o.mapKeyTemp(n.X.Type(), n.Index)
if needCopy {
return o.copyExpr(n)
}
@@ -1173,22 +1173,22 @@ func (o *Order) expr1(n, lhs ir.Node) ir.Node {
// temporary to pass to the runtime conversion routine.
case ir.OCONVIFACE:
n := n.(*ir.ConvExpr)
- n.SetLeft(o.expr(n.Left(), nil))
- if n.Left().Type().IsInterface() {
+ n.X = o.expr(n.X, nil)
+ if n.X.Type().IsInterface() {
return n
}
- if _, needsaddr := convFuncName(n.Left().Type(), n.Type()); needsaddr || isStaticCompositeLiteral(n.Left()) {
+ if _, needsaddr := convFuncName(n.X.Type(), n.Type()); needsaddr || isStaticCompositeLiteral(n.X) {
// Need a temp if we need to pass the address to the conversion function.
// We also process static composite literal node here, making a named static global
// whose address we can put directly in an interface (see OCONVIFACE case in walk).
- n.SetLeft(o.addrTemp(n.Left()))
+ n.X = o.addrTemp(n.X)
}
return n
case ir.OCONVNOP:
n := n.(*ir.ConvExpr)
- if n.Type().IsKind(types.TUNSAFEPTR) && n.Left().Type().IsKind(types.TUINTPTR) && (n.Left().Op() == ir.OCALLFUNC || n.Left().Op() == ir.OCALLINTER || n.Left().Op() == ir.OCALLMETH) {
- call := n.Left().(*ir.CallExpr)
+ if n.Type().IsKind(types.TUNSAFEPTR) && n.X.Type().IsKind(types.TUINTPTR) && (n.X.Op() == ir.OCALLFUNC || n.X.Op() == ir.OCALLINTER || n.X.Op() == ir.OCALLMETH) {
+ call := n.X.(*ir.CallExpr)
// When reordering unsafe.Pointer(f()) into a separate
// statement, the conversion and function call must stay
// together. See golang.org/issue/15329.
@@ -1198,7 +1198,7 @@ func (o *Order) expr1(n, lhs ir.Node) ir.Node {
return o.copyExpr(n)
}
} else {
- n.SetLeft(o.expr(n.Left(), nil))
+ n.X = o.expr(n.X, nil)
}
return n
@@ -1216,7 +1216,7 @@ func (o *Order) expr1(n, lhs ir.Node) ir.Node {
r := o.newTemp(n.Type(), false)
// Evaluate left-hand side.
- lhs := o.expr(n.Left(), nil)
+ lhs := o.expr(n.X, nil)
o.out = append(o.out, typecheck(ir.NewAssignStmt(base.Pos, r, lhs), ctxStmt))
// Evaluate right-hand side, save generated code.
@@ -1224,7 +1224,7 @@ func (o *Order) expr1(n, lhs ir.Node) ir.Node {
o.out = nil
t := o.markTemp()
o.edge()
- rhs := o.expr(n.Right(), nil)
+ rhs := o.expr(n.Y, nil)
o.out = append(o.out, typecheck(ir.NewAssignStmt(base.Pos, r, rhs), ctxStmt))
o.cleanTemp(t)
gen := o.out
@@ -1233,9 +1233,9 @@ func (o *Order) expr1(n, lhs ir.Node) ir.Node {
// If left-hand side doesn't cause a short-circuit, issue right-hand side.
nif := ir.NewIfStmt(base.Pos, r, nil, nil)
if n.Op() == ir.OANDAND {
- nif.PtrBody().Set(gen)
+ nif.Body.Set(gen)
} else {
- nif.PtrRlist().Set(gen)
+ nif.Else.Set(gen)
}
o.out = append(o.out, nif)
return r
@@ -1261,8 +1261,8 @@ func (o *Order) expr1(n, lhs ir.Node) ir.Node {
if isRuneCount(n) {
// len([]rune(s)) is rewritten to runtime.countrunes(s) later.
- conv := n.(*ir.UnaryExpr).Left().(*ir.ConvExpr)
- conv.SetLeft(o.expr(conv.Left(), nil))
+ conv := n.(*ir.UnaryExpr).X.(*ir.ConvExpr)
+ conv.X = o.expr(conv.X, nil)
} else {
o.call(n)
}
@@ -1276,21 +1276,21 @@ func (o *Order) expr1(n, lhs ir.Node) ir.Node {
// Check for append(x, make([]T, y)...) .
n := n.(*ir.CallExpr)
if isAppendOfMake(n) {
- n.List().SetFirst(o.expr(n.List().First(), nil)) // order x
- mk := n.List().Second().(*ir.MakeExpr)
- mk.SetLeft(o.expr(mk.Left(), nil)) // order y
+ n.Args.SetFirst(o.expr(n.Args.First(), nil)) // order x
+ mk := n.Args.Second().(*ir.MakeExpr)
+ mk.Len = o.expr(mk.Len, nil) // order y
} else {
- o.exprList(n.List())
+ o.exprList(n.Args)
}
- if lhs == nil || lhs.Op() != ir.ONAME && !samesafeexpr(lhs, n.List().First()) {
+ if lhs == nil || lhs.Op() != ir.ONAME && !samesafeexpr(lhs, n.Args.First()) {
return o.copyExpr(n)
}
return n
case ir.OSLICE, ir.OSLICEARR, ir.OSLICESTR, ir.OSLICE3, ir.OSLICE3ARR:
n := n.(*ir.SliceExpr)
- n.SetLeft(o.expr(n.Left(), nil))
+ n.X = o.expr(n.X, nil)
low, high, max := n.SliceBounds()
low = o.expr(low, nil)
low = o.cheapExpr(low)
@@ -1299,21 +1299,21 @@ func (o *Order) expr1(n, lhs ir.Node) ir.Node {
max = o.expr(max, nil)
max = o.cheapExpr(max)
n.SetSliceBounds(low, high, max)
- if lhs == nil || lhs.Op() != ir.ONAME && !samesafeexpr(lhs, n.Left()) {
+ if lhs == nil || lhs.Op() != ir.ONAME && !samesafeexpr(lhs, n.X) {
return o.copyExpr(n)
}
return n
case ir.OCLOSURE:
n := n.(*ir.ClosureExpr)
- if n.Transient() && len(n.Func().ClosureVars) > 0 {
+ if n.Transient() && len(n.Func.ClosureVars) > 0 {
n.Prealloc = o.newTemp(closureType(n), false)
}
return n
case ir.OCALLPART:
n := n.(*ir.CallPartExpr)
- n.SetLeft(o.expr(n.Left(), nil))
+ n.X = o.expr(n.X, nil)
if n.Transient() {
t := partialCallType(n)
n.Prealloc = o.newTemp(t, false)
@@ -1322,7 +1322,7 @@ func (o *Order) expr1(n, lhs ir.Node) ir.Node {
case ir.OSLICELIT:
n := n.(*ir.CompLitExpr)
- o.exprList(n.List())
+ o.exprList(n.List)
if n.Transient() {
t := types.NewArray(n.Type().Elem(), n.Len)
n.Prealloc = o.newTemp(t, false)
@@ -1331,7 +1331,7 @@ func (o *Order) expr1(n, lhs ir.Node) ir.Node {
case ir.ODOTTYPE, ir.ODOTTYPE2:
n := n.(*ir.TypeAssertExpr)
- n.SetLeft(o.expr(n.Left(), nil))
+ n.X = o.expr(n.X, nil)
if !isdirectiface(n.Type()) || instrumenting {
return o.copyExprClear(n)
}
@@ -1339,32 +1339,32 @@ func (o *Order) expr1(n, lhs ir.Node) ir.Node {
case ir.ORECV:
n := n.(*ir.UnaryExpr)
- n.SetLeft(o.expr(n.Left(), nil))
+ n.X = o.expr(n.X, nil)
return o.copyExprClear(n)
case ir.OEQ, ir.ONE, ir.OLT, ir.OLE, ir.OGT, ir.OGE:
n := n.(*ir.BinaryExpr)
- n.SetLeft(o.expr(n.Left(), nil))
- n.SetRight(o.expr(n.Right(), nil))
+ n.X = o.expr(n.X, nil)
+ n.Y = o.expr(n.Y, nil)
- t := n.Left().Type()
+ t := n.X.Type()
switch {
case t.IsString():
// Mark string(byteSlice) arguments to reuse byteSlice backing
// buffer during conversion. String comparison does not
// memorize the strings for later use, so it is safe.
- if n.Left().Op() == ir.OBYTES2STR {
- n.Left().(*ir.ConvExpr).SetOp(ir.OBYTES2STRTMP)
+ if n.X.Op() == ir.OBYTES2STR {
+ n.X.(*ir.ConvExpr).SetOp(ir.OBYTES2STRTMP)
}
- if n.Right().Op() == ir.OBYTES2STR {
- n.Right().(*ir.ConvExpr).SetOp(ir.OBYTES2STRTMP)
+ if n.Y.Op() == ir.OBYTES2STR {
+ n.Y.(*ir.ConvExpr).SetOp(ir.OBYTES2STRTMP)
}
case t.IsStruct() || t.IsArray():
// for complex comparisons, we need both args to be
// addressable so we can pass them to the runtime.
- n.SetLeft(o.addrTemp(n.Left()))
- n.SetRight(o.addrTemp(n.Right()))
+ n.X = o.addrTemp(n.X)
+ n.Y = o.addrTemp(n.Y)
}
return n
@@ -1385,13 +1385,13 @@ func (o *Order) expr1(n, lhs ir.Node) ir.Node {
// the keys and values before storing any of them to the map.
// See issue 26552.
n := n.(*ir.CompLitExpr)
- entries := n.List().Slice()
+ entries := n.List.Slice()
statics := entries[:0]
var dynamics []*ir.KeyExpr
for _, r := range entries {
r := r.(*ir.KeyExpr)
- if !isStaticCompositeLiteral(r.Left()) || !isStaticCompositeLiteral(r.Right()) {
+ if !isStaticCompositeLiteral(r.Key) || !isStaticCompositeLiteral(r.Value) {
dynamics = append(dynamics, r)
continue
}
@@ -1399,14 +1399,14 @@ func (o *Order) expr1(n, lhs ir.Node) ir.Node {
// Recursively ordering some static entries can change them to dynamic;
// e.g., OCONVIFACE nodes. See #31777.
r = o.expr(r, nil).(*ir.KeyExpr)
- if !isStaticCompositeLiteral(r.Left()) || !isStaticCompositeLiteral(r.Right()) {
+ if !isStaticCompositeLiteral(r.Key) || !isStaticCompositeLiteral(r.Value) {
dynamics = append(dynamics, r)
continue
}
statics = append(statics, r)
}
- n.PtrList().Set(statics)
+ n.List.Set(statics)
if len(dynamics) == 0 {
return n
@@ -1420,7 +1420,7 @@ func (o *Order) expr1(n, lhs ir.Node) ir.Node {
// Emit eval+insert of dynamic entries, one at a time.
for _, r := range dynamics {
- as := ir.NewAssignStmt(base.Pos, ir.NewIndexExpr(base.Pos, m, r.Left()), r.Right())
+ as := ir.NewAssignStmt(base.Pos, ir.NewIndexExpr(base.Pos, m, r.Key), r.Value)
typecheck(as, ctxStmt) // Note: this converts the OINDEX to an OINDEXMAP
o.stmt(as)
}
@@ -1441,10 +1441,10 @@ func (o *Order) expr1(n, lhs ir.Node) ir.Node {
func (o *Order) as2(n *ir.AssignListStmt) {
tmplist := []ir.Node{}
left := []ir.Node{}
- for ni, l := range n.List().Slice() {
+ for ni, l := range n.Lhs.Slice() {
if !ir.IsBlank(l) {
tmp := o.newTemp(l.Type(), l.Type().HasPointers())
- n.List().SetIndex(ni, tmp)
+ n.Lhs.SetIndex(ni, tmp)
tmplist = append(tmplist, tmp)
left = append(left, l)
}
@@ -1453,8 +1453,8 @@ func (o *Order) as2(n *ir.AssignListStmt) {
o.out = append(o.out, n)
as := ir.NewAssignListStmt(base.Pos, ir.OAS2, nil, nil)
- as.PtrList().Set(left)
- as.PtrRlist().Set(tmplist)
+ as.Lhs.Set(left)
+ as.Rhs.Set(tmplist)
o.stmt(typecheck(as, ctxStmt))
}
@@ -1462,25 +1462,25 @@ func (o *Order) as2(n *ir.AssignListStmt) {
// Just like as2, this also adds temporaries to ensure left-to-right assignment.
func (o *Order) okAs2(n *ir.AssignListStmt) {
var tmp1, tmp2 ir.Node
- if !ir.IsBlank(n.List().First()) {
- typ := n.Rlist().First().Type()
+ if !ir.IsBlank(n.Lhs.First()) {
+ typ := n.Rhs.First().Type()
tmp1 = o.newTemp(typ, typ.HasPointers())
}
- if !ir.IsBlank(n.List().Second()) {
+ if !ir.IsBlank(n.Lhs.Second()) {
tmp2 = o.newTemp(types.Types[types.TBOOL], false)
}
o.out = append(o.out, n)
if tmp1 != nil {
- r := ir.NewAssignStmt(base.Pos, n.List().First(), tmp1)
+ r := ir.NewAssignStmt(base.Pos, n.Lhs.First(), tmp1)
o.mapAssign(typecheck(r, ctxStmt))
- n.List().SetFirst(tmp1)
+ n.Lhs.SetFirst(tmp1)
}
if tmp2 != nil {
- r := ir.NewAssignStmt(base.Pos, n.List().Second(), conv(tmp2, n.List().Second().Type()))
+ r := ir.NewAssignStmt(base.Pos, n.Lhs.Second(), conv(tmp2, n.Lhs.Second().Type()))
o.mapAssign(typecheck(r, ctxStmt))
- n.List().SetSecond(tmp2)
+ n.Lhs.SetSecond(tmp2)
}
}
diff --git a/src/cmd/compile/internal/gc/pgen.go b/src/cmd/compile/internal/gc/pgen.go
index e43471dbca..32550c8bd4 100644
--- a/src/cmd/compile/internal/gc/pgen.go
+++ b/src/cmd/compile/internal/gc/pgen.go
@@ -68,11 +68,11 @@ func emitptrargsmap(fn *ir.Func) {
// the top of the stack and increasing in size.
// Non-autos sort on offset.
func cmpstackvarlt(a, b *ir.Name) bool {
- if (a.Class() == ir.PAUTO) != (b.Class() == ir.PAUTO) {
- return b.Class() == ir.PAUTO
+ if (a.Class_ == ir.PAUTO) != (b.Class_ == ir.PAUTO) {
+ return b.Class_ == ir.PAUTO
}
- if a.Class() != ir.PAUTO {
+ if a.Class_ != ir.PAUTO {
return a.FrameOffset() < b.FrameOffset()
}
@@ -113,7 +113,7 @@ func (s *ssafn) AllocFrame(f *ssa.Func) {
// Mark the PAUTO's unused.
for _, ln := range fn.Dcl {
- if ln.Class() == ir.PAUTO {
+ if ln.Class_ == ir.PAUTO {
ln.SetUsed(false)
}
}
@@ -128,7 +128,7 @@ func (s *ssafn) AllocFrame(f *ssa.Func) {
for _, b := range f.Blocks {
for _, v := range b.Values {
if n, ok := v.Aux.(*ir.Name); ok {
- switch n.Class() {
+ switch n.Class_ {
case ir.PPARAM, ir.PPARAMOUT:
// Don't modify nodfp; it is a global.
if n != nodfp {
@@ -154,7 +154,7 @@ func (s *ssafn) AllocFrame(f *ssa.Func) {
// Reassign stack offsets of the locals that are used.
lastHasPtr := false
for i, n := range fn.Dcl {
- if n.Op() != ir.ONAME || n.Class() != ir.PAUTO {
+ if n.Op() != ir.ONAME || n.Class_ != ir.PAUTO {
continue
}
if !n.Used() {
@@ -207,7 +207,7 @@ func funccompile(fn *ir.Func) {
// assign parameter offsets
dowidth(fn.Type())
- if fn.Body().Len() == 0 {
+ if fn.Body.Len() == 0 {
// Initialize ABI wrappers if necessary.
initLSym(fn, false)
emitptrargsmap(fn)
@@ -249,7 +249,7 @@ func compile(fn *ir.Func) {
// because symbols must be allocated before the parallel
// phase of the compiler.
for _, n := range fn.Dcl {
- switch n.Class() {
+ switch n.Class_ {
case ir.PPARAM, ir.PPARAMOUT, ir.PAUTO:
if livenessShouldTrack(n) && n.Addrtaken() {
dtypesym(n.Type())
@@ -360,7 +360,7 @@ func compileFunctions() {
// since they're most likely to be the slowest.
// This helps avoid stragglers.
sort.Slice(compilequeue, func(i, j int) bool {
- return compilequeue[i].Body().Len() > compilequeue[j].Body().Len()
+ return compilequeue[i].Body.Len() > compilequeue[j].Body.Len()
})
}
var wg sync.WaitGroup
@@ -440,7 +440,7 @@ func debuginfo(fnsym *obj.LSym, infosym *obj.LSym, curfn interface{}) ([]dwarf.S
if n.Op() != ir.ONAME { // might be OTYPE or OLITERAL
continue
}
- switch n.Class() {
+ switch n.Class_ {
case ir.PAUTO:
if !n.Used() {
// Text == nil -> generating abstract function
@@ -533,7 +533,7 @@ func createSimpleVar(fnsym *obj.LSym, n *ir.Name) *dwarf.Var {
var abbrev int
var offs int64
- switch n.Class() {
+ switch n.Class_ {
case ir.PAUTO:
offs = n.FrameOffset()
abbrev = dwarf.DW_ABRV_AUTO
@@ -549,7 +549,7 @@ func createSimpleVar(fnsym *obj.LSym, n *ir.Name) *dwarf.Var {
abbrev = dwarf.DW_ABRV_PARAM
offs = n.FrameOffset() + base.Ctxt.FixedFrameSize()
default:
- base.Fatalf("createSimpleVar unexpected class %v for node %v", n.Class(), n)
+ base.Fatalf("createSimpleVar unexpected class %v for node %v", n.Class_, n)
}
typename := dwarf.InfoPrefix + typesymname(n.Type())
@@ -566,7 +566,7 @@ func createSimpleVar(fnsym *obj.LSym, n *ir.Name) *dwarf.Var {
declpos := base.Ctxt.InnermostPos(declPos(n))
return &dwarf.Var{
Name: n.Sym().Name,
- IsReturnValue: n.Class() == ir.PPARAMOUT,
+ IsReturnValue: n.Class_ == ir.PPARAMOUT,
IsInlFormal: n.Name().InlFormal(),
Abbrev: abbrev,
StackOffset: int32(offs),
@@ -643,7 +643,7 @@ func createDwarfVars(fnsym *obj.LSym, complexOK bool, fn *ir.Func, apDecls []*ir
if c == '.' || n.Type().IsUntyped() {
continue
}
- if n.Class() == ir.PPARAM && !canSSAType(n.Type()) {
+ if n.Class_ == ir.PPARAM && !canSSAType(n.Type()) {
// SSA-able args get location lists, and may move in and
// out of registers, so those are handled elsewhere.
// Autos and named output params seem to get handled
@@ -658,10 +658,10 @@ func createDwarfVars(fnsym *obj.LSym, complexOK bool, fn *ir.Func, apDecls []*ir
typename := dwarf.InfoPrefix + typesymname(n.Type())
decls = append(decls, n)
abbrev := dwarf.DW_ABRV_AUTO_LOCLIST
- isReturnValue := (n.Class() == ir.PPARAMOUT)
- if n.Class() == ir.PPARAM || n.Class() == ir.PPARAMOUT {
+ isReturnValue := (n.Class_ == ir.PPARAMOUT)
+ if n.Class_ == ir.PPARAM || n.Class_ == ir.PPARAMOUT {
abbrev = dwarf.DW_ABRV_PARAM_LOCLIST
- } else if n.Class() == ir.PAUTOHEAP {
+ } else if n.Class_ == ir.PAUTOHEAP {
// If dcl in question has been promoted to heap, do a bit
// of extra work to recover original class (auto or param);
// see issue 30908. This insures that we get the proper
@@ -670,9 +670,9 @@ func createDwarfVars(fnsym *obj.LSym, complexOK bool, fn *ir.Func, apDecls []*ir
// and not stack).
// TODO(thanm): generate a better location expression
stackcopy := n.Name().Stackcopy
- if stackcopy != nil && (stackcopy.Class() == ir.PPARAM || stackcopy.Class() == ir.PPARAMOUT) {
+ if stackcopy != nil && (stackcopy.Class_ == ir.PPARAM || stackcopy.Class_ == ir.PPARAMOUT) {
abbrev = dwarf.DW_ABRV_PARAM_LOCLIST
- isReturnValue = (stackcopy.Class() == ir.PPARAMOUT)
+ isReturnValue = (stackcopy.Class_ == ir.PPARAMOUT)
}
}
inlIndex := 0
@@ -731,7 +731,7 @@ func preInliningDcls(fnsym *obj.LSym) []*ir.Name {
func stackOffset(slot ssa.LocalSlot) int32 {
n := slot.N
var off int64
- switch n.Class() {
+ switch n.Class_ {
case ir.PAUTO:
off = n.FrameOffset()
if base.Ctxt.FixedFrameSize() == 0 {
@@ -753,7 +753,7 @@ func createComplexVar(fnsym *obj.LSym, fn *ir.Func, varID ssa.VarID) *dwarf.Var
n := debug.Vars[varID]
var abbrev int
- switch n.Class() {
+ switch n.Class_ {
case ir.PAUTO:
abbrev = dwarf.DW_ABRV_AUTO_LOCLIST
case ir.PPARAM, ir.PPARAMOUT:
@@ -777,7 +777,7 @@ func createComplexVar(fnsym *obj.LSym, fn *ir.Func, varID ssa.VarID) *dwarf.Var
declpos := base.Ctxt.InnermostPos(n.Pos())
dvar := &dwarf.Var{
Name: n.Sym().Name,
- IsReturnValue: n.Class() == ir.PPARAMOUT,
+ IsReturnValue: n.Class_ == ir.PPARAMOUT,
IsInlFormal: n.Name().InlFormal(),
Abbrev: abbrev,
Type: base.Ctxt.Lookup(typename),
diff --git a/src/cmd/compile/internal/gc/pgen_test.go b/src/cmd/compile/internal/gc/pgen_test.go
index 3875fb7223..1170db2681 100644
--- a/src/cmd/compile/internal/gc/pgen_test.go
+++ b/src/cmd/compile/internal/gc/pgen_test.go
@@ -44,7 +44,7 @@ func TestCmpstackvar(t *testing.T) {
n := NewName(s)
n.SetType(t)
n.SetFrameOffset(xoffset)
- n.SetClass(cl)
+ n.Class_ = cl
return n
}
testdata := []struct {
@@ -159,7 +159,7 @@ func TestStackvarSort(t *testing.T) {
n := NewName(s)
n.SetType(t)
n.SetFrameOffset(xoffset)
- n.SetClass(cl)
+ n.Class_ = cl
return n
}
inp := []*ir.Name{
diff --git a/src/cmd/compile/internal/gc/plive.go b/src/cmd/compile/internal/gc/plive.go
index 77cd9c5b19..0b796ae7fa 100644
--- a/src/cmd/compile/internal/gc/plive.go
+++ b/src/cmd/compile/internal/gc/plive.go
@@ -211,7 +211,7 @@ func livenessShouldTrack(nn ir.Node) bool {
return false
}
n := nn.(*ir.Name)
- return (n.Class() == ir.PAUTO || n.Class() == ir.PPARAM || n.Class() == ir.PPARAMOUT) && n.Type().HasPointers()
+ return (n.Class_ == ir.PAUTO || n.Class_ == ir.PPARAM || n.Class_ == ir.PPARAMOUT) && n.Type().HasPointers()
}
// getvariables returns the list of on-stack variables that we need to track
@@ -238,7 +238,7 @@ func (lv *Liveness) initcache() {
lv.cache.initialized = true
for i, node := range lv.vars {
- switch node.Class() {
+ switch node.Class_ {
case ir.PPARAM:
// A return instruction with a p.to is a tail return, which brings
// the stack pointer back up (if it ever went down) and then jumps
@@ -494,7 +494,7 @@ func (lv *Liveness) pointerMap(liveout bvec, vars []*ir.Name, args, locals bvec)
break
}
node := vars[i]
- switch node.Class() {
+ switch node.Class_ {
case ir.PAUTO:
onebitwalktype1(node.Type(), node.FrameOffset()+lv.stkptrsize, locals)
@@ -795,7 +795,7 @@ func (lv *Liveness) epilogue() {
// don't need to keep the stack copy live?
if lv.fn.HasDefer() {
for i, n := range lv.vars {
- if n.Class() == ir.PPARAMOUT {
+ if n.Class_ == ir.PPARAMOUT {
if n.Name().IsOutputParamHeapAddr() {
// Just to be paranoid. Heap addresses are PAUTOs.
base.Fatalf("variable %v both output param and heap output param", n)
@@ -893,7 +893,7 @@ func (lv *Liveness) epilogue() {
if !liveout.Get(int32(i)) {
continue
}
- if n.Class() == ir.PPARAM {
+ if n.Class_ == ir.PPARAM {
continue // ok
}
base.Fatalf("bad live variable at entry of %v: %L", lv.fn.Nname, n)
@@ -926,7 +926,7 @@ func (lv *Liveness) epilogue() {
// the only things that can possibly be live are the
// input parameters.
for j, n := range lv.vars {
- if n.Class() != ir.PPARAM && lv.stackMaps[0].Get(int32(j)) {
+ if n.Class_ != ir.PPARAM && lv.stackMaps[0].Get(int32(j)) {
lv.f.Fatalf("%v %L recorded as live on entry", lv.fn.Nname, n)
}
}
@@ -1171,7 +1171,7 @@ func (lv *Liveness) emit() (argsSym, liveSym *obj.LSym) {
// (Nodes without pointers aren't in lv.vars; see livenessShouldTrack.)
var maxArgNode *ir.Name
for _, n := range lv.vars {
- switch n.Class() {
+ switch n.Class_ {
case ir.PPARAM, ir.PPARAMOUT:
if maxArgNode == nil || n.FrameOffset() > maxArgNode.FrameOffset() {
maxArgNode = n
diff --git a/src/cmd/compile/internal/gc/range.go b/src/cmd/compile/internal/gc/range.go
index 4a753328f2..3aa4ff71fa 100644
--- a/src/cmd/compile/internal/gc/range.go
+++ b/src/cmd/compile/internal/gc/range.go
@@ -27,7 +27,7 @@ func typecheckrange(n *ir.RangeStmt) {
// second half of dance, the first half being typecheckrangeExpr
n.SetTypecheck(1)
- ls := n.List().Slice()
+ ls := n.Vars.Slice()
for i1, n1 := range ls {
if n1.Typecheck() == 0 {
ls[i1] = typecheck(ls[i1], ctxExpr|ctxAssign)
@@ -35,19 +35,19 @@ func typecheckrange(n *ir.RangeStmt) {
}
decldepth++
- typecheckslice(n.Body().Slice(), ctxStmt)
+ typecheckslice(n.Body.Slice(), ctxStmt)
decldepth--
}
func typecheckrangeExpr(n *ir.RangeStmt) {
- n.SetRight(typecheck(n.Right(), ctxExpr))
+ n.X = typecheck(n.X, ctxExpr)
- t := n.Right().Type()
+ t := n.X.Type()
if t == nil {
return
}
// delicate little dance. see typecheckas2
- ls := n.List().Slice()
+ ls := n.Vars.Slice()
for i1, n1 := range ls {
if !ir.DeclaredBy(n1, n) {
ls[i1] = typecheck(ls[i1], ctxExpr|ctxAssign)
@@ -63,7 +63,7 @@ func typecheckrangeExpr(n *ir.RangeStmt) {
toomany := false
switch t.Kind() {
default:
- base.ErrorfAt(n.Pos(), "cannot range over %L", n.Right())
+ base.ErrorfAt(n.Pos(), "cannot range over %L", n.X)
return
case types.TARRAY, types.TSLICE:
@@ -76,13 +76,13 @@ func typecheckrangeExpr(n *ir.RangeStmt) {
case types.TCHAN:
if !t.ChanDir().CanRecv() {
- base.ErrorfAt(n.Pos(), "invalid operation: range %v (receive from send-only type %v)", n.Right(), n.Right().Type())
+ base.ErrorfAt(n.Pos(), "invalid operation: range %v (receive from send-only type %v)", n.X, n.X.Type())
return
}
t1 = t.Elem()
t2 = nil
- if n.List().Len() == 2 {
+ if n.Vars.Len() == 2 {
toomany = true
}
@@ -91,16 +91,16 @@ func typecheckrangeExpr(n *ir.RangeStmt) {
t2 = types.RuneType
}
- if n.List().Len() > 2 || toomany {
+ if n.Vars.Len() > 2 || toomany {
base.ErrorfAt(n.Pos(), "too many variables in range")
}
var v1, v2 ir.Node
- if n.List().Len() != 0 {
- v1 = n.List().First()
+ if n.Vars.Len() != 0 {
+ v1 = n.Vars.First()
}
- if n.List().Len() > 1 {
- v2 = n.List().Second()
+ if n.Vars.Len() > 1 {
+ v2 = n.Vars.Second()
}
// this is not only an optimization but also a requirement in the spec.
@@ -109,7 +109,7 @@ func typecheckrangeExpr(n *ir.RangeStmt) {
// present."
if ir.IsBlank(v2) {
if v1 != nil {
- n.PtrList().Set1(v1)
+ n.Vars.Set1(v1)
}
v2 = nil
}
@@ -159,7 +159,7 @@ func cheapComputableIndex(width int64) bool {
// the returned node.
func walkrange(nrange *ir.RangeStmt) ir.Node {
if isMapClear(nrange) {
- m := nrange.Right()
+ m := nrange.X
lno := setlineno(m)
n := mapClear(m)
base.Pos = lno
@@ -168,7 +168,7 @@ func walkrange(nrange *ir.RangeStmt) ir.Node {
nfor := ir.NewForStmt(nrange.Pos(), nil, nil, nil, nil)
nfor.SetInit(nrange.Init())
- nfor.SetSym(nrange.Sym())
+ nfor.Label = nrange.Label
// variable name conventions:
// ohv1, hv1, hv2: hidden (old) val 1, 2
@@ -179,17 +179,17 @@ func walkrange(nrange *ir.RangeStmt) ir.Node {
t := nrange.Type()
- a := nrange.Right()
+ a := nrange.X
lno := setlineno(a)
var v1, v2 ir.Node
- l := nrange.List().Len()
+ l := nrange.Vars.Len()
if l > 0 {
- v1 = nrange.List().First()
+ v1 = nrange.Vars.First()
}
if l > 1 {
- v2 = nrange.List().Second()
+ v2 = nrange.Vars.Second()
}
if ir.IsBlank(v2) {
@@ -227,8 +227,8 @@ func walkrange(nrange *ir.RangeStmt) ir.Node {
init = append(init, ir.NewAssignStmt(base.Pos, hv1, nil))
init = append(init, ir.NewAssignStmt(base.Pos, hn, ir.NewUnaryExpr(base.Pos, ir.OLEN, ha)))
- nfor.SetLeft(ir.NewBinaryExpr(base.Pos, ir.OLT, hv1, hn))
- nfor.SetRight(ir.NewAssignStmt(base.Pos, hv1, ir.NewBinaryExpr(base.Pos, ir.OADD, hv1, nodintconst(1))))
+ nfor.Cond = ir.NewBinaryExpr(base.Pos, ir.OLT, hv1, hn)
+ nfor.Post = ir.NewAssignStmt(base.Pos, hv1, ir.NewBinaryExpr(base.Pos, ir.OADD, hv1, nodintconst(1)))
// for range ha { body }
if v1 == nil {
@@ -249,8 +249,8 @@ func walkrange(nrange *ir.RangeStmt) ir.Node {
// Use OAS2 to correctly handle assignments
// of the form "v1, a[v1] := range".
a := ir.NewAssignListStmt(base.Pos, ir.OAS2, nil, nil)
- a.PtrList().Set2(v1, v2)
- a.PtrRlist().Set2(hv1, tmp)
+ a.Lhs.Set2(v1, v2)
+ a.Rhs.Set2(hv1, tmp)
body = []ir.Node{a}
break
}
@@ -268,7 +268,7 @@ func walkrange(nrange *ir.RangeStmt) ir.Node {
// elimination on the index variable (see #20711).
// Enhance the prove pass to understand this.
ifGuard = ir.NewIfStmt(base.Pos, nil, nil, nil)
- ifGuard.SetLeft(ir.NewBinaryExpr(base.Pos, ir.OLT, hv1, hn))
+ ifGuard.Cond = ir.NewBinaryExpr(base.Pos, ir.OLT, hv1, hn)
nfor.SetOp(ir.OFORUNTIL)
hp := temp(types.NewPtr(nrange.Type().Elem()))
@@ -279,8 +279,8 @@ func walkrange(nrange *ir.RangeStmt) ir.Node {
// Use OAS2 to correctly handle assignments
// of the form "v1, a[v1] := range".
a := ir.NewAssignListStmt(base.Pos, ir.OAS2, nil, nil)
- a.PtrList().Set2(v1, v2)
- a.PtrRlist().Set2(hv1, ir.NewStarExpr(base.Pos, hp))
+ a.Lhs.Set2(v1, v2)
+ a.Rhs.Set2(hv1, ir.NewStarExpr(base.Pos, hp))
body = append(body, a)
// Advance pointer as part of the late increment.
@@ -289,7 +289,7 @@ func walkrange(nrange *ir.RangeStmt) ir.Node {
// advancing the pointer is safe and won't go past the
// end of the allocation.
as := ir.NewAssignStmt(base.Pos, hp, addptr(hp, t.Elem().Width))
- nfor.PtrList().Set1(typecheck(as, ctxStmt))
+ nfor.Late.Set1(typecheck(as, ctxStmt))
case types.TMAP:
// order.stmt allocated the iterator for us.
@@ -305,11 +305,11 @@ func walkrange(nrange *ir.RangeStmt) ir.Node {
fn = substArgTypes(fn, t.Key(), t.Elem(), th)
init = append(init, mkcall1(fn, nil, nil, typename(t), ha, nodAddr(hit)))
- nfor.SetLeft(ir.NewBinaryExpr(base.Pos, ir.ONE, ir.NewSelectorExpr(base.Pos, ir.ODOT, hit, keysym), nodnil()))
+ nfor.Cond = ir.NewBinaryExpr(base.Pos, ir.ONE, ir.NewSelectorExpr(base.Pos, ir.ODOT, hit, keysym), nodnil())
fn = syslook("mapiternext")
fn = substArgTypes(fn, th)
- nfor.SetRight(mkcall1(fn, nil, nil, nodAddr(hit)))
+ nfor.Post = mkcall1(fn, nil, nil, nodAddr(hit))
key := ir.NewStarExpr(base.Pos, ir.NewSelectorExpr(base.Pos, ir.ODOT, hit, keysym))
if v1 == nil {
@@ -319,8 +319,8 @@ func walkrange(nrange *ir.RangeStmt) ir.Node {
} else {
elem := ir.NewStarExpr(base.Pos, ir.NewSelectorExpr(base.Pos, ir.ODOT, hit, elemsym))
a := ir.NewAssignListStmt(base.Pos, ir.OAS2, nil, nil)
- a.PtrList().Set2(v1, v2)
- a.PtrRlist().Set2(key, elem)
+ a.Lhs.Set2(v1, v2)
+ a.Rhs.Set2(key, elem)
body = []ir.Node{a}
}
@@ -335,12 +335,12 @@ func walkrange(nrange *ir.RangeStmt) ir.Node {
}
hb := temp(types.Types[types.TBOOL])
- nfor.SetLeft(ir.NewBinaryExpr(base.Pos, ir.ONE, hb, nodbool(false)))
+ nfor.Cond = ir.NewBinaryExpr(base.Pos, ir.ONE, hb, nodbool(false))
a := ir.NewAssignListStmt(base.Pos, ir.OAS2RECV, nil, nil)
a.SetTypecheck(1)
- a.PtrList().Set2(hv1, hb)
- a.PtrRlist().Set1(ir.NewUnaryExpr(base.Pos, ir.ORECV, ha))
- nfor.Left().PtrInit().Set1(a)
+ a.Lhs.Set2(hv1, hb)
+ a.Rhs.Set1(ir.NewUnaryExpr(base.Pos, ir.ORECV, ha))
+ nfor.Cond.PtrInit().Set1(a)
if v1 == nil {
body = nil
} else {
@@ -378,7 +378,7 @@ func walkrange(nrange *ir.RangeStmt) ir.Node {
init = append(init, ir.NewAssignStmt(base.Pos, hv1, nil))
// hv1 < len(ha)
- nfor.SetLeft(ir.NewBinaryExpr(base.Pos, ir.OLT, hv1, ir.NewUnaryExpr(base.Pos, ir.OLEN, ha)))
+ nfor.Cond = ir.NewBinaryExpr(base.Pos, ir.OLT, hv1, ir.NewUnaryExpr(base.Pos, ir.OLEN, ha))
if v1 != nil {
// hv1t = hv1
@@ -392,19 +392,19 @@ func walkrange(nrange *ir.RangeStmt) ir.Node {
// if hv2 < utf8.RuneSelf
nif := ir.NewIfStmt(base.Pos, nil, nil, nil)
- nif.SetLeft(ir.NewBinaryExpr(base.Pos, ir.OLT, hv2, nodintconst(utf8.RuneSelf)))
+ nif.Cond = ir.NewBinaryExpr(base.Pos, ir.OLT, hv2, nodintconst(utf8.RuneSelf))
// hv1++
- nif.PtrBody().Set1(ir.NewAssignStmt(base.Pos, hv1, ir.NewBinaryExpr(base.Pos, ir.OADD, hv1, nodintconst(1))))
+ nif.Body.Set1(ir.NewAssignStmt(base.Pos, hv1, ir.NewBinaryExpr(base.Pos, ir.OADD, hv1, nodintconst(1))))
// } else {
eif := ir.NewAssignListStmt(base.Pos, ir.OAS2, nil, nil)
- nif.PtrRlist().Set1(eif)
+ nif.Else.Set1(eif)
// hv2, hv1 = decoderune(ha, hv1)
- eif.PtrList().Set2(hv2, hv1)
+ eif.Lhs.Set2(hv2, hv1)
fn := syslook("decoderune")
- eif.PtrRlist().Set1(mkcall1(fn, fn.Type().Results(), nil, ha, hv1))
+ eif.Rhs.Set1(mkcall1(fn, fn.Type().Results(), nil, ha, hv1))
body = append(body, nif)
@@ -412,8 +412,8 @@ func walkrange(nrange *ir.RangeStmt) ir.Node {
if v2 != nil {
// v1, v2 = hv1t, hv2
a := ir.NewAssignListStmt(base.Pos, ir.OAS2, nil, nil)
- a.PtrList().Set2(v1, v2)
- a.PtrRlist().Set2(hv1t, hv2)
+ a.Lhs.Set2(v1, v2)
+ a.Rhs.Set2(hv1t, hv2)
body = append(body, a)
} else {
// v1 = hv1t
@@ -431,18 +431,18 @@ func walkrange(nrange *ir.RangeStmt) ir.Node {
nfor.PtrInit().Append(init...)
}
- typecheckslice(nfor.Left().Init().Slice(), ctxStmt)
+ typecheckslice(nfor.Cond.Init().Slice(), ctxStmt)
- nfor.SetLeft(typecheck(nfor.Left(), ctxExpr))
- nfor.SetLeft(defaultlit(nfor.Left(), nil))
- nfor.SetRight(typecheck(nfor.Right(), ctxStmt))
+ nfor.Cond = typecheck(nfor.Cond, ctxExpr)
+ nfor.Cond = defaultlit(nfor.Cond, nil)
+ nfor.Post = typecheck(nfor.Post, ctxStmt)
typecheckslice(body, ctxStmt)
- nfor.PtrBody().Append(body...)
- nfor.PtrBody().Append(nrange.Body().Slice()...)
+ nfor.Body.Append(body...)
+ nfor.Body.Append(nrange.Body.Slice()...)
var n ir.Node = nfor
if ifGuard != nil {
- ifGuard.PtrBody().Set1(n)
+ ifGuard.Body.Set1(n)
n = ifGuard
}
@@ -464,11 +464,11 @@ func isMapClear(n *ir.RangeStmt) bool {
return false
}
- if n.Op() != ir.ORANGE || n.Type().Kind() != types.TMAP || n.List().Len() != 1 {
+ if n.Op() != ir.ORANGE || n.Type().Kind() != types.TMAP || n.Vars.Len() != 1 {
return false
}
- k := n.List().First()
+ k := n.Vars.First()
if k == nil || ir.IsBlank(k) {
return false
}
@@ -478,17 +478,17 @@ func isMapClear(n *ir.RangeStmt) bool {
return false
}
- if n.Body().Len() != 1 {
+ if n.Body.Len() != 1 {
return false
}
- stmt := n.Body().First() // only stmt in body
+ stmt := n.Body.First() // only stmt in body
if stmt == nil || stmt.Op() != ir.ODELETE {
return false
}
- m := n.Right()
- if delete := stmt.(*ir.CallExpr); !samesafeexpr(delete.List().First(), m) || !samesafeexpr(delete.List().Second(), k) {
+ m := n.X
+ if delete := stmt.(*ir.CallExpr); !samesafeexpr(delete.Args.First(), m) || !samesafeexpr(delete.Args.Second(), k) {
return false
}
@@ -531,26 +531,26 @@ func arrayClear(loop *ir.RangeStmt, v1, v2, a ir.Node) ir.Node {
return nil
}
- if loop.Body().Len() != 1 || loop.Body().First() == nil {
+ if loop.Body.Len() != 1 || loop.Body.First() == nil {
return nil
}
- stmt1 := loop.Body().First() // only stmt in body
+ stmt1 := loop.Body.First() // only stmt in body
if stmt1.Op() != ir.OAS {
return nil
}
stmt := stmt1.(*ir.AssignStmt)
- if stmt.Left().Op() != ir.OINDEX {
+ if stmt.X.Op() != ir.OINDEX {
return nil
}
- lhs := stmt.Left().(*ir.IndexExpr)
+ lhs := stmt.X.(*ir.IndexExpr)
- if !samesafeexpr(lhs.Left(), a) || !samesafeexpr(lhs.Right(), v1) {
+ if !samesafeexpr(lhs.X, a) || !samesafeexpr(lhs.Index, v1) {
return nil
}
elemsize := loop.Type().Elem().Width
- if elemsize <= 0 || !isZero(stmt.Right()) {
+ if elemsize <= 0 || !isZero(stmt.Y) {
return nil
}
@@ -562,8 +562,8 @@ func arrayClear(loop *ir.RangeStmt, v1, v2, a ir.Node) ir.Node {
// i = len(a) - 1
// }
n := ir.NewIfStmt(base.Pos, nil, nil, nil)
- n.PtrBody().Set(nil)
- n.SetLeft(ir.NewBinaryExpr(base.Pos, ir.ONE, ir.NewUnaryExpr(base.Pos, ir.OLEN, a), nodintconst(0)))
+ n.Body.Set(nil)
+ n.Cond = ir.NewBinaryExpr(base.Pos, ir.ONE, ir.NewUnaryExpr(base.Pos, ir.OLEN, a), nodintconst(0))
// hp = &a[0]
hp := temp(types.Types[types.TUNSAFEPTR])
@@ -571,12 +571,12 @@ func arrayClear(loop *ir.RangeStmt, v1, v2, a ir.Node) ir.Node {
ix := ir.NewIndexExpr(base.Pos, a, nodintconst(0))
ix.SetBounded(true)
addr := convnop(nodAddr(ix), types.Types[types.TUNSAFEPTR])
- n.PtrBody().Append(ir.NewAssignStmt(base.Pos, hp, addr))
+ n.Body.Append(ir.NewAssignStmt(base.Pos, hp, addr))
// hn = len(a) * sizeof(elem(a))
hn := temp(types.Types[types.TUINTPTR])
mul := conv(ir.NewBinaryExpr(base.Pos, ir.OMUL, ir.NewUnaryExpr(base.Pos, ir.OLEN, a), nodintconst(elemsize)), types.Types[types.TUINTPTR])
- n.PtrBody().Append(ir.NewAssignStmt(base.Pos, hn, mul))
+ n.Body.Append(ir.NewAssignStmt(base.Pos, hn, mul))
var fn ir.Node
if a.Type().Elem().HasPointers() {
@@ -588,16 +588,16 @@ func arrayClear(loop *ir.RangeStmt, v1, v2, a ir.Node) ir.Node {
fn = mkcall("memclrNoHeapPointers", nil, nil, hp, hn)
}
- n.PtrBody().Append(fn)
+ n.Body.Append(fn)
// i = len(a) - 1
v1 = ir.NewAssignStmt(base.Pos, v1, ir.NewBinaryExpr(base.Pos, ir.OSUB, ir.NewUnaryExpr(base.Pos, ir.OLEN, a), nodintconst(1)))
- n.PtrBody().Append(v1)
+ n.Body.Append(v1)
- n.SetLeft(typecheck(n.Left(), ctxExpr))
- n.SetLeft(defaultlit(n.Left(), nil))
- typecheckslice(n.Body().Slice(), ctxStmt)
+ n.Cond = typecheck(n.Cond, ctxExpr)
+ n.Cond = defaultlit(n.Cond, nil)
+ typecheckslice(n.Body.Slice(), ctxStmt)
return walkstmt(n)
}
diff --git a/src/cmd/compile/internal/gc/reflect.go b/src/cmd/compile/internal/gc/reflect.go
index 92b04f20d5..07552e64b4 100644
--- a/src/cmd/compile/internal/gc/reflect.go
+++ b/src/cmd/compile/internal/gc/reflect.go
@@ -994,7 +994,7 @@ func typename(t *types.Type) *ir.AddrExpr {
if s.Def == nil {
n := ir.NewNameAt(src.NoXPos, s)
n.SetType(types.Types[types.TUINT8])
- n.SetClass(ir.PEXTERN)
+ n.Class_ = ir.PEXTERN
n.SetTypecheck(1)
s.Def = n
}
@@ -1013,7 +1013,7 @@ func itabname(t, itype *types.Type) *ir.AddrExpr {
if s.Def == nil {
n := NewName(s)
n.SetType(types.Types[types.TUINT8])
- n.SetClass(ir.PEXTERN)
+ n.Class_ = ir.PEXTERN
n.SetTypecheck(1)
s.Def = n
itabs = append(itabs, itabEntry{t: t, itype: itype, lsym: s.Linksym()})
@@ -1875,7 +1875,7 @@ func zeroaddr(size int64) ir.Node {
if s.Def == nil {
x := NewName(s)
x.SetType(types.Types[types.TUINT8])
- x.SetClass(ir.PEXTERN)
+ x.Class_ = ir.PEXTERN
x.SetTypecheck(1)
s.Def = x
}
diff --git a/src/cmd/compile/internal/gc/scc.go b/src/cmd/compile/internal/gc/scc.go
index f2d089fa4c..a5a6480958 100644
--- a/src/cmd/compile/internal/gc/scc.go
+++ b/src/cmd/compile/internal/gc/scc.go
@@ -58,7 +58,7 @@ func visitBottomUp(list []ir.Node, analyze func(list []*ir.Func, recursive bool)
for _, n := range list {
if n.Op() == ir.ODCLFUNC {
n := n.(*ir.Func)
- if !n.Func().IsHiddenClosure() {
+ if !n.IsHiddenClosure() {
v.visit(n)
}
}
@@ -82,7 +82,7 @@ func (v *bottomUpVisitor) visit(n *ir.Func) uint32 {
switch n.Op() {
case ir.ONAME:
n := n.(*ir.Name)
- if n.Class() == ir.PFUNC {
+ if n.Class_ == ir.PFUNC {
if n != nil && n.Name().Defn != nil {
if m := v.visit(n.Name().Defn.(*ir.Func)); m < min {
min = m
@@ -100,7 +100,7 @@ func (v *bottomUpVisitor) visit(n *ir.Func) uint32 {
case ir.ODOTMETH:
n := n.(*ir.SelectorExpr)
fn := methodExprName(n)
- if fn != nil && fn.Op() == ir.ONAME && fn.Class() == ir.PFUNC && fn.Defn != nil {
+ if fn != nil && fn.Op() == ir.ONAME && fn.Class_ == ir.PFUNC && fn.Defn != nil {
if m := v.visit(fn.Defn.(*ir.Func)); m < min {
min = m
}
@@ -109,7 +109,7 @@ func (v *bottomUpVisitor) visit(n *ir.Func) uint32 {
n := n.(*ir.CallPartExpr)
fn := ir.AsNode(callpartMethod(n).Nname)
if fn != nil && fn.Op() == ir.ONAME {
- if fn := fn.(*ir.Name); fn.Class() == ir.PFUNC && fn.Name().Defn != nil {
+ if fn := fn.(*ir.Name); fn.Class_ == ir.PFUNC && fn.Name().Defn != nil {
if m := v.visit(fn.Name().Defn.(*ir.Func)); m < min {
min = m
}
@@ -117,7 +117,7 @@ func (v *bottomUpVisitor) visit(n *ir.Func) uint32 {
}
case ir.OCLOSURE:
n := n.(*ir.ClosureExpr)
- if m := v.visit(n.Func()); m < min {
+ if m := v.visit(n.Func); m < min {
min = m
}
}
diff --git a/src/cmd/compile/internal/gc/scope.go b/src/cmd/compile/internal/gc/scope.go
index 8dd44b1dd4..9ab33583c8 100644
--- a/src/cmd/compile/internal/gc/scope.go
+++ b/src/cmd/compile/internal/gc/scope.go
@@ -30,13 +30,13 @@ func findScope(marks []ir.Mark, pos src.XPos) ir.ScopeID {
func assembleScopes(fnsym *obj.LSym, fn *ir.Func, dwarfVars []*dwarf.Var, varScopes []ir.ScopeID) []dwarf.Scope {
// Initialize the DWARF scope tree based on lexical scopes.
- dwarfScopes := make([]dwarf.Scope, 1+len(fn.Func().Parents))
- for i, parent := range fn.Func().Parents {
+ dwarfScopes := make([]dwarf.Scope, 1+len(fn.Parents))
+ for i, parent := range fn.Parents {
dwarfScopes[i+1].Parent = int32(parent)
}
scopeVariables(dwarfVars, varScopes, dwarfScopes)
- scopePCs(fnsym, fn.Func().Marks, dwarfScopes)
+ scopePCs(fnsym, fn.Marks, dwarfScopes)
return compactScopes(dwarfScopes)
}
diff --git a/src/cmd/compile/internal/gc/select.go b/src/cmd/compile/internal/gc/select.go
index 64d3461dca..5c69be7e06 100644
--- a/src/cmd/compile/internal/gc/select.go
+++ b/src/cmd/compile/internal/gc/select.go
@@ -15,30 +15,30 @@ func typecheckselect(sel *ir.SelectStmt) {
var def ir.Node
lno := setlineno(sel)
typecheckslice(sel.Init().Slice(), ctxStmt)
- for _, ncase := range sel.List().Slice() {
+ for _, ncase := range sel.Cases.Slice() {
ncase := ncase.(*ir.CaseStmt)
- if ncase.List().Len() == 0 {
+ if ncase.List.Len() == 0 {
// default
if def != nil {
base.ErrorfAt(ncase.Pos(), "multiple defaults in select (first at %v)", ir.Line(def))
} else {
def = ncase
}
- } else if ncase.List().Len() > 1 {
+ } else if ncase.List.Len() > 1 {
base.ErrorfAt(ncase.Pos(), "select cases cannot be lists")
} else {
- ncase.List().SetFirst(typecheck(ncase.List().First(), ctxStmt))
- n := ncase.List().First()
- ncase.SetLeft(n)
- ncase.PtrList().Set(nil)
+ ncase.List.SetFirst(typecheck(ncase.List.First(), ctxStmt))
+ n := ncase.List.First()
+ ncase.Comm = n
+ ncase.List.Set(nil)
oselrecv2 := func(dst, recv ir.Node, colas bool) {
n := ir.NewAssignListStmt(n.Pos(), ir.OSELRECV2, nil, nil)
- n.PtrList().Set2(dst, ir.BlankNode)
- n.PtrRlist().Set1(recv)
- n.SetColas(colas)
+ n.Lhs.Set2(dst, ir.BlankNode)
+ n.Rhs.Set1(recv)
+ n.Def = colas
n.SetTypecheck(1)
- ncase.SetLeft(n)
+ ncase.Comm = n
}
switch n.Op() {
default:
@@ -57,21 +57,21 @@ func typecheckselect(sel *ir.SelectStmt) {
// remove implicit conversions; the eventual assignment
// will reintroduce them.
n := n.(*ir.AssignStmt)
- if r := n.Right(); r.Op() == ir.OCONVNOP || r.Op() == ir.OCONVIFACE {
+ if r := n.Y; r.Op() == ir.OCONVNOP || r.Op() == ir.OCONVIFACE {
r := r.(*ir.ConvExpr)
if r.Implicit() {
- n.SetRight(r.Left())
+ n.Y = r.X
}
}
- if n.Right().Op() != ir.ORECV {
+ if n.Y.Op() != ir.ORECV {
base.ErrorfAt(n.Pos(), "select assignment must have receive on right hand side")
break
}
- oselrecv2(n.Left(), n.Right(), n.Colas())
+ oselrecv2(n.X, n.Y, n.Def)
case ir.OAS2RECV:
n := n.(*ir.AssignListStmt)
- if n.Rlist().First().Op() != ir.ORECV {
+ if n.Rhs.First().Op() != ir.ORECV {
base.ErrorfAt(n.Pos(), "select assignment must have receive on right hand side")
break
}
@@ -87,7 +87,7 @@ func typecheckselect(sel *ir.SelectStmt) {
}
}
- typecheckslice(ncase.Body().Slice(), ctxStmt)
+ typecheckslice(ncase.Body.Slice(), ctxStmt)
}
base.Pos = lno
@@ -95,18 +95,18 @@ func typecheckselect(sel *ir.SelectStmt) {
func walkselect(sel *ir.SelectStmt) {
lno := setlineno(sel)
- if sel.Body().Len() != 0 {
+ if sel.Compiled.Len() != 0 {
base.Fatalf("double walkselect")
}
init := sel.Init().Slice()
sel.PtrInit().Set(nil)
- init = append(init, walkselectcases(sel.List())...)
- sel.SetList(ir.Nodes{})
+ init = append(init, walkselectcases(sel.Cases)...)
+ sel.Cases = ir.Nodes{}
- sel.PtrBody().Set(init)
- walkstmtlist(sel.Body().Slice())
+ sel.Compiled.Set(init)
+ walkstmtlist(sel.Compiled.Slice())
base.Pos = lno
}
@@ -125,8 +125,8 @@ func walkselectcases(cases ir.Nodes) []ir.Node {
cas := cases.First().(*ir.CaseStmt)
setlineno(cas)
l := cas.Init().Slice()
- if cas.Left() != nil { // not default:
- n := cas.Left()
+ if cas.Comm != nil { // not default:
+ n := cas.Comm
l = append(l, n.Init().Slice()...)
n.PtrInit().Set(nil)
switch n.Op() {
@@ -138,8 +138,8 @@ func walkselectcases(cases ir.Nodes) []ir.Node {
case ir.OSELRECV2:
r := n.(*ir.AssignListStmt)
- if ir.IsBlank(r.List().First()) && ir.IsBlank(r.List().Second()) {
- n = r.Rlist().First()
+ if ir.IsBlank(r.Lhs.First()) && ir.IsBlank(r.Lhs.Second()) {
+ n = r.Rhs.First()
break
}
r.SetOp(ir.OAS2RECV)
@@ -148,7 +148,7 @@ func walkselectcases(cases ir.Nodes) []ir.Node {
l = append(l, n)
}
- l = append(l, cas.Body().Slice()...)
+ l = append(l, cas.Body.Slice()...)
l = append(l, ir.NewBranchStmt(base.Pos, ir.OBREAK, nil))
return l
}
@@ -159,7 +159,7 @@ func walkselectcases(cases ir.Nodes) []ir.Node {
for _, cas := range cases.Slice() {
cas := cas.(*ir.CaseStmt)
setlineno(cas)
- n := cas.Left()
+ n := cas.Comm
if n == nil {
dflt = cas
continue
@@ -167,14 +167,14 @@ func walkselectcases(cases ir.Nodes) []ir.Node {
switch n.Op() {
case ir.OSEND:
n := n.(*ir.SendStmt)
- n.SetRight(nodAddr(n.Right()))
- n.SetRight(typecheck(n.Right(), ctxExpr))
+ n.Value = nodAddr(n.Value)
+ n.Value = typecheck(n.Value, ctxExpr)
case ir.OSELRECV2:
n := n.(*ir.AssignListStmt)
- if !ir.IsBlank(n.List().First()) {
- n.List().SetIndex(0, nodAddr(n.List().First()))
- n.List().SetIndex(0, typecheck(n.List().First(), ctxExpr))
+ if !ir.IsBlank(n.Lhs.First()) {
+ n.Lhs.SetIndex(0, nodAddr(n.Lhs.First()))
+ n.Lhs.SetIndex(0, typecheck(n.Lhs.First(), ctxExpr))
}
}
}
@@ -186,7 +186,7 @@ func walkselectcases(cases ir.Nodes) []ir.Node {
cas = cases.Second().(*ir.CaseStmt)
}
- n := cas.Left()
+ n := cas.Comm
setlineno(n)
r := ir.NewIfStmt(base.Pos, nil, nil, nil)
r.PtrInit().Set(cas.Init().Slice())
@@ -198,31 +198,31 @@ func walkselectcases(cases ir.Nodes) []ir.Node {
case ir.OSEND:
// if selectnbsend(c, v) { body } else { default body }
n := n.(*ir.SendStmt)
- ch := n.Left()
- call = mkcall1(chanfn("selectnbsend", 2, ch.Type()), types.Types[types.TBOOL], r.PtrInit(), ch, n.Right())
+ ch := n.Chan
+ call = mkcall1(chanfn("selectnbsend", 2, ch.Type()), types.Types[types.TBOOL], r.PtrInit(), ch, n.Value)
case ir.OSELRECV2:
n := n.(*ir.AssignListStmt)
- recv := n.Rlist().First().(*ir.UnaryExpr)
- ch := recv.Left()
- elem := n.List().First()
+ recv := n.Rhs.First().(*ir.UnaryExpr)
+ ch := recv.X
+ elem := n.Lhs.First()
if ir.IsBlank(elem) {
elem = nodnil()
}
- if ir.IsBlank(n.List().Second()) {
+ if ir.IsBlank(n.Lhs.Second()) {
// if selectnbrecv(&v, c) { body } else { default body }
call = mkcall1(chanfn("selectnbrecv", 2, ch.Type()), types.Types[types.TBOOL], r.PtrInit(), elem, ch)
} else {
// TODO(cuonglm): make this use selectnbrecv()
// if selectnbrecv2(&v, &received, c) { body } else { default body }
- receivedp := typecheck(nodAddr(n.List().Second()), ctxExpr)
+ receivedp := typecheck(nodAddr(n.Lhs.Second()), ctxExpr)
call = mkcall1(chanfn("selectnbrecv2", 2, ch.Type()), types.Types[types.TBOOL], r.PtrInit(), elem, receivedp, ch)
}
}
- r.SetLeft(typecheck(call, ctxExpr))
- r.PtrBody().Set(cas.Body().Slice())
- r.PtrRlist().Set(append(dflt.Init().Slice(), dflt.Body().Slice()...))
+ r.Cond = typecheck(call, ctxExpr)
+ r.Body.Set(cas.Body.Slice())
+ r.Else.Set(append(dflt.Init().Slice(), dflt.Body.Slice()...))
return []ir.Node{r, ir.NewBranchStmt(base.Pos, ir.OBREAK, nil)}
}
@@ -258,7 +258,7 @@ func walkselectcases(cases ir.Nodes) []ir.Node {
init = append(init, cas.Init().Slice()...)
cas.PtrInit().Set(nil)
- n := cas.Left()
+ n := cas.Comm
if n == nil { // default:
continue
}
@@ -272,15 +272,15 @@ func walkselectcases(cases ir.Nodes) []ir.Node {
n := n.(*ir.SendStmt)
i = nsends
nsends++
- c = n.Left()
- elem = n.Right()
+ c = n.Chan
+ elem = n.Value
case ir.OSELRECV2:
n := n.(*ir.AssignListStmt)
nrecvs++
i = ncas - nrecvs
- recv := n.Rlist().First().(*ir.UnaryExpr)
- c = recv.Left()
- elem = n.List().First()
+ recv := n.Rhs.First().(*ir.UnaryExpr)
+ c = recv.X
+ elem = n.Lhs.First()
}
casorder[i] = cas
@@ -313,9 +313,9 @@ func walkselectcases(cases ir.Nodes) []ir.Node {
chosen := temp(types.Types[types.TINT])
recvOK := temp(types.Types[types.TBOOL])
r := ir.NewAssignListStmt(base.Pos, ir.OAS2, nil, nil)
- r.PtrList().Set2(chosen, recvOK)
+ r.Lhs.Set2(chosen, recvOK)
fn := syslook("selectgo")
- r.PtrRlist().Set1(mkcall1(fn, fn.Type().Results(), nil, bytePtrToIndex(selv, 0), bytePtrToIndex(order, 0), pc0, nodintconst(int64(nsends)), nodintconst(int64(nrecvs)), nodbool(dflt == nil)))
+ r.Rhs.Set1(mkcall1(fn, fn.Type().Results(), nil, bytePtrToIndex(selv, 0), bytePtrToIndex(order, 0), pc0, nodintconst(int64(nsends)), nodintconst(int64(nrecvs)), nodbool(dflt == nil)))
init = append(init, typecheck(r, ctxStmt))
// selv and order are no longer alive after selectgo.
@@ -332,16 +332,16 @@ func walkselectcases(cases ir.Nodes) []ir.Node {
r := ir.NewIfStmt(base.Pos, cond, nil, nil)
- if n := cas.Left(); n != nil && n.Op() == ir.OSELRECV2 {
+ if n := cas.Comm; n != nil && n.Op() == ir.OSELRECV2 {
n := n.(*ir.AssignListStmt)
- if !ir.IsBlank(n.List().Second()) {
- x := ir.NewAssignStmt(base.Pos, n.List().Second(), recvOK)
- r.PtrBody().Append(typecheck(x, ctxStmt))
+ if !ir.IsBlank(n.Lhs.Second()) {
+ x := ir.NewAssignStmt(base.Pos, n.Lhs.Second(), recvOK)
+ r.Body.Append(typecheck(x, ctxStmt))
}
}
- r.PtrBody().AppendNodes(cas.PtrBody())
- r.PtrBody().Append(ir.NewBranchStmt(base.Pos, ir.OBREAK, nil))
+ r.Body.AppendNodes(&cas.Body)
+ r.Body.Append(ir.NewBranchStmt(base.Pos, ir.OBREAK, nil))
init = append(init, r)
}
diff --git a/src/cmd/compile/internal/gc/sinit.go b/src/cmd/compile/internal/gc/sinit.go
index f4988df9ac..0fc19a6989 100644
--- a/src/cmd/compile/internal/gc/sinit.go
+++ b/src/cmd/compile/internal/gc/sinit.go
@@ -61,25 +61,25 @@ func (s *InitSchedule) tryStaticInit(nn ir.Node) bool {
return false
}
n := nn.(*ir.AssignStmt)
- if ir.IsBlank(n.Left()) && !anySideEffects(n.Right()) {
+ if ir.IsBlank(n.X) && !anySideEffects(n.Y) {
// Discard.
return true
}
lno := setlineno(n)
defer func() { base.Pos = lno }()
- nam := n.Left().(*ir.Name)
- return s.staticassign(nam, 0, n.Right(), nam.Type())
+ nam := n.X.(*ir.Name)
+ return s.staticassign(nam, 0, n.Y, nam.Type())
}
// like staticassign but we are copying an already
// initialized value r.
func (s *InitSchedule) staticcopy(l *ir.Name, loff int64, rn *ir.Name, typ *types.Type) bool {
- if rn.Class() == ir.PFUNC {
+ if rn.Class_ == ir.PFUNC {
// TODO if roff != 0 { panic }
pfuncsym(l, loff, rn)
return true
}
- if rn.Class() != ir.PEXTERN || rn.Sym().Pkg != types.LocalPkg {
+ if rn.Class_ != ir.PEXTERN || rn.Sym().Pkg != types.LocalPkg {
return false
}
if rn.Defn == nil { // probably zeroed but perhaps supplied externally and of unknown value
@@ -92,10 +92,10 @@ func (s *InitSchedule) staticcopy(l *ir.Name, loff int64, rn *ir.Name, typ *type
return false
}
orig := rn
- r := rn.Defn.(*ir.AssignStmt).Right()
+ r := rn.Defn.(*ir.AssignStmt).Y
for r.Op() == ir.OCONVNOP && !types.Identical(r.Type(), typ) {
- r = r.(*ir.ConvExpr).Left()
+ r = r.(*ir.ConvExpr).X
}
switch r.Op() {
@@ -128,7 +128,7 @@ func (s *InitSchedule) staticcopy(l *ir.Name, loff int64, rn *ir.Name, typ *type
case ir.OADDR:
r := r.(*ir.AddrExpr)
- if a := r.Left(); a.Op() == ir.ONAME {
+ if a := r.X; a.Op() == ir.ONAME {
a := a.(*ir.Name)
addrsym(l, loff, a, 0)
return true
@@ -136,7 +136,7 @@ func (s *InitSchedule) staticcopy(l *ir.Name, loff int64, rn *ir.Name, typ *type
case ir.OPTRLIT:
r := r.(*ir.AddrExpr)
- switch r.Left().Op() {
+ switch r.X.Op() {
case ir.OARRAYLIT, ir.OSLICELIT, ir.OSTRUCTLIT, ir.OMAPLIT:
// copy pointer
addrsym(l, loff, s.inittemps[r], 0)
@@ -182,7 +182,7 @@ func (s *InitSchedule) staticcopy(l *ir.Name, loff int64, rn *ir.Name, typ *type
func (s *InitSchedule) staticassign(l *ir.Name, loff int64, r ir.Node, typ *types.Type) bool {
for r.Op() == ir.OCONVNOP {
- r = r.(*ir.ConvExpr).Left()
+ r = r.(*ir.ConvExpr).X
}
switch r.Op() {
@@ -206,7 +206,7 @@ func (s *InitSchedule) staticassign(l *ir.Name, loff int64, r ir.Node, typ *type
case ir.OADDR:
r := r.(*ir.AddrExpr)
- if name, offset, ok := stataddr(r.Left()); ok {
+ if name, offset, ok := stataddr(r.X); ok {
addrsym(l, loff, name, offset)
return true
}
@@ -214,17 +214,17 @@ func (s *InitSchedule) staticassign(l *ir.Name, loff int64, r ir.Node, typ *type
case ir.OPTRLIT:
r := r.(*ir.AddrExpr)
- switch r.Left().Op() {
+ switch r.X.Op() {
case ir.OARRAYLIT, ir.OSLICELIT, ir.OMAPLIT, ir.OSTRUCTLIT:
// Init pointer.
- a := staticname(r.Left().Type())
+ a := staticname(r.X.Type())
s.inittemps[r] = a
addrsym(l, loff, a, 0)
// Init underlying literal.
- if !s.staticassign(a, 0, r.Left(), a.Type()) {
- s.append(ir.NewAssignStmt(base.Pos, a, r.Left()))
+ if !s.staticassign(a, 0, r.X, a.Type()) {
+ s.append(ir.NewAssignStmt(base.Pos, a, r.X))
}
return true
}
@@ -232,8 +232,8 @@ func (s *InitSchedule) staticassign(l *ir.Name, loff int64, r ir.Node, typ *type
case ir.OSTR2BYTES:
r := r.(*ir.ConvExpr)
- if l.Class() == ir.PEXTERN && r.Left().Op() == ir.OLITERAL {
- sval := ir.StringVal(r.Left())
+ if l.Class_ == ir.PEXTERN && r.X.Op() == ir.OLITERAL {
+ sval := ir.StringVal(r.X)
slicebytes(l, loff, sval)
return true
}
@@ -284,7 +284,7 @@ func (s *InitSchedule) staticassign(l *ir.Name, loff int64, r ir.Node, typ *type
// Closures with no captured variables are globals,
// so the assignment can be done at link time.
// TODO if roff != 0 { panic }
- pfuncsym(l, loff, r.Func().Nname)
+ pfuncsym(l, loff, r.Func.Nname)
return true
}
closuredebugruntimecheck(r)
@@ -297,7 +297,7 @@ func (s *InitSchedule) staticassign(l *ir.Name, loff int64, r ir.Node, typ *type
r := r.(*ir.ConvExpr)
val := ir.Node(r)
for val.Op() == ir.OCONVIFACE {
- val = val.(*ir.ConvExpr).Left()
+ val = val.(*ir.ConvExpr).X
}
if val.Type().IsInterface() {
@@ -321,7 +321,7 @@ func (s *InitSchedule) staticassign(l *ir.Name, loff int64, r ir.Node, typ *type
// Create a copy of l to modify while we emit data.
// Emit itab, advance offset.
- addrsym(l, loff, itab.Left().(*ir.Name), 0)
+ addrsym(l, loff, itab.X.(*ir.Name), 0)
// Emit data.
if isdirectiface(val.Type()) {
@@ -409,7 +409,7 @@ func isSimpleName(nn ir.Node) bool {
return false
}
n := nn.(*ir.Name)
- return n.Class() != ir.PAUTOHEAP && n.Class() != ir.PEXTERN
+ return n.Class_ != ir.PAUTOHEAP && n.Class_ != ir.PEXTERN
}
func litas(l ir.Node, r ir.Node, init *ir.Nodes) {
@@ -439,7 +439,7 @@ func getdyn(n ir.Node, top bool) initGenType {
if !top {
return initDynamic
}
- if n.Len/4 > int64(n.List().Len()) {
+ if n.Len/4 > int64(n.List.Len()) {
// <25% of entries have explicit values.
// Very rough estimation, it takes 4 bytes of instructions
// to initialize 1 byte of result. So don't use a static
@@ -454,12 +454,12 @@ func getdyn(n ir.Node, top bool) initGenType {
lit := n.(*ir.CompLitExpr)
var mode initGenType
- for _, n1 := range lit.List().Slice() {
+ for _, n1 := range lit.List.Slice() {
switch n1.Op() {
case ir.OKEY:
- n1 = n1.(*ir.KeyExpr).Right()
+ n1 = n1.(*ir.KeyExpr).Value
case ir.OSTRUCTKEY:
- n1 = n1.(*ir.StructKeyExpr).Left()
+ n1 = n1.(*ir.StructKeyExpr).Value
}
mode |= getdyn(n1, false)
if mode == initDynamic|initConst {
@@ -476,9 +476,9 @@ func isStaticCompositeLiteral(n ir.Node) bool {
return false
case ir.OARRAYLIT:
n := n.(*ir.CompLitExpr)
- for _, r := range n.List().Slice() {
+ for _, r := range n.List.Slice() {
if r.Op() == ir.OKEY {
- r = r.(*ir.KeyExpr).Right()
+ r = r.(*ir.KeyExpr).Value
}
if !isStaticCompositeLiteral(r) {
return false
@@ -487,9 +487,9 @@ func isStaticCompositeLiteral(n ir.Node) bool {
return true
case ir.OSTRUCTLIT:
n := n.(*ir.CompLitExpr)
- for _, r := range n.List().Slice() {
+ for _, r := range n.List.Slice() {
r := r.(*ir.StructKeyExpr)
- if !isStaticCompositeLiteral(r.Left()) {
+ if !isStaticCompositeLiteral(r.Value) {
return false
}
}
@@ -501,7 +501,7 @@ func isStaticCompositeLiteral(n ir.Node) bool {
n := n.(*ir.ConvExpr)
val := ir.Node(n)
for val.Op() == ir.OCONVIFACE {
- val = val.(*ir.ConvExpr).Left()
+ val = val.(*ir.ConvExpr).X
}
if val.Type().IsInterface() {
return val.Op() == ir.ONIL
@@ -542,11 +542,11 @@ func fixedlit(ctxt initContext, kind initKind, n *ir.CompLitExpr, var_ ir.Node,
splitnode = func(r ir.Node) (ir.Node, ir.Node) {
if r.Op() == ir.OKEY {
kv := r.(*ir.KeyExpr)
- k = indexconst(kv.Left())
+ k = indexconst(kv.Key)
if k < 0 {
- base.Fatalf("fixedlit: invalid index %v", kv.Left())
+ base.Fatalf("fixedlit: invalid index %v", kv.Key)
}
- r = kv.Right()
+ r = kv.Value
}
a := ir.NewIndexExpr(base.Pos, var_, nodintconst(k))
k++
@@ -558,17 +558,17 @@ func fixedlit(ctxt initContext, kind initKind, n *ir.CompLitExpr, var_ ir.Node,
case ir.OSTRUCTLIT:
splitnode = func(rn ir.Node) (ir.Node, ir.Node) {
r := rn.(*ir.StructKeyExpr)
- if r.Sym().IsBlank() || isBlank {
- return ir.BlankNode, r.Left()
+ if r.Field.IsBlank() || isBlank {
+ return ir.BlankNode, r.Value
}
setlineno(r)
- return ir.NewSelectorExpr(base.Pos, ir.ODOT, var_, r.Sym()), r.Left()
+ return ir.NewSelectorExpr(base.Pos, ir.ODOT, var_, r.Field), r.Value
}
default:
base.Fatalf("fixedlit bad op: %v", n.Op())
}
- for _, r := range n.List().Slice() {
+ for _, r := range n.List.Slice() {
a, value := splitnode(r)
if a == ir.BlankNode && !anySideEffects(value) {
// Discard.
@@ -635,7 +635,7 @@ func slicelit(ctxt initContext, n *ir.CompLitExpr, var_ ir.Node, init *ir.Nodes)
// copy static to slice
var_ = typecheck(var_, ctxExpr|ctxAssign)
name, offset, ok := stataddr(var_)
- if !ok || name.Class() != ir.PEXTERN {
+ if !ok || name.Class_ != ir.PEXTERN {
base.Fatalf("slicelit: %v", var_)
}
slicesym(name, offset, vstat, t.NumElem())
@@ -703,7 +703,7 @@ func slicelit(ctxt initContext, n *ir.CompLitExpr, var_ ir.Node, init *ir.Nodes)
a = ir.NewAssignStmt(base.Pos, temp(t), nil)
a = typecheck(a, ctxStmt)
init.Append(a) // zero new temp
- a = a.(*ir.AssignStmt).Left()
+ a = a.(*ir.AssignStmt).X
} else {
init.Append(ir.NewUnaryExpr(base.Pos, ir.OVARDEF, a))
}
@@ -722,14 +722,14 @@ func slicelit(ctxt initContext, n *ir.CompLitExpr, var_ ir.Node, init *ir.Nodes)
// put dynamics into array (5)
var index int64
- for _, value := range n.List().Slice() {
+ for _, value := range n.List.Slice() {
if value.Op() == ir.OKEY {
kv := value.(*ir.KeyExpr)
- index = indexconst(kv.Left())
+ index = indexconst(kv.Key)
if index < 0 {
- base.Fatalf("slicelit: invalid index %v", kv.Left())
+ base.Fatalf("slicelit: invalid index %v", kv.Key)
}
- value = kv.Right()
+ value = kv.Value
}
a := ir.NewIndexExpr(base.Pos, vauto, nodintconst(index))
a.SetBounded(true)
@@ -778,16 +778,16 @@ func maplit(n *ir.CompLitExpr, m ir.Node, init *ir.Nodes) {
// make the map var
a := ir.NewCallExpr(base.Pos, ir.OMAKE, nil, nil)
a.SetEsc(n.Esc())
- a.PtrList().Set2(ir.TypeNode(n.Type()), nodintconst(int64(n.List().Len())))
+ a.Args.Set2(ir.TypeNode(n.Type()), nodintconst(int64(n.List.Len())))
litas(m, a, init)
- entries := n.List().Slice()
+ entries := n.List.Slice()
// The order pass already removed any dynamic (runtime-computed) entries.
// All remaining entries are static. Double-check that.
for _, r := range entries {
r := r.(*ir.KeyExpr)
- if !isStaticCompositeLiteral(r.Left()) || !isStaticCompositeLiteral(r.Right()) {
+ if !isStaticCompositeLiteral(r.Key) || !isStaticCompositeLiteral(r.Value) {
base.Fatalf("maplit: entry is not a literal: %v", r)
}
}
@@ -813,8 +813,8 @@ func maplit(n *ir.CompLitExpr, m ir.Node, init *ir.Nodes) {
datae := ir.NewCompLitExpr(base.Pos, ir.OARRAYLIT, nil, nil)
for _, r := range entries {
r := r.(*ir.KeyExpr)
- datak.PtrList().Append(r.Left())
- datae.PtrList().Append(r.Right())
+ datak.List.Append(r.Key)
+ datae.List.Append(r.Value)
}
fixedlit(inInitFunction, initKindStatic, datak, vstatk, init)
fixedlit(inInitFunction, initKindStatic, datae, vstate, init)
@@ -837,7 +837,7 @@ func maplit(n *ir.CompLitExpr, m ir.Node, init *ir.Nodes) {
body := ir.NewAssignStmt(base.Pos, lhs, rhs)
loop := ir.NewForStmt(base.Pos, nil, cond, incr, nil)
- loop.PtrBody().Set1(body)
+ loop.Body.Set1(body)
loop.PtrInit().Set1(zero)
appendWalkStmt(init, loop)
@@ -853,7 +853,7 @@ func maplit(n *ir.CompLitExpr, m ir.Node, init *ir.Nodes) {
for _, r := range entries {
r := r.(*ir.KeyExpr)
- index, elem := r.Left(), r.Right()
+ index, elem := r.Key, r.Value
setlineno(index)
appendWalkStmt(init, ir.NewAssignStmt(base.Pos, tmpkey, index))
@@ -890,19 +890,19 @@ func anylit(n ir.Node, var_ ir.Node, init *ir.Nodes) {
}
var r ir.Node
- if n.Right() != nil {
+ if n.Alloc != nil {
// n.Right is stack temporary used as backing store.
- appendWalkStmt(init, ir.NewAssignStmt(base.Pos, n.Right(), nil)) // zero backing store, just in case (#18410)
- r = nodAddr(n.Right())
+ appendWalkStmt(init, ir.NewAssignStmt(base.Pos, n.Alloc, nil)) // zero backing store, just in case (#18410)
+ r = nodAddr(n.Alloc)
} else {
- r = ir.NewUnaryExpr(base.Pos, ir.ONEW, ir.TypeNode(n.Left().Type()))
+ r = ir.NewUnaryExpr(base.Pos, ir.ONEW, ir.TypeNode(n.X.Type()))
r.SetEsc(n.Esc())
}
appendWalkStmt(init, ir.NewAssignStmt(base.Pos, var_, r))
var_ = ir.NewStarExpr(base.Pos, var_)
var_ = typecheck(var_, ctxExpr|ctxAssign)
- anylit(n.Left(), var_, init)
+ anylit(n.X, var_, init)
case ir.OSTRUCTLIT, ir.OARRAYLIT:
n := n.(*ir.CompLitExpr)
@@ -910,7 +910,7 @@ func anylit(n ir.Node, var_ ir.Node, init *ir.Nodes) {
base.Fatalf("anylit: not struct/array")
}
- if isSimpleName(var_) && n.List().Len() > 4 {
+ if isSimpleName(var_) && n.List.Len() > 4 {
// lay out static data
vstat := readonlystaticname(t)
@@ -935,7 +935,7 @@ func anylit(n ir.Node, var_ ir.Node, init *ir.Nodes) {
components = int64(t.NumFields())
}
// initialization of an array or struct with unspecified components (missing fields or arrays)
- if isSimpleName(var_) || int64(n.List().Len()) < components {
+ if isSimpleName(var_) || int64(n.List.Len()) < components {
appendWalkStmt(init, ir.NewAssignStmt(base.Pos, var_, nil))
}
@@ -958,34 +958,34 @@ func anylit(n ir.Node, var_ ir.Node, init *ir.Nodes) {
// It returns true if n's effects have been added to init,
// in which case n should be dropped from the program by the caller.
func oaslit(n *ir.AssignStmt, init *ir.Nodes) bool {
- if n.Left() == nil || n.Right() == nil {
+ if n.X == nil || n.Y == nil {
// not a special composite literal assignment
return false
}
- if n.Left().Type() == nil || n.Right().Type() == nil {
+ if n.X.Type() == nil || n.Y.Type() == nil {
// not a special composite literal assignment
return false
}
- if !isSimpleName(n.Left()) {
+ if !isSimpleName(n.X) {
// not a special composite literal assignment
return false
}
- if !types.Identical(n.Left().Type(), n.Right().Type()) {
+ if !types.Identical(n.X.Type(), n.Y.Type()) {
// not a special composite literal assignment
return false
}
- switch n.Right().Op() {
+ switch n.Y.Op() {
default:
// not a special composite literal assignment
return false
case ir.OSTRUCTLIT, ir.OARRAYLIT, ir.OSLICELIT, ir.OMAPLIT:
- if refersToCommonName(n.Left(), n.Right()) {
+ if refersToCommonName(n.X, n.Y) {
// not a special composite literal assignment
return false
}
- anylit(n.Right(), n.Left(), init)
+ anylit(n.Y, n.X, init)
}
return true
@@ -1015,21 +1015,21 @@ func stataddr(n ir.Node) (name *ir.Name, offset int64, ok bool) {
case ir.ODOT:
n := n.(*ir.SelectorExpr)
- if name, offset, ok = stataddr(n.Left()); !ok {
+ if name, offset, ok = stataddr(n.X); !ok {
break
}
- offset += n.Offset()
+ offset += n.Offset
return name, offset, true
case ir.OINDEX:
n := n.(*ir.IndexExpr)
- if n.Left().Type().IsSlice() {
+ if n.X.Type().IsSlice() {
break
}
- if name, offset, ok = stataddr(n.Left()); !ok {
+ if name, offset, ok = stataddr(n.X); !ok {
break
}
- l := getlit(n.Right())
+ l := getlit(n.Index)
if l < 0 {
break
}
@@ -1058,14 +1058,14 @@ func (s *InitSchedule) initplan(n ir.Node) {
case ir.OARRAYLIT, ir.OSLICELIT:
n := n.(*ir.CompLitExpr)
var k int64
- for _, a := range n.List().Slice() {
+ for _, a := range n.List.Slice() {
if a.Op() == ir.OKEY {
kv := a.(*ir.KeyExpr)
- k = indexconst(kv.Left())
+ k = indexconst(kv.Key)
if k < 0 {
- base.Fatalf("initplan arraylit: invalid index %v", kv.Left())
+ base.Fatalf("initplan arraylit: invalid index %v", kv.Key)
}
- a = kv.Right()
+ a = kv.Value
}
s.addvalue(p, k*n.Type().Elem().Width, a)
k++
@@ -1073,25 +1073,25 @@ func (s *InitSchedule) initplan(n ir.Node) {
case ir.OSTRUCTLIT:
n := n.(*ir.CompLitExpr)
- for _, a := range n.List().Slice() {
+ for _, a := range n.List.Slice() {
if a.Op() != ir.OSTRUCTKEY {
base.Fatalf("initplan structlit")
}
a := a.(*ir.StructKeyExpr)
- if a.Sym().IsBlank() {
+ if a.Field.IsBlank() {
continue
}
- s.addvalue(p, a.Offset(), a.Left())
+ s.addvalue(p, a.Offset, a.Value)
}
case ir.OMAPLIT:
n := n.(*ir.CompLitExpr)
- for _, a := range n.List().Slice() {
+ for _, a := range n.List.Slice() {
if a.Op() != ir.OKEY {
base.Fatalf("initplan maplit")
}
a := a.(*ir.KeyExpr)
- s.addvalue(p, -1, a.Right())
+ s.addvalue(p, -1, a.Value)
}
}
}
@@ -1135,9 +1135,9 @@ func isZero(n ir.Node) bool {
case ir.OARRAYLIT:
n := n.(*ir.CompLitExpr)
- for _, n1 := range n.List().Slice() {
+ for _, n1 := range n.List.Slice() {
if n1.Op() == ir.OKEY {
- n1 = n1.(*ir.KeyExpr).Right()
+ n1 = n1.(*ir.KeyExpr).Value
}
if !isZero(n1) {
return false
@@ -1147,9 +1147,9 @@ func isZero(n ir.Node) bool {
case ir.OSTRUCTLIT:
n := n.(*ir.CompLitExpr)
- for _, n1 := range n.List().Slice() {
+ for _, n1 := range n.List.Slice() {
n1 := n1.(*ir.StructKeyExpr)
- if !isZero(n1.Left()) {
+ if !isZero(n1.Value) {
return false
}
}
@@ -1164,16 +1164,16 @@ func isvaluelit(n ir.Node) bool {
}
func genAsStatic(as *ir.AssignStmt) {
- if as.Left().Type() == nil {
+ if as.X.Type() == nil {
base.Fatalf("genAsStatic as.Left not typechecked")
}
- name, offset, ok := stataddr(as.Left())
- if !ok || (name.Class() != ir.PEXTERN && as.Left() != ir.BlankNode) {
- base.Fatalf("genAsStatic: lhs %v", as.Left())
+ name, offset, ok := stataddr(as.X)
+ if !ok || (name.Class_ != ir.PEXTERN && as.X != ir.BlankNode) {
+ base.Fatalf("genAsStatic: lhs %v", as.X)
}
- switch r := as.Right(); r.Op() {
+ switch r := as.Y; r.Op() {
case ir.OLITERAL:
litsym(name, offset, r, int(r.Type().Width))
return
@@ -1183,13 +1183,13 @@ func genAsStatic(as *ir.AssignStmt) {
return
case ir.ONAME:
r := r.(*ir.Name)
- if r.Offset() != 0 {
+ if r.Offset_ != 0 {
base.Fatalf("genAsStatic %+v", as)
}
- if r.Class() == ir.PFUNC {
+ if r.Class_ == ir.PFUNC {
pfuncsym(name, offset, r)
return
}
}
- base.Fatalf("genAsStatic: rhs %v", as.Right())
+ base.Fatalf("genAsStatic: rhs %v", as.Y)
}
diff --git a/src/cmd/compile/internal/gc/ssa.go b/src/cmd/compile/internal/gc/ssa.go
index dc3ea4be9e..4660da0456 100644
--- a/src/cmd/compile/internal/gc/ssa.go
+++ b/src/cmd/compile/internal/gc/ssa.go
@@ -211,7 +211,7 @@ func initssaconfig() {
// considered as the 0th parameter. This does not include the receiver of an
// interface call.
func getParam(n *ir.CallExpr, i int) *types.Field {
- t := n.Left().Type()
+ t := n.X.Type()
if n.Op() == ir.OCALLMETH {
if i == 0 {
return t.Recv()
@@ -275,7 +275,7 @@ func (s *state) emitOpenDeferInfo() {
var maxargsize int64
for i := len(s.openDefers) - 1; i >= 0; i-- {
r := s.openDefers[i]
- argsize := r.n.Left().Type().ArgWidth()
+ argsize := r.n.X.Type().ArgWidth()
if argsize > maxargsize {
maxargsize = argsize
}
@@ -287,7 +287,7 @@ func (s *state) emitOpenDeferInfo() {
// Write in reverse-order, for ease of running in that order at runtime
for i := len(s.openDefers) - 1; i >= 0; i-- {
r := s.openDefers[i]
- off = dvarint(x, off, r.n.Left().Type().ArgWidth())
+ off = dvarint(x, off, r.n.X.Type().ArgWidth())
off = dvarint(x, off, -r.closureNode.FrameOffset())
numArgs := len(r.argNodes)
if r.rcvrNode != nil {
@@ -323,7 +323,7 @@ func buildssa(fn *ir.Func, worker int) *ssa.Func {
if printssa {
astBuf = &bytes.Buffer{}
ir.FDumpList(astBuf, "buildssa-enter", fn.Enter)
- ir.FDumpList(astBuf, "buildssa-body", fn.Body())
+ ir.FDumpList(astBuf, "buildssa-body", fn.Body)
ir.FDumpList(astBuf, "buildssa-exit", fn.Exit)
if ssaDumpStdout {
fmt.Println("generating SSA for", name)
@@ -438,7 +438,7 @@ func buildssa(fn *ir.Func, worker int) *ssa.Func {
var args []ssa.Param
var results []ssa.Param
for _, n := range fn.Dcl {
- switch n.Class() {
+ switch n.Class_ {
case ir.PPARAM:
s.decladdrs[n] = s.entryNewValue2A(ssa.OpLocalAddr, types.NewPtr(n.Type()), n, s.sp, s.startmem)
args = append(args, ssa.Param{Type: n.Type(), Offset: int32(n.FrameOffset())})
@@ -459,13 +459,13 @@ func buildssa(fn *ir.Func, worker int) *ssa.Func {
case ir.PFUNC:
// local function - already handled by frontend
default:
- s.Fatalf("local variable with class %v unimplemented", n.Class())
+ s.Fatalf("local variable with class %v unimplemented", n.Class_)
}
}
// Populate SSAable arguments.
for _, n := range fn.Dcl {
- if n.Class() == ir.PPARAM && s.canSSA(n) {
+ if n.Class_ == ir.PPARAM && s.canSSA(n) {
v := s.newValue0A(ssa.OpArg, n.Type(), n)
s.vars[n] = v
s.addNamedValue(n, v) // This helps with debugging information, not needed for compilation itself.
@@ -474,7 +474,7 @@ func buildssa(fn *ir.Func, worker int) *ssa.Func {
// Convert the AST-based IR to the SSA-based IR
s.stmtList(fn.Enter)
- s.stmtList(fn.Body())
+ s.stmtList(fn.Body)
// fallthrough to exit
if s.curBlock != nil {
@@ -1028,7 +1028,7 @@ func (s *state) instrumentMove(t *types.Type, dst, src *ssa.Value) {
}
func (s *state) instrument2(t *types.Type, addr, addr2 *ssa.Value, kind instrumentKind) {
- if !s.curfn.Func().InstrumentBody() {
+ if !s.curfn.InstrumentBody() {
return
}
@@ -1151,7 +1151,7 @@ func (s *state) stmt(n ir.Node) {
case ir.OBLOCK:
n := n.(*ir.BlockStmt)
- s.stmtList(n.List())
+ s.stmtList(n.List)
// No-ops
case ir.ODCLCONST, ir.ODCLTYPE, ir.OFALL:
@@ -1168,9 +1168,9 @@ func (s *state) stmt(n ir.Node) {
case ir.OCALLMETH, ir.OCALLINTER:
n := n.(*ir.CallExpr)
s.callResult(n, callNormal)
- if n.Op() == ir.OCALLFUNC && n.Left().Op() == ir.ONAME && n.Left().(*ir.Name).Class() == ir.PFUNC {
- if fn := n.Left().Sym().Name; base.Flag.CompilingRuntime && fn == "throw" ||
- n.Left().Sym().Pkg == Runtimepkg && (fn == "throwinit" || fn == "gopanic" || fn == "panicwrap" || fn == "block" || fn == "panicmakeslicelen" || fn == "panicmakeslicecap") {
+ if n.Op() == ir.OCALLFUNC && n.X.Op() == ir.ONAME && n.X.(*ir.Name).Class_ == ir.PFUNC {
+ if fn := n.X.Sym().Name; base.Flag.CompilingRuntime && fn == "throw" ||
+ n.X.Sym().Pkg == Runtimepkg && (fn == "throwinit" || fn == "gopanic" || fn == "panicwrap" || fn == "block" || fn == "panicmakeslicelen" || fn == "panicmakeslicecap") {
m := s.mem()
b := s.endBlock()
b.Kind = ssa.BlockExit
@@ -1194,23 +1194,23 @@ func (s *state) stmt(n ir.Node) {
base.WarnfAt(n.Pos(), "%s defer", defertype)
}
if s.hasOpenDefers {
- s.openDeferRecord(n.Left().(*ir.CallExpr))
+ s.openDeferRecord(n.Call.(*ir.CallExpr))
} else {
d := callDefer
if n.Esc() == EscNever {
d = callDeferStack
}
- s.callResult(n.Left().(*ir.CallExpr), d)
+ s.callResult(n.Call.(*ir.CallExpr), d)
}
case ir.OGO:
n := n.(*ir.GoDeferStmt)
- s.callResult(n.Left().(*ir.CallExpr), callGo)
+ s.callResult(n.Call.(*ir.CallExpr), callGo)
case ir.OAS2DOTTYPE:
n := n.(*ir.AssignListStmt)
- res, resok := s.dottype(n.Rlist().First().(*ir.TypeAssertExpr), true)
+ res, resok := s.dottype(n.Rhs.First().(*ir.TypeAssertExpr), true)
deref := false
- if !canSSAType(n.Rlist().First().Type()) {
+ if !canSSAType(n.Rhs.First().Type()) {
if res.Op != ssa.OpLoad {
s.Fatalf("dottype of non-load")
}
@@ -1224,33 +1224,33 @@ func (s *state) stmt(n ir.Node) {
deref = true
res = res.Args[0]
}
- s.assign(n.List().First(), res, deref, 0)
- s.assign(n.List().Second(), resok, false, 0)
+ s.assign(n.Lhs.First(), res, deref, 0)
+ s.assign(n.Lhs.Second(), resok, false, 0)
return
case ir.OAS2FUNC:
// We come here only when it is an intrinsic call returning two values.
n := n.(*ir.AssignListStmt)
- call := n.Rlist().First().(*ir.CallExpr)
+ call := n.Rhs.First().(*ir.CallExpr)
if !IsIntrinsicCall(call) {
s.Fatalf("non-intrinsic AS2FUNC not expanded %v", call)
}
v := s.intrinsicCall(call)
- v1 := s.newValue1(ssa.OpSelect0, n.List().First().Type(), v)
- v2 := s.newValue1(ssa.OpSelect1, n.List().Second().Type(), v)
- s.assign(n.List().First(), v1, false, 0)
- s.assign(n.List().Second(), v2, false, 0)
+ v1 := s.newValue1(ssa.OpSelect0, n.Lhs.First().Type(), v)
+ v2 := s.newValue1(ssa.OpSelect1, n.Lhs.Second().Type(), v)
+ s.assign(n.Lhs.First(), v1, false, 0)
+ s.assign(n.Lhs.Second(), v2, false, 0)
return
case ir.ODCL:
n := n.(*ir.Decl)
- if n.Left().(*ir.Name).Class() == ir.PAUTOHEAP {
+ if n.X.(*ir.Name).Class_ == ir.PAUTOHEAP {
s.Fatalf("DCL %v", n)
}
case ir.OLABEL:
n := n.(*ir.LabelStmt)
- sym := n.Sym()
+ sym := n.Label
lab := s.label(sym)
// The label might already have a target block via a goto.
@@ -1268,7 +1268,7 @@ func (s *state) stmt(n ir.Node) {
case ir.OGOTO:
n := n.(*ir.BranchStmt)
- sym := n.Sym()
+ sym := n.Label
lab := s.label(sym)
if lab.target == nil {
@@ -1281,7 +1281,7 @@ func (s *state) stmt(n ir.Node) {
case ir.OAS:
n := n.(*ir.AssignStmt)
- if n.Left() == n.Right() && n.Left().Op() == ir.ONAME {
+ if n.X == n.Y && n.X.Op() == ir.ONAME {
// An x=x assignment. No point in doing anything
// here. In addition, skipping this assignment
// prevents generating:
@@ -1293,7 +1293,7 @@ func (s *state) stmt(n ir.Node) {
}
// Evaluate RHS.
- rhs := n.Right()
+ rhs := n.Y
if rhs != nil {
switch rhs.Op() {
case ir.OSTRUCTLIT, ir.OARRAYLIT, ir.OSLICELIT:
@@ -1309,13 +1309,13 @@ func (s *state) stmt(n ir.Node) {
// Check whether we're writing the result of an append back to the same slice.
// If so, we handle it specially to avoid write barriers on the fast
// (non-growth) path.
- if !samesafeexpr(n.Left(), rhs.List().First()) || base.Flag.N != 0 {
+ if !samesafeexpr(n.X, rhs.Args.First()) || base.Flag.N != 0 {
break
}
// If the slice can be SSA'd, it'll be on the stack,
// so there will be no write barriers,
// so there's no need to attempt to prevent them.
- if s.canSSA(n.Left()) {
+ if s.canSSA(n.X) {
if base.Debug.Append > 0 { // replicating old diagnostic message
base.WarnfAt(n.Pos(), "append: len-only update (in local slice)")
}
@@ -1329,7 +1329,7 @@ func (s *state) stmt(n ir.Node) {
}
}
- if ir.IsBlank(n.Left()) {
+ if ir.IsBlank(n.X) {
// _ = rhs
// Just evaluate rhs for side-effects.
if rhs != nil {
@@ -1339,10 +1339,10 @@ func (s *state) stmt(n ir.Node) {
}
var t *types.Type
- if n.Right() != nil {
- t = n.Right().Type()
+ if n.Y != nil {
+ t = n.Y.Type()
} else {
- t = n.Left().Type()
+ t = n.X.Type()
}
var r *ssa.Value
@@ -1362,7 +1362,7 @@ func (s *state) stmt(n ir.Node) {
}
var skip skipMask
- if rhs != nil && (rhs.Op() == ir.OSLICE || rhs.Op() == ir.OSLICE3 || rhs.Op() == ir.OSLICESTR) && samesafeexpr(rhs.(*ir.SliceExpr).Left(), n.Left()) {
+ if rhs != nil && (rhs.Op() == ir.OSLICE || rhs.Op() == ir.OSLICE3 || rhs.Op() == ir.OSLICESTR) && samesafeexpr(rhs.(*ir.SliceExpr).X, n.X) {
// We're assigning a slicing operation back to its source.
// Don't write back fields we aren't changing. See issue #14855.
rhs := rhs.(*ir.SliceExpr)
@@ -1392,49 +1392,49 @@ func (s *state) stmt(n ir.Node) {
}
}
- s.assign(n.Left(), r, deref, skip)
+ s.assign(n.X, r, deref, skip)
case ir.OIF:
n := n.(*ir.IfStmt)
- if ir.IsConst(n.Left(), constant.Bool) {
- s.stmtList(n.Left().Init())
- if ir.BoolVal(n.Left()) {
- s.stmtList(n.Body())
+ if ir.IsConst(n.Cond, constant.Bool) {
+ s.stmtList(n.Cond.Init())
+ if ir.BoolVal(n.Cond) {
+ s.stmtList(n.Body)
} else {
- s.stmtList(n.Rlist())
+ s.stmtList(n.Else)
}
break
}
bEnd := s.f.NewBlock(ssa.BlockPlain)
var likely int8
- if n.Likely() {
+ if n.Likely {
likely = 1
}
var bThen *ssa.Block
- if n.Body().Len() != 0 {
+ if n.Body.Len() != 0 {
bThen = s.f.NewBlock(ssa.BlockPlain)
} else {
bThen = bEnd
}
var bElse *ssa.Block
- if n.Rlist().Len() != 0 {
+ if n.Else.Len() != 0 {
bElse = s.f.NewBlock(ssa.BlockPlain)
} else {
bElse = bEnd
}
- s.condBranch(n.Left(), bThen, bElse, likely)
+ s.condBranch(n.Cond, bThen, bElse, likely)
- if n.Body().Len() != 0 {
+ if n.Body.Len() != 0 {
s.startBlock(bThen)
- s.stmtList(n.Body())
+ s.stmtList(n.Body)
if b := s.endBlock(); b != nil {
b.AddEdgeTo(bEnd)
}
}
- if n.Rlist().Len() != 0 {
+ if n.Else.Len() != 0 {
s.startBlock(bElse)
- s.stmtList(n.Rlist())
+ s.stmtList(n.Else)
if b := s.endBlock(); b != nil {
b.AddEdgeTo(bEnd)
}
@@ -1443,7 +1443,7 @@ func (s *state) stmt(n ir.Node) {
case ir.ORETURN:
n := n.(*ir.ReturnStmt)
- s.stmtList(n.List())
+ s.stmtList(n.Results)
b := s.exit()
b.Pos = s.lastPos.WithIsStmt()
@@ -1451,12 +1451,12 @@ func (s *state) stmt(n ir.Node) {
n := n.(*ir.BranchStmt)
b := s.exit()
b.Kind = ssa.BlockRetJmp // override BlockRet
- b.Aux = callTargetLSym(n.Sym(), s.curfn.LSym)
+ b.Aux = callTargetLSym(n.Label, s.curfn.LSym)
case ir.OCONTINUE, ir.OBREAK:
n := n.(*ir.BranchStmt)
var to *ssa.Block
- if n.Sym() == nil {
+ if n.Label == nil {
// plain break/continue
switch n.Op() {
case ir.OCONTINUE:
@@ -1466,7 +1466,7 @@ func (s *state) stmt(n ir.Node) {
}
} else {
// labeled break/continue; look up the target
- sym := n.Sym()
+ sym := n.Label
lab := s.label(sym)
switch n.Op() {
case ir.OCONTINUE:
@@ -1501,8 +1501,8 @@ func (s *state) stmt(n ir.Node) {
b.AddEdgeTo(bCond)
// generate code to test condition
s.startBlock(bCond)
- if n.Left() != nil {
- s.condBranch(n.Left(), bBody, bEnd, 1)
+ if n.Cond != nil {
+ s.condBranch(n.Cond, bBody, bEnd, 1)
} else {
b := s.endBlock()
b.Kind = ssa.BlockPlain
@@ -1519,7 +1519,7 @@ func (s *state) stmt(n ir.Node) {
s.continueTo = bIncr
s.breakTo = bEnd
var lab *ssaLabel
- if sym := n.Sym(); sym != nil {
+ if sym := n.Label; sym != nil {
// labeled for loop
lab = s.label(sym)
lab.continueTarget = bIncr
@@ -1528,7 +1528,7 @@ func (s *state) stmt(n ir.Node) {
// generate body
s.startBlock(bBody)
- s.stmtList(n.Body())
+ s.stmtList(n.Body)
// tear down continue/break
s.continueTo = prevContinue
@@ -1545,8 +1545,8 @@ func (s *state) stmt(n ir.Node) {
// generate incr (and, for OFORUNTIL, condition)
s.startBlock(bIncr)
- if n.Right() != nil {
- s.stmt(n.Right())
+ if n.Post != nil {
+ s.stmt(n.Post)
}
if n.Op() == ir.OFOR {
if b := s.endBlock(); b != nil {
@@ -1561,10 +1561,10 @@ func (s *state) stmt(n ir.Node) {
// bCond is unused in OFORUNTIL, so repurpose it.
bLateIncr := bCond
// test condition
- s.condBranch(n.Left(), bLateIncr, bEnd, 1)
+ s.condBranch(n.Cond, bLateIncr, bEnd, 1)
// generate late increment
s.startBlock(bLateIncr)
- s.stmtList(n.List())
+ s.stmtList(n.Late)
s.endBlock().AddEdgeTo(bBody)
}
@@ -1581,12 +1581,12 @@ func (s *state) stmt(n ir.Node) {
var body ir.Nodes
if n.Op() == ir.OSWITCH {
n := n.(*ir.SwitchStmt)
- sym = n.Sym()
- body = n.Body()
+ sym = n.Label
+ body = n.Compiled
} else {
n := n.(*ir.SelectStmt)
- sym = n.Sym()
- body = n.Body()
+ sym = n.Label
+ body = n.Compiled
}
var lab *ssaLabel
@@ -1616,8 +1616,8 @@ func (s *state) stmt(n ir.Node) {
case ir.OVARDEF:
n := n.(*ir.UnaryExpr)
- if !s.canSSA(n.Left()) {
- s.vars[memVar] = s.newValue1Apos(ssa.OpVarDef, types.TypeMem, n.Left().(*ir.Name), s.mem(), false)
+ if !s.canSSA(n.X) {
+ s.vars[memVar] = s.newValue1Apos(ssa.OpVarDef, types.TypeMem, n.X.(*ir.Name), s.mem(), false)
}
case ir.OVARKILL:
// Insert a varkill op to record that a variable is no longer live.
@@ -1625,18 +1625,18 @@ func (s *state) stmt(n ir.Node) {
// varkill in the store chain is enough to keep it correctly ordered
// with respect to call ops.
n := n.(*ir.UnaryExpr)
- if !s.canSSA(n.Left()) {
- s.vars[memVar] = s.newValue1Apos(ssa.OpVarKill, types.TypeMem, n.Left().(*ir.Name), s.mem(), false)
+ if !s.canSSA(n.X) {
+ s.vars[memVar] = s.newValue1Apos(ssa.OpVarKill, types.TypeMem, n.X.(*ir.Name), s.mem(), false)
}
case ir.OVARLIVE:
// Insert a varlive op to record that a variable is still live.
n := n.(*ir.UnaryExpr)
- v := n.Left().(*ir.Name)
+ v := n.X.(*ir.Name)
if !v.Addrtaken() {
s.Fatalf("VARLIVE variable %v must have Addrtaken set", v)
}
- switch v.Class() {
+ switch v.Class_ {
case ir.PAUTO, ir.PPARAM, ir.PPARAMOUT:
default:
s.Fatalf("VARLIVE variable %v must be Auto or Arg", v)
@@ -1645,12 +1645,12 @@ func (s *state) stmt(n ir.Node) {
case ir.OCHECKNIL:
n := n.(*ir.UnaryExpr)
- p := s.expr(n.Left())
+ p := s.expr(n.X)
s.nilCheck(p)
case ir.OINLMARK:
n := n.(*ir.InlineMarkStmt)
- s.newValue1I(ssa.OpInlMark, types.TypeVoid, n.Offset(), s.mem())
+ s.newValue1I(ssa.OpInlMark, types.TypeVoid, n.Index, s.mem())
default:
s.Fatalf("unhandled stmt %v", n.Op())
@@ -2118,19 +2118,19 @@ func (s *state) expr(n ir.Node) *ssa.Value {
switch n.Op() {
case ir.OBYTES2STRTMP:
n := n.(*ir.ConvExpr)
- slice := s.expr(n.Left())
+ slice := s.expr(n.X)
ptr := s.newValue1(ssa.OpSlicePtr, s.f.Config.Types.BytePtr, slice)
len := s.newValue1(ssa.OpSliceLen, types.Types[types.TINT], slice)
return s.newValue2(ssa.OpStringMake, n.Type(), ptr, len)
case ir.OSTR2BYTESTMP:
n := n.(*ir.ConvExpr)
- str := s.expr(n.Left())
+ str := s.expr(n.X)
ptr := s.newValue1(ssa.OpStringPtr, s.f.Config.Types.BytePtr, str)
len := s.newValue1(ssa.OpStringLen, types.Types[types.TINT], str)
return s.newValue3(ssa.OpSliceMake, n.Type(), ptr, len, len)
case ir.OCFUNC:
n := n.(*ir.UnaryExpr)
- aux := n.Left().Sym().Linksym()
+ aux := n.X.Sym().Linksym()
return s.entryNewValue1A(ssa.OpAddr, n.Type(), aux, s.sb)
case ir.OMETHEXPR:
n := n.(*ir.MethodExpr)
@@ -2138,7 +2138,7 @@ func (s *state) expr(n ir.Node) *ssa.Value {
return s.entryNewValue1A(ssa.OpAddr, types.NewPtr(n.Type()), sym, s.sb)
case ir.ONAME:
n := n.(*ir.Name)
- if n.Class() == ir.PFUNC {
+ if n.Class_ == ir.PFUNC {
// "value" of a function is the address of the function's closure
sym := funcsym(n.Sym()).Linksym()
return s.entryNewValue1A(ssa.OpAddr, types.NewPtr(n.Type()), sym, s.sb)
@@ -2230,11 +2230,11 @@ func (s *state) expr(n ir.Node) *ssa.Value {
case ir.OCONVNOP:
n := n.(*ir.ConvExpr)
to := n.Type()
- from := n.Left().Type()
+ from := n.X.Type()
// Assume everything will work out, so set up our return value.
// Anything interesting that happens from here is a fatal.
- x := s.expr(n.Left())
+ x := s.expr(n.X)
if to == from {
return x
}
@@ -2298,9 +2298,9 @@ func (s *state) expr(n ir.Node) *ssa.Value {
case ir.OCONV:
n := n.(*ir.ConvExpr)
- x := s.expr(n.Left())
- ft := n.Left().Type() // from type
- tt := n.Type() // to type
+ x := s.expr(n.X)
+ ft := n.X.Type() // from type
+ tt := n.Type() // to type
if ft.IsBoolean() && tt.IsKind(types.TUINT8) {
// Bool -> uint8 is generated internally when indexing into runtime.staticbyte.
return s.newValue1(ssa.OpCopy, n.Type(), x)
@@ -2465,7 +2465,7 @@ func (s *state) expr(n ir.Node) *ssa.Value {
s.newValueOrSfCall1(op, ttp, s.newValue1(ssa.OpComplexImag, ftp, x)))
}
- s.Fatalf("unhandled OCONV %s -> %s", n.Left().Type().Kind(), n.Type().Kind())
+ s.Fatalf("unhandled OCONV %s -> %s", n.X.Type().Kind(), n.Type().Kind())
return nil
case ir.ODOTTYPE:
@@ -2476,10 +2476,10 @@ func (s *state) expr(n ir.Node) *ssa.Value {
// binary ops
case ir.OLT, ir.OEQ, ir.ONE, ir.OLE, ir.OGE, ir.OGT:
n := n.(*ir.BinaryExpr)
- a := s.expr(n.Left())
- b := s.expr(n.Right())
- if n.Left().Type().IsComplex() {
- pt := floatForComplex(n.Left().Type())
+ a := s.expr(n.X)
+ b := s.expr(n.Y)
+ if n.X.Type().IsComplex() {
+ pt := floatForComplex(n.X.Type())
op := s.ssaOp(ir.OEQ, pt)
r := s.newValueOrSfCall2(op, types.Types[types.TBOOL], s.newValue1(ssa.OpComplexReal, pt, a), s.newValue1(ssa.OpComplexReal, pt, b))
i := s.newValueOrSfCall2(op, types.Types[types.TBOOL], s.newValue1(ssa.OpComplexImag, pt, a), s.newValue1(ssa.OpComplexImag, pt, b))
@@ -2502,16 +2502,16 @@ func (s *state) expr(n ir.Node) *ssa.Value {
case ir.OGT:
op, a, b = ir.OLT, b, a
}
- if n.Left().Type().IsFloat() {
+ if n.X.Type().IsFloat() {
// float comparison
- return s.newValueOrSfCall2(s.ssaOp(op, n.Left().Type()), types.Types[types.TBOOL], a, b)
+ return s.newValueOrSfCall2(s.ssaOp(op, n.X.Type()), types.Types[types.TBOOL], a, b)
}
// integer comparison
- return s.newValue2(s.ssaOp(op, n.Left().Type()), types.Types[types.TBOOL], a, b)
+ return s.newValue2(s.ssaOp(op, n.X.Type()), types.Types[types.TBOOL], a, b)
case ir.OMUL:
n := n.(*ir.BinaryExpr)
- a := s.expr(n.Left())
- b := s.expr(n.Right())
+ a := s.expr(n.X)
+ b := s.expr(n.Y)
if n.Type().IsComplex() {
mulop := ssa.OpMul64F
addop := ssa.OpAdd64F
@@ -2550,8 +2550,8 @@ func (s *state) expr(n ir.Node) *ssa.Value {
case ir.ODIV:
n := n.(*ir.BinaryExpr)
- a := s.expr(n.Left())
- b := s.expr(n.Right())
+ a := s.expr(n.X)
+ b := s.expr(n.Y)
if n.Type().IsComplex() {
// TODO this is not executed because the front-end substitutes a runtime call.
// That probably ought to change; with modest optimization the widen/narrow
@@ -2598,13 +2598,13 @@ func (s *state) expr(n ir.Node) *ssa.Value {
return s.intDivide(n, a, b)
case ir.OMOD:
n := n.(*ir.BinaryExpr)
- a := s.expr(n.Left())
- b := s.expr(n.Right())
+ a := s.expr(n.X)
+ b := s.expr(n.Y)
return s.intDivide(n, a, b)
case ir.OADD, ir.OSUB:
n := n.(*ir.BinaryExpr)
- a := s.expr(n.Left())
- b := s.expr(n.Right())
+ a := s.expr(n.X)
+ b := s.expr(n.Y)
if n.Type().IsComplex() {
pt := floatForComplex(n.Type())
op := s.ssaOp(n.Op(), pt)
@@ -2618,19 +2618,19 @@ func (s *state) expr(n ir.Node) *ssa.Value {
return s.newValue2(s.ssaOp(n.Op(), n.Type()), a.Type, a, b)
case ir.OAND, ir.OOR, ir.OXOR:
n := n.(*ir.BinaryExpr)
- a := s.expr(n.Left())
- b := s.expr(n.Right())
+ a := s.expr(n.X)
+ b := s.expr(n.Y)
return s.newValue2(s.ssaOp(n.Op(), n.Type()), a.Type, a, b)
case ir.OANDNOT:
n := n.(*ir.BinaryExpr)
- a := s.expr(n.Left())
- b := s.expr(n.Right())
+ a := s.expr(n.X)
+ b := s.expr(n.Y)
b = s.newValue1(s.ssaOp(ir.OBITNOT, b.Type), b.Type, b)
return s.newValue2(s.ssaOp(ir.OAND, n.Type()), a.Type, a, b)
case ir.OLSH, ir.ORSH:
n := n.(*ir.BinaryExpr)
- a := s.expr(n.Left())
- b := s.expr(n.Right())
+ a := s.expr(n.X)
+ b := s.expr(n.Y)
bt := b.Type
if bt.IsSigned() {
cmp := s.newValue2(s.ssaOp(ir.OLE, bt), types.Types[types.TBOOL], s.zeroVal(bt), b)
@@ -2653,7 +2653,7 @@ func (s *state) expr(n ir.Node) *ssa.Value {
// Using var in the subsequent block introduces the
// necessary phi variable.
n := n.(*ir.LogicalExpr)
- el := s.expr(n.Left())
+ el := s.expr(n.X)
s.vars[n] = el
b := s.endBlock()
@@ -2675,7 +2675,7 @@ func (s *state) expr(n ir.Node) *ssa.Value {
}
s.startBlock(bRight)
- er := s.expr(n.Right())
+ er := s.expr(n.Y)
s.vars[n] = er
b = s.endBlock()
@@ -2685,14 +2685,14 @@ func (s *state) expr(n ir.Node) *ssa.Value {
return s.variable(n, types.Types[types.TBOOL])
case ir.OCOMPLEX:
n := n.(*ir.BinaryExpr)
- r := s.expr(n.Left())
- i := s.expr(n.Right())
+ r := s.expr(n.X)
+ i := s.expr(n.Y)
return s.newValue2(ssa.OpComplexMake, n.Type(), r, i)
// unary ops
case ir.ONEG:
n := n.(*ir.UnaryExpr)
- a := s.expr(n.Left())
+ a := s.expr(n.X)
if n.Type().IsComplex() {
tp := floatForComplex(n.Type())
negop := s.ssaOp(n.Op(), tp)
@@ -2703,31 +2703,31 @@ func (s *state) expr(n ir.Node) *ssa.Value {
return s.newValue1(s.ssaOp(n.Op(), n.Type()), a.Type, a)
case ir.ONOT, ir.OBITNOT:
n := n.(*ir.UnaryExpr)
- a := s.expr(n.Left())
+ a := s.expr(n.X)
return s.newValue1(s.ssaOp(n.Op(), n.Type()), a.Type, a)
case ir.OIMAG, ir.OREAL:
n := n.(*ir.UnaryExpr)
- a := s.expr(n.Left())
- return s.newValue1(s.ssaOp(n.Op(), n.Left().Type()), n.Type(), a)
+ a := s.expr(n.X)
+ return s.newValue1(s.ssaOp(n.Op(), n.X.Type()), n.Type(), a)
case ir.OPLUS:
n := n.(*ir.UnaryExpr)
- return s.expr(n.Left())
+ return s.expr(n.X)
case ir.OADDR:
n := n.(*ir.AddrExpr)
- return s.addr(n.Left())
+ return s.addr(n.X)
case ir.ORESULT:
n := n.(*ir.ResultExpr)
if s.prevCall == nil || s.prevCall.Op != ssa.OpStaticLECall && s.prevCall.Op != ssa.OpInterLECall && s.prevCall.Op != ssa.OpClosureLECall {
// Do the old thing
- addr := s.constOffPtrSP(types.NewPtr(n.Type()), n.Offset())
+ addr := s.constOffPtrSP(types.NewPtr(n.Type()), n.Offset)
return s.rawLoad(n.Type(), addr)
}
- which := s.prevCall.Aux.(*ssa.AuxCall).ResultForOffset(n.Offset())
+ which := s.prevCall.Aux.(*ssa.AuxCall).ResultForOffset(n.Offset)
if which == -1 {
// Do the old thing // TODO: Panic instead.
- addr := s.constOffPtrSP(types.NewPtr(n.Type()), n.Offset())
+ addr := s.constOffPtrSP(types.NewPtr(n.Type()), n.Offset)
return s.rawLoad(n.Type(), addr)
}
if canSSAType(n.Type()) {
@@ -2739,17 +2739,17 @@ func (s *state) expr(n ir.Node) *ssa.Value {
case ir.ODEREF:
n := n.(*ir.StarExpr)
- p := s.exprPtr(n.Left(), n.Bounded(), n.Pos())
+ p := s.exprPtr(n.X, n.Bounded(), n.Pos())
return s.load(n.Type(), p)
case ir.ODOT:
n := n.(*ir.SelectorExpr)
- if n.Left().Op() == ir.OSTRUCTLIT {
+ if n.X.Op() == ir.OSTRUCTLIT {
// All literals with nonzero fields have already been
// rewritten during walk. Any that remain are just T{}
// or equivalents. Use the zero value.
- if !isZero(n.Left()) {
- s.Fatalf("literal with nonzero value in SSA: %v", n.Left())
+ if !isZero(n.X) {
+ s.Fatalf("literal with nonzero value in SSA: %v", n.X)
}
return s.zeroVal(n.Type())
}
@@ -2761,46 +2761,46 @@ func (s *state) expr(n ir.Node) *ssa.Value {
p := s.addr(n)
return s.load(n.Type(), p)
}
- v := s.expr(n.Left())
+ v := s.expr(n.X)
return s.newValue1I(ssa.OpStructSelect, n.Type(), int64(fieldIdx(n)), v)
case ir.ODOTPTR:
n := n.(*ir.SelectorExpr)
- p := s.exprPtr(n.Left(), n.Bounded(), n.Pos())
- p = s.newValue1I(ssa.OpOffPtr, types.NewPtr(n.Type()), n.Offset(), p)
+ p := s.exprPtr(n.X, n.Bounded(), n.Pos())
+ p = s.newValue1I(ssa.OpOffPtr, types.NewPtr(n.Type()), n.Offset, p)
return s.load(n.Type(), p)
case ir.OINDEX:
n := n.(*ir.IndexExpr)
switch {
- case n.Left().Type().IsString():
- if n.Bounded() && ir.IsConst(n.Left(), constant.String) && ir.IsConst(n.Right(), constant.Int) {
+ case n.X.Type().IsString():
+ if n.Bounded() && ir.IsConst(n.X, constant.String) && ir.IsConst(n.Index, constant.Int) {
// Replace "abc"[1] with 'b'.
// Delayed until now because "abc"[1] is not an ideal constant.
// See test/fixedbugs/issue11370.go.
- return s.newValue0I(ssa.OpConst8, types.Types[types.TUINT8], int64(int8(ir.StringVal(n.Left())[ir.Int64Val(n.Right())])))
+ return s.newValue0I(ssa.OpConst8, types.Types[types.TUINT8], int64(int8(ir.StringVal(n.X)[ir.Int64Val(n.Index)])))
}
- a := s.expr(n.Left())
- i := s.expr(n.Right())
+ a := s.expr(n.X)
+ i := s.expr(n.Index)
len := s.newValue1(ssa.OpStringLen, types.Types[types.TINT], a)
i = s.boundsCheck(i, len, ssa.BoundsIndex, n.Bounded())
ptrtyp := s.f.Config.Types.BytePtr
ptr := s.newValue1(ssa.OpStringPtr, ptrtyp, a)
- if ir.IsConst(n.Right(), constant.Int) {
- ptr = s.newValue1I(ssa.OpOffPtr, ptrtyp, ir.Int64Val(n.Right()), ptr)
+ if ir.IsConst(n.Index, constant.Int) {
+ ptr = s.newValue1I(ssa.OpOffPtr, ptrtyp, ir.Int64Val(n.Index), ptr)
} else {
ptr = s.newValue2(ssa.OpAddPtr, ptrtyp, ptr, i)
}
return s.load(types.Types[types.TUINT8], ptr)
- case n.Left().Type().IsSlice():
+ case n.X.Type().IsSlice():
p := s.addr(n)
- return s.load(n.Left().Type().Elem(), p)
- case n.Left().Type().IsArray():
- if canSSAType(n.Left().Type()) {
+ return s.load(n.X.Type().Elem(), p)
+ case n.X.Type().IsArray():
+ if canSSAType(n.X.Type()) {
// SSA can handle arrays of length at most 1.
- bound := n.Left().Type().NumElem()
- a := s.expr(n.Left())
- i := s.expr(n.Right())
+ bound := n.X.Type().NumElem()
+ a := s.expr(n.X)
+ i := s.expr(n.Index)
if bound == 0 {
// Bounds check will never succeed. Might as well
// use constants for the bounds check.
@@ -2814,33 +2814,33 @@ func (s *state) expr(n ir.Node) *ssa.Value {
return s.newValue1I(ssa.OpArraySelect, n.Type(), 0, a)
}
p := s.addr(n)
- return s.load(n.Left().Type().Elem(), p)
+ return s.load(n.X.Type().Elem(), p)
default:
- s.Fatalf("bad type for index %v", n.Left().Type())
+ s.Fatalf("bad type for index %v", n.X.Type())
return nil
}
case ir.OLEN, ir.OCAP:
n := n.(*ir.UnaryExpr)
switch {
- case n.Left().Type().IsSlice():
+ case n.X.Type().IsSlice():
op := ssa.OpSliceLen
if n.Op() == ir.OCAP {
op = ssa.OpSliceCap
}
- return s.newValue1(op, types.Types[types.TINT], s.expr(n.Left()))
- case n.Left().Type().IsString(): // string; not reachable for OCAP
- return s.newValue1(ssa.OpStringLen, types.Types[types.TINT], s.expr(n.Left()))
- case n.Left().Type().IsMap(), n.Left().Type().IsChan():
- return s.referenceTypeBuiltin(n, s.expr(n.Left()))
+ return s.newValue1(op, types.Types[types.TINT], s.expr(n.X))
+ case n.X.Type().IsString(): // string; not reachable for OCAP
+ return s.newValue1(ssa.OpStringLen, types.Types[types.TINT], s.expr(n.X))
+ case n.X.Type().IsMap(), n.X.Type().IsChan():
+ return s.referenceTypeBuiltin(n, s.expr(n.X))
default: // array
- return s.constInt(types.Types[types.TINT], n.Left().Type().NumElem())
+ return s.constInt(types.Types[types.TINT], n.X.Type().NumElem())
}
case ir.OSPTR:
n := n.(*ir.UnaryExpr)
- a := s.expr(n.Left())
- if n.Left().Type().IsSlice() {
+ a := s.expr(n.X)
+ if n.X.Type().IsSlice() {
return s.newValue1(ssa.OpSlicePtr, n.Type(), a)
} else {
return s.newValue1(ssa.OpStringPtr, n.Type(), a)
@@ -2848,30 +2848,30 @@ func (s *state) expr(n ir.Node) *ssa.Value {
case ir.OITAB:
n := n.(*ir.UnaryExpr)
- a := s.expr(n.Left())
+ a := s.expr(n.X)
return s.newValue1(ssa.OpITab, n.Type(), a)
case ir.OIDATA:
n := n.(*ir.UnaryExpr)
- a := s.expr(n.Left())
+ a := s.expr(n.X)
return s.newValue1(ssa.OpIData, n.Type(), a)
case ir.OEFACE:
n := n.(*ir.BinaryExpr)
- tab := s.expr(n.Left())
- data := s.expr(n.Right())
+ tab := s.expr(n.X)
+ data := s.expr(n.Y)
return s.newValue2(ssa.OpIMake, n.Type(), tab, data)
case ir.OSLICEHEADER:
n := n.(*ir.SliceHeaderExpr)
- p := s.expr(n.Left())
- l := s.expr(n.List().First())
- c := s.expr(n.List().Second())
+ p := s.expr(n.Ptr)
+ l := s.expr(n.LenCap.First())
+ c := s.expr(n.LenCap.Second())
return s.newValue3(ssa.OpSliceMake, n.Type(), p, l, c)
case ir.OSLICE, ir.OSLICEARR, ir.OSLICE3, ir.OSLICE3ARR:
n := n.(*ir.SliceExpr)
- v := s.expr(n.Left())
+ v := s.expr(n.X)
var i, j, k *ssa.Value
low, high, max := n.SliceBounds()
if low != nil {
@@ -2888,7 +2888,7 @@ func (s *state) expr(n ir.Node) *ssa.Value {
case ir.OSLICESTR:
n := n.(*ir.SliceExpr)
- v := s.expr(n.Left())
+ v := s.expr(n.X)
var i, j *ssa.Value
low, high, _ := n.SliceBounds()
if low != nil {
@@ -2933,7 +2933,7 @@ func (s *state) expr(n ir.Node) *ssa.Value {
if n.Type().Elem().Size() == 0 {
return s.newValue1A(ssa.OpAddr, n.Type(), zerobaseSym, s.sb)
}
- typ := s.expr(n.Left())
+ typ := s.expr(n.X)
vv := s.rtcall(newobject, true, []*types.Type{n.Type()}, typ)
return vv[0]
@@ -2987,7 +2987,7 @@ func (s *state) append(n *ir.CallExpr, inplace bool) *ssa.Value {
pt := types.NewPtr(et)
// Evaluate slice
- sn := n.List().First() // the slice node is the first in the list
+ sn := n.Args.First() // the slice node is the first in the list
var slice, addr *ssa.Value
if inplace {
@@ -3002,7 +3002,7 @@ func (s *state) append(n *ir.CallExpr, inplace bool) *ssa.Value {
assign := s.f.NewBlock(ssa.BlockPlain)
// Decide if we need to grow
- nargs := int64(n.List().Len() - 1)
+ nargs := int64(n.Args.Len() - 1)
p := s.newValue1(ssa.OpSlicePtr, pt, slice)
l := s.newValue1(ssa.OpSliceLen, types.Types[types.TINT], slice)
c := s.newValue1(ssa.OpSliceCap, types.Types[types.TINT], slice)
@@ -3027,13 +3027,13 @@ func (s *state) append(n *ir.CallExpr, inplace bool) *ssa.Value {
// Call growslice
s.startBlock(grow)
- taddr := s.expr(n.Left())
+ taddr := s.expr(n.X)
r := s.rtcall(growslice, true, []*types.Type{pt, types.Types[types.TINT], types.Types[types.TINT]}, taddr, p, l, c, nl)
if inplace {
if sn.Op() == ir.ONAME {
sn := sn.(*ir.Name)
- if sn.Class() != ir.PEXTERN {
+ if sn.Class_ != ir.PEXTERN {
// Tell liveness we're about to build a new slice
s.vars[memVar] = s.newValue1A(ssa.OpVarDef, types.TypeMem, sn, s.mem())
}
@@ -3071,7 +3071,7 @@ func (s *state) append(n *ir.CallExpr, inplace bool) *ssa.Value {
store bool
}
args := make([]argRec, 0, nargs)
- for _, n := range n.List().Slice()[1:] {
+ for _, n := range n.Args.Slice()[1:] {
if canSSAType(n.Type()) {
args = append(args, argRec{v: s.expr(n), store: true})
} else {
@@ -3116,9 +3116,9 @@ func (s *state) condBranch(cond ir.Node, yes, no *ssa.Block, likely int8) {
cond := cond.(*ir.LogicalExpr)
mid := s.f.NewBlock(ssa.BlockPlain)
s.stmtList(cond.Init())
- s.condBranch(cond.Left(), mid, no, max8(likely, 0))
+ s.condBranch(cond.X, mid, no, max8(likely, 0))
s.startBlock(mid)
- s.condBranch(cond.Right(), yes, no, likely)
+ s.condBranch(cond.Y, yes, no, likely)
return
// Note: if likely==1, then both recursive calls pass 1.
// If likely==-1, then we don't have enough information to decide
@@ -3130,9 +3130,9 @@ func (s *state) condBranch(cond ir.Node, yes, no *ssa.Block, likely int8) {
cond := cond.(*ir.LogicalExpr)
mid := s.f.NewBlock(ssa.BlockPlain)
s.stmtList(cond.Init())
- s.condBranch(cond.Left(), yes, mid, min8(likely, 0))
+ s.condBranch(cond.X, yes, mid, min8(likely, 0))
s.startBlock(mid)
- s.condBranch(cond.Right(), yes, no, likely)
+ s.condBranch(cond.Y, yes, no, likely)
return
// Note: if likely==-1, then both recursive calls pass -1.
// If likely==1, then we don't have enough info to decide
@@ -3140,12 +3140,12 @@ func (s *state) condBranch(cond ir.Node, yes, no *ssa.Block, likely int8) {
case ir.ONOT:
cond := cond.(*ir.UnaryExpr)
s.stmtList(cond.Init())
- s.condBranch(cond.Left(), no, yes, -likely)
+ s.condBranch(cond.X, no, yes, -likely)
return
case ir.OCONVNOP:
cond := cond.(*ir.ConvExpr)
s.stmtList(cond.Init())
- s.condBranch(cond.Left(), yes, no, likely)
+ s.condBranch(cond.X, yes, no, likely)
return
}
c := s.expr(cond)
@@ -3192,12 +3192,12 @@ func (s *state) assign(left ir.Node, right *ssa.Value, deref bool, skip skipMask
// Grab information about the structure type.
left := left.(*ir.SelectorExpr)
- t := left.Left().Type()
+ t := left.X.Type()
nf := t.NumFields()
idx := fieldIdx(left)
// Grab old value of structure.
- old := s.expr(left.Left())
+ old := s.expr(left.X)
// Make new structure.
new := s.newValue0(ssa.StructMakeOp(t.NumFields()), t)
@@ -3212,20 +3212,20 @@ func (s *state) assign(left ir.Node, right *ssa.Value, deref bool, skip skipMask
}
// Recursively assign the new value we've made to the base of the dot op.
- s.assign(left.Left(), new, false, 0)
+ s.assign(left.X, new, false, 0)
// TODO: do we need to update named values here?
return
}
- if left.Op() == ir.OINDEX && left.(*ir.IndexExpr).Left().Type().IsArray() {
+ if left.Op() == ir.OINDEX && left.(*ir.IndexExpr).X.Type().IsArray() {
left := left.(*ir.IndexExpr)
s.pushLine(left.Pos())
defer s.popLine()
// We're assigning to an element of an ssa-able array.
// a[i] = v
- t := left.Left().Type()
+ t := left.X.Type()
n := t.NumElem()
- i := s.expr(left.Right()) // index
+ i := s.expr(left.Index) // index
if n == 0 {
// The bounds check must fail. Might as well
// ignore the actual index and just use zeros.
@@ -3240,7 +3240,7 @@ func (s *state) assign(left ir.Node, right *ssa.Value, deref bool, skip skipMask
len := s.constInt(types.Types[types.TINT], 1)
s.boundsCheck(i, len, ssa.BoundsIndex, false) // checks i == 0
v := s.newValue1(ssa.OpArrayMake1, t, right)
- s.assign(left.Left(), v, false, 0)
+ s.assign(left.X, v, false, 0)
return
}
left := left.(*ir.Name)
@@ -3252,7 +3252,7 @@ func (s *state) assign(left ir.Node, right *ssa.Value, deref bool, skip skipMask
// If this assignment clobbers an entire local variable, then emit
// OpVarDef so liveness analysis knows the variable is redefined.
- if base := clobberBase(left); base.Op() == ir.ONAME && base.(*ir.Name).Class() != ir.PEXTERN && skip == 0 {
+ if base := clobberBase(left); base.Op() == ir.ONAME && base.(*ir.Name).Class_ != ir.PEXTERN && skip == 0 {
s.vars[memVar] = s.newValue1Apos(ssa.OpVarDef, types.TypeMem, base.(*ir.Name), s.mem(), !ir.IsAutoTmp(base))
}
@@ -4333,7 +4333,7 @@ func isIntrinsicCall(n *ir.CallExpr) bool {
if n == nil {
return false
}
- name, ok := n.Left().(*ir.Name)
+ name, ok := n.X.(*ir.Name)
if !ok {
return false
}
@@ -4342,7 +4342,7 @@ func isIntrinsicCall(n *ir.CallExpr) bool {
// intrinsicCall converts a call to a recognized intrinsic function into the intrinsic SSA operation.
func (s *state) intrinsicCall(n *ir.CallExpr) *ssa.Value {
- v := findIntrinsic(n.Left().Sym())(s, n, s.intrinsicArgs(n))
+ v := findIntrinsic(n.X.Sym())(s, n, s.intrinsicArgs(n))
if ssa.IntrinsicsDebug > 0 {
x := v
if x == nil {
@@ -4351,7 +4351,7 @@ func (s *state) intrinsicCall(n *ir.CallExpr) *ssa.Value {
if x.Op == ssa.OpSelect0 || x.Op == ssa.OpSelect1 {
x = x.Args[0]
}
- base.WarnfAt(n.Pos(), "intrinsic substitution for %v with %s", n.Left().Sym().Name, x.LongString())
+ base.WarnfAt(n.Pos(), "intrinsic substitution for %v with %s", n.X.Sym().Name, x.LongString())
}
return v
}
@@ -4360,12 +4360,12 @@ func (s *state) intrinsicCall(n *ir.CallExpr) *ssa.Value {
func (s *state) intrinsicArgs(n *ir.CallExpr) []*ssa.Value {
// Construct map of temps; see comments in s.call about the structure of n.
temps := map[ir.Node]*ssa.Value{}
- for _, a := range n.List().Slice() {
+ for _, a := range n.Args.Slice() {
if a.Op() != ir.OAS {
s.Fatalf("non-assignment as a temp function argument %v", a.Op())
}
a := a.(*ir.AssignStmt)
- l, r := a.Left(), a.Right()
+ l, r := a.X, a.Y
if l.Op() != ir.ONAME {
s.Fatalf("non-ONAME temp function argument %v", a.Op())
}
@@ -4373,8 +4373,8 @@ func (s *state) intrinsicArgs(n *ir.CallExpr) []*ssa.Value {
// Walk ensures these temporaries are dead outside of n.
temps[l] = s.expr(r)
}
- args := make([]*ssa.Value, n.Rlist().Len())
- for i, n := range n.Rlist().Slice() {
+ args := make([]*ssa.Value, n.Rargs.Len())
+ for i, n := range n.Rargs.Slice() {
// Store a value to an argument slot.
if x, ok := temps[n]; ok {
// This is a previously computed temporary.
@@ -4399,7 +4399,7 @@ func (s *state) openDeferRecord(n *ir.CallExpr) {
// once.mutex'. Such a statement will create a mapping in s.vars[] from
// the autotmp name to the evaluated SSA arg value, but won't do any
// stores to the stack.
- s.stmtList(n.List())
+ s.stmtList(n.Args)
var args []*ssa.Value
var argNodes []*ir.Name
@@ -4407,7 +4407,7 @@ func (s *state) openDeferRecord(n *ir.CallExpr) {
opendefer := &openDeferInfo{
n: n,
}
- fn := n.Left()
+ fn := n.X
if n.Op() == ir.OCALLFUNC {
// We must always store the function value in a stack slot for the
// runtime panic code to use. But in the defer exit code, we will
@@ -4415,7 +4415,7 @@ func (s *state) openDeferRecord(n *ir.CallExpr) {
closureVal := s.expr(fn)
closure := s.openDeferSave(nil, fn.Type(), closureVal)
opendefer.closureNode = closure.Aux.(*ir.Name)
- if !(fn.Op() == ir.ONAME && fn.(*ir.Name).Class() == ir.PFUNC) {
+ if !(fn.Op() == ir.ONAME && fn.(*ir.Name).Class_ == ir.PFUNC) {
opendefer.closure = closure
}
} else if n.Op() == ir.OCALLMETH {
@@ -4442,7 +4442,7 @@ func (s *state) openDeferRecord(n *ir.CallExpr) {
opendefer.closureNode = opendefer.closure.Aux.(*ir.Name)
opendefer.rcvrNode = opendefer.rcvr.Aux.(*ir.Name)
}
- for _, argn := range n.Rlist().Slice() {
+ for _, argn := range n.Rargs.Slice() {
var v *ssa.Value
if canSSAType(argn.Type()) {
v = s.openDeferSave(nil, argn.Type(), s.expr(argn))
@@ -4565,7 +4565,7 @@ func (s *state) openDeferExit() {
// closure/receiver/args that were stored in argtmps at the point
// of the defer statement.
argStart := base.Ctxt.FixedFrameSize()
- fn := r.n.Left()
+ fn := r.n.X
stksize := fn.Type().ArgWidth()
var ACArgs []ssa.Param
var ACResults []ssa.Param
@@ -4672,11 +4672,11 @@ func (s *state) call(n *ir.CallExpr, k callKind, returnResultAddr bool) *ssa.Val
var closure *ssa.Value // ptr to closure to run (if dynamic)
var codeptr *ssa.Value // ptr to target code (if dynamic)
var rcvr *ssa.Value // receiver to set
- fn := n.Left()
+ fn := n.X
var ACArgs []ssa.Param
var ACResults []ssa.Param
var callArgs []*ssa.Value
- res := n.Left().Type().Results()
+ res := n.X.Type().Results()
if k == callNormal {
nf := res.NumFields()
for i := 0; i < nf; i++ {
@@ -4690,7 +4690,7 @@ func (s *state) call(n *ir.CallExpr, k callKind, returnResultAddr bool) *ssa.Val
switch n.Op() {
case ir.OCALLFUNC:
testLateExpansion = k != callDeferStack && ssa.LateCallExpansionEnabledWithin(s.f)
- if k == callNormal && fn.Op() == ir.ONAME && fn.(*ir.Name).Class() == ir.PFUNC {
+ if k == callNormal && fn.Op() == ir.ONAME && fn.(*ir.Name).Class_ == ir.PFUNC {
fn := fn.(*ir.Name)
sym = fn.Sym()
break
@@ -4708,7 +4708,7 @@ func (s *state) call(n *ir.CallExpr, k callKind, returnResultAddr bool) *ssa.Val
fn := fn.(*ir.SelectorExpr)
testLateExpansion = k != callDeferStack && ssa.LateCallExpansionEnabledWithin(s.f)
if k == callNormal {
- sym = fn.Sym()
+ sym = fn.Sel
break
}
closure = s.getMethodClosure(fn)
@@ -4734,7 +4734,7 @@ func (s *state) call(n *ir.CallExpr, k callKind, returnResultAddr bool) *ssa.Val
// Run all assignments of temps.
// The temps are introduced to avoid overwriting argument
// slots when arguments themselves require function calls.
- s.stmtList(n.List())
+ s.stmtList(n.Args)
var call *ssa.Value
if k == callDeferStack {
@@ -4769,7 +4769,7 @@ func (s *state) call(n *ir.CallExpr, k callKind, returnResultAddr bool) *ssa.Val
// Then, store all the arguments of the defer call.
ft := fn.Type()
off := t.FieldOff(12)
- args := n.Rlist().Slice()
+ args := n.Rargs.Slice()
// Set receiver (for interface calls). Always a pointer.
if rcvr != nil {
@@ -4845,8 +4845,8 @@ func (s *state) call(n *ir.CallExpr, k callKind, returnResultAddr bool) *ssa.Val
}
// Write args.
- t := n.Left().Type()
- args := n.Rlist().Slice()
+ t := n.X.Type()
+ args := n.Rargs.Slice()
if n.Op() == ir.OCALLMETH {
f := t.Recv()
ACArg, arg := s.putArg(args[0], f.Type, argStart+f.Offset, testLateExpansion)
@@ -4923,7 +4923,7 @@ func (s *state) call(n *ir.CallExpr, k callKind, returnResultAddr bool) *ssa.Val
s.vars[memVar] = call
}
// Insert OVARLIVE nodes
- s.stmtList(n.Body())
+ s.stmtList(n.Body)
// Finish block for defers
if k == callDefer || k == callDeferStack {
@@ -4977,9 +4977,9 @@ func (s *state) getMethodClosure(fn *ir.SelectorExpr) *ssa.Value {
// Make a PFUNC node out of that, then evaluate it.
// We get back an SSA value representing &sync.(*Mutex).Unlock·f.
// We can then pass that to defer or go.
- n2 := ir.NewNameAt(fn.Pos(), fn.Sym())
+ n2 := ir.NewNameAt(fn.Pos(), fn.Sel)
n2.Curfn = s.curfn
- n2.SetClass(ir.PFUNC)
+ n2.Class_ = ir.PFUNC
// n2.Sym already existed, so it's already marked as a function.
n2.SetPos(fn.Pos())
n2.SetType(types.Types[types.TUINT8]) // fake type for a static closure. Could use runtime.funcval if we had it.
@@ -4989,10 +4989,10 @@ func (s *state) getMethodClosure(fn *ir.SelectorExpr) *ssa.Value {
// getClosureAndRcvr returns values for the appropriate closure and receiver of an
// interface call
func (s *state) getClosureAndRcvr(fn *ir.SelectorExpr) (*ssa.Value, *ssa.Value) {
- i := s.expr(fn.Left())
+ i := s.expr(fn.X)
itab := s.newValue1(ssa.OpITab, types.Types[types.TUINTPTR], i)
s.nilCheck(itab)
- itabidx := fn.Offset() + 2*int64(Widthptr) + 8 // offset of fun field in runtime.itab
+ itabidx := fn.Offset + 2*int64(Widthptr) + 8 // offset of fun field in runtime.itab
closure := s.newValue1I(ssa.OpOffPtr, s.f.Config.Types.UintptrPtr, itabidx, itab)
rcvr := s.newValue1(ssa.OpIData, s.f.Config.Types.BytePtr, i)
return closure, rcvr
@@ -5028,7 +5028,7 @@ func (s *state) addr(n ir.Node) *ssa.Value {
fallthrough
case ir.ONAME:
n := n.(*ir.Name)
- switch n.Class() {
+ switch n.Class_ {
case ir.PEXTERN:
// global variable
v := s.entryNewValue1A(ssa.OpAddr, t, n.Sym().Linksym(), s.sb)
@@ -5057,60 +5057,60 @@ func (s *state) addr(n ir.Node) *ssa.Value {
// that cse works on their addresses
return s.newValue2Apos(ssa.OpLocalAddr, t, n, s.sp, s.mem(), true)
default:
- s.Fatalf("variable address class %v not implemented", n.Class())
+ s.Fatalf("variable address class %v not implemented", n.Class_)
return nil
}
case ir.ORESULT:
// load return from callee
n := n.(*ir.ResultExpr)
if s.prevCall == nil || s.prevCall.Op != ssa.OpStaticLECall && s.prevCall.Op != ssa.OpInterLECall && s.prevCall.Op != ssa.OpClosureLECall {
- return s.constOffPtrSP(t, n.Offset())
+ return s.constOffPtrSP(t, n.Offset)
}
- which := s.prevCall.Aux.(*ssa.AuxCall).ResultForOffset(n.Offset())
+ which := s.prevCall.Aux.(*ssa.AuxCall).ResultForOffset(n.Offset)
if which == -1 {
// Do the old thing // TODO: Panic instead.
- return s.constOffPtrSP(t, n.Offset())
+ return s.constOffPtrSP(t, n.Offset)
}
x := s.newValue1I(ssa.OpSelectNAddr, t, which, s.prevCall)
return x
case ir.OINDEX:
n := n.(*ir.IndexExpr)
- if n.Left().Type().IsSlice() {
- a := s.expr(n.Left())
- i := s.expr(n.Right())
+ if n.X.Type().IsSlice() {
+ a := s.expr(n.X)
+ i := s.expr(n.Index)
len := s.newValue1(ssa.OpSliceLen, types.Types[types.TINT], a)
i = s.boundsCheck(i, len, ssa.BoundsIndex, n.Bounded())
p := s.newValue1(ssa.OpSlicePtr, t, a)
return s.newValue2(ssa.OpPtrIndex, t, p, i)
} else { // array
- a := s.addr(n.Left())
- i := s.expr(n.Right())
- len := s.constInt(types.Types[types.TINT], n.Left().Type().NumElem())
+ a := s.addr(n.X)
+ i := s.expr(n.Index)
+ len := s.constInt(types.Types[types.TINT], n.X.Type().NumElem())
i = s.boundsCheck(i, len, ssa.BoundsIndex, n.Bounded())
- return s.newValue2(ssa.OpPtrIndex, types.NewPtr(n.Left().Type().Elem()), a, i)
+ return s.newValue2(ssa.OpPtrIndex, types.NewPtr(n.X.Type().Elem()), a, i)
}
case ir.ODEREF:
n := n.(*ir.StarExpr)
- return s.exprPtr(n.Left(), n.Bounded(), n.Pos())
+ return s.exprPtr(n.X, n.Bounded(), n.Pos())
case ir.ODOT:
n := n.(*ir.SelectorExpr)
- p := s.addr(n.Left())
- return s.newValue1I(ssa.OpOffPtr, t, n.Offset(), p)
+ p := s.addr(n.X)
+ return s.newValue1I(ssa.OpOffPtr, t, n.Offset, p)
case ir.ODOTPTR:
n := n.(*ir.SelectorExpr)
- p := s.exprPtr(n.Left(), n.Bounded(), n.Pos())
- return s.newValue1I(ssa.OpOffPtr, t, n.Offset(), p)
+ p := s.exprPtr(n.X, n.Bounded(), n.Pos())
+ return s.newValue1I(ssa.OpOffPtr, t, n.Offset, p)
case ir.OCLOSUREREAD:
n := n.(*ir.ClosureReadExpr)
- return s.newValue1I(ssa.OpOffPtr, t, n.Offset(),
+ return s.newValue1I(ssa.OpOffPtr, t, n.Offset,
s.entryNewValue0(ssa.OpGetClosurePtr, s.f.Config.Types.BytePtr))
case ir.OCONVNOP:
n := n.(*ir.ConvExpr)
- if n.Type() == n.Left().Type() {
- return s.addr(n.Left())
+ if n.Type() == n.X.Type() {
+ return s.addr(n.X)
}
- addr := s.addr(n.Left())
+ addr := s.addr(n.X)
return s.newValue1(ssa.OpCopy, t, addr) // ensure that addr has the right type
case ir.OCALLFUNC, ir.OCALLINTER, ir.OCALLMETH:
n := n.(*ir.CallExpr)
@@ -5141,13 +5141,13 @@ func (s *state) canSSA(n ir.Node) bool {
nn := n
if nn.Op() == ir.ODOT {
nn := nn.(*ir.SelectorExpr)
- n = nn.Left()
+ n = nn.X
continue
}
if nn.Op() == ir.OINDEX {
nn := nn.(*ir.IndexExpr)
- if nn.Left().Type().IsArray() {
- n = nn.Left()
+ if nn.X.Type().IsArray() {
+ n = nn.X
continue
}
}
@@ -5166,10 +5166,10 @@ func (s *state) canSSAName(name *ir.Name) bool {
if isParamHeapCopy(name) {
return false
}
- if name.Class() == ir.PAUTOHEAP {
+ if name.Class_ == ir.PAUTOHEAP {
s.Fatalf("canSSA of PAUTOHEAP %v", name)
}
- switch name.Class() {
+ switch name.Class_ {
case ir.PEXTERN:
return false
case ir.PPARAMOUT:
@@ -5187,7 +5187,7 @@ func (s *state) canSSAName(name *ir.Name) bool {
return false
}
}
- if name.Class() == ir.PPARAM && name.Sym() != nil && name.Sym().Name == ".this" {
+ if name.Class_ == ir.PPARAM && name.Sym() != nil && name.Sym().Name == ".this" {
// wrappers generated by genwrapper need to update
// the .this pointer in place.
// TODO: treat as a PPARAMOUT?
@@ -5893,7 +5893,7 @@ func (s *state) uint32Tofloat(cvttab *u322fcvtTab, n ir.Node, x *ssa.Value, ft,
// referenceTypeBuiltin generates code for the len/cap builtins for maps and channels.
func (s *state) referenceTypeBuiltin(n *ir.UnaryExpr, x *ssa.Value) *ssa.Value {
- if !n.Left().Type().IsMap() && !n.Left().Type().IsChan() {
+ if !n.X.Type().IsMap() && !n.X.Type().IsChan() {
s.Fatalf("node must be a map or a channel")
}
// if n == nil {
@@ -6050,8 +6050,8 @@ func (s *state) floatToUint(cvttab *f2uCvtTab, n ir.Node, x *ssa.Value, ft, tt *
// commaok indicates whether to panic or return a bool.
// If commaok is false, resok will be nil.
func (s *state) dottype(n *ir.TypeAssertExpr, commaok bool) (res, resok *ssa.Value) {
- iface := s.expr(n.Left()) // input interface
- target := s.expr(n.Right()) // target type
+ iface := s.expr(n.X) // input interface
+ target := s.expr(n.Ntype) // target type
byteptr := s.f.Config.Types.BytePtr
if n.Type().IsInterface() {
@@ -6067,7 +6067,7 @@ func (s *state) dottype(n *ir.TypeAssertExpr, commaok bool) (res, resok *ssa.Val
// Conversion succeeds iff that field is not nil.
cond := s.newValue2(ssa.OpNeqPtr, types.Types[types.TBOOL], itab, s.constNil(byteptr))
- if n.Left().Type().IsEmptyInterface() && commaok {
+ if n.X.Type().IsEmptyInterface() && commaok {
// Converting empty interface to empty interface with ,ok is just a nil check.
return iface, cond
}
@@ -6089,7 +6089,7 @@ func (s *state) dottype(n *ir.TypeAssertExpr, commaok bool) (res, resok *ssa.Val
// On success, return (perhaps modified) input interface.
s.startBlock(bOk)
- if n.Left().Type().IsEmptyInterface() {
+ if n.X.Type().IsEmptyInterface() {
res = iface // Use input interface unchanged.
return
}
@@ -6128,7 +6128,7 @@ func (s *state) dottype(n *ir.TypeAssertExpr, commaok bool) (res, resok *ssa.Val
if base.Debug.TypeAssert > 0 {
base.WarnfAt(n.Pos(), "type assertion not inlined")
}
- if n.Left().Type().IsEmptyInterface() {
+ if n.X.Type().IsEmptyInterface() {
if commaok {
call := s.rtcall(assertE2I2, true, []*types.Type{n.Type(), types.Types[types.TBOOL]}, target, iface)
return call[0], call[1]
@@ -6153,12 +6153,12 @@ func (s *state) dottype(n *ir.TypeAssertExpr, commaok bool) (res, resok *ssa.Val
base.WarnfAt(n.Pos(), "type assertion inlined")
}
var targetITab *ssa.Value
- if n.Left().Type().IsEmptyInterface() {
+ if n.X.Type().IsEmptyInterface() {
// Looking for pointer to target type.
targetITab = target
} else {
// Looking for pointer to itab for target type and source interface.
- targetITab = s.expr(n.List().First())
+ targetITab = s.expr(n.Itab.First())
}
var tmp ir.Node // temporary for use with large types
@@ -6185,8 +6185,8 @@ func (s *state) dottype(n *ir.TypeAssertExpr, commaok bool) (res, resok *ssa.Val
if !commaok {
// on failure, panic by calling panicdottype
s.startBlock(bFail)
- taddr := s.expr(n.Right().(*ir.AddrExpr).Right())
- if n.Left().Type().IsEmptyInterface() {
+ taddr := s.expr(n.Ntype.(*ir.AddrExpr).Alloc)
+ if n.X.Type().IsEmptyInterface() {
s.rtcall(panicdottypeE, false, nil, itab, target, taddr)
} else {
s.rtcall(panicdottypeI, false, nil, itab, target, taddr)
@@ -6280,7 +6280,7 @@ func (s *state) mem() *ssa.Value {
}
func (s *state) addNamedValue(n *ir.Name, v *ssa.Value) {
- if n.Class() == ir.Pxxx {
+ if n.Class_ == ir.Pxxx {
// Don't track our marker nodes (memVar etc.).
return
}
@@ -6288,7 +6288,7 @@ func (s *state) addNamedValue(n *ir.Name, v *ssa.Value) {
// Don't track temporary variables.
return
}
- if n.Class() == ir.PPARAMOUT {
+ if n.Class_ == ir.PPARAMOUT {
// Don't track named output values. This prevents return values
// from being assigned too early. See #14591 and #14762. TODO: allow this.
return
@@ -6811,11 +6811,11 @@ func defframe(s *SSAGenState, e *ssafn) {
if !n.Needzero() {
continue
}
- if n.Class() != ir.PAUTO {
- e.Fatalf(n.Pos(), "needzero class %d", n.Class())
+ if n.Class_ != ir.PAUTO {
+ e.Fatalf(n.Pos(), "needzero class %d", n.Class_)
}
if n.Type().Size()%int64(Widthptr) != 0 || n.FrameOffset()%int64(Widthptr) != 0 || n.Type().Size() == 0 {
- e.Fatalf(n.Pos(), "var %L has size %d offset %d", n, n.Type().Size(), n.Offset())
+ e.Fatalf(n.Pos(), "var %L has size %d offset %d", n, n.Type().Size(), n.Offset_)
}
if lo != hi && n.FrameOffset()+n.Type().Size() >= lo-int64(2*Widthreg) {
@@ -6896,7 +6896,7 @@ func AddAux2(a *obj.Addr, v *ssa.Value, offset int64) {
a.Name = obj.NAME_EXTERN
a.Sym = n
case *ir.Name:
- if n.Class() == ir.PPARAM || n.Class() == ir.PPARAMOUT {
+ if n.Class_ == ir.PPARAM || n.Class_ == ir.PPARAMOUT {
a.Name = obj.NAME_PARAM
a.Sym = ir.Orig(n).Sym().Linksym()
a.Offset += n.FrameOffset()
@@ -7048,7 +7048,7 @@ func AddrAuto(a *obj.Addr, v *ssa.Value) {
a.Sym = n.Sym().Linksym()
a.Reg = int16(thearch.REGSP)
a.Offset = n.FrameOffset() + off
- if n.Class() == ir.PPARAM || n.Class() == ir.PPARAMOUT {
+ if n.Class_ == ir.PPARAM || n.Class_ == ir.PPARAMOUT {
a.Name = obj.NAME_PARAM
} else {
a.Name = obj.NAME_AUTO
@@ -7063,7 +7063,7 @@ func (s *SSAGenState) AddrScratch(a *obj.Addr) {
a.Name = obj.NAME_AUTO
a.Sym = s.ScratchFpMem.Sym().Linksym()
a.Reg = int16(thearch.REGSP)
- a.Offset = s.ScratchFpMem.Offset()
+ a.Offset = s.ScratchFpMem.Offset_
}
// Call returns a new CALL instruction for the SSA value v.
@@ -7146,8 +7146,8 @@ func (s *SSAGenState) UseArgs(n int64) {
// fieldIdx finds the index of the field referred to by the ODOT node n.
func fieldIdx(n *ir.SelectorExpr) int {
- t := n.Left().Type()
- f := n.Sym()
+ t := n.X.Type()
+ f := n.Sel
if !t.IsStruct() {
panic("ODOT's LHS is not a struct")
}
@@ -7158,7 +7158,7 @@ func fieldIdx(n *ir.SelectorExpr) int {
i++
continue
}
- if t1.Offset != n.Offset() {
+ if t1.Offset != n.Offset {
panic("field offset doesn't match")
}
return i
@@ -7282,7 +7282,7 @@ func (e *ssafn) DerefItab(it *obj.LSym, offset int64) *obj.LSym {
func (e *ssafn) SplitSlot(parent *ssa.LocalSlot, suffix string, offset int64, t *types.Type) ssa.LocalSlot {
node := parent.N
- if node.Class() != ir.PAUTO || node.Name().Addrtaken() {
+ if node.Class_ != ir.PAUTO || node.Name().Addrtaken() {
// addressed things and non-autos retain their parents (i.e., cannot truly be split)
return ssa.LocalSlot{N: node, Type: t, Off: parent.Off + offset}
}
@@ -7292,7 +7292,7 @@ func (e *ssafn) SplitSlot(parent *ssa.LocalSlot, suffix string, offset int64, t
s.Def = n
ir.AsNode(s.Def).Name().SetUsed(true)
n.SetType(t)
- n.SetClass(ir.PAUTO)
+ n.Class_ = ir.PAUTO
n.SetEsc(EscNever)
n.Curfn = e.curfn
e.curfn.Dcl = append(e.curfn.Dcl, n)
@@ -7368,14 +7368,14 @@ func (e *ssafn) MyImportPath() string {
func clobberBase(n ir.Node) ir.Node {
if n.Op() == ir.ODOT {
n := n.(*ir.SelectorExpr)
- if n.Left().Type().NumFields() == 1 {
- return clobberBase(n.Left())
+ if n.X.Type().NumFields() == 1 {
+ return clobberBase(n.X)
}
}
if n.Op() == ir.OINDEX {
n := n.(*ir.IndexExpr)
- if n.Left().Type().IsArray() && n.Left().Type().NumElem() == 1 {
- return clobberBase(n.Left())
+ if n.X.Type().IsArray() && n.X.Type().NumElem() == 1 {
+ return clobberBase(n.X)
}
}
return n
diff --git a/src/cmd/compile/internal/gc/subr.go b/src/cmd/compile/internal/gc/subr.go
index 5aebae0b18..450b20e000 100644
--- a/src/cmd/compile/internal/gc/subr.go
+++ b/src/cmd/compile/internal/gc/subr.go
@@ -616,7 +616,7 @@ func calcHasCall(n ir.Node) bool {
if instrumenting {
return true
}
- return n.Left().HasCall() || n.Right().HasCall()
+ return n.X.HasCall() || n.Y.HasCall()
case ir.OINDEX, ir.OSLICE, ir.OSLICEARR, ir.OSLICE3, ir.OSLICE3ARR, ir.OSLICESTR,
ir.ODEREF, ir.ODOTPTR, ir.ODOTTYPE, ir.ODIV, ir.OMOD:
// These ops might panic, make sure they are done
@@ -630,49 +630,49 @@ func calcHasCall(n ir.Node) bool {
if thearch.SoftFloat && (isFloat[n.Type().Kind()] || isComplex[n.Type().Kind()]) {
return true
}
- return n.Left().HasCall() || n.Right().HasCall()
+ return n.X.HasCall() || n.Y.HasCall()
case ir.ONEG:
n := n.(*ir.UnaryExpr)
if thearch.SoftFloat && (isFloat[n.Type().Kind()] || isComplex[n.Type().Kind()]) {
return true
}
- return n.Left().HasCall()
+ return n.X.HasCall()
case ir.OLT, ir.OEQ, ir.ONE, ir.OLE, ir.OGE, ir.OGT:
n := n.(*ir.BinaryExpr)
- if thearch.SoftFloat && (isFloat[n.Left().Type().Kind()] || isComplex[n.Left().Type().Kind()]) {
+ if thearch.SoftFloat && (isFloat[n.X.Type().Kind()] || isComplex[n.X.Type().Kind()]) {
return true
}
- return n.Left().HasCall() || n.Right().HasCall()
+ return n.X.HasCall() || n.Y.HasCall()
case ir.OCONV:
n := n.(*ir.ConvExpr)
- if thearch.SoftFloat && ((isFloat[n.Type().Kind()] || isComplex[n.Type().Kind()]) || (isFloat[n.Left().Type().Kind()] || isComplex[n.Left().Type().Kind()])) {
+ if thearch.SoftFloat && ((isFloat[n.Type().Kind()] || isComplex[n.Type().Kind()]) || (isFloat[n.X.Type().Kind()] || isComplex[n.X.Type().Kind()])) {
return true
}
- return n.Left().HasCall()
+ return n.X.HasCall()
case ir.OAND, ir.OANDNOT, ir.OLSH, ir.OOR, ir.ORSH, ir.OXOR, ir.OCOPY, ir.OCOMPLEX, ir.OEFACE:
n := n.(*ir.BinaryExpr)
- return n.Left().HasCall() || n.Right().HasCall()
+ return n.X.HasCall() || n.Y.HasCall()
case ir.OAS:
n := n.(*ir.AssignStmt)
- return n.Left().HasCall() || n.Right() != nil && n.Right().HasCall()
+ return n.X.HasCall() || n.Y != nil && n.Y.HasCall()
case ir.OADDR:
n := n.(*ir.AddrExpr)
- return n.Left().HasCall()
+ return n.X.HasCall()
case ir.OPAREN:
n := n.(*ir.ParenExpr)
- return n.Left().HasCall()
+ return n.X.HasCall()
case ir.OBITNOT, ir.ONOT, ir.OPLUS, ir.ORECV,
ir.OALIGNOF, ir.OCAP, ir.OCLOSE, ir.OIMAG, ir.OLEN, ir.ONEW,
ir.OOFFSETOF, ir.OPANIC, ir.OREAL, ir.OSIZEOF,
ir.OCHECKNIL, ir.OCFUNC, ir.OIDATA, ir.OITAB, ir.ONEWOBJ, ir.OSPTR, ir.OVARDEF, ir.OVARKILL, ir.OVARLIVE:
n := n.(*ir.UnaryExpr)
- return n.Left().HasCall()
+ return n.X.HasCall()
case ir.ODOT, ir.ODOTMETH, ir.ODOTINTER:
n := n.(*ir.SelectorExpr)
- return n.Left().HasCall()
+ return n.X.HasCall()
case ir.OGETG, ir.OCLOSUREREAD, ir.OMETHEXPR:
return false
@@ -687,15 +687,15 @@ func calcHasCall(n ir.Node) bool {
case ir.OCONVIFACE, ir.OCONVNOP, ir.OBYTES2STR, ir.OBYTES2STRTMP, ir.ORUNES2STR, ir.OSTR2BYTES, ir.OSTR2BYTESTMP, ir.OSTR2RUNES, ir.ORUNESTR:
// TODO(rsc): Some conversions are themselves calls, no?
n := n.(*ir.ConvExpr)
- return n.Left().HasCall()
+ return n.X.HasCall()
case ir.ODOTTYPE2:
// TODO(rsc): Shouldn't this be up with ODOTTYPE above?
n := n.(*ir.TypeAssertExpr)
- return n.Left().HasCall()
+ return n.X.HasCall()
case ir.OSLICEHEADER:
// TODO(rsc): What about len and cap?
n := n.(*ir.SliceHeaderExpr)
- return n.Left().HasCall()
+ return n.Ptr.HasCall()
case ir.OAS2DOTTYPE, ir.OAS2FUNC:
// TODO(rsc): Surely we need to check List and Rlist.
return false
@@ -783,44 +783,44 @@ func safeexpr(n ir.Node, init *ir.Nodes) ir.Node {
case ir.OLEN, ir.OCAP:
n := n.(*ir.UnaryExpr)
- l := safeexpr(n.Left(), init)
- if l == n.Left() {
+ l := safeexpr(n.X, init)
+ if l == n.X {
return n
}
a := ir.Copy(n).(*ir.UnaryExpr)
- a.SetLeft(l)
+ a.X = l
return walkexpr(typecheck(a, ctxExpr), init)
case ir.ODOT, ir.ODOTPTR:
n := n.(*ir.SelectorExpr)
- l := safeexpr(n.Left(), init)
- if l == n.Left() {
+ l := safeexpr(n.X, init)
+ if l == n.X {
return n
}
a := ir.Copy(n).(*ir.SelectorExpr)
- a.SetLeft(l)
+ a.X = l
return walkexpr(typecheck(a, ctxExpr), init)
case ir.ODEREF:
n := n.(*ir.StarExpr)
- l := safeexpr(n.Left(), init)
- if l == n.Left() {
+ l := safeexpr(n.X, init)
+ if l == n.X {
return n
}
a := ir.Copy(n).(*ir.StarExpr)
- a.SetLeft(l)
+ a.X = l
return walkexpr(typecheck(a, ctxExpr), init)
case ir.OINDEX, ir.OINDEXMAP:
n := n.(*ir.IndexExpr)
- l := safeexpr(n.Left(), init)
- r := safeexpr(n.Right(), init)
- if l == n.Left() && r == n.Right() {
+ l := safeexpr(n.X, init)
+ r := safeexpr(n.Index, init)
+ if l == n.X && r == n.Index {
return n
}
a := ir.Copy(n).(*ir.IndexExpr)
- a.SetLeft(l)
- a.SetRight(r)
+ a.X = l
+ a.Index = r
return walkexpr(typecheck(a, ctxExpr), init)
case ir.OSTRUCTLIT, ir.OARRAYLIT, ir.OSLICELIT:
@@ -992,20 +992,20 @@ func dotpath(s *types.Sym, t *types.Type, save **types.Field, ignorecase bool) (
// will give shortest unique addressing.
// modify the tree with missing type names.
func adddot(n *ir.SelectorExpr) *ir.SelectorExpr {
- n.SetLeft(typecheck(n.Left(), ctxType|ctxExpr))
- if n.Left().Diag() {
+ n.X = typecheck(n.X, ctxType|ctxExpr)
+ if n.X.Diag() {
n.SetDiag(true)
}
- t := n.Left().Type()
+ t := n.X.Type()
if t == nil {
return n
}
- if n.Left().Op() == ir.OTYPE {
+ if n.X.Op() == ir.OTYPE {
return n
}
- s := n.Sym()
+ s := n.Sel
if s == nil {
return n
}
@@ -1014,14 +1014,14 @@ func adddot(n *ir.SelectorExpr) *ir.SelectorExpr {
case path != nil:
// rebuild elided dots
for c := len(path) - 1; c >= 0; c-- {
- dot := ir.NewSelectorExpr(base.Pos, ir.ODOT, n.Left(), path[c].field.Sym)
+ dot := ir.NewSelectorExpr(base.Pos, ir.ODOT, n.X, path[c].field.Sym)
dot.SetImplicit(true)
dot.SetType(path[c].field.Type)
- n.SetLeft(dot)
+ n.X = dot
}
case ambig:
base.Errorf("ambiguous selector %v", n)
- n.SetLeft(nil)
+ n.X = nil
}
return n
@@ -1228,10 +1228,10 @@ func genwrapper(rcvr *types.Type, method *types.Field, newnam *types.Sym) {
if rcvr.IsPtr() && rcvr.Elem() == methodrcvr {
// generating wrapper from *T to T.
n := ir.NewIfStmt(base.Pos, nil, nil, nil)
- n.SetLeft(ir.NewBinaryExpr(base.Pos, ir.OEQ, nthis, nodnil()))
+ n.Cond = ir.NewBinaryExpr(base.Pos, ir.OEQ, nthis, nodnil())
call := ir.NewCallExpr(base.Pos, ir.OCALL, syslook("panicwrap"), nil)
- n.PtrBody().Set1(call)
- fn.PtrBody().Append(n)
+ n.Body.Set1(call)
+ fn.Body.Append(n)
}
dot := adddot(ir.NewSelectorExpr(base.Pos, ir.OXDOT, nthis, method.Sym))
@@ -1245,29 +1245,29 @@ func genwrapper(rcvr *types.Type, method *types.Field, newnam *types.Sym) {
// value for that function.
if !instrumenting && rcvr.IsPtr() && methodrcvr.IsPtr() && method.Embedded != 0 && !isifacemethod(method.Type) && !(thearch.LinkArch.Name == "ppc64le" && base.Ctxt.Flag_dynlink) {
// generate tail call: adjust pointer receiver and jump to embedded method.
- left := dot.Left() // skip final .M
+ left := dot.X // skip final .M
if !left.Type().IsPtr() {
left = nodAddr(left)
}
as := ir.NewAssignStmt(base.Pos, nthis, convnop(left, rcvr))
- fn.PtrBody().Append(as)
- fn.PtrBody().Append(ir.NewBranchStmt(base.Pos, ir.ORETJMP, methodSym(methodrcvr, method.Sym)))
+ fn.Body.Append(as)
+ fn.Body.Append(ir.NewBranchStmt(base.Pos, ir.ORETJMP, methodSym(methodrcvr, method.Sym)))
} else {
fn.SetWrapper(true) // ignore frame for panic+recover matching
call := ir.NewCallExpr(base.Pos, ir.OCALL, dot, nil)
- call.PtrList().Set(paramNnames(tfn.Type()))
- call.SetIsDDD(tfn.Type().IsVariadic())
+ call.Args.Set(paramNnames(tfn.Type()))
+ call.IsDDD = tfn.Type().IsVariadic()
if method.Type.NumResults() > 0 {
ret := ir.NewReturnStmt(base.Pos, nil)
- ret.PtrList().Set1(call)
- fn.PtrBody().Append(ret)
+ ret.Results.Set1(call)
+ fn.Body.Append(ret)
} else {
- fn.PtrBody().Append(call)
+ fn.Body.Append(call)
}
}
if false && base.Flag.LowerR != 0 {
- ir.DumpList("genwrapper body", fn.Body())
+ ir.DumpList("genwrapper body", fn.Body)
}
funcbody()
@@ -1277,7 +1277,7 @@ func genwrapper(rcvr *types.Type, method *types.Field, newnam *types.Sym) {
typecheckFunc(fn)
Curfn = fn
- typecheckslice(fn.Body().Slice(), ctxStmt)
+ typecheckslice(fn.Body.Slice(), ctxStmt)
// Inline calls within (*T).M wrappers. This is safe because we only
// generate those wrappers within the same compilation unit as (T).M.
@@ -1422,7 +1422,7 @@ func implements(t, iface *types.Type, m, samename **types.Field, ptr *int) bool
func liststmt(l []ir.Node) ir.Node {
n := ir.NewBlockStmt(base.Pos, nil)
- n.PtrList().Set(l)
+ n.List.Set(l)
if len(l) != 0 {
n.SetPos(l[0].Pos())
}
@@ -1542,8 +1542,8 @@ func itabType(itab ir.Node) ir.Node {
typ := ir.NewSelectorExpr(base.Pos, ir.ODOTPTR, itab, nil)
typ.SetType(types.NewPtr(types.Types[types.TUINT8]))
typ.SetTypecheck(1)
- typ.SetOffset(int64(Widthptr)) // offset of _type in runtime.itab
- typ.SetBounded(true) // guaranteed not to fault
+ typ.Offset = int64(Widthptr) // offset of _type in runtime.itab
+ typ.SetBounded(true) // guaranteed not to fault
return typ
}
diff --git a/src/cmd/compile/internal/gc/swt.go b/src/cmd/compile/internal/gc/swt.go
index 7cd1c16e00..da781e6f45 100644
--- a/src/cmd/compile/internal/gc/swt.go
+++ b/src/cmd/compile/internal/gc/swt.go
@@ -17,7 +17,7 @@ import (
// typecheckswitch typechecks a switch statement.
func typecheckswitch(n *ir.SwitchStmt) {
typecheckslice(n.Init().Slice(), ctxStmt)
- if n.Left() != nil && n.Left().Op() == ir.OTYPESW {
+ if n.Tag != nil && n.Tag.Op() == ir.OTYPESW {
typecheckTypeSwitch(n)
} else {
typecheckExprSwitch(n)
@@ -25,26 +25,26 @@ func typecheckswitch(n *ir.SwitchStmt) {
}
func typecheckTypeSwitch(n *ir.SwitchStmt) {
- guard := n.Left().(*ir.TypeSwitchGuard)
- guard.SetRight(typecheck(guard.Right(), ctxExpr))
- t := guard.Right().Type()
+ guard := n.Tag.(*ir.TypeSwitchGuard)
+ guard.X = typecheck(guard.X, ctxExpr)
+ t := guard.X.Type()
if t != nil && !t.IsInterface() {
- base.ErrorfAt(n.Pos(), "cannot type switch on non-interface value %L", guard.Right())
+ base.ErrorfAt(n.Pos(), "cannot type switch on non-interface value %L", guard.X)
t = nil
}
// We don't actually declare the type switch's guarded
// declaration itself. So if there are no cases, we won't
// notice that it went unused.
- if v := guard.Left(); v != nil && !ir.IsBlank(v) && n.List().Len() == 0 {
+ if v := guard.Tag; v != nil && !ir.IsBlank(v) && n.Cases.Len() == 0 {
base.ErrorfAt(v.Pos(), "%v declared but not used", v.Sym())
}
var defCase, nilCase ir.Node
var ts typeSet
- for _, ncase := range n.List().Slice() {
+ for _, ncase := range n.Cases.Slice() {
ncase := ncase.(*ir.CaseStmt)
- ls := ncase.List().Slice()
+ ls := ncase.List.Slice()
if len(ls) == 0 { // default:
if defCase != nil {
base.ErrorfAt(ncase.Pos(), "multiple defaults in switch (first at %v)", ir.Line(defCase))
@@ -77,13 +77,13 @@ func typecheckTypeSwitch(n *ir.SwitchStmt) {
if !n1.Type().IsInterface() && !implements(n1.Type(), t, &missing, &have, &ptr) && !missing.Broke() {
if have != nil && !have.Broke() {
base.ErrorfAt(ncase.Pos(), "impossible type switch case: %L cannot have dynamic type %v"+
- " (wrong type for %v method)\n\thave %v%S\n\twant %v%S", guard.Right(), n1.Type(), missing.Sym, have.Sym, have.Type, missing.Sym, missing.Type)
+ " (wrong type for %v method)\n\thave %v%S\n\twant %v%S", guard.X, n1.Type(), missing.Sym, have.Sym, have.Type, missing.Sym, missing.Type)
} else if ptr != 0 {
base.ErrorfAt(ncase.Pos(), "impossible type switch case: %L cannot have dynamic type %v"+
- " (%v method has pointer receiver)", guard.Right(), n1.Type(), missing.Sym)
+ " (%v method has pointer receiver)", guard.X, n1.Type(), missing.Sym)
} else {
base.ErrorfAt(ncase.Pos(), "impossible type switch case: %L cannot have dynamic type %v"+
- " (missing %v method)", guard.Right(), n1.Type(), missing.Sym)
+ " (missing %v method)", guard.X, n1.Type(), missing.Sym)
}
continue
}
@@ -91,7 +91,7 @@ func typecheckTypeSwitch(n *ir.SwitchStmt) {
ts.add(ncase.Pos(), n1.Type())
}
- if ncase.Rlist().Len() != 0 {
+ if ncase.Vars.Len() != 0 {
// Assign the clause variable's type.
vt := t
if len(ls) == 1 {
@@ -104,7 +104,7 @@ func typecheckTypeSwitch(n *ir.SwitchStmt) {
}
}
- nvar := ncase.Rlist().First()
+ nvar := ncase.Vars.First()
nvar.SetType(vt)
if vt != nil {
nvar = typecheck(nvar, ctxExpr|ctxAssign)
@@ -113,10 +113,10 @@ func typecheckTypeSwitch(n *ir.SwitchStmt) {
nvar.SetTypecheck(1)
nvar.SetWalkdef(1)
}
- ncase.Rlist().SetFirst(nvar)
+ ncase.Vars.SetFirst(nvar)
}
- typecheckslice(ncase.Body().Slice(), ctxStmt)
+ typecheckslice(ncase.Body.Slice(), ctxStmt)
}
}
@@ -150,10 +150,10 @@ func (s *typeSet) add(pos src.XPos, typ *types.Type) {
func typecheckExprSwitch(n *ir.SwitchStmt) {
t := types.Types[types.TBOOL]
- if n.Left() != nil {
- n.SetLeft(typecheck(n.Left(), ctxExpr))
- n.SetLeft(defaultlit(n.Left(), nil))
- t = n.Left().Type()
+ if n.Tag != nil {
+ n.Tag = typecheck(n.Tag, ctxExpr)
+ n.Tag = defaultlit(n.Tag, nil)
+ t = n.Tag.Type()
}
var nilonly string
@@ -168,9 +168,9 @@ func typecheckExprSwitch(n *ir.SwitchStmt) {
case !IsComparable(t):
if t.IsStruct() {
- base.ErrorfAt(n.Pos(), "cannot switch on %L (struct containing %v cannot be compared)", n.Left(), IncomparableField(t).Type)
+ base.ErrorfAt(n.Pos(), "cannot switch on %L (struct containing %v cannot be compared)", n.Tag, IncomparableField(t).Type)
} else {
- base.ErrorfAt(n.Pos(), "cannot switch on %L", n.Left())
+ base.ErrorfAt(n.Pos(), "cannot switch on %L", n.Tag)
}
t = nil
}
@@ -178,9 +178,9 @@ func typecheckExprSwitch(n *ir.SwitchStmt) {
var defCase ir.Node
var cs constSet
- for _, ncase := range n.List().Slice() {
+ for _, ncase := range n.Cases.Slice() {
ncase := ncase.(*ir.CaseStmt)
- ls := ncase.List().Slice()
+ ls := ncase.List.Slice()
if len(ls) == 0 { // default:
if defCase != nil {
base.ErrorfAt(ncase.Pos(), "multiple defaults in switch (first at %v)", ir.Line(defCase))
@@ -199,15 +199,15 @@ func typecheckExprSwitch(n *ir.SwitchStmt) {
}
if nilonly != "" && !ir.IsNil(n1) {
- base.ErrorfAt(ncase.Pos(), "invalid case %v in switch (can only compare %s %v to nil)", n1, nilonly, n.Left())
+ base.ErrorfAt(ncase.Pos(), "invalid case %v in switch (can only compare %s %v to nil)", n1, nilonly, n.Tag)
} else if t.IsInterface() && !n1.Type().IsInterface() && !IsComparable(n1.Type()) {
base.ErrorfAt(ncase.Pos(), "invalid case %L in switch (incomparable type)", n1)
} else {
op1, _ := assignop(n1.Type(), t)
op2, _ := assignop(t, n1.Type())
if op1 == ir.OXXX && op2 == ir.OXXX {
- if n.Left() != nil {
- base.ErrorfAt(ncase.Pos(), "invalid case %v in switch on %v (mismatched types %v and %v)", n1, n.Left(), n1.Type(), t)
+ if n.Tag != nil {
+ base.ErrorfAt(ncase.Pos(), "invalid case %v in switch on %v (mismatched types %v and %v)", n1, n.Tag, n1.Type(), t)
} else {
base.ErrorfAt(ncase.Pos(), "invalid case %v in switch (mismatched types %v and bool)", n1, n1.Type())
}
@@ -225,18 +225,18 @@ func typecheckExprSwitch(n *ir.SwitchStmt) {
}
}
- typecheckslice(ncase.Body().Slice(), ctxStmt)
+ typecheckslice(ncase.Body.Slice(), ctxStmt)
}
}
// walkswitch walks a switch statement.
func walkswitch(sw *ir.SwitchStmt) {
// Guard against double walk, see #25776.
- if sw.List().Len() == 0 && sw.Body().Len() > 0 {
+ if sw.Cases.Len() == 0 && sw.Compiled.Len() > 0 {
return // Was fatal, but eliminating every possible source of double-walking is hard
}
- if sw.Left() != nil && sw.Left().Op() == ir.OTYPESW {
+ if sw.Tag != nil && sw.Tag.Op() == ir.OTYPESW {
walkTypeSwitch(sw)
} else {
walkExprSwitch(sw)
@@ -248,8 +248,8 @@ func walkswitch(sw *ir.SwitchStmt) {
func walkExprSwitch(sw *ir.SwitchStmt) {
lno := setlineno(sw)
- cond := sw.Left()
- sw.SetLeft(nil)
+ cond := sw.Tag
+ sw.Tag = nil
// convert switch {...} to switch true {...}
if cond == nil {
@@ -272,7 +272,7 @@ func walkExprSwitch(sw *ir.SwitchStmt) {
cond = walkexpr(cond, sw.PtrInit())
if cond.Op() != ir.OLITERAL && cond.Op() != ir.ONIL {
- cond = copyexpr(cond, cond.Type(), sw.PtrBody())
+ cond = copyexpr(cond, cond.Type(), &sw.Compiled)
}
base.Pos = lno
@@ -283,33 +283,33 @@ func walkExprSwitch(sw *ir.SwitchStmt) {
var defaultGoto ir.Node
var body ir.Nodes
- for _, ncase := range sw.List().Slice() {
+ for _, ncase := range sw.Cases.Slice() {
ncase := ncase.(*ir.CaseStmt)
label := autolabel(".s")
jmp := ir.NewBranchStmt(ncase.Pos(), ir.OGOTO, label)
// Process case dispatch.
- if ncase.List().Len() == 0 {
+ if ncase.List.Len() == 0 {
if defaultGoto != nil {
base.Fatalf("duplicate default case not detected during typechecking")
}
defaultGoto = jmp
}
- for _, n1 := range ncase.List().Slice() {
+ for _, n1 := range ncase.List.Slice() {
s.Add(ncase.Pos(), n1, jmp)
}
// Process body.
body.Append(ir.NewLabelStmt(ncase.Pos(), label))
- body.Append(ncase.Body().Slice()...)
- if fall, pos := endsInFallthrough(ncase.Body().Slice()); !fall {
+ body.Append(ncase.Body.Slice()...)
+ if fall, pos := endsInFallthrough(ncase.Body.Slice()); !fall {
br := ir.NewBranchStmt(base.Pos, ir.OBREAK, nil)
br.SetPos(pos)
body.Append(br)
}
}
- sw.PtrList().Set(nil)
+ sw.Cases.Set(nil)
if defaultGoto == nil {
br := ir.NewBranchStmt(base.Pos, ir.OBREAK, nil)
@@ -317,10 +317,10 @@ func walkExprSwitch(sw *ir.SwitchStmt) {
defaultGoto = br
}
- s.Emit(sw.PtrBody())
- sw.PtrBody().Append(defaultGoto)
- sw.PtrBody().AppendNodes(&body)
- walkstmtlist(sw.Body().Slice())
+ s.Emit(&sw.Compiled)
+ sw.Compiled.Append(defaultGoto)
+ sw.Compiled.AppendNodes(&body)
+ walkstmtlist(sw.Compiled.Slice())
}
// An exprSwitch walks an expression switch.
@@ -402,8 +402,8 @@ func (s *exprSwitch) flush() {
},
func(i int, nif *ir.IfStmt) {
run := runs[i]
- nif.SetLeft(ir.NewBinaryExpr(base.Pos, ir.OEQ, ir.NewUnaryExpr(base.Pos, ir.OLEN, s.exprname), nodintconst(runLen(run))))
- s.search(run, nif.PtrBody())
+ nif.Cond = ir.NewBinaryExpr(base.Pos, ir.OEQ, ir.NewUnaryExpr(base.Pos, ir.OLEN, s.exprname), nodintconst(runLen(run)))
+ s.search(run, &nif.Body)
},
)
return
@@ -437,8 +437,8 @@ func (s *exprSwitch) search(cc []exprClause, out *ir.Nodes) {
},
func(i int, nif *ir.IfStmt) {
c := &cc[i]
- nif.SetLeft(c.test(s.exprname))
- nif.PtrBody().Set1(c.jmp)
+ nif.Cond = c.test(s.exprname)
+ nif.Body.Set1(c.jmp)
},
)
}
@@ -471,9 +471,9 @@ func allCaseExprsAreSideEffectFree(sw *ir.SwitchStmt) bool {
// Restricting to constants is simple and probably powerful
// enough.
- for _, ncase := range sw.List().Slice() {
+ for _, ncase := range sw.Cases.Slice() {
ncase := ncase.(*ir.CaseStmt)
- for _, v := range ncase.List().Slice() {
+ for _, v := range ncase.List.Slice() {
if v.Op() != ir.OLITERAL {
return false
}
@@ -504,11 +504,11 @@ func endsInFallthrough(stmts []ir.Node) (bool, src.XPos) {
// type switch.
func walkTypeSwitch(sw *ir.SwitchStmt) {
var s typeSwitch
- s.facename = sw.Left().(*ir.TypeSwitchGuard).Right()
- sw.SetLeft(nil)
+ s.facename = sw.Tag.(*ir.TypeSwitchGuard).X
+ sw.Tag = nil
s.facename = walkexpr(s.facename, sw.PtrInit())
- s.facename = copyexpr(s.facename, s.facename.Type(), sw.PtrBody())
+ s.facename = copyexpr(s.facename, s.facename.Type(), &sw.Compiled)
s.okname = temp(types.Types[types.TBOOL])
// Get interface descriptor word.
@@ -523,55 +523,55 @@ func walkTypeSwitch(sw *ir.SwitchStmt) {
// h := e._type.hash
// Use a similar strategy for non-empty interfaces.
ifNil := ir.NewIfStmt(base.Pos, nil, nil, nil)
- ifNil.SetLeft(ir.NewBinaryExpr(base.Pos, ir.OEQ, itab, nodnil()))
+ ifNil.Cond = ir.NewBinaryExpr(base.Pos, ir.OEQ, itab, nodnil())
base.Pos = base.Pos.WithNotStmt() // disable statement marks after the first check.
- ifNil.SetLeft(typecheck(ifNil.Left(), ctxExpr))
- ifNil.SetLeft(defaultlit(ifNil.Left(), nil))
+ ifNil.Cond = typecheck(ifNil.Cond, ctxExpr)
+ ifNil.Cond = defaultlit(ifNil.Cond, nil)
// ifNil.Nbody assigned at end.
- sw.PtrBody().Append(ifNil)
+ sw.Compiled.Append(ifNil)
// Load hash from type or itab.
dotHash := ir.NewSelectorExpr(base.Pos, ir.ODOTPTR, itab, nil)
dotHash.SetType(types.Types[types.TUINT32])
dotHash.SetTypecheck(1)
if s.facename.Type().IsEmptyInterface() {
- dotHash.SetOffset(int64(2 * Widthptr)) // offset of hash in runtime._type
+ dotHash.Offset = int64(2 * Widthptr) // offset of hash in runtime._type
} else {
- dotHash.SetOffset(int64(2 * Widthptr)) // offset of hash in runtime.itab
+ dotHash.Offset = int64(2 * Widthptr) // offset of hash in runtime.itab
}
dotHash.SetBounded(true) // guaranteed not to fault
- s.hashname = copyexpr(dotHash, dotHash.Type(), sw.PtrBody())
+ s.hashname = copyexpr(dotHash, dotHash.Type(), &sw.Compiled)
br := ir.NewBranchStmt(base.Pos, ir.OBREAK, nil)
var defaultGoto, nilGoto ir.Node
var body ir.Nodes
- for _, ncase := range sw.List().Slice() {
+ for _, ncase := range sw.Cases.Slice() {
ncase := ncase.(*ir.CaseStmt)
var caseVar ir.Node
- if ncase.Rlist().Len() != 0 {
- caseVar = ncase.Rlist().First()
+ if ncase.Vars.Len() != 0 {
+ caseVar = ncase.Vars.First()
}
// For single-type cases with an interface type,
// we initialize the case variable as part of the type assertion.
// In other cases, we initialize it in the body.
var singleType *types.Type
- if ncase.List().Len() == 1 && ncase.List().First().Op() == ir.OTYPE {
- singleType = ncase.List().First().Type()
+ if ncase.List.Len() == 1 && ncase.List.First().Op() == ir.OTYPE {
+ singleType = ncase.List.First().Type()
}
caseVarInitialized := false
label := autolabel(".s")
jmp := ir.NewBranchStmt(ncase.Pos(), ir.OGOTO, label)
- if ncase.List().Len() == 0 { // default:
+ if ncase.List.Len() == 0 { // default:
if defaultGoto != nil {
base.Fatalf("duplicate default case not detected during typechecking")
}
defaultGoto = jmp
}
- for _, n1 := range ncase.List().Slice() {
+ for _, n1 := range ncase.List.Slice() {
if ir.IsNil(n1) { // case nil:
if nilGoto != nil {
base.Fatalf("duplicate nil case not detected during typechecking")
@@ -605,10 +605,10 @@ func walkTypeSwitch(sw *ir.SwitchStmt) {
typecheckslice(l, ctxStmt)
body.Append(l...)
}
- body.Append(ncase.Body().Slice()...)
+ body.Append(ncase.Body.Slice()...)
body.Append(br)
}
- sw.PtrList().Set(nil)
+ sw.Cases.Set(nil)
if defaultGoto == nil {
defaultGoto = br
@@ -616,13 +616,13 @@ func walkTypeSwitch(sw *ir.SwitchStmt) {
if nilGoto == nil {
nilGoto = defaultGoto
}
- ifNil.PtrBody().Set1(nilGoto)
+ ifNil.Body.Set1(nilGoto)
- s.Emit(sw.PtrBody())
- sw.PtrBody().Append(defaultGoto)
- sw.PtrBody().AppendNodes(&body)
+ s.Emit(&sw.Compiled)
+ sw.Compiled.Append(defaultGoto)
+ sw.Compiled.AppendNodes(&body)
- walkstmtlist(sw.Body().Slice())
+ walkstmtlist(sw.Compiled.Slice())
}
// A typeSwitch walks a type switch.
@@ -656,16 +656,16 @@ func (s *typeSwitch) Add(pos src.XPos, typ *types.Type, caseVar, jmp ir.Node) {
// cv, ok = iface.(type)
as := ir.NewAssignListStmt(pos, ir.OAS2, nil, nil)
- as.PtrList().Set2(caseVar, s.okname) // cv, ok =
+ as.Lhs.Set2(caseVar, s.okname) // cv, ok =
dot := ir.NewTypeAssertExpr(pos, s.facename, nil)
dot.SetType(typ) // iface.(type)
- as.PtrRlist().Set1(dot)
+ as.Rhs.Set1(dot)
appendWalkStmt(&body, as)
// if ok { goto label }
nif := ir.NewIfStmt(pos, nil, nil, nil)
- nif.SetLeft(s.okname)
- nif.PtrBody().Set1(jmp)
+ nif.Cond = s.okname
+ nif.Body.Set1(jmp)
body.Append(nif)
if !typ.IsInterface() {
@@ -714,8 +714,8 @@ func (s *typeSwitch) flush() {
// TODO(mdempsky): Omit hash equality check if
// there's only one type.
c := cc[i]
- nif.SetLeft(ir.NewBinaryExpr(base.Pos, ir.OEQ, s.hashname, nodintconst(int64(c.hash))))
- nif.PtrBody().AppendNodes(&c.body)
+ nif.Cond = ir.NewBinaryExpr(base.Pos, ir.OEQ, s.hashname, nodintconst(int64(c.hash)))
+ nif.Body.AppendNodes(&c.body)
},
)
}
@@ -740,22 +740,22 @@ func binarySearch(n int, out *ir.Nodes, less func(i int) ir.Node, leaf func(i in
nif := ir.NewIfStmt(base.Pos, nil, nil, nil)
leaf(i, nif)
base.Pos = base.Pos.WithNotStmt()
- nif.SetLeft(typecheck(nif.Left(), ctxExpr))
- nif.SetLeft(defaultlit(nif.Left(), nil))
+ nif.Cond = typecheck(nif.Cond, ctxExpr)
+ nif.Cond = defaultlit(nif.Cond, nil)
out.Append(nif)
- out = nif.PtrRlist()
+ out = &nif.Else
}
return
}
half := lo + n/2
nif := ir.NewIfStmt(base.Pos, nil, nil, nil)
- nif.SetLeft(less(half))
+ nif.Cond = less(half)
base.Pos = base.Pos.WithNotStmt()
- nif.SetLeft(typecheck(nif.Left(), ctxExpr))
- nif.SetLeft(defaultlit(nif.Left(), nil))
- do(lo, half, nif.PtrBody())
- do(half, hi, nif.PtrRlist())
+ nif.Cond = typecheck(nif.Cond, ctxExpr)
+ nif.Cond = defaultlit(nif.Cond, nil)
+ do(lo, half, &nif.Body)
+ do(half, hi, &nif.Else)
out.Append(nif)
}
diff --git a/src/cmd/compile/internal/gc/typecheck.go b/src/cmd/compile/internal/gc/typecheck.go
index bb5e9fad1e..73fb6bb1c1 100644
--- a/src/cmd/compile/internal/gc/typecheck.go
+++ b/src/cmd/compile/internal/gc/typecheck.go
@@ -48,7 +48,7 @@ func TypecheckPackage() {
timings.Start("fe", "typecheck", "top1")
for i := 0; i < len(Target.Decls); i++ {
n := Target.Decls[i]
- if op := n.Op(); op != ir.ODCL && op != ir.OAS && op != ir.OAS2 && (op != ir.ODCLTYPE || !n.(*ir.Decl).Left().Name().Alias()) {
+ if op := n.Op(); op != ir.ODCL && op != ir.OAS && op != ir.OAS2 && (op != ir.ODCLTYPE || !n.(*ir.Decl).X.Name().Alias()) {
Target.Decls[i] = typecheck(n, ctxStmt)
}
}
@@ -60,7 +60,7 @@ func TypecheckPackage() {
timings.Start("fe", "typecheck", "top2")
for i := 0; i < len(Target.Decls); i++ {
n := Target.Decls[i]
- if op := n.Op(); op == ir.ODCL || op == ir.OAS || op == ir.OAS2 || op == ir.ODCLTYPE && n.(*ir.Decl).Left().Name().Alias() {
+ if op := n.Op(); op == ir.ODCL || op == ir.OAS || op == ir.OAS2 || op == ir.ODCLTYPE && n.(*ir.Decl).X.Name().Alias() {
Target.Decls[i] = typecheck(n, ctxStmt)
}
}
@@ -97,7 +97,7 @@ func TypecheckPackage() {
for _, n := range Target.Decls {
if n.Op() == ir.ODCLFUNC {
n := n.(*ir.Func)
- if n.Func().OClosure != nil {
+ if n.OClosure != nil {
Curfn = n
capturevars(n)
}
@@ -142,10 +142,10 @@ func TypecheckFuncBody(n *ir.Func) {
Curfn = n
decldepth = 1
errorsBefore := base.Errors()
- typecheckslice(n.Body(), ctxStmt)
+ typecheckslice(n.Body, ctxStmt)
checkreturn(n)
if base.Errors() > errorsBefore {
- n.PtrBody().Set(nil) // type errors; do not compile
+ n.Body.Set(nil) // type errors; do not compile
}
// Now that we've checked whether n terminates,
// we can eliminate some obviously dead code.
@@ -387,7 +387,7 @@ func typecheck(n ir.Node, top int) (res ir.Node) {
// Skip over parens.
for n.Op() == ir.OPAREN {
- n = n.(*ir.ParenExpr).Left()
+ n = n.(*ir.ParenExpr).X
}
// Resolve definition of name and value of iota lazily.
@@ -479,7 +479,7 @@ func typecheck(n ir.Node, top int) (res ir.Node) {
switch n.Op() {
case ir.OCALLFUNC, ir.OCALLINTER, ir.OCALLMETH:
n := n.(*ir.CallExpr)
- if t := n.Left().Type(); t != nil && t.Kind() == types.TFUNC {
+ if t := n.X.Type(); t != nil && t.Kind() == types.TFUNC {
nr := t.NumResults()
isMulti = nr > 1
if nr == 0 {
@@ -580,7 +580,7 @@ func typecheck1(n ir.Node, top int) (res ir.Node) {
if n.Op() == ir.ONAME {
n := n.(*ir.Name)
- if n.SubOp() != 0 && top&ctxCallee == 0 {
+ if n.BuiltinOp != 0 && top&ctxCallee == 0 {
base.Errorf("use of builtin %v not in function call", n.Sym())
n.SetType(nil)
return n
@@ -615,7 +615,7 @@ func typecheck1(n ir.Node, top int) (res ir.Node) {
if n.Name().Decldepth == 0 {
n.Name().Decldepth = decldepth
}
- if n.SubOp() != 0 {
+ if n.BuiltinOp != 0 {
return n
}
if top&ctxAssign == 0 {
@@ -767,7 +767,7 @@ func typecheck1(n ir.Node, top int) (res ir.Node) {
if !t.IsPtr() {
if top&(ctxExpr|ctxStmt) != 0 {
- base.Errorf("invalid indirect of %L", n.Left())
+ base.Errorf("invalid indirect of %L", n.X)
n.SetType(nil)
return n
}
@@ -803,14 +803,14 @@ func typecheck1(n ir.Node, top int) (res ir.Node) {
var setLR func()
switch n := n.(type) {
case *ir.AssignOpStmt:
- l, r = n.Left(), n.Right()
- setLR = func() { n.SetLeft(l); n.SetRight(r) }
+ l, r = n.X, n.Y
+ setLR = func() { n.X = l; n.Y = r }
case *ir.BinaryExpr:
- l, r = n.Left(), n.Right()
- setLR = func() { n.SetLeft(l); n.SetRight(r) }
+ l, r = n.X, n.Y
+ setLR = func() { n.X = l; n.Y = r }
case *ir.LogicalExpr:
- l, r = n.Left(), n.Right()
- setLR = func() { n.SetLeft(l); n.SetRight(r) }
+ l, r = n.X, n.Y
+ setLR = func() { n.X = l; n.Y = r }
}
l = typecheck(l, ctxExpr)
r = typecheck(r, ctxExpr)
@@ -823,13 +823,13 @@ func typecheck1(n ir.Node, top int) (res ir.Node) {
if n.Op() == ir.OASOP {
n := n.(*ir.AssignOpStmt)
checkassign(n, l)
- if n.Implicit() && !okforarith[l.Type().Kind()] {
+ if n.IncDec && !okforarith[l.Type().Kind()] {
base.Errorf("invalid operation: %v (non-numeric type %v)", n, l.Type())
n.SetType(nil)
return n
}
// TODO(marvin): Fix Node.EType type union.
- op = n.SubOp()
+ op = n.AsOp
}
if op == ir.OLSH || op == ir.ORSH {
r = defaultlit(r, types.Types[types.TUINT])
@@ -866,13 +866,13 @@ func typecheck1(n ir.Node, top int) (res ir.Node) {
// can't be converted to int (see issue #41500).
if n.Op() == ir.OANDAND || n.Op() == ir.OOROR {
n := n.(*ir.LogicalExpr)
- if !n.Left().Type().IsBoolean() {
- base.Errorf("invalid operation: %v (operator %v not defined on %s)", n, n.Op(), typekind(n.Left().Type()))
+ if !n.X.Type().IsBoolean() {
+ base.Errorf("invalid operation: %v (operator %v not defined on %s)", n, n.Op(), typekind(n.X.Type()))
n.SetType(nil)
return n
}
- if !n.Right().Type().IsBoolean() {
- base.Errorf("invalid operation: %v (operator %v not defined on %s)", n, n.Op(), typekind(n.Right().Type()))
+ if !n.Y.Type().IsBoolean() {
+ base.Errorf("invalid operation: %v (operator %v not defined on %s)", n, n.Op(), typekind(n.Y.Type()))
n.SetType(nil)
return n
}
@@ -1027,9 +1027,9 @@ func typecheck1(n ir.Node, top int) (res ir.Node) {
}
if r.Op() == ir.OADDSTR {
r := r.(*ir.AddStringExpr)
- add.PtrList().AppendNodes(r.PtrList())
+ add.List.AppendNodes(&r.List)
} else {
- add.PtrList().Append(r)
+ add.List.Append(r)
}
add.SetType(t)
return add
@@ -1048,8 +1048,8 @@ func typecheck1(n ir.Node, top int) (res ir.Node) {
case ir.OBITNOT, ir.ONEG, ir.ONOT, ir.OPLUS:
n := n.(*ir.UnaryExpr)
- n.SetLeft(typecheck(n.Left(), ctxExpr))
- l := n.Left()
+ n.X = typecheck(n.X, ctxExpr)
+ l := n.X
t := l.Type()
if t == nil {
n.SetType(nil)
@@ -1067,19 +1067,19 @@ func typecheck1(n ir.Node, top int) (res ir.Node) {
// exprs
case ir.OADDR:
n := n.(*ir.AddrExpr)
- n.SetLeft(typecheck(n.Left(), ctxExpr))
- if n.Left().Type() == nil {
+ n.X = typecheck(n.X, ctxExpr)
+ if n.X.Type() == nil {
n.SetType(nil)
return n
}
- switch n.Left().Op() {
+ switch n.X.Op() {
case ir.OARRAYLIT, ir.OMAPLIT, ir.OSLICELIT, ir.OSTRUCTLIT:
n.SetOp(ir.OPTRLIT)
default:
- checklvalue(n.Left(), "take the address of")
- r := outervalue(n.Left())
+ checklvalue(n.X, "take the address of")
+ r := outervalue(n.X)
if r.Op() == ir.ONAME {
r := r.(*ir.Name)
if ir.Orig(r) != r {
@@ -1094,14 +1094,14 @@ func typecheck1(n ir.Node, top int) (res ir.Node) {
r.Name().Defn.Name().SetAddrtaken(true)
}
}
- n.SetLeft(defaultlit(n.Left(), nil))
- if n.Left().Type() == nil {
+ n.X = defaultlit(n.X, nil)
+ if n.X.Type() == nil {
n.SetType(nil)
return n
}
}
- n.SetType(types.NewPtr(n.Left().Type()))
+ n.SetType(types.NewPtr(n.X.Type()))
return n
case ir.OCOMPLIT:
@@ -1112,26 +1112,26 @@ func typecheck1(n ir.Node, top int) (res ir.Node) {
if n.Op() == ir.OXDOT {
n = adddot(n)
n.SetOp(ir.ODOT)
- if n.Left() == nil {
+ if n.X == nil {
n.SetType(nil)
return n
}
}
- n.SetLeft(typecheck(n.Left(), ctxExpr|ctxType))
+ n.X = typecheck(n.X, ctxExpr|ctxType)
- n.SetLeft(defaultlit(n.Left(), nil))
+ n.X = defaultlit(n.X, nil)
- t := n.Left().Type()
+ t := n.X.Type()
if t == nil {
- base.UpdateErrorDot(ir.Line(n), fmt.Sprint(n.Left()), fmt.Sprint(n))
+ base.UpdateErrorDot(ir.Line(n), fmt.Sprint(n.X), fmt.Sprint(n))
n.SetType(nil)
return n
}
- s := n.Sym()
+ s := n.Sel
- if n.Left().Op() == ir.OTYPE {
+ if n.X.Op() == ir.OTYPE {
return typecheckMethodExpr(n)
}
@@ -1145,7 +1145,7 @@ func typecheck1(n ir.Node, top int) (res ir.Node) {
checkwidth(t)
}
- if n.Sym().IsBlank() {
+ if n.Sel.IsBlank() {
base.Errorf("cannot refer to blank field or method")
n.SetType(nil)
return n
@@ -1155,21 +1155,21 @@ func typecheck1(n ir.Node, top int) (res ir.Node) {
// Legitimate field or method lookup failed, try to explain the error
switch {
case t.IsEmptyInterface():
- base.Errorf("%v undefined (type %v is interface with no methods)", n, n.Left().Type())
+ base.Errorf("%v undefined (type %v is interface with no methods)", n, n.X.Type())
case t.IsPtr() && t.Elem().IsInterface():
// Pointer to interface is almost always a mistake.
- base.Errorf("%v undefined (type %v is pointer to interface, not interface)", n, n.Left().Type())
+ base.Errorf("%v undefined (type %v is pointer to interface, not interface)", n, n.X.Type())
case lookdot(n, t, 1) != nil:
// Field or method matches by name, but it is not exported.
- base.Errorf("%v undefined (cannot refer to unexported field or method %v)", n, n.Sym())
+ base.Errorf("%v undefined (cannot refer to unexported field or method %v)", n, n.Sel)
default:
if mt := lookdot(n, t, 2); mt != nil && visible(mt.Sym) { // Case-insensitive lookup.
- base.Errorf("%v undefined (type %v has no field or method %v, but does have %v)", n, n.Left().Type(), n.Sym(), mt.Sym)
+ base.Errorf("%v undefined (type %v has no field or method %v, but does have %v)", n, n.X.Type(), n.Sel, mt.Sym)
} else {
- base.Errorf("%v undefined (type %v has no field or method %v)", n, n.Left().Type(), n.Sym())
+ base.Errorf("%v undefined (type %v has no field or method %v)", n, n.X.Type(), n.Sel)
}
}
n.SetType(nil)
@@ -1183,9 +1183,9 @@ func typecheck1(n ir.Node, top int) (res ir.Node) {
case ir.ODOTTYPE:
n := n.(*ir.TypeAssertExpr)
- n.SetLeft(typecheck(n.Left(), ctxExpr))
- n.SetLeft(defaultlit(n.Left(), nil))
- l := n.Left()
+ n.X = typecheck(n.X, ctxExpr)
+ n.X = defaultlit(n.X, nil)
+ l := n.X
t := l.Type()
if t == nil {
n.SetType(nil)
@@ -1197,10 +1197,10 @@ func typecheck1(n ir.Node, top int) (res ir.Node) {
return n
}
- if n.Right() != nil {
- n.SetRight(typecheck(n.Right(), ctxType))
- n.SetType(n.Right().Type())
- n.SetRight(nil)
+ if n.Ntype != nil {
+ n.Ntype = typecheck(n.Ntype, ctxType)
+ n.SetType(n.Ntype.Type())
+ n.Ntype = nil
if n.Type() == nil {
return n
}
@@ -1229,12 +1229,12 @@ func typecheck1(n ir.Node, top int) (res ir.Node) {
case ir.OINDEX:
n := n.(*ir.IndexExpr)
- n.SetLeft(typecheck(n.Left(), ctxExpr))
- n.SetLeft(defaultlit(n.Left(), nil))
- n.SetLeft(implicitstar(n.Left()))
- l := n.Left()
- n.SetRight(typecheck(n.Right(), ctxExpr))
- r := n.Right()
+ n.X = typecheck(n.X, ctxExpr)
+ n.X = defaultlit(n.X, nil)
+ n.X = implicitstar(n.X)
+ l := n.X
+ n.Index = typecheck(n.Index, ctxExpr)
+ r := n.Index
t := l.Type()
if t == nil || r.Type() == nil {
n.SetType(nil)
@@ -1247,7 +1247,7 @@ func typecheck1(n ir.Node, top int) (res ir.Node) {
return n
case types.TSTRING, types.TARRAY, types.TSLICE:
- n.SetRight(indexlit(n.Right()))
+ n.Index = indexlit(n.Index)
if t.IsString() {
n.SetType(types.ByteType)
} else {
@@ -1260,37 +1260,37 @@ func typecheck1(n ir.Node, top int) (res ir.Node) {
why = "slice"
}
- if n.Right().Type() != nil && !n.Right().Type().IsInteger() {
- base.Errorf("non-integer %s index %v", why, n.Right())
+ if n.Index.Type() != nil && !n.Index.Type().IsInteger() {
+ base.Errorf("non-integer %s index %v", why, n.Index)
return n
}
- if !n.Bounded() && ir.IsConst(n.Right(), constant.Int) {
- x := n.Right().Val()
+ if !n.Bounded() && ir.IsConst(n.Index, constant.Int) {
+ x := n.Index.Val()
if constant.Sign(x) < 0 {
- base.Errorf("invalid %s index %v (index must be non-negative)", why, n.Right())
+ base.Errorf("invalid %s index %v (index must be non-negative)", why, n.Index)
} else if t.IsArray() && constant.Compare(x, token.GEQ, constant.MakeInt64(t.NumElem())) {
- base.Errorf("invalid array index %v (out of bounds for %d-element array)", n.Right(), t.NumElem())
- } else if ir.IsConst(n.Left(), constant.String) && constant.Compare(x, token.GEQ, constant.MakeInt64(int64(len(ir.StringVal(n.Left()))))) {
- base.Errorf("invalid string index %v (out of bounds for %d-byte string)", n.Right(), len(ir.StringVal(n.Left())))
+ base.Errorf("invalid array index %v (out of bounds for %d-element array)", n.Index, t.NumElem())
+ } else if ir.IsConst(n.X, constant.String) && constant.Compare(x, token.GEQ, constant.MakeInt64(int64(len(ir.StringVal(n.X))))) {
+ base.Errorf("invalid string index %v (out of bounds for %d-byte string)", n.Index, len(ir.StringVal(n.X)))
} else if doesoverflow(x, types.Types[types.TINT]) {
- base.Errorf("invalid %s index %v (index too large)", why, n.Right())
+ base.Errorf("invalid %s index %v (index too large)", why, n.Index)
}
}
case types.TMAP:
- n.SetRight(assignconv(n.Right(), t.Key(), "map index"))
+ n.Index = assignconv(n.Index, t.Key(), "map index")
n.SetType(t.Elem())
n.SetOp(ir.OINDEXMAP)
- n.SetIndexMapLValue(false)
+ n.Assigned = false
}
return n
case ir.ORECV:
n := n.(*ir.UnaryExpr)
- n.SetLeft(typecheck(n.Left(), ctxExpr))
- n.SetLeft(defaultlit(n.Left(), nil))
- l := n.Left()
+ n.X = typecheck(n.X, ctxExpr)
+ n.X = defaultlit(n.X, nil)
+ l := n.X
t := l.Type()
if t == nil {
n.SetType(nil)
@@ -1313,10 +1313,10 @@ func typecheck1(n ir.Node, top int) (res ir.Node) {
case ir.OSEND:
n := n.(*ir.SendStmt)
- n.SetLeft(typecheck(n.Left(), ctxExpr))
- n.SetRight(typecheck(n.Right(), ctxExpr))
- n.SetLeft(defaultlit(n.Left(), nil))
- t := n.Left().Type()
+ n.Chan = typecheck(n.Chan, ctxExpr)
+ n.Value = typecheck(n.Value, ctxExpr)
+ n.Chan = defaultlit(n.Chan, nil)
+ t := n.Chan.Type()
if t == nil {
return n
}
@@ -1330,8 +1330,8 @@ func typecheck1(n ir.Node, top int) (res ir.Node) {
return n
}
- n.SetRight(assignconv(n.Right(), t.Elem(), "send"))
- if n.Right().Type() == nil {
+ n.Value = assignconv(n.Value, t.Elem(), "send")
+ if n.Value.Type() == nil {
return n
}
return n
@@ -1351,17 +1351,17 @@ func typecheck1(n ir.Node, top int) (res ir.Node) {
base.Fatalf("invalid type %v for OSLICEHEADER", n.Type())
}
- if n.Left() == nil || n.Left().Type() == nil || !n.Left().Type().IsUnsafePtr() {
+ if n.Ptr == nil || n.Ptr.Type() == nil || !n.Ptr.Type().IsUnsafePtr() {
base.Fatalf("need unsafe.Pointer for OSLICEHEADER")
}
- if x := n.List().Len(); x != 2 {
+ if x := n.LenCap.Len(); x != 2 {
base.Fatalf("expected 2 params (len, cap) for OSLICEHEADER, got %d", x)
}
- n.SetLeft(typecheck(n.Left(), ctxExpr))
- l := typecheck(n.List().First(), ctxExpr)
- c := typecheck(n.List().Second(), ctxExpr)
+ n.Ptr = typecheck(n.Ptr, ctxExpr)
+ l := typecheck(n.LenCap.First(), ctxExpr)
+ c := typecheck(n.LenCap.Second(), ctxExpr)
l = defaultlit(l, types.Types[types.TINT])
c = defaultlit(c, types.Types[types.TINT])
@@ -1377,8 +1377,8 @@ func typecheck1(n ir.Node, top int) (res ir.Node) {
base.Fatalf("len larger than cap for OSLICEHEADER")
}
- n.List().SetFirst(l)
- n.List().SetSecond(c)
+ n.LenCap.SetFirst(l)
+ n.LenCap.SetSecond(c)
return n
case ir.OMAKESLICECOPY:
@@ -1397,28 +1397,28 @@ func typecheck1(n ir.Node, top int) (res ir.Node) {
base.Fatalf("invalid type %v for OMAKESLICECOPY", n.Type())
}
- if n.Left() == nil {
+ if n.Len == nil {
base.Fatalf("missing len argument for OMAKESLICECOPY")
}
- if n.Right() == nil {
+ if n.Cap == nil {
base.Fatalf("missing slice argument to copy for OMAKESLICECOPY")
}
- n.SetLeft(typecheck(n.Left(), ctxExpr))
- n.SetRight(typecheck(n.Right(), ctxExpr))
+ n.Len = typecheck(n.Len, ctxExpr)
+ n.Cap = typecheck(n.Cap, ctxExpr)
- n.SetLeft(defaultlit(n.Left(), types.Types[types.TINT]))
+ n.Len = defaultlit(n.Len, types.Types[types.TINT])
- if !n.Left().Type().IsInteger() && n.Type().Kind() != types.TIDEAL {
+ if !n.Len.Type().IsInteger() && n.Type().Kind() != types.TIDEAL {
base.Errorf("non-integer len argument in OMAKESLICECOPY")
}
- if ir.IsConst(n.Left(), constant.Int) {
- if doesoverflow(n.Left().Val(), types.Types[types.TINT]) {
+ if ir.IsConst(n.Len, constant.Int) {
+ if doesoverflow(n.Len.Val(), types.Types[types.TINT]) {
base.Fatalf("len for OMAKESLICECOPY too large")
}
- if constant.Sign(n.Left().Val()) < 0 {
+ if constant.Sign(n.Len.Val()) < 0 {
base.Fatalf("len for OMAKESLICECOPY must be non-negative")
}
}
@@ -1426,33 +1426,33 @@ func typecheck1(n ir.Node, top int) (res ir.Node) {
case ir.OSLICE, ir.OSLICE3:
n := n.(*ir.SliceExpr)
- n.SetLeft(typecheck(n.Left(), ctxExpr))
+ n.X = typecheck(n.X, ctxExpr)
low, high, max := n.SliceBounds()
hasmax := n.Op().IsSlice3()
low = typecheck(low, ctxExpr)
high = typecheck(high, ctxExpr)
max = typecheck(max, ctxExpr)
- n.SetLeft(defaultlit(n.Left(), nil))
+ n.X = defaultlit(n.X, nil)
low = indexlit(low)
high = indexlit(high)
max = indexlit(max)
n.SetSliceBounds(low, high, max)
- l := n.Left()
+ l := n.X
if l.Type() == nil {
n.SetType(nil)
return n
}
if l.Type().IsArray() {
- if !islvalue(n.Left()) {
+ if !islvalue(n.X) {
base.Errorf("invalid operation %v (slice of unaddressable value)", n)
n.SetType(nil)
return n
}
- addr := nodAddr(n.Left())
+ addr := nodAddr(n.X)
addr.SetImplicit(true)
- n.SetLeft(typecheck(addr, ctxExpr))
- l = n.Left()
+ n.X = typecheck(addr, ctxExpr)
+ l = n.X
}
t := l.Type()
var tp *types.Type
@@ -1507,27 +1507,27 @@ func typecheck1(n ir.Node, top int) (res ir.Node) {
n.Use = ir.CallUseStmt
}
typecheckslice(n.Init().Slice(), ctxStmt) // imported rewritten f(g()) calls (#30907)
- n.SetLeft(typecheck(n.Left(), ctxExpr|ctxType|ctxCallee))
- if n.Left().Diag() {
+ n.X = typecheck(n.X, ctxExpr|ctxType|ctxCallee)
+ if n.X.Diag() {
n.SetDiag(true)
}
- l := n.Left()
+ l := n.X
- if l.Op() == ir.ONAME && l.(*ir.Name).SubOp() != 0 {
+ if l.Op() == ir.ONAME && l.(*ir.Name).BuiltinOp != 0 {
l := l.(*ir.Name)
- if n.IsDDD() && l.SubOp() != ir.OAPPEND {
+ if n.IsDDD && l.BuiltinOp != ir.OAPPEND {
base.Errorf("invalid use of ... with builtin %v", l)
}
// builtin: OLEN, OCAP, etc.
- switch l.SubOp() {
+ switch l.BuiltinOp {
default:
base.Fatalf("unknown builtin %v", l)
case ir.OAPPEND, ir.ODELETE, ir.OMAKE, ir.OPRINT, ir.OPRINTN, ir.ORECOVER:
- n.SetOp(l.SubOp())
- n.SetLeft(nil)
+ n.SetOp(l.BuiltinOp)
+ n.X = nil
n.SetTypecheck(0) // re-typechecking new op is OK, not a loop
return typecheck(n, top)
@@ -1540,7 +1540,7 @@ func typecheck1(n ir.Node, top int) (res ir.Node) {
n.SetType(nil)
return n
}
- u := ir.NewUnaryExpr(n.Pos(), l.SubOp(), arg)
+ u := ir.NewUnaryExpr(n.Pos(), l.BuiltinOp, arg)
return typecheck(initExpr(n.Init().Slice(), u), top) // typecheckargs can add to old.Init
case ir.OCOMPLEX, ir.OCOPY:
@@ -1550,16 +1550,16 @@ func typecheck1(n ir.Node, top int) (res ir.Node) {
n.SetType(nil)
return n
}
- b := ir.NewBinaryExpr(n.Pos(), l.SubOp(), arg1, arg2)
+ b := ir.NewBinaryExpr(n.Pos(), l.BuiltinOp, arg1, arg2)
return typecheck(initExpr(n.Init().Slice(), b), top) // typecheckargs can add to old.Init
}
panic("unreachable")
}
- n.SetLeft(defaultlit(n.Left(), nil))
- l = n.Left()
+ n.X = defaultlit(n.X, nil)
+ l = n.X
if l.Op() == ir.OTYPE {
- if n.IsDDD() {
+ if n.IsDDD {
if !l.Type().Broke() {
base.Errorf("invalid use of ... in type conversion to %v", l.Type())
}
@@ -1600,7 +1600,7 @@ func typecheck1(n ir.Node, top int) (res ir.Node) {
// It isn't necessary, so just do a sanity check.
tp := t.Recv().Type
- if l.Left() == nil || !types.Identical(l.Left().Type(), tp) {
+ if l.X == nil || !types.Identical(l.X.Type(), tp) {
base.Fatalf("method receiver")
}
@@ -1622,15 +1622,15 @@ func typecheck1(n ir.Node, top int) (res ir.Node) {
}
}
- typecheckaste(ir.OCALL, n.Left(), n.IsDDD(), t.Params(), n.List(), func() string { return fmt.Sprintf("argument to %v", n.Left()) })
+ typecheckaste(ir.OCALL, n.X, n.IsDDD, t.Params(), n.Args, func() string { return fmt.Sprintf("argument to %v", n.X) })
if t.NumResults() == 0 {
return n
}
if t.NumResults() == 1 {
n.SetType(l.Type().Results().Field(0).Type)
- if n.Op() == ir.OCALLFUNC && n.Left().Op() == ir.ONAME {
- if sym := n.Left().(*ir.Name).Sym(); isRuntimePkg(sym.Pkg) && sym.Name == "getg" {
+ if n.Op() == ir.OCALLFUNC && n.X.Op() == ir.ONAME {
+ if sym := n.X.(*ir.Name).Sym(); isRuntimePkg(sym.Pkg) && sym.Name == "getg" {
// Emit code for runtime.getg() directly instead of calling function.
// Most such rewrites (for example the similar one for math.Sqrt) should be done in walk,
// so that the ordering pass can make sure to preserve the semantics of the original code
@@ -1659,10 +1659,10 @@ func typecheck1(n ir.Node, top int) (res ir.Node) {
case ir.OCAP, ir.OLEN:
n := n.(*ir.UnaryExpr)
- n.SetLeft(typecheck(n.Left(), ctxExpr))
- n.SetLeft(defaultlit(n.Left(), nil))
- n.SetLeft(implicitstar(n.Left()))
- l := n.Left()
+ n.X = typecheck(n.X, ctxExpr)
+ n.X = defaultlit(n.X, nil)
+ n.X = implicitstar(n.X)
+ l := n.X
t := l.Type()
if t == nil {
n.SetType(nil)
@@ -1686,8 +1686,8 @@ func typecheck1(n ir.Node, top int) (res ir.Node) {
case ir.OREAL, ir.OIMAG:
n := n.(*ir.UnaryExpr)
- n.SetLeft(typecheck(n.Left(), ctxExpr))
- l := n.Left()
+ n.X = typecheck(n.X, ctxExpr)
+ l := n.X
t := l.Type()
if t == nil {
n.SetType(nil)
@@ -1711,8 +1711,8 @@ func typecheck1(n ir.Node, top int) (res ir.Node) {
case ir.OCOMPLEX:
n := n.(*ir.BinaryExpr)
- l := typecheck(n.Left(), ctxExpr)
- r := typecheck(n.Right(), ctxExpr)
+ l := typecheck(n.X, ctxExpr)
+ r := typecheck(n.Y, ctxExpr)
if l.Type() == nil || r.Type() == nil {
n.SetType(nil)
return n
@@ -1722,8 +1722,8 @@ func typecheck1(n ir.Node, top int) (res ir.Node) {
n.SetType(nil)
return n
}
- n.SetLeft(l)
- n.SetRight(r)
+ n.X = l
+ n.Y = r
if !types.Identical(l.Type(), r.Type()) {
base.Errorf("invalid operation: %v (mismatched types %v and %v)", n, l.Type(), r.Type())
@@ -1752,9 +1752,9 @@ func typecheck1(n ir.Node, top int) (res ir.Node) {
case ir.OCLOSE:
n := n.(*ir.UnaryExpr)
- n.SetLeft(typecheck(n.Left(), ctxExpr))
- n.SetLeft(defaultlit(n.Left(), nil))
- l := n.Left()
+ n.X = typecheck(n.X, ctxExpr)
+ n.X = defaultlit(n.X, nil)
+ l := n.X
t := l.Type()
if t == nil {
n.SetType(nil)
@@ -1776,7 +1776,7 @@ func typecheck1(n ir.Node, top int) (res ir.Node) {
case ir.ODELETE:
n := n.(*ir.CallExpr)
typecheckargs(n)
- args := n.List()
+ args := n.Args
if args.Len() == 0 {
base.Errorf("missing arguments to delete")
n.SetType(nil)
@@ -1809,7 +1809,7 @@ func typecheck1(n ir.Node, top int) (res ir.Node) {
case ir.OAPPEND:
n := n.(*ir.CallExpr)
typecheckargs(n)
- args := n.List()
+ args := n.Args
if args.Len() == 0 {
base.Errorf("missing arguments to append")
n.SetType(nil)
@@ -1835,7 +1835,7 @@ func typecheck1(n ir.Node, top int) (res ir.Node) {
return n
}
- if n.IsDDD() {
+ if n.IsDDD {
if args.Len() == 1 {
base.Errorf("cannot use ... on first argument to append")
n.SetType(nil)
@@ -1870,39 +1870,39 @@ func typecheck1(n ir.Node, top int) (res ir.Node) {
case ir.OCOPY:
n := n.(*ir.BinaryExpr)
n.SetType(types.Types[types.TINT])
- n.SetLeft(typecheck(n.Left(), ctxExpr))
- n.SetLeft(defaultlit(n.Left(), nil))
- n.SetRight(typecheck(n.Right(), ctxExpr))
- n.SetRight(defaultlit(n.Right(), nil))
- if n.Left().Type() == nil || n.Right().Type() == nil {
+ n.X = typecheck(n.X, ctxExpr)
+ n.X = defaultlit(n.X, nil)
+ n.Y = typecheck(n.Y, ctxExpr)
+ n.Y = defaultlit(n.Y, nil)
+ if n.X.Type() == nil || n.Y.Type() == nil {
n.SetType(nil)
return n
}
// copy([]byte, string)
- if n.Left().Type().IsSlice() && n.Right().Type().IsString() {
- if types.Identical(n.Left().Type().Elem(), types.ByteType) {
+ if n.X.Type().IsSlice() && n.Y.Type().IsString() {
+ if types.Identical(n.X.Type().Elem(), types.ByteType) {
return n
}
- base.Errorf("arguments to copy have different element types: %L and string", n.Left().Type())
+ base.Errorf("arguments to copy have different element types: %L and string", n.X.Type())
n.SetType(nil)
return n
}
- if !n.Left().Type().IsSlice() || !n.Right().Type().IsSlice() {
- if !n.Left().Type().IsSlice() && !n.Right().Type().IsSlice() {
- base.Errorf("arguments to copy must be slices; have %L, %L", n.Left().Type(), n.Right().Type())
- } else if !n.Left().Type().IsSlice() {
- base.Errorf("first argument to copy should be slice; have %L", n.Left().Type())
+ if !n.X.Type().IsSlice() || !n.Y.Type().IsSlice() {
+ if !n.X.Type().IsSlice() && !n.Y.Type().IsSlice() {
+ base.Errorf("arguments to copy must be slices; have %L, %L", n.X.Type(), n.Y.Type())
+ } else if !n.X.Type().IsSlice() {
+ base.Errorf("first argument to copy should be slice; have %L", n.X.Type())
} else {
- base.Errorf("second argument to copy should be slice or string; have %L", n.Right().Type())
+ base.Errorf("second argument to copy should be slice or string; have %L", n.Y.Type())
}
n.SetType(nil)
return n
}
- if !types.Identical(n.Left().Type().Elem(), n.Right().Type().Elem()) {
- base.Errorf("arguments to copy have different element types: %L and %L", n.Left().Type(), n.Right().Type())
+ if !types.Identical(n.X.Type().Elem(), n.Y.Type().Elem()) {
+ base.Errorf("arguments to copy have different element types: %L and %L", n.X.Type(), n.Y.Type())
n.SetType(nil)
return n
}
@@ -1911,17 +1911,17 @@ func typecheck1(n ir.Node, top int) (res ir.Node) {
case ir.OCONV:
n := n.(*ir.ConvExpr)
checkwidth(n.Type()) // ensure width is calculated for backend
- n.SetLeft(typecheck(n.Left(), ctxExpr))
- n.SetLeft(convlit1(n.Left(), n.Type(), true, nil))
- t := n.Left().Type()
+ n.X = typecheck(n.X, ctxExpr)
+ n.X = convlit1(n.X, n.Type(), true, nil)
+ t := n.X.Type()
if t == nil || n.Type() == nil {
n.SetType(nil)
return n
}
- op, why := convertop(n.Left().Op() == ir.OLITERAL, t, n.Type())
+ op, why := convertop(n.X.Op() == ir.OLITERAL, t, n.Type())
if op == ir.OXXX {
- if !n.Diag() && !n.Type().Broke() && !n.Left().Diag() {
- base.Errorf("cannot convert %L to type %v%s", n.Left(), n.Type(), why)
+ if !n.Diag() && !n.Type().Broke() && !n.X.Diag() {
+ base.Errorf("cannot convert %L to type %v%s", n.X, n.Type(), why)
n.SetDiag(true)
}
n.SetOp(ir.OCONV)
@@ -1947,7 +1947,7 @@ func typecheck1(n ir.Node, top int) (res ir.Node) {
// ok
case ir.OSTR2RUNES:
- if n.Left().Op() == ir.OLITERAL {
+ if n.X.Op() == ir.OLITERAL {
return stringtoruneslit(n)
}
}
@@ -1955,14 +1955,14 @@ func typecheck1(n ir.Node, top int) (res ir.Node) {
case ir.OMAKE:
n := n.(*ir.CallExpr)
- args := n.List().Slice()
+ args := n.Args.Slice()
if len(args) == 0 {
base.Errorf("missing argument to make")
n.SetType(nil)
return n
}
- n.PtrList().Set(nil)
+ n.Args.Set(nil)
l := args[0]
l = typecheck(l, ctxType)
t := l.Type()
@@ -2063,26 +2063,26 @@ func typecheck1(n ir.Node, top int) (res ir.Node) {
case ir.ONEW:
n := n.(*ir.UnaryExpr)
- if n.Left() == nil {
+ if n.X == nil {
// Fatalf because the OCALL above checked for us,
// so this must be an internally-generated mistake.
base.Fatalf("missing argument to new")
}
- l := n.Left()
+ l := n.X
l = typecheck(l, ctxType)
t := l.Type()
if t == nil {
n.SetType(nil)
return n
}
- n.SetLeft(l)
+ n.X = l
n.SetType(types.NewPtr(t))
return n
case ir.OPRINT, ir.OPRINTN:
n := n.(*ir.CallExpr)
typecheckargs(n)
- ls := n.List().Slice()
+ ls := n.Args.Slice()
for i1, n1 := range ls {
// Special case for print: int constant is int64, not int.
if ir.IsConst(n1, constant.Int) {
@@ -2095,9 +2095,9 @@ func typecheck1(n ir.Node, top int) (res ir.Node) {
case ir.OPANIC:
n := n.(*ir.UnaryExpr)
- n.SetLeft(typecheck(n.Left(), ctxExpr))
- n.SetLeft(defaultlit(n.Left(), types.Types[types.TINTER]))
- if n.Left().Type() == nil {
+ n.X = typecheck(n.X, ctxExpr)
+ n.X = defaultlit(n.X, types.Types[types.TINTER])
+ if n.X.Type() == nil {
n.SetType(nil)
return n
}
@@ -2105,7 +2105,7 @@ func typecheck1(n ir.Node, top int) (res ir.Node) {
case ir.ORECOVER:
n := n.(*ir.CallExpr)
- if n.List().Len() != 0 {
+ if n.Args.Len() != 0 {
base.Errorf("too many arguments to recover")
n.SetType(nil)
return n
@@ -2124,8 +2124,8 @@ func typecheck1(n ir.Node, top int) (res ir.Node) {
case ir.OITAB:
n := n.(*ir.UnaryExpr)
- n.SetLeft(typecheck(n.Left(), ctxExpr))
- t := n.Left().Type()
+ n.X = typecheck(n.X, ctxExpr)
+ t := n.X.Type()
if t == nil {
n.SetType(nil)
return n
@@ -2145,8 +2145,8 @@ func typecheck1(n ir.Node, top int) (res ir.Node) {
case ir.OSPTR:
n := n.(*ir.UnaryExpr)
- n.SetLeft(typecheck(n.Left(), ctxExpr))
- t := n.Left().Type()
+ n.X = typecheck(n.X, ctxExpr)
+ t := n.X.Type()
if t == nil {
n.SetType(nil)
return n
@@ -2166,13 +2166,13 @@ func typecheck1(n ir.Node, top int) (res ir.Node) {
case ir.OCFUNC:
n := n.(*ir.UnaryExpr)
- n.SetLeft(typecheck(n.Left(), ctxExpr))
+ n.X = typecheck(n.X, ctxExpr)
n.SetType(types.Types[types.TUINTPTR])
return n
case ir.OCONVNOP:
n := n.(*ir.ConvExpr)
- n.SetLeft(typecheck(n.Left(), ctxExpr))
+ n.X = typecheck(n.X, ctxExpr)
return n
// statements
@@ -2181,8 +2181,8 @@ func typecheck1(n ir.Node, top int) (res ir.Node) {
typecheckas(n)
// Code that creates temps does not bother to set defn, so do it here.
- if n.Left().Op() == ir.ONAME && ir.IsAutoTmp(n.Left()) {
- n.Left().Name().Defn = n
+ if n.X.Op() == ir.ONAME && ir.IsAutoTmp(n.X) {
+ n.X.Name().Defn = n
}
return n
@@ -2201,7 +2201,7 @@ func typecheck1(n ir.Node, top int) (res ir.Node) {
case ir.OBLOCK:
n := n.(*ir.BlockStmt)
- typecheckslice(n.List().Slice(), ctxStmt)
+ typecheckslice(n.List.Slice(), ctxStmt)
return n
case ir.OLABEL:
@@ -2216,8 +2216,8 @@ func typecheck1(n ir.Node, top int) (res ir.Node) {
case ir.ODEFER, ir.OGO:
n := n.(*ir.GoDeferStmt)
- n.SetLeft(typecheck(n.Left(), ctxStmt|ctxExpr))
- if !n.Left().Diag() {
+ n.Call = typecheck(n.Call, ctxStmt|ctxExpr)
+ if !n.Call.Diag() {
checkdefergo(n)
}
return n
@@ -2226,35 +2226,35 @@ func typecheck1(n ir.Node, top int) (res ir.Node) {
n := n.(*ir.ForStmt)
typecheckslice(n.Init().Slice(), ctxStmt)
decldepth++
- n.SetLeft(typecheck(n.Left(), ctxExpr))
- n.SetLeft(defaultlit(n.Left(), nil))
- if n.Left() != nil {
- t := n.Left().Type()
+ n.Cond = typecheck(n.Cond, ctxExpr)
+ n.Cond = defaultlit(n.Cond, nil)
+ if n.Cond != nil {
+ t := n.Cond.Type()
if t != nil && !t.IsBoolean() {
- base.Errorf("non-bool %L used as for condition", n.Left())
+ base.Errorf("non-bool %L used as for condition", n.Cond)
}
}
- n.SetRight(typecheck(n.Right(), ctxStmt))
+ n.Post = typecheck(n.Post, ctxStmt)
if n.Op() == ir.OFORUNTIL {
- typecheckslice(n.List().Slice(), ctxStmt)
+ typecheckslice(n.Late.Slice(), ctxStmt)
}
- typecheckslice(n.Body().Slice(), ctxStmt)
+ typecheckslice(n.Body.Slice(), ctxStmt)
decldepth--
return n
case ir.OIF:
n := n.(*ir.IfStmt)
typecheckslice(n.Init().Slice(), ctxStmt)
- n.SetLeft(typecheck(n.Left(), ctxExpr))
- n.SetLeft(defaultlit(n.Left(), nil))
- if n.Left() != nil {
- t := n.Left().Type()
+ n.Cond = typecheck(n.Cond, ctxExpr)
+ n.Cond = defaultlit(n.Cond, nil)
+ if n.Cond != nil {
+ t := n.Cond.Type()
if t != nil && !t.IsBoolean() {
- base.Errorf("non-bool %L used as if condition", n.Left())
+ base.Errorf("non-bool %L used as if condition", n.Cond)
}
}
- typecheckslice(n.Body().Slice(), ctxStmt)
- typecheckslice(n.Rlist().Slice(), ctxStmt)
+ typecheckslice(n.Body.Slice(), ctxStmt)
+ typecheckslice(n.Else.Slice(), ctxStmt)
return n
case ir.ORETURN:
@@ -2266,10 +2266,10 @@ func typecheck1(n ir.Node, top int) (res ir.Node) {
return n
}
- if hasNamedResults(Curfn) && n.List().Len() == 0 {
+ if hasNamedResults(Curfn) && n.Results.Len() == 0 {
return n
}
- typecheckaste(ir.ORETURN, nil, false, Curfn.Type().Results(), n.List(), func() string { return "return argument" })
+ typecheckaste(ir.ORETURN, nil, false, Curfn.Type().Results(), n.Results, func() string { return "return argument" })
return n
case ir.ORETJMP:
@@ -2300,13 +2300,13 @@ func typecheck1(n ir.Node, top int) (res ir.Node) {
case ir.ODCLCONST:
n := n.(*ir.Decl)
- n.SetLeft(typecheck(n.Left(), ctxExpr))
+ n.X = typecheck(n.X, ctxExpr)
return n
case ir.ODCLTYPE:
n := n.(*ir.Decl)
- n.SetLeft(typecheck(n.Left(), ctxType))
- checkwidth(n.Left().Type())
+ n.X = typecheck(n.X, ctxType)
+ checkwidth(n.X.Type())
return n
}
@@ -2321,13 +2321,13 @@ func typecheckargs(n ir.Node) {
default:
base.Fatalf("typecheckargs %+v", n.Op())
case *ir.CallExpr:
- list = n.List().Slice()
- if n.IsDDD() {
+ list = n.Args.Slice()
+ if n.IsDDD {
typecheckslice(list, ctxExpr)
return
}
case *ir.ReturnStmt:
- list = n.List().Slice()
+ list = n.Results.Slice()
}
if len(list) != 1 {
typecheckslice(list, ctxExpr)
@@ -2348,7 +2348,7 @@ func typecheckargs(n ir.Node) {
}
as := ir.NewAssignListStmt(base.Pos, ir.OAS2, nil, nil)
- as.PtrRlist().Append(list...)
+ as.Rhs.Append(list...)
// If we're outside of function context, then this call will
// be executed during the generated init function. However,
@@ -2363,7 +2363,7 @@ func typecheckargs(n ir.Node) {
for _, f := range t.FieldSlice() {
t := temp(f.Type)
as.PtrInit().Append(ir.NewDecl(base.Pos, ir.ODCL, t))
- as.PtrList().Append(t)
+ as.Lhs.Append(t)
list = append(list, t)
}
if static {
@@ -2372,9 +2372,9 @@ func typecheckargs(n ir.Node) {
switch n := n.(type) {
case *ir.CallExpr:
- n.PtrList().Set(list)
+ n.Args.Set(list)
case *ir.ReturnStmt:
- n.PtrList().Set(list)
+ n.Results.Set(list)
}
n.PtrInit().Append(typecheck(as, ctxStmt))
@@ -2425,7 +2425,7 @@ func checkdefergo(n *ir.GoDeferStmt) {
what = "go"
}
- switch n.Left().Op() {
+ switch n.Call.Op() {
// ok
case ir.OCALLINTER,
ir.OCALLMETH,
@@ -2451,16 +2451,16 @@ func checkdefergo(n *ir.GoDeferStmt) {
ir.ONEW,
ir.OREAL,
ir.OLITERAL: // conversion or unsafe.Alignof, Offsetof, Sizeof
- if orig := ir.Orig(n.Left()); orig.Op() == ir.OCONV {
+ if orig := ir.Orig(n.Call); orig.Op() == ir.OCONV {
break
}
- base.ErrorfAt(n.Pos(), "%s discards result of %v", what, n.Left())
+ base.ErrorfAt(n.Pos(), "%s discards result of %v", what, n.Call)
return
}
// type is broken or missing, most likely a method call on a broken type
// we will warn about the broken type elsewhere. no need to emit a potentially confusing error
- if n.Left().Type() == nil || n.Left().Type().Broke() {
+ if n.Call.Type() == nil || n.Call.Type().Broke() {
return
}
@@ -2493,31 +2493,31 @@ func implicitstar(n ir.Node) ir.Node {
}
func needOneArg(n *ir.CallExpr, f string, args ...interface{}) (ir.Node, bool) {
- if n.List().Len() == 0 {
+ if n.Args.Len() == 0 {
p := fmt.Sprintf(f, args...)
base.Errorf("missing argument to %s: %v", p, n)
return nil, false
}
- if n.List().Len() > 1 {
+ if n.Args.Len() > 1 {
p := fmt.Sprintf(f, args...)
base.Errorf("too many arguments to %s: %v", p, n)
- return n.List().First(), false
+ return n.Args.First(), false
}
- return n.List().First(), true
+ return n.Args.First(), true
}
func needTwoArgs(n *ir.CallExpr) (ir.Node, ir.Node, bool) {
- if n.List().Len() != 2 {
- if n.List().Len() < 2 {
+ if n.Args.Len() != 2 {
+ if n.Args.Len() < 2 {
base.Errorf("not enough arguments in call to %v", n)
} else {
base.Errorf("too many arguments in call to %v", n)
}
return nil, nil, false
}
- return n.List().First(), n.List().Second(), true
+ return n.Args.First(), n.Args.Second(), true
}
func lookdot1(errnode ir.Node, s *types.Sym, t *types.Type, fs *types.Fields, dostrcmp int) *types.Field {
@@ -2556,7 +2556,7 @@ func typecheckMethodExpr(n *ir.SelectorExpr) (res ir.Node) {
defer tracePrint("typecheckMethodExpr", n)(&res)
}
- t := n.Left().Type()
+ t := n.X.Type()
// Compute the method set for t.
var ms *types.Fields
@@ -2565,7 +2565,7 @@ func typecheckMethodExpr(n *ir.SelectorExpr) (res ir.Node) {
} else {
mt := methtype(t)
if mt == nil {
- base.Errorf("%v undefined (type %v has no method %v)", n, t, n.Sym())
+ base.Errorf("%v undefined (type %v has no method %v)", n, t, n.Sel)
n.SetType(nil)
return n
}
@@ -2584,7 +2584,7 @@ func typecheckMethodExpr(n *ir.SelectorExpr) (res ir.Node) {
}
}
- s := n.Sym()
+ s := n.Sel
m := lookdot1(n, s, t, ms, 0)
if m == nil {
if lookdot1(n, s, t, ms, 1) != nil {
@@ -2604,10 +2604,10 @@ func typecheckMethodExpr(n *ir.SelectorExpr) (res ir.Node) {
return n
}
- me := ir.NewMethodExpr(n.Pos(), n.Left().Type(), m)
- me.SetType(methodfunc(m.Type, n.Left().Type()))
+ me := ir.NewMethodExpr(n.Pos(), n.X.Type(), m)
+ me.SetType(methodfunc(m.Type, n.X.Type()))
f := NewName(methodSym(t, m.Sym))
- f.SetClass(ir.PFUNC)
+ f.Class_ = ir.PFUNC
f.SetType(me.Type())
me.FuncName_ = f
@@ -2635,7 +2635,7 @@ func derefall(t *types.Type) *types.Type {
}
func lookdot(n *ir.SelectorExpr, t *types.Type, dostrcmp int) *types.Field {
- s := n.Sym()
+ s := n.Sel
dowidth(t)
var f1 *types.Field
@@ -2644,7 +2644,7 @@ func lookdot(n *ir.SelectorExpr, t *types.Type, dostrcmp int) *types.Field {
}
var f2 *types.Field
- if n.Left().Type() == t || n.Left().Type().Sym() == nil {
+ if n.X.Type() == t || n.X.Type().Sym() == nil {
mt := methtype(t)
if mt != nil {
f2 = lookdot1(n, s, mt, mt.Methods(), dostrcmp)
@@ -2657,18 +2657,18 @@ func lookdot(n *ir.SelectorExpr, t *types.Type, dostrcmp int) *types.Field {
return f1
}
if f2 != nil {
- base.Errorf("%v is both field and method", n.Sym())
+ base.Errorf("%v is both field and method", n.Sel)
}
if f1.Offset == types.BADWIDTH {
base.Fatalf("lookdot badwidth %v %p", f1, f1)
}
- n.SetOffset(f1.Offset)
+ n.Offset = f1.Offset
n.SetType(f1.Type)
if t.IsInterface() {
- if n.Left().Type().IsPtr() {
- star := ir.NewStarExpr(base.Pos, n.Left())
+ if n.X.Type().IsPtr() {
+ star := ir.NewStarExpr(base.Pos, n.X)
star.SetImplicit(true)
- n.SetLeft(typecheck(star, ctxExpr))
+ n.X = typecheck(star, ctxExpr)
}
n.SetOp(ir.ODOTINTER)
@@ -2682,29 +2682,29 @@ func lookdot(n *ir.SelectorExpr, t *types.Type, dostrcmp int) *types.Field {
// Already in the process of diagnosing an error.
return f2
}
- tt := n.Left().Type()
+ tt := n.X.Type()
dowidth(tt)
rcvr := f2.Type.Recv().Type
if !types.Identical(rcvr, tt) {
if rcvr.IsPtr() && types.Identical(rcvr.Elem(), tt) {
- checklvalue(n.Left(), "call pointer method on")
- addr := nodAddr(n.Left())
+ checklvalue(n.X, "call pointer method on")
+ addr := nodAddr(n.X)
addr.SetImplicit(true)
- n.SetLeft(typecheck(addr, ctxType|ctxExpr))
+ n.X = typecheck(addr, ctxType|ctxExpr)
} else if tt.IsPtr() && (!rcvr.IsPtr() || rcvr.IsPtr() && rcvr.Elem().NotInHeap()) && types.Identical(tt.Elem(), rcvr) {
- star := ir.NewStarExpr(base.Pos, n.Left())
+ star := ir.NewStarExpr(base.Pos, n.X)
star.SetImplicit(true)
- n.SetLeft(typecheck(star, ctxType|ctxExpr))
+ n.X = typecheck(star, ctxType|ctxExpr)
} else if tt.IsPtr() && tt.Elem().IsPtr() && types.Identical(derefall(tt), derefall(rcvr)) {
- base.Errorf("calling method %v with receiver %L requires explicit dereference", n.Sym(), n.Left())
+ base.Errorf("calling method %v with receiver %L requires explicit dereference", n.Sel, n.X)
for tt.IsPtr() {
// Stop one level early for method with pointer receiver.
if rcvr.IsPtr() && !tt.Elem().IsPtr() {
break
}
- star := ir.NewStarExpr(base.Pos, n.Left())
+ star := ir.NewStarExpr(base.Pos, n.X)
star.SetImplicit(true)
- n.SetLeft(typecheck(star, ctxType|ctxExpr))
+ n.X = typecheck(star, ctxType|ctxExpr)
tt = tt.Elem()
}
} else {
@@ -2712,24 +2712,24 @@ func lookdot(n *ir.SelectorExpr, t *types.Type, dostrcmp int) *types.Field {
}
}
- implicit, ll := n.Implicit(), n.Left()
+ implicit, ll := n.Implicit(), n.X
for ll != nil && (ll.Op() == ir.ODOT || ll.Op() == ir.ODOTPTR || ll.Op() == ir.ODEREF) {
switch l := ll.(type) {
case *ir.SelectorExpr:
- implicit, ll = l.Implicit(), l.Left()
+ implicit, ll = l.Implicit(), l.X
case *ir.StarExpr:
- implicit, ll = l.Implicit(), l.Left()
+ implicit, ll = l.Implicit(), l.X
}
}
if implicit && ll.Type().IsPtr() && ll.Type().Sym() != nil && ll.Type().Sym().Def != nil && ir.AsNode(ll.Type().Sym().Def).Op() == ir.OTYPE {
// It is invalid to automatically dereference a named pointer type when selecting a method.
// Make n.Left == ll to clarify error message.
- n.SetLeft(ll)
+ n.X = ll
return nil
}
- n.SetSym(methodSym(n.Left().Type(), f2.Sym))
- n.SetOffset(f2.Offset)
+ n.Sel = methodSym(n.X.Type(), f2.Sym)
+ n.Offset = f2.Offset
n.SetType(f2.Type)
n.SetOp(ir.ODOTMETH)
n.Selection = f2
@@ -2968,18 +2968,18 @@ func pushtype(nn ir.Node, t *types.Type) ir.Node {
return nn
}
n := nn.(*ir.CompLitExpr)
- if n.Right() != nil {
+ if n.Ntype != nil {
return n
}
switch {
case iscomptype(t):
// For T, return T{...}.
- n.SetRight(ir.TypeNode(t))
+ n.Ntype = ir.TypeNode(t)
case t.IsPtr() && iscomptype(t.Elem()):
// For *T, return &T{...}.
- n.SetRight(ir.TypeNode(t.Elem()))
+ n.Ntype = ir.TypeNode(t.Elem())
addr := nodAddrAt(n.Pos(), n)
addr.SetImplicit(true)
@@ -3000,7 +3000,7 @@ func typecheckcomplit(n *ir.CompLitExpr) (res ir.Node) {
base.Pos = lno
}()
- if n.Right() == nil {
+ if n.Ntype == nil {
base.ErrorfAt(n.Pos(), "missing type in composite literal")
n.SetType(nil)
return n
@@ -3009,25 +3009,25 @@ func typecheckcomplit(n *ir.CompLitExpr) (res ir.Node) {
// Save original node (including n.Right)
n.SetOrig(ir.Copy(n))
- setlineno(n.Right())
+ setlineno(n.Ntype)
// Need to handle [...]T arrays specially.
- if array, ok := n.Right().(*ir.ArrayType); ok && array.Elem != nil && array.Len == nil {
+ if array, ok := n.Ntype.(*ir.ArrayType); ok && array.Elem != nil && array.Len == nil {
array.Elem = typecheck(array.Elem, ctxType)
elemType := array.Elem.Type()
if elemType == nil {
n.SetType(nil)
return n
}
- length := typecheckarraylit(elemType, -1, n.List().Slice(), "array literal")
+ length := typecheckarraylit(elemType, -1, n.List.Slice(), "array literal")
n.SetOp(ir.OARRAYLIT)
n.SetType(types.NewArray(elemType, length))
- n.SetRight(nil)
+ n.Ntype = nil
return n
}
- n.SetRight(typecheck(n.Right(), ctxType))
- t := n.Right().Type()
+ n.Ntype = ir.Node(typecheck(n.Ntype, ctxType)).(ir.Ntype)
+ t := n.Ntype.Type()
if t == nil {
n.SetType(nil)
return n
@@ -3040,50 +3040,50 @@ func typecheckcomplit(n *ir.CompLitExpr) (res ir.Node) {
n.SetType(nil)
case types.TARRAY:
- typecheckarraylit(t.Elem(), t.NumElem(), n.List().Slice(), "array literal")
+ typecheckarraylit(t.Elem(), t.NumElem(), n.List.Slice(), "array literal")
n.SetOp(ir.OARRAYLIT)
- n.SetRight(nil)
+ n.Ntype = nil
case types.TSLICE:
- length := typecheckarraylit(t.Elem(), -1, n.List().Slice(), "slice literal")
+ length := typecheckarraylit(t.Elem(), -1, n.List.Slice(), "slice literal")
n.SetOp(ir.OSLICELIT)
- n.SetRight(nil)
+ n.Ntype = nil
n.Len = length
case types.TMAP:
var cs constSet
- for i3, l := range n.List().Slice() {
+ for i3, l := range n.List.Slice() {
setlineno(l)
if l.Op() != ir.OKEY {
- n.List().SetIndex(i3, typecheck(l, ctxExpr))
+ n.List.SetIndex(i3, typecheck(l, ctxExpr))
base.Errorf("missing key in map literal")
continue
}
l := l.(*ir.KeyExpr)
- r := l.Left()
+ r := l.Key
r = pushtype(r, t.Key())
r = typecheck(r, ctxExpr)
- l.SetLeft(assignconv(r, t.Key(), "map key"))
- cs.add(base.Pos, l.Left(), "key", "map literal")
+ l.Key = assignconv(r, t.Key(), "map key")
+ cs.add(base.Pos, l.Key, "key", "map literal")
- r = l.Right()
+ r = l.Value
r = pushtype(r, t.Elem())
r = typecheck(r, ctxExpr)
- l.SetRight(assignconv(r, t.Elem(), "map value"))
+ l.Value = assignconv(r, t.Elem(), "map value")
}
n.SetOp(ir.OMAPLIT)
- n.SetRight(nil)
+ n.Ntype = nil
case types.TSTRUCT:
// Need valid field offsets for Xoffset below.
dowidth(t)
errored := false
- if n.List().Len() != 0 && nokeys(n.List()) {
+ if n.List.Len() != 0 && nokeys(n.List) {
// simple list of variables
- ls := n.List().Slice()
+ ls := n.List.Slice()
for i, n1 := range ls {
setlineno(n1)
n1 = typecheck(n1, ctxExpr)
@@ -3104,7 +3104,7 @@ func typecheckcomplit(n *ir.CompLitExpr) (res ir.Node) {
// No pushtype allowed here. Must name fields for that.
n1 = assignconv(n1, f.Type, "field value")
sk := ir.NewStructKeyExpr(base.Pos, f.Sym, n1)
- sk.SetOffset(f.Offset)
+ sk.Offset = f.Offset
ls[i] = sk
}
if len(ls) < t.NumFields() {
@@ -3114,13 +3114,13 @@ func typecheckcomplit(n *ir.CompLitExpr) (res ir.Node) {
hash := make(map[string]bool)
// keyed list
- ls := n.List().Slice()
+ ls := n.List.Slice()
for i, l := range ls {
setlineno(l)
if l.Op() == ir.OKEY {
kv := l.(*ir.KeyExpr)
- key := kv.Left()
+ key := kv.Key
// Sym might have resolved to name in other top-level
// package, because of import dot. Redirect to correct sym
@@ -3139,7 +3139,7 @@ func typecheckcomplit(n *ir.CompLitExpr) (res ir.Node) {
continue
}
- l = ir.NewStructKeyExpr(l.Pos(), s, kv.Right())
+ l = ir.NewStructKeyExpr(l.Pos(), s, kv.Value)
ls[i] = l
}
@@ -3153,22 +3153,22 @@ func typecheckcomplit(n *ir.CompLitExpr) (res ir.Node) {
}
l := l.(*ir.StructKeyExpr)
- f := lookdot1(nil, l.Sym(), t, t.Fields(), 0)
+ f := lookdot1(nil, l.Field, t, t.Fields(), 0)
if f == nil {
- if ci := lookdot1(nil, l.Sym(), t, t.Fields(), 2); ci != nil { // Case-insensitive lookup.
+ if ci := lookdot1(nil, l.Field, t, t.Fields(), 2); ci != nil { // Case-insensitive lookup.
if visible(ci.Sym) {
- base.Errorf("unknown field '%v' in struct literal of type %v (but does have %v)", l.Sym(), t, ci.Sym)
- } else if nonexported(l.Sym()) && l.Sym().Name == ci.Sym.Name { // Ensure exactness before the suggestion.
- base.Errorf("cannot refer to unexported field '%v' in struct literal of type %v", l.Sym(), t)
+ base.Errorf("unknown field '%v' in struct literal of type %v (but does have %v)", l.Field, t, ci.Sym)
+ } else if nonexported(l.Field) && l.Field.Name == ci.Sym.Name { // Ensure exactness before the suggestion.
+ base.Errorf("cannot refer to unexported field '%v' in struct literal of type %v", l.Field, t)
} else {
- base.Errorf("unknown field '%v' in struct literal of type %v", l.Sym(), t)
+ base.Errorf("unknown field '%v' in struct literal of type %v", l.Field, t)
}
continue
}
var f *types.Field
- p, _ := dotpath(l.Sym(), t, &f, true)
+ p, _ := dotpath(l.Field, t, &f, true)
if p == nil || f.IsMethod() {
- base.Errorf("unknown field '%v' in struct literal of type %v", l.Sym(), t)
+ base.Errorf("unknown field '%v' in struct literal of type %v", l.Field, t)
continue
}
// dotpath returns the parent embedded types in reverse order.
@@ -3176,21 +3176,21 @@ func typecheckcomplit(n *ir.CompLitExpr) (res ir.Node) {
for ei := len(p) - 1; ei >= 0; ei-- {
ep = append(ep, p[ei].field.Sym.Name)
}
- ep = append(ep, l.Sym().Name)
+ ep = append(ep, l.Field.Name)
base.Errorf("cannot use promoted field %v in struct literal of type %v", strings.Join(ep, "."), t)
continue
}
fielddup(f.Sym.Name, hash)
- l.SetOffset(f.Offset)
+ l.Offset = f.Offset
// No pushtype allowed here. Tried and rejected.
- l.SetLeft(typecheck(l.Left(), ctxExpr))
- l.SetLeft(assignconv(l.Left(), f.Type, "field value"))
+ l.Value = typecheck(l.Value, ctxExpr)
+ l.Value = assignconv(l.Value, f.Type, "field value")
}
}
n.SetOp(ir.OSTRUCTLIT)
- n.SetRight(nil)
+ n.Ntype = nil
}
return n
@@ -3215,28 +3215,28 @@ func typecheckarraylit(elemType *types.Type, bound int64, elts []ir.Node, ctx st
var kv *ir.KeyExpr
if elt.Op() == ir.OKEY {
elt := elt.(*ir.KeyExpr)
- elt.SetLeft(typecheck(elt.Left(), ctxExpr))
- key = indexconst(elt.Left())
+ elt.Key = typecheck(elt.Key, ctxExpr)
+ key = indexconst(elt.Key)
if key < 0 {
- if !elt.Left().Diag() {
+ if !elt.Key.Diag() {
if key == -2 {
base.Errorf("index too large")
} else {
base.Errorf("index must be non-negative integer constant")
}
- elt.Left().SetDiag(true)
+ elt.Key.SetDiag(true)
}
key = -(1 << 30) // stay negative for a while
}
kv = elt
- r = elt.Right()
+ r = elt.Value
}
r = pushtype(r, elemType)
r = typecheck(r, ctxExpr)
r = assignconv(r, elemType, ctx)
if kv != nil {
- kv.SetRight(r)
+ kv.Value = r
} else {
elts[i] = r
}
@@ -3280,10 +3280,10 @@ func islvalue(n ir.Node) bool {
switch n.Op() {
case ir.OINDEX:
n := n.(*ir.IndexExpr)
- if n.Left().Type() != nil && n.Left().Type().IsArray() {
- return islvalue(n.Left())
+ if n.X.Type() != nil && n.X.Type().IsArray() {
+ return islvalue(n.X)
}
- if n.Left().Type() != nil && n.Left().Type().IsString() {
+ if n.X.Type() != nil && n.X.Type().IsString() {
return false
}
fallthrough
@@ -3292,11 +3292,11 @@ func islvalue(n ir.Node) bool {
case ir.ODOT:
n := n.(*ir.SelectorExpr)
- return islvalue(n.Left())
+ return islvalue(n.X)
case ir.ONAME:
n := n.(*ir.Name)
- if n.Class() == ir.PFUNC {
+ if n.Class_ == ir.PFUNC {
return false
}
return true
@@ -3332,7 +3332,7 @@ func checkassign(stmt ir.Node, n ir.Node) {
}
if n.Op() == ir.OINDEXMAP {
n := n.(*ir.IndexExpr)
- n.SetIndexMapLValue(true)
+ n.Assigned = true
return
}
@@ -3342,9 +3342,9 @@ func checkassign(stmt ir.Node, n ir.Node) {
}
switch {
- case n.Op() == ir.ODOT && n.(*ir.SelectorExpr).Left().Op() == ir.OINDEXMAP:
+ case n.Op() == ir.ODOT && n.(*ir.SelectorExpr).X.Op() == ir.OINDEXMAP:
base.Errorf("cannot assign to struct field %v in map", n)
- case (n.Op() == ir.OINDEX && n.(*ir.IndexExpr).Left().Type().IsString()) || n.Op() == ir.OSLICESTR:
+ case (n.Op() == ir.OINDEX && n.(*ir.IndexExpr).X.Type().IsString()) || n.Op() == ir.OSLICESTR:
base.Errorf("cannot assign to %v (strings are immutable)", n)
case n.Op() == ir.OLITERAL && n.Sym() != nil && isGoConst(n):
base.Errorf("cannot assign to %v (declared const)", n)
@@ -3387,39 +3387,39 @@ func samesafeexpr(l ir.Node, r ir.Node) bool {
case ir.ODOT, ir.ODOTPTR:
l := l.(*ir.SelectorExpr)
r := r.(*ir.SelectorExpr)
- return l.Sym() != nil && r.Sym() != nil && l.Sym() == r.Sym() && samesafeexpr(l.Left(), r.Left())
+ return l.Sel != nil && r.Sel != nil && l.Sel == r.Sel && samesafeexpr(l.X, r.X)
case ir.ODEREF:
l := l.(*ir.StarExpr)
r := r.(*ir.StarExpr)
- return samesafeexpr(l.Left(), r.Left())
+ return samesafeexpr(l.X, r.X)
case ir.ONOT, ir.OBITNOT, ir.OPLUS, ir.ONEG:
l := l.(*ir.UnaryExpr)
r := r.(*ir.UnaryExpr)
- return samesafeexpr(l.Left(), r.Left())
+ return samesafeexpr(l.X, r.X)
case ir.OCONVNOP:
l := l.(*ir.ConvExpr)
r := r.(*ir.ConvExpr)
- return samesafeexpr(l.Left(), r.Left())
+ return samesafeexpr(l.X, r.X)
case ir.OCONV:
l := l.(*ir.ConvExpr)
r := r.(*ir.ConvExpr)
// Some conversions can't be reused, such as []byte(str).
// Allow only numeric-ish types. This is a bit conservative.
- return issimple[l.Type().Kind()] && samesafeexpr(l.Left(), r.Left())
+ return issimple[l.Type().Kind()] && samesafeexpr(l.X, r.X)
case ir.OINDEX, ir.OINDEXMAP:
l := l.(*ir.IndexExpr)
r := r.(*ir.IndexExpr)
- return samesafeexpr(l.Left(), r.Left()) && samesafeexpr(l.Right(), r.Right())
+ return samesafeexpr(l.X, r.X) && samesafeexpr(l.Index, r.Index)
case ir.OADD, ir.OSUB, ir.OOR, ir.OXOR, ir.OMUL, ir.OLSH, ir.ORSH, ir.OAND, ir.OANDNOT, ir.ODIV, ir.OMOD:
l := l.(*ir.BinaryExpr)
r := r.(*ir.BinaryExpr)
- return samesafeexpr(l.Left(), r.Left()) && samesafeexpr(l.Right(), r.Right())
+ return samesafeexpr(l.X, r.X) && samesafeexpr(l.Y, r.Y)
case ir.OLITERAL:
return constant.Compare(l.Val(), token.EQL, r.Val())
@@ -3446,30 +3446,30 @@ func typecheckas(n *ir.AssignStmt) {
// if the variable has a type (ntype) then typechecking
// will not look at defn, so it is okay (and desirable,
// so that the conversion below happens).
- n.SetLeft(resolve(n.Left()))
+ n.X = resolve(n.X)
- if !ir.DeclaredBy(n.Left(), n) || n.Left().Name().Ntype != nil {
- n.SetLeft(typecheck(n.Left(), ctxExpr|ctxAssign))
+ if !ir.DeclaredBy(n.X, n) || n.X.Name().Ntype != nil {
+ n.X = typecheck(n.X, ctxExpr|ctxAssign)
}
// Use ctxMultiOK so we can emit an "N variables but M values" error
// to be consistent with typecheckas2 (#26616).
- n.SetRight(typecheck(n.Right(), ctxExpr|ctxMultiOK))
- checkassign(n, n.Left())
- if n.Right() != nil && n.Right().Type() != nil {
- if n.Right().Type().IsFuncArgStruct() {
- base.Errorf("assignment mismatch: 1 variable but %v returns %d values", n.Right().(*ir.CallExpr).Left(), n.Right().Type().NumFields())
+ n.Y = typecheck(n.Y, ctxExpr|ctxMultiOK)
+ checkassign(n, n.X)
+ if n.Y != nil && n.Y.Type() != nil {
+ if n.Y.Type().IsFuncArgStruct() {
+ base.Errorf("assignment mismatch: 1 variable but %v returns %d values", n.Y.(*ir.CallExpr).X, n.Y.Type().NumFields())
// Multi-value RHS isn't actually valid for OAS; nil out
// to indicate failed typechecking.
- n.Right().SetType(nil)
- } else if n.Left().Type() != nil {
- n.SetRight(assignconv(n.Right(), n.Left().Type(), "assignment"))
+ n.Y.SetType(nil)
+ } else if n.X.Type() != nil {
+ n.Y = assignconv(n.Y, n.X.Type(), "assignment")
}
}
- if ir.DeclaredBy(n.Left(), n) && n.Left().Name().Ntype == nil {
- n.SetRight(defaultlit(n.Right(), nil))
- n.Left().SetType(n.Right().Type())
+ if ir.DeclaredBy(n.X, n) && n.X.Name().Ntype == nil {
+ n.Y = defaultlit(n.Y, nil)
+ n.X.SetType(n.Y.Type())
}
// second half of dance.
@@ -3477,11 +3477,11 @@ func typecheckas(n *ir.AssignStmt) {
// just to get it over with. see dance above.
n.SetTypecheck(1)
- if n.Left().Typecheck() == 0 {
- n.SetLeft(typecheck(n.Left(), ctxExpr|ctxAssign))
+ if n.X.Typecheck() == 0 {
+ n.X = typecheck(n.X, ctxExpr|ctxAssign)
}
- if !ir.IsBlank(n.Left()) {
- checkwidth(n.Left().Type()) // ensure width is calculated for backend
+ if !ir.IsBlank(n.X) {
+ checkwidth(n.X.Type()) // ensure width is calculated for backend
}
}
@@ -3497,7 +3497,7 @@ func typecheckas2(n *ir.AssignListStmt) {
defer tracePrint("typecheckas2", n)(nil)
}
- ls := n.List().Slice()
+ ls := n.Lhs.Slice()
for i1, n1 := range ls {
// delicate little dance.
n1 = resolve(n1)
@@ -3508,21 +3508,21 @@ func typecheckas2(n *ir.AssignListStmt) {
}
}
- cl := n.List().Len()
- cr := n.Rlist().Len()
+ cl := n.Lhs.Len()
+ cr := n.Rhs.Len()
if cl > 1 && cr == 1 {
- n.Rlist().SetFirst(typecheck(n.Rlist().First(), ctxExpr|ctxMultiOK))
+ n.Rhs.SetFirst(typecheck(n.Rhs.First(), ctxExpr|ctxMultiOK))
} else {
- typecheckslice(n.Rlist().Slice(), ctxExpr)
+ typecheckslice(n.Rhs.Slice(), ctxExpr)
}
- checkassignlist(n, n.List())
+ checkassignlist(n, n.Lhs)
var l ir.Node
var r ir.Node
if cl == cr {
// easy
- ls := n.List().Slice()
- rs := n.Rlist().Slice()
+ ls := n.Lhs.Slice()
+ rs := n.Rhs.Slice()
for il, nl := range ls {
nr := rs[il]
if nl.Type() != nil && nr.Type() != nil {
@@ -3537,8 +3537,8 @@ func typecheckas2(n *ir.AssignListStmt) {
goto out
}
- l = n.List().First()
- r = n.Rlist().First()
+ l = n.Lhs.First()
+ r = n.Rhs.First()
// x,y,z = f()
if cr == 1 {
@@ -3556,7 +3556,7 @@ func typecheckas2(n *ir.AssignListStmt) {
}
r.(*ir.CallExpr).Use = ir.CallUseList
n.SetOp(ir.OAS2FUNC)
- for i, l := range n.List().Slice() {
+ for i, l := range n.Lhs.Slice() {
f := r.Type().Field(i)
if f.Type != nil && l.Type() != nil {
checkassignto(f.Type, l)
@@ -3592,7 +3592,7 @@ func typecheckas2(n *ir.AssignListStmt) {
if ir.DeclaredBy(l, n) {
l.SetType(r.Type())
}
- l := n.List().Second()
+ l := n.Lhs.Second()
if l.Type() != nil && !l.Type().IsBoolean() {
checkassignto(types.Types[types.TBOOL], l)
}
@@ -3609,13 +3609,13 @@ mismatch:
base.Errorf("assignment mismatch: %d variables but %d values", cl, cr)
case ir.OCALLFUNC, ir.OCALLMETH, ir.OCALLINTER:
r := r.(*ir.CallExpr)
- base.Errorf("assignment mismatch: %d variables but %v returns %d values", cl, r.Left(), cr)
+ base.Errorf("assignment mismatch: %d variables but %v returns %d values", cl, r.X, cr)
}
// second half of dance
out:
n.SetTypecheck(1)
- ls = n.List().Slice()
+ ls = n.Lhs.Slice()
for i1, n1 := range ls {
if n1.Typecheck() == 0 {
ls[i1] = typecheck(ls[i1], ctxExpr|ctxAssign)
@@ -3632,7 +3632,7 @@ func typecheckfunc(n *ir.Func) {
}
for _, ln := range n.Dcl {
- if ln.Op() == ir.ONAME && (ln.Class() == ir.PPARAM || ln.Class() == ir.PPARAMOUT) {
+ if ln.Op() == ir.ONAME && (ln.Class_ == ir.PPARAM || ln.Class_ == ir.PPARAMOUT) {
ln.Decldepth = 1
}
}
@@ -3662,19 +3662,19 @@ func typecheckfunc(n *ir.Func) {
// The result of stringtoruneslit MUST be assigned back to n, e.g.
// n.Left = stringtoruneslit(n.Left)
func stringtoruneslit(n *ir.ConvExpr) ir.Node {
- if n.Left().Op() != ir.OLITERAL || n.Left().Val().Kind() != constant.String {
+ if n.X.Op() != ir.OLITERAL || n.X.Val().Kind() != constant.String {
base.Fatalf("stringtoarraylit %v", n)
}
var l []ir.Node
i := 0
- for _, r := range ir.StringVal(n.Left()) {
+ for _, r := range ir.StringVal(n.X) {
l = append(l, ir.NewKeyExpr(base.Pos, nodintconst(int64(i)), nodintconst(int64(r))))
i++
}
nn := ir.NewCompLitExpr(base.Pos, ir.OCOMPLIT, ir.TypeNode(n.Type()).(ir.Ntype), nil)
- nn.PtrList().Set(l)
+ nn.List.Set(l)
return typecheck(nn, ctxExpr)
}
@@ -3837,7 +3837,7 @@ func typecheckdef(n ir.Node) {
break
}
if n.Name().Defn == nil {
- if n.SubOp() != 0 { // like OPRINTN
+ if n.BuiltinOp != 0 { // like OPRINTN
break
}
if base.Errors() > 0 {
@@ -3945,10 +3945,10 @@ func markBreak(fn *ir.Func) {
case ir.OBREAK:
n := n.(*ir.BranchStmt)
- if n.Sym() == nil {
+ if n.Label == nil {
setHasBreak(implicit)
} else {
- setHasBreak(labels[n.Sym()])
+ setHasBreak(labels[n.Label])
}
case ir.OFOR, ir.OFORUNTIL, ir.OSWITCH, ir.OSELECT, ir.ORANGE:
@@ -3957,13 +3957,13 @@ func markBreak(fn *ir.Func) {
var sym *types.Sym
switch n := n.(type) {
case *ir.ForStmt:
- sym = n.Sym()
+ sym = n.Label
case *ir.RangeStmt:
- sym = n.Sym()
+ sym = n.Label
case *ir.SelectStmt:
- sym = n.Sym()
+ sym = n.Label
case *ir.SwitchStmt:
- sym = n.Sym()
+ sym = n.Label
}
if sym != nil {
if labels == nil {
@@ -3990,13 +3990,13 @@ func controlLabel(n ir.Node) *types.Sym {
base.Fatalf("controlLabel %+v", n.Op())
return nil
case *ir.ForStmt:
- return n.Sym()
+ return n.Label
case *ir.RangeStmt:
- return n.Sym()
+ return n.Label
case *ir.SelectStmt:
- return n.Sym()
+ return n.Label
case *ir.SwitchStmt:
- return n.Sym()
+ return n.Label
}
}
@@ -4007,13 +4007,13 @@ func setHasBreak(n ir.Node) {
case nil:
// ignore
case *ir.ForStmt:
- n.SetHasBreak(true)
+ n.HasBreak = true
case *ir.RangeStmt:
- n.SetHasBreak(true)
+ n.HasBreak = true
case *ir.SelectStmt:
- n.SetHasBreak(true)
+ n.HasBreak = true
case *ir.SwitchStmt:
- n.SetHasBreak(true)
+ n.HasBreak = true
}
}
@@ -4038,37 +4038,37 @@ func isTermNode(n ir.Node) bool {
case ir.OBLOCK:
n := n.(*ir.BlockStmt)
- return isTermNodes(n.List())
+ return isTermNodes(n.List)
case ir.OGOTO, ir.ORETURN, ir.ORETJMP, ir.OPANIC, ir.OFALL:
return true
case ir.OFOR, ir.OFORUNTIL:
n := n.(*ir.ForStmt)
- if n.Left() != nil {
+ if n.Cond != nil {
return false
}
- if n.HasBreak() {
+ if n.HasBreak {
return false
}
return true
case ir.OIF:
n := n.(*ir.IfStmt)
- return isTermNodes(n.Body()) && isTermNodes(n.Rlist())
+ return isTermNodes(n.Body) && isTermNodes(n.Else)
case ir.OSWITCH:
n := n.(*ir.SwitchStmt)
- if n.HasBreak() {
+ if n.HasBreak {
return false
}
def := false
- for _, cas := range n.List().Slice() {
+ for _, cas := range n.Cases.Slice() {
cas := cas.(*ir.CaseStmt)
- if !isTermNodes(cas.Body()) {
+ if !isTermNodes(cas.Body) {
return false
}
- if cas.List().Len() == 0 { // default
+ if cas.List.Len() == 0 { // default
def = true
}
}
@@ -4076,12 +4076,12 @@ func isTermNode(n ir.Node) bool {
case ir.OSELECT:
n := n.(*ir.SelectStmt)
- if n.HasBreak() {
+ if n.HasBreak {
return false
}
- for _, cas := range n.List().Slice() {
+ for _, cas := range n.Cases.Slice() {
cas := cas.(*ir.CaseStmt)
- if !isTermNodes(cas.Body()) {
+ if !isTermNodes(cas.Body) {
return false
}
}
@@ -4093,34 +4093,34 @@ func isTermNode(n ir.Node) bool {
// checkreturn makes sure that fn terminates appropriately.
func checkreturn(fn *ir.Func) {
- if fn.Type().NumResults() != 0 && fn.Body().Len() != 0 {
+ if fn.Type().NumResults() != 0 && fn.Body.Len() != 0 {
markBreak(fn)
- if !isTermNodes(fn.Body()) {
+ if !isTermNodes(fn.Body) {
base.ErrorfAt(fn.Endlineno, "missing return at end of function")
}
}
}
func deadcode(fn *ir.Func) {
- deadcodeslice(fn.PtrBody())
+ deadcodeslice(&fn.Body)
- if fn.Body().Len() == 0 {
+ if fn.Body.Len() == 0 {
return
}
- for _, n := range fn.Body().Slice() {
+ for _, n := range fn.Body.Slice() {
if n.Init().Len() > 0 {
return
}
switch n.Op() {
case ir.OIF:
n := n.(*ir.IfStmt)
- if !ir.IsConst(n.Left(), constant.Bool) || n.Body().Len() > 0 || n.Rlist().Len() > 0 {
+ if !ir.IsConst(n.Cond, constant.Bool) || n.Body.Len() > 0 || n.Else.Len() > 0 {
return
}
case ir.OFOR:
n := n.(*ir.ForStmt)
- if !ir.IsConst(n.Left(), constant.Bool) || ir.BoolVal(n.Left()) {
+ if !ir.IsConst(n.Cond, constant.Bool) || ir.BoolVal(n.Cond) {
return
}
default:
@@ -4128,7 +4128,7 @@ func deadcode(fn *ir.Func) {
}
}
- fn.PtrBody().Set([]ir.Node{ir.NewBlockStmt(base.Pos, nil)})
+ fn.Body.Set([]ir.Node{ir.NewBlockStmt(base.Pos, nil)})
}
func deadcodeslice(nn *ir.Nodes) {
@@ -4148,15 +4148,15 @@ func deadcodeslice(nn *ir.Nodes) {
}
if n.Op() == ir.OIF {
n := n.(*ir.IfStmt)
- n.SetLeft(deadcodeexpr(n.Left()))
- if ir.IsConst(n.Left(), constant.Bool) {
+ n.Cond = deadcodeexpr(n.Cond)
+ if ir.IsConst(n.Cond, constant.Bool) {
var body ir.Nodes
- if ir.BoolVal(n.Left()) {
- n.SetRlist(ir.Nodes{})
- body = n.Body()
+ if ir.BoolVal(n.Cond) {
+ n.Else = ir.Nodes{}
+ body = n.Body
} else {
- n.SetBody(ir.Nodes{})
- body = n.Rlist()
+ n.Body = ir.Nodes{}
+ body = n.Else
}
// If "then" or "else" branch ends with panic or return statement,
// it is safe to remove all statements after this node.
@@ -4178,26 +4178,26 @@ func deadcodeslice(nn *ir.Nodes) {
switch n.Op() {
case ir.OBLOCK:
n := n.(*ir.BlockStmt)
- deadcodeslice(n.PtrList())
+ deadcodeslice(&n.List)
case ir.OCASE:
n := n.(*ir.CaseStmt)
- deadcodeslice(n.PtrBody())
+ deadcodeslice(&n.Body)
case ir.OFOR:
n := n.(*ir.ForStmt)
- deadcodeslice(n.PtrBody())
+ deadcodeslice(&n.Body)
case ir.OIF:
n := n.(*ir.IfStmt)
- deadcodeslice(n.PtrBody())
- deadcodeslice(n.PtrRlist())
+ deadcodeslice(&n.Body)
+ deadcodeslice(&n.Else)
case ir.ORANGE:
n := n.(*ir.RangeStmt)
- deadcodeslice(n.PtrBody())
+ deadcodeslice(&n.Body)
case ir.OSELECT:
n := n.(*ir.SelectStmt)
- deadcodeslice(n.PtrList())
+ deadcodeslice(&n.Cases)
case ir.OSWITCH:
n := n.(*ir.SwitchStmt)
- deadcodeslice(n.PtrList())
+ deadcodeslice(&n.Cases)
}
if cut {
@@ -4214,24 +4214,24 @@ func deadcodeexpr(n ir.Node) ir.Node {
switch n.Op() {
case ir.OANDAND:
n := n.(*ir.LogicalExpr)
- n.SetLeft(deadcodeexpr(n.Left()))
- n.SetRight(deadcodeexpr(n.Right()))
- if ir.IsConst(n.Left(), constant.Bool) {
- if ir.BoolVal(n.Left()) {
- return n.Right() // true && x => x
+ n.X = deadcodeexpr(n.X)
+ n.Y = deadcodeexpr(n.Y)
+ if ir.IsConst(n.X, constant.Bool) {
+ if ir.BoolVal(n.X) {
+ return n.Y // true && x => x
} else {
- return n.Left() // false && x => false
+ return n.X // false && x => false
}
}
case ir.OOROR:
n := n.(*ir.LogicalExpr)
- n.SetLeft(deadcodeexpr(n.Left()))
- n.SetRight(deadcodeexpr(n.Right()))
- if ir.IsConst(n.Left(), constant.Bool) {
- if ir.BoolVal(n.Left()) {
- return n.Left() // true || x => true
+ n.X = deadcodeexpr(n.X)
+ n.Y = deadcodeexpr(n.Y)
+ if ir.IsConst(n.X, constant.Bool) {
+ if ir.BoolVal(n.X) {
+ return n.X // true || x => true
} else {
- return n.Right() // false || x => x
+ return n.Y // false || x => x
}
}
}
@@ -4247,8 +4247,8 @@ func getIotaValue() int64 {
}
}
- if Curfn != nil && Curfn.Iota() >= 0 {
- return Curfn.Iota()
+ if Curfn != nil && Curfn.Iota >= 0 {
+ return Curfn.Iota
}
return -1
diff --git a/src/cmd/compile/internal/gc/universe.go b/src/cmd/compile/internal/gc/universe.go
index e11c0eb92c..cf20583042 100644
--- a/src/cmd/compile/internal/gc/universe.go
+++ b/src/cmd/compile/internal/gc/universe.go
@@ -152,14 +152,14 @@ func initUniverse() {
for _, s := range &builtinFuncs {
s2 := types.BuiltinPkg.Lookup(s.name)
def := NewName(s2)
- def.SetSubOp(s.op)
+ def.BuiltinOp = s.op
s2.Def = def
}
for _, s := range &unsafeFuncs {
s2 := unsafepkg.Lookup(s.name)
def := NewName(s2)
- def.SetSubOp(s.op)
+ def.BuiltinOp = s.op
s2.Def = def
}
@@ -342,6 +342,6 @@ func finishUniverse() {
nodfp = NewName(lookup(".fp"))
nodfp.SetType(types.Types[types.TINT32])
- nodfp.SetClass(ir.PPARAM)
+ nodfp.Class_ = ir.PPARAM
nodfp.SetUsed(true)
}
diff --git a/src/cmd/compile/internal/gc/unsafe.go b/src/cmd/compile/internal/gc/unsafe.go
index eeedea396e..cecc8720a9 100644
--- a/src/cmd/compile/internal/gc/unsafe.go
+++ b/src/cmd/compile/internal/gc/unsafe.go
@@ -14,9 +14,9 @@ func evalunsafe(n ir.Node) int64 {
switch n.Op() {
case ir.OALIGNOF, ir.OSIZEOF:
n := n.(*ir.UnaryExpr)
- n.SetLeft(typecheck(n.Left(), ctxExpr))
- n.SetLeft(defaultlit(n.Left(), nil))
- tr := n.Left().Type()
+ n.X = typecheck(n.X, ctxExpr)
+ n.X = defaultlit(n.X, nil)
+ tr := n.X.Type()
if tr == nil {
return 0
}
@@ -29,20 +29,20 @@ func evalunsafe(n ir.Node) int64 {
case ir.OOFFSETOF:
// must be a selector.
n := n.(*ir.UnaryExpr)
- if n.Left().Op() != ir.OXDOT {
+ if n.X.Op() != ir.OXDOT {
base.Errorf("invalid expression %v", n)
return 0
}
- sel := n.Left().(*ir.SelectorExpr)
+ sel := n.X.(*ir.SelectorExpr)
// Remember base of selector to find it back after dot insertion.
// Since r->left may be mutated by typechecking, check it explicitly
// first to track it correctly.
- sel.SetLeft(typecheck(sel.Left(), ctxExpr))
- sbase := sel.Left()
+ sel.X = typecheck(sel.X, ctxExpr)
+ sbase := sel.X
tsel := typecheck(sel, ctxExpr)
- n.SetLeft(tsel)
+ n.X = tsel
if tsel.Type() == nil {
return 0
}
@@ -67,15 +67,15 @@ func evalunsafe(n ir.Node) int64 {
// but accessing f must not otherwise involve
// indirection via embedded pointer types.
r := r.(*ir.SelectorExpr)
- if r.Left() != sbase {
- base.Errorf("invalid expression %v: selector implies indirection of embedded %v", n, r.Left())
+ if r.X != sbase {
+ base.Errorf("invalid expression %v: selector implies indirection of embedded %v", n, r.X)
return 0
}
fallthrough
case ir.ODOT:
r := r.(*ir.SelectorExpr)
- v += r.Offset()
- next = r.Left()
+ v += r.Offset
+ next = r.X
default:
ir.Dump("unsafenmagic", tsel)
base.Fatalf("impossible %v node after dot insertion", r.Op())
diff --git a/src/cmd/compile/internal/gc/walk.go b/src/cmd/compile/internal/gc/walk.go
index 91b7a184cf..3fd6c97d68 100644
--- a/src/cmd/compile/internal/gc/walk.go
+++ b/src/cmd/compile/internal/gc/walk.go
@@ -33,14 +33,14 @@ func walk(fn *ir.Func) {
if base.Flag.W != 0 {
s := fmt.Sprintf("\nbefore walk %v", Curfn.Sym())
- ir.DumpList(s, Curfn.Body())
+ ir.DumpList(s, Curfn.Body)
}
lno := base.Pos
// Final typecheck for any unused variables.
for i, ln := range fn.Dcl {
- if ln.Op() == ir.ONAME && (ln.Class() == ir.PAUTO || ln.Class() == ir.PAUTOHEAP) {
+ if ln.Op() == ir.ONAME && (ln.Class_ == ir.PAUTO || ln.Class_ == ir.PAUTOHEAP) {
ln = typecheck(ln, ctxExpr|ctxAssign).(*ir.Name)
fn.Dcl[i] = ln
}
@@ -48,13 +48,13 @@ func walk(fn *ir.Func) {
// Propagate the used flag for typeswitch variables up to the NONAME in its definition.
for _, ln := range fn.Dcl {
- if ln.Op() == ir.ONAME && (ln.Class() == ir.PAUTO || ln.Class() == ir.PAUTOHEAP) && ln.Defn != nil && ln.Defn.Op() == ir.OTYPESW && ln.Used() {
+ if ln.Op() == ir.ONAME && (ln.Class_ == ir.PAUTO || ln.Class_ == ir.PAUTOHEAP) && ln.Defn != nil && ln.Defn.Op() == ir.OTYPESW && ln.Used() {
ln.Defn.(*ir.TypeSwitchGuard).Used = true
}
}
for _, ln := range fn.Dcl {
- if ln.Op() != ir.ONAME || (ln.Class() != ir.PAUTO && ln.Class() != ir.PAUTOHEAP) || ln.Sym().Name[0] == '&' || ln.Used() {
+ if ln.Op() != ir.ONAME || (ln.Class_ != ir.PAUTO && ln.Class_ != ir.PAUTOHEAP) || ln.Sym().Name[0] == '&' || ln.Used() {
continue
}
if defn, ok := ln.Defn.(*ir.TypeSwitchGuard); ok {
@@ -72,10 +72,10 @@ func walk(fn *ir.Func) {
if base.Errors() > errorsBefore {
return
}
- walkstmtlist(Curfn.Body().Slice())
+ walkstmtlist(Curfn.Body.Slice())
if base.Flag.W != 0 {
s := fmt.Sprintf("after walk %v", Curfn.Sym())
- ir.DumpList(s, Curfn.Body())
+ ir.DumpList(s, Curfn.Body)
}
zeroResults()
@@ -98,7 +98,7 @@ func walkstmtlist(s []ir.Node) {
func paramoutheap(fn *ir.Func) bool {
for _, ln := range fn.Dcl {
- switch ln.Class() {
+ switch ln.Class_ {
case ir.PPARAMOUT:
if isParamStackCopy(ln) || ln.Addrtaken() {
return true
@@ -189,8 +189,8 @@ func walkstmt(n ir.Node) ir.Node {
init := n.Init()
n.PtrInit().Set(nil)
- n.SetLeft(walkexpr(n.Left(), &init))
- call := walkexpr(mkcall1(chanfn("chanrecv1", 2, n.Left().Type()), nil, &init, n.Left(), nodnil()), &init)
+ n.X = walkexpr(n.X, &init)
+ call := walkexpr(mkcall1(chanfn("chanrecv1", 2, n.X.Type()), nil, &init, n.X, nodnil()), &init)
return initExpr(init.Slice(), call)
case ir.OBREAK,
@@ -208,20 +208,20 @@ func walkstmt(n ir.Node) ir.Node {
case ir.ODCL:
n := n.(*ir.Decl)
- v := n.Left().(*ir.Name)
- if v.Class() == ir.PAUTOHEAP {
+ v := n.X.(*ir.Name)
+ if v.Class_ == ir.PAUTOHEAP {
if base.Flag.CompilingRuntime {
base.Errorf("%v escapes to heap, not allowed in runtime", v)
}
nn := ir.NewAssignStmt(base.Pos, v.Name().Heapaddr, callnew(v.Type()))
- nn.SetColas(true)
+ nn.Def = true
return walkstmt(typecheck(nn, ctxStmt))
}
return n
case ir.OBLOCK:
n := n.(*ir.BlockStmt)
- walkstmtlist(n.List().Slice())
+ walkstmtlist(n.List.Slice())
return n
case ir.OCASE:
@@ -247,33 +247,33 @@ func walkstmt(n ir.Node) ir.Node {
case ir.OGO:
n := n.(*ir.GoDeferStmt)
var init ir.Nodes
- switch call := n.Left(); call.Op() {
+ switch call := n.Call; call.Op() {
case ir.OPRINT, ir.OPRINTN:
call := call.(*ir.CallExpr)
- n.SetLeft(wrapCall(call, &init))
+ n.Call = wrapCall(call, &init)
case ir.ODELETE:
call := call.(*ir.CallExpr)
- if mapfast(call.List().First().Type()) == mapslow {
- n.SetLeft(wrapCall(call, &init))
+ if mapfast(call.Args.First().Type()) == mapslow {
+ n.Call = wrapCall(call, &init)
} else {
- n.SetLeft(walkexpr(call, &init))
+ n.Call = walkexpr(call, &init)
}
case ir.OCOPY:
call := call.(*ir.BinaryExpr)
- n.SetLeft(copyany(call, &init, true))
+ n.Call = copyany(call, &init, true)
case ir.OCALLFUNC, ir.OCALLMETH, ir.OCALLINTER:
call := call.(*ir.CallExpr)
- if call.Body().Len() > 0 {
- n.SetLeft(wrapCall(call, &init))
+ if call.Body.Len() > 0 {
+ n.Call = wrapCall(call, &init)
} else {
- n.SetLeft(walkexpr(call, &init))
+ n.Call = walkexpr(call, &init)
}
default:
- n.SetLeft(walkexpr(call, &init))
+ n.Call = walkexpr(call, &init)
}
if init.Len() > 0 {
init.Append(n)
@@ -283,41 +283,41 @@ func walkstmt(n ir.Node) ir.Node {
case ir.OFOR, ir.OFORUNTIL:
n := n.(*ir.ForStmt)
- if n.Left() != nil {
- walkstmtlist(n.Left().Init().Slice())
- init := n.Left().Init()
- n.Left().PtrInit().Set(nil)
- n.SetLeft(walkexpr(n.Left(), &init))
- n.SetLeft(initExpr(init.Slice(), n.Left()))
+ if n.Cond != nil {
+ walkstmtlist(n.Cond.Init().Slice())
+ init := n.Cond.Init()
+ n.Cond.PtrInit().Set(nil)
+ n.Cond = walkexpr(n.Cond, &init)
+ n.Cond = initExpr(init.Slice(), n.Cond)
}
- n.SetRight(walkstmt(n.Right()))
+ n.Post = walkstmt(n.Post)
if n.Op() == ir.OFORUNTIL {
- walkstmtlist(n.List().Slice())
+ walkstmtlist(n.Late.Slice())
}
- walkstmtlist(n.Body().Slice())
+ walkstmtlist(n.Body.Slice())
return n
case ir.OIF:
n := n.(*ir.IfStmt)
- n.SetLeft(walkexpr(n.Left(), n.PtrInit()))
- walkstmtlist(n.Body().Slice())
- walkstmtlist(n.Rlist().Slice())
+ n.Cond = walkexpr(n.Cond, n.PtrInit())
+ walkstmtlist(n.Body.Slice())
+ walkstmtlist(n.Else.Slice())
return n
case ir.ORETURN:
n := n.(*ir.ReturnStmt)
Curfn.NumReturns++
- if n.List().Len() == 0 {
+ if n.Results.Len() == 0 {
return n
}
- if (hasNamedResults(Curfn) && n.List().Len() > 1) || paramoutheap(Curfn) {
+ if (hasNamedResults(Curfn) && n.Results.Len() > 1) || paramoutheap(Curfn) {
// assign to the function out parameters,
// so that ascompatee can fix up conflicts
var rl []ir.Node
for _, ln := range Curfn.Dcl {
- cl := ln.Class()
+ cl := ln.Class_
if cl == ir.PAUTO || cl == ir.PAUTOHEAP {
break
}
@@ -330,23 +330,23 @@ func walkstmt(n ir.Node) ir.Node {
}
}
- if got, want := n.List().Len(), len(rl); got != want {
+ if got, want := n.Results.Len(), len(rl); got != want {
// order should have rewritten multi-value function calls
// with explicit OAS2FUNC nodes.
base.Fatalf("expected %v return arguments, have %v", want, got)
}
// move function calls out, to make ascompatee's job easier.
- walkexprlistsafe(n.List().Slice(), n.PtrInit())
+ walkexprlistsafe(n.Results.Slice(), n.PtrInit())
- n.PtrList().Set(ascompatee(n.Op(), rl, n.List().Slice(), n.PtrInit()))
+ n.Results.Set(ascompatee(n.Op(), rl, n.Results.Slice(), n.PtrInit()))
return n
}
- walkexprlist(n.List().Slice(), n.PtrInit())
+ walkexprlist(n.Results.Slice(), n.PtrInit())
// For each return parameter (lhs), assign the corresponding result (rhs).
lhs := Curfn.Type().Results()
- rhs := n.List().Slice()
+ rhs := n.Results.Slice()
res := make([]ir.Node, lhs.NumFields())
for i, nl := range lhs.FieldSlice() {
nname := ir.AsNode(nl.Nname)
@@ -356,7 +356,7 @@ func walkstmt(n ir.Node) ir.Node {
a := ir.NewAssignStmt(base.Pos, nname, rhs[i])
res[i] = convas(a, n.PtrInit())
}
- n.PtrList().Set(res)
+ n.Results.Set(res)
return n
case ir.ORETJMP:
@@ -499,10 +499,10 @@ func walkexpr(n ir.Node, init *ir.Nodes) ir.Node {
base.Fatalf("expression has untyped type: %+v", n)
}
- if n.Op() == ir.ONAME && n.(*ir.Name).Class() == ir.PAUTOHEAP {
+ if n.Op() == ir.ONAME && n.(*ir.Name).Class_ == ir.PAUTOHEAP {
n := n.(*ir.Name)
nn := ir.NewStarExpr(base.Pos, n.Name().Heapaddr)
- nn.Left().MarkNonNil()
+ nn.X.MarkNonNil()
return walkexpr(typecheck(nn, ctxExpr), init)
}
@@ -556,46 +556,46 @@ func walkexpr1(n ir.Node, init *ir.Nodes) ir.Node {
case ir.ONOT, ir.ONEG, ir.OPLUS, ir.OBITNOT, ir.OREAL, ir.OIMAG, ir.OSPTR, ir.OITAB, ir.OIDATA:
n := n.(*ir.UnaryExpr)
- n.SetLeft(walkexpr(n.Left(), init))
+ n.X = walkexpr(n.X, init)
return n
case ir.ODOTMETH, ir.ODOTINTER:
n := n.(*ir.SelectorExpr)
- n.SetLeft(walkexpr(n.Left(), init))
+ n.X = walkexpr(n.X, init)
return n
case ir.OADDR:
n := n.(*ir.AddrExpr)
- n.SetLeft(walkexpr(n.Left(), init))
+ n.X = walkexpr(n.X, init)
return n
case ir.ODEREF:
n := n.(*ir.StarExpr)
- n.SetLeft(walkexpr(n.Left(), init))
+ n.X = walkexpr(n.X, init)
return n
case ir.OEFACE, ir.OAND, ir.OANDNOT, ir.OSUB, ir.OMUL, ir.OADD, ir.OOR, ir.OXOR, ir.OLSH, ir.ORSH:
n := n.(*ir.BinaryExpr)
- n.SetLeft(walkexpr(n.Left(), init))
- n.SetRight(walkexpr(n.Right(), init))
+ n.X = walkexpr(n.X, init)
+ n.Y = walkexpr(n.Y, init)
return n
case ir.ODOT, ir.ODOTPTR:
n := n.(*ir.SelectorExpr)
usefield(n)
- n.SetLeft(walkexpr(n.Left(), init))
+ n.X = walkexpr(n.X, init)
return n
case ir.ODOTTYPE, ir.ODOTTYPE2:
n := n.(*ir.TypeAssertExpr)
- n.SetLeft(walkexpr(n.Left(), init))
+ n.X = walkexpr(n.X, init)
// Set up interface type addresses for back end.
- n.SetRight(typename(n.Type()))
+ n.Ntype = typename(n.Type())
if n.Op() == ir.ODOTTYPE {
- n.Right().(*ir.AddrExpr).SetRight(typename(n.Left().Type()))
+ n.Ntype.(*ir.AddrExpr).Alloc = typename(n.X.Type())
}
- if !n.Type().IsInterface() && !n.Left().Type().IsEmptyInterface() {
- n.PtrList().Set1(itabname(n.Type(), n.Left().Type()))
+ if !n.Type().IsInterface() && !n.X.Type().IsEmptyInterface() {
+ n.Itab.Set1(itabname(n.Type(), n.X.Type()))
}
return n
@@ -603,20 +603,20 @@ func walkexpr1(n ir.Node, init *ir.Nodes) ir.Node {
n := n.(*ir.UnaryExpr)
if isRuneCount(n) {
// Replace len([]rune(string)) with runtime.countrunes(string).
- return mkcall("countrunes", n.Type(), init, conv(n.Left().(*ir.ConvExpr).Left(), types.Types[types.TSTRING]))
+ return mkcall("countrunes", n.Type(), init, conv(n.X.(*ir.ConvExpr).X, types.Types[types.TSTRING]))
}
- n.SetLeft(walkexpr(n.Left(), init))
+ n.X = walkexpr(n.X, init)
// replace len(*[10]int) with 10.
// delayed until now to preserve side effects.
- t := n.Left().Type()
+ t := n.X.Type()
if t.IsPtr() {
t = t.Elem()
}
if t.IsArray() {
- safeexpr(n.Left(), init)
+ safeexpr(n.X, init)
con := origIntConst(n, t.NumElem())
con.SetTypecheck(1)
return con
@@ -625,8 +625,8 @@ func walkexpr1(n ir.Node, init *ir.Nodes) ir.Node {
case ir.OCOMPLEX:
n := n.(*ir.BinaryExpr)
- n.SetLeft(walkexpr(n.Left(), init))
- n.SetRight(walkexpr(n.Right(), init))
+ n.X = walkexpr(n.X, init)
+ n.Y = walkexpr(n.Y, init)
return n
case ir.OEQ, ir.ONE, ir.OLT, ir.OLE, ir.OGT, ir.OGE:
@@ -635,15 +635,15 @@ func walkexpr1(n ir.Node, init *ir.Nodes) ir.Node {
case ir.OANDAND, ir.OOROR:
n := n.(*ir.LogicalExpr)
- n.SetLeft(walkexpr(n.Left(), init))
+ n.X = walkexpr(n.X, init)
// cannot put side effects from n.Right on init,
// because they cannot run before n.Left is checked.
// save elsewhere and store on the eventual n.Right.
var ll ir.Nodes
- n.SetRight(walkexpr(n.Right(), &ll))
- n.SetRight(initExpr(ll.Slice(), n.Right()))
+ n.Y = walkexpr(n.Y, &ll)
+ n.Y = initExpr(ll.Slice(), n.Y)
return n
case ir.OPRINT, ir.OPRINTN:
@@ -651,7 +651,7 @@ func walkexpr1(n ir.Node, init *ir.Nodes) ir.Node {
case ir.OPANIC:
n := n.(*ir.UnaryExpr)
- return mkcall("gopanic", nil, init, n.Left())
+ return mkcall("gopanic", nil, init, n.X)
case ir.ORECOVER:
n := n.(*ir.CallExpr)
@@ -667,24 +667,24 @@ func walkexpr1(n ir.Node, init *ir.Nodes) ir.Node {
markUsedIfaceMethod(n)
}
- if n.Op() == ir.OCALLFUNC && n.Left().Op() == ir.OCLOSURE {
+ if n.Op() == ir.OCALLFUNC && n.X.Op() == ir.OCLOSURE {
// Transform direct call of a closure to call of a normal function.
// transformclosure already did all preparation work.
// Prepend captured variables to argument list.
- clo := n.Left().(*ir.ClosureExpr)
- n.PtrList().Prepend(clo.Func().ClosureEnter.Slice()...)
- clo.Func().ClosureEnter.Set(nil)
+ clo := n.X.(*ir.ClosureExpr)
+ n.Args.Prepend(clo.Func.ClosureEnter.Slice()...)
+ clo.Func.ClosureEnter.Set(nil)
// Replace OCLOSURE with ONAME/PFUNC.
- n.SetLeft(clo.Func().Nname)
+ n.X = clo.Func.Nname
// Update type of OCALLFUNC node.
// Output arguments had not changed, but their offsets could.
- if n.Left().Type().NumResults() == 1 {
- n.SetType(n.Left().Type().Results().Field(0).Type)
+ if n.X.Type().NumResults() == 1 {
+ n.SetType(n.X.Type().Results().Field(0).Type)
} else {
- n.SetType(n.Left().Type().Results())
+ n.SetType(n.X.Type().Results())
}
}
@@ -698,10 +698,10 @@ func walkexpr1(n ir.Node, init *ir.Nodes) ir.Node {
switch n.Op() {
case ir.OAS:
n := n.(*ir.AssignStmt)
- left, right = n.Left(), n.Right()
+ left, right = n.X, n.Y
case ir.OASOP:
n := n.(*ir.AssignOpStmt)
- left, right = n.Left(), n.Right()
+ left, right = n.X, n.Y
}
// Recognize m[k] = append(m[k], ...) so we can reuse
@@ -710,22 +710,22 @@ func walkexpr1(n ir.Node, init *ir.Nodes) ir.Node {
if left.Op() == ir.OINDEXMAP && right.Op() == ir.OAPPEND {
left := left.(*ir.IndexExpr)
mapAppend = right.(*ir.CallExpr)
- if !samesafeexpr(left, mapAppend.List().First()) {
- base.Fatalf("not same expressions: %v != %v", left, mapAppend.List().First())
+ if !samesafeexpr(left, mapAppend.Args.First()) {
+ base.Fatalf("not same expressions: %v != %v", left, mapAppend.Args.First())
}
}
left = walkexpr(left, init)
left = safeexpr(left, init)
if mapAppend != nil {
- mapAppend.List().SetFirst(left)
+ mapAppend.Args.SetFirst(left)
}
if n.Op() == ir.OASOP {
// Rewrite x op= y into x = x op y.
- n = ir.NewAssignStmt(base.Pos, left, typecheck(ir.NewBinaryExpr(base.Pos, n.(*ir.AssignOpStmt).SubOp(), left, right), ctxExpr))
+ n = ir.NewAssignStmt(base.Pos, left, typecheck(ir.NewBinaryExpr(base.Pos, n.(*ir.AssignOpStmt).AsOp, left, right), ctxExpr))
} else {
- n.(*ir.AssignStmt).SetLeft(left)
+ n.(*ir.AssignStmt).X = left
}
as := n.(*ir.AssignStmt)
@@ -733,32 +733,32 @@ func walkexpr1(n ir.Node, init *ir.Nodes) ir.Node {
return ir.NewBlockStmt(as.Pos(), nil)
}
- if as.Right() == nil {
+ if as.Y == nil {
// TODO(austin): Check all "implicit zeroing"
return as
}
- if !instrumenting && isZero(as.Right()) {
+ if !instrumenting && isZero(as.Y) {
return as
}
- switch as.Right().Op() {
+ switch as.Y.Op() {
default:
- as.SetRight(walkexpr(as.Right(), init))
+ as.Y = walkexpr(as.Y, init)
case ir.ORECV:
// x = <-c; as.Left is x, as.Right.Left is c.
// order.stmt made sure x is addressable.
- recv := as.Right().(*ir.UnaryExpr)
- recv.SetLeft(walkexpr(recv.Left(), init))
+ recv := as.Y.(*ir.UnaryExpr)
+ recv.X = walkexpr(recv.X, init)
- n1 := nodAddr(as.Left())
- r := recv.Left() // the channel
+ n1 := nodAddr(as.X)
+ r := recv.X // the channel
return mkcall1(chanfn("chanrecv1", 2, r.Type()), nil, init, r, n1)
case ir.OAPPEND:
// x = append(...)
- call := as.Right().(*ir.CallExpr)
+ call := as.Y.(*ir.CallExpr)
if call.Type().Elem().NotInHeap() {
base.Errorf("%v can't be allocated in Go; it is incomplete (or unallocatable)", call.Type().Elem())
}
@@ -767,24 +767,24 @@ func walkexpr1(n ir.Node, init *ir.Nodes) ir.Node {
case isAppendOfMake(call):
// x = append(y, make([]T, y)...)
r = extendslice(call, init)
- case call.IsDDD():
+ case call.IsDDD:
r = appendslice(call, init) // also works for append(slice, string).
default:
r = walkappend(call, init, as)
}
- as.SetRight(r)
+ as.Y = r
if r.Op() == ir.OAPPEND {
// Left in place for back end.
// Do not add a new write barrier.
// Set up address of type for back end.
- r.(*ir.CallExpr).SetLeft(typename(r.Type().Elem()))
+ r.(*ir.CallExpr).X = typename(r.Type().Elem())
return as
}
// Otherwise, lowered for race detector.
// Treat as ordinary assignment.
}
- if as.Left() != nil && as.Right() != nil {
+ if as.X != nil && as.Y != nil {
return convas(as, init)
}
return as
@@ -792,26 +792,26 @@ func walkexpr1(n ir.Node, init *ir.Nodes) ir.Node {
case ir.OAS2:
n := n.(*ir.AssignListStmt)
init.AppendNodes(n.PtrInit())
- walkexprlistsafe(n.List().Slice(), init)
- walkexprlistsafe(n.Rlist().Slice(), init)
- return liststmt(ascompatee(ir.OAS, n.List().Slice(), n.Rlist().Slice(), init))
+ walkexprlistsafe(n.Lhs.Slice(), init)
+ walkexprlistsafe(n.Rhs.Slice(), init)
+ return liststmt(ascompatee(ir.OAS, n.Lhs.Slice(), n.Rhs.Slice(), init))
// a,b,... = fn()
case ir.OAS2FUNC:
n := n.(*ir.AssignListStmt)
init.AppendNodes(n.PtrInit())
- r := n.Rlist().First()
- walkexprlistsafe(n.List().Slice(), init)
+ r := n.Rhs.First()
+ walkexprlistsafe(n.Lhs.Slice(), init)
r = walkexpr(r, init)
if IsIntrinsicCall(r.(*ir.CallExpr)) {
- n.PtrRlist().Set1(r)
+ n.Rhs.Set1(r)
return n
}
init.Append(r)
- ll := ascompatet(n.List(), r.Type())
+ ll := ascompatet(n.Lhs, r.Type())
return liststmt(ll)
// x, y = <-c
@@ -820,18 +820,18 @@ func walkexpr1(n ir.Node, init *ir.Nodes) ir.Node {
n := n.(*ir.AssignListStmt)
init.AppendNodes(n.PtrInit())
- r := n.Rlist().First().(*ir.UnaryExpr) // recv
- walkexprlistsafe(n.List().Slice(), init)
- r.SetLeft(walkexpr(r.Left(), init))
+ r := n.Rhs.First().(*ir.UnaryExpr) // recv
+ walkexprlistsafe(n.Lhs.Slice(), init)
+ r.X = walkexpr(r.X, init)
var n1 ir.Node
- if ir.IsBlank(n.List().First()) {
+ if ir.IsBlank(n.Lhs.First()) {
n1 = nodnil()
} else {
- n1 = nodAddr(n.List().First())
+ n1 = nodAddr(n.Lhs.First())
}
- fn := chanfn("chanrecv2", 2, r.Left().Type())
- ok := n.List().Second()
- call := mkcall1(fn, types.Types[types.TBOOL], init, r.Left(), n1)
+ fn := chanfn("chanrecv2", 2, r.X.Type())
+ ok := n.Lhs.Second()
+ call := mkcall1(fn, types.Types[types.TBOOL], init, r.X, n1)
return typecheck(ir.NewAssignStmt(base.Pos, ok, call), ctxStmt)
// a,b = m[i]
@@ -839,21 +839,21 @@ func walkexpr1(n ir.Node, init *ir.Nodes) ir.Node {
n := n.(*ir.AssignListStmt)
init.AppendNodes(n.PtrInit())
- r := n.Rlist().First().(*ir.IndexExpr)
- walkexprlistsafe(n.List().Slice(), init)
- r.SetLeft(walkexpr(r.Left(), init))
- r.SetRight(walkexpr(r.Right(), init))
- t := r.Left().Type()
+ r := n.Rhs.First().(*ir.IndexExpr)
+ walkexprlistsafe(n.Lhs.Slice(), init)
+ r.X = walkexpr(r.X, init)
+ r.Index = walkexpr(r.Index, init)
+ t := r.X.Type()
fast := mapfast(t)
var key ir.Node
if fast != mapslow {
// fast versions take key by value
- key = r.Right()
+ key = r.Index
} else {
// standard version takes key by reference
// order.expr made sure key is addressable.
- key = nodAddr(r.Right())
+ key = nodAddr(r.Index)
}
// from:
@@ -861,25 +861,25 @@ func walkexpr1(n ir.Node, init *ir.Nodes) ir.Node {
// to:
// var,b = mapaccess2*(t, m, i)
// a = *var
- a := n.List().First()
+ a := n.Lhs.First()
var call *ir.CallExpr
if w := t.Elem().Width; w <= zeroValSize {
fn := mapfn(mapaccess2[fast], t)
- call = mkcall1(fn, fn.Type().Results(), init, typename(t), r.Left(), key)
+ call = mkcall1(fn, fn.Type().Results(), init, typename(t), r.X, key)
} else {
fn := mapfn("mapaccess2_fat", t)
z := zeroaddr(w)
- call = mkcall1(fn, fn.Type().Results(), init, typename(t), r.Left(), key, z)
+ call = mkcall1(fn, fn.Type().Results(), init, typename(t), r.X, key, z)
}
// mapaccess2* returns a typed bool, but due to spec changes,
// the boolean result of i.(T) is now untyped so we make it the
// same type as the variable on the lhs.
- if ok := n.List().Second(); !ir.IsBlank(ok) && ok.Type().IsBoolean() {
+ if ok := n.Lhs.Second(); !ir.IsBlank(ok) && ok.Type().IsBoolean() {
call.Type().Field(1).Type = ok.Type()
}
- n.PtrRlist().Set1(call)
+ n.Rhs.Set1(call)
n.SetOp(ir.OAS2FUNC)
// don't generate a = *var if a is _
@@ -891,7 +891,7 @@ func walkexpr1(n ir.Node, init *ir.Nodes) ir.Node {
var_.SetTypecheck(1)
var_.MarkNonNil() // mapaccess always returns a non-nil pointer
- n.List().SetFirst(var_)
+ n.Lhs.SetFirst(var_)
init.Append(walkexpr(n, init))
as := ir.NewAssignStmt(base.Pos, a, ir.NewStarExpr(base.Pos, var_))
@@ -900,8 +900,8 @@ func walkexpr1(n ir.Node, init *ir.Nodes) ir.Node {
case ir.ODELETE:
n := n.(*ir.CallExpr)
init.AppendNodes(n.PtrInit())
- map_ := n.List().First()
- key := n.List().Second()
+ map_ := n.Args.First()
+ key := n.Args.Second()
map_ = walkexpr(map_, init)
key = walkexpr(key, init)
@@ -915,15 +915,15 @@ func walkexpr1(n ir.Node, init *ir.Nodes) ir.Node {
case ir.OAS2DOTTYPE:
n := n.(*ir.AssignListStmt)
- walkexprlistsafe(n.List().Slice(), init)
- n.PtrRlist().SetIndex(0, walkexpr(n.Rlist().First(), init))
+ walkexprlistsafe(n.Lhs.Slice(), init)
+ (&n.Rhs).SetIndex(0, walkexpr(n.Rhs.First(), init))
return n
case ir.OCONVIFACE:
n := n.(*ir.ConvExpr)
- n.SetLeft(walkexpr(n.Left(), init))
+ n.X = walkexpr(n.X, init)
- fromType := n.Left().Type()
+ fromType := n.X.Type()
toType := n.Type()
if !fromType.IsInterface() && !ir.IsBlank(Curfn.Nname) { // skip unnamed functions (func _())
@@ -940,7 +940,7 @@ func walkexpr1(n ir.Node, init *ir.Nodes) ir.Node {
// Optimize convT2E or convT2I as a two-word copy when T is pointer-shaped.
if isdirectiface(fromType) {
- l := ir.NewBinaryExpr(base.Pos, ir.OEFACE, typeword(), n.Left())
+ l := ir.NewBinaryExpr(base.Pos, ir.OEFACE, typeword(), n.X)
l.SetType(toType)
l.SetTypecheck(n.Typecheck())
return l
@@ -948,12 +948,12 @@ func walkexpr1(n ir.Node, init *ir.Nodes) ir.Node {
if staticuint64s == nil {
staticuint64s = NewName(Runtimepkg.Lookup("staticuint64s"))
- staticuint64s.SetClass(ir.PEXTERN)
+ staticuint64s.Class_ = ir.PEXTERN
// The actual type is [256]uint64, but we use [256*8]uint8 so we can address
// individual bytes.
staticuint64s.SetType(types.NewArray(types.Types[types.TUINT8], 256*8))
zerobase = NewName(Runtimepkg.Lookup("zerobase"))
- zerobase.SetClass(ir.PEXTERN)
+ zerobase.Class_ = ir.PEXTERN
zerobase.SetType(types.Types[types.TUINTPTR])
}
@@ -964,27 +964,27 @@ func walkexpr1(n ir.Node, init *ir.Nodes) ir.Node {
switch {
case fromType.Size() == 0:
// n.Left is zero-sized. Use zerobase.
- cheapexpr(n.Left(), init) // Evaluate n.Left for side-effects. See issue 19246.
+ cheapexpr(n.X, init) // Evaluate n.Left for side-effects. See issue 19246.
value = zerobase
case fromType.IsBoolean() || (fromType.Size() == 1 && fromType.IsInteger()):
// n.Left is a bool/byte. Use staticuint64s[n.Left * 8] on little-endian
// and staticuint64s[n.Left * 8 + 7] on big-endian.
- n.SetLeft(cheapexpr(n.Left(), init))
+ n.X = cheapexpr(n.X, init)
// byteindex widens n.Left so that the multiplication doesn't overflow.
- index := ir.NewBinaryExpr(base.Pos, ir.OLSH, byteindex(n.Left()), nodintconst(3))
+ index := ir.NewBinaryExpr(base.Pos, ir.OLSH, byteindex(n.X), nodintconst(3))
if thearch.LinkArch.ByteOrder == binary.BigEndian {
index = ir.NewBinaryExpr(base.Pos, ir.OADD, index, nodintconst(7))
}
xe := ir.NewIndexExpr(base.Pos, staticuint64s, index)
xe.SetBounded(true)
value = xe
- case n.Left().Op() == ir.ONAME && n.Left().(*ir.Name).Class() == ir.PEXTERN && n.Left().(*ir.Name).Readonly():
+ case n.X.Op() == ir.ONAME && n.X.(*ir.Name).Class_ == ir.PEXTERN && n.X.(*ir.Name).Readonly():
// n.Left is a readonly global; use it directly.
- value = n.Left()
+ value = n.X
case !fromType.IsInterface() && n.Esc() == EscNone && fromType.Width <= 1024:
// n.Left does not escape. Use a stack temporary initialized to n.Left.
value = temp(fromType)
- init.Append(typecheck(ir.NewAssignStmt(base.Pos, value, n.Left()), ctxStmt))
+ init.Append(typecheck(ir.NewAssignStmt(base.Pos, value, n.X), ctxStmt))
}
if value != nil {
@@ -1005,7 +1005,7 @@ func walkexpr1(n ir.Node, init *ir.Nodes) ir.Node {
if toType.IsEmptyInterface() && fromType.IsInterface() && !fromType.IsEmptyInterface() {
// Evaluate the input interface.
c := temp(fromType)
- init.Append(ir.NewAssignStmt(base.Pos, c, n.Left()))
+ init.Append(ir.NewAssignStmt(base.Pos, c, n.X))
// Get the itab out of the interface.
tmp := temp(types.NewPtr(types.Types[types.TUINT8]))
@@ -1013,7 +1013,7 @@ func walkexpr1(n ir.Node, init *ir.Nodes) ir.Node {
// Get the type out of the itab.
nif := ir.NewIfStmt(base.Pos, typecheck(ir.NewBinaryExpr(base.Pos, ir.ONE, tmp, nodnil()), ctxExpr), nil, nil)
- nif.PtrBody().Set1(ir.NewAssignStmt(base.Pos, tmp, itabType(tmp)))
+ nif.Body.Set1(ir.NewAssignStmt(base.Pos, tmp, itabType(tmp)))
init.Append(nif)
// Build the result.
@@ -1034,7 +1034,7 @@ func walkexpr1(n ir.Node, init *ir.Nodes) ir.Node {
fn = substArgTypes(fn, fromType)
dowidth(fn.Type())
call := ir.NewCallExpr(base.Pos, ir.OCALL, fn, nil)
- call.PtrList().Set1(n.Left())
+ call.Args.Set1(n.X)
e := ir.NewBinaryExpr(base.Pos, ir.OEFACE, typeword(), safeexpr(walkexpr(typecheck(call, ctxExpr), init), init))
e.SetType(toType)
e.SetTypecheck(1)
@@ -1050,7 +1050,7 @@ func walkexpr1(n ir.Node, init *ir.Nodes) ir.Node {
tab = typeword()
}
- v := n.Left()
+ v := n.X
if needsaddr {
// Types of large or unknown size are passed by reference.
// Orderexpr arranged for n.Left to be a temporary for all
@@ -1069,41 +1069,41 @@ func walkexpr1(n ir.Node, init *ir.Nodes) ir.Node {
fn = substArgTypes(fn, fromType, toType)
dowidth(fn.Type())
call := ir.NewCallExpr(base.Pos, ir.OCALL, fn, nil)
- call.PtrList().Set2(tab, v)
+ call.Args.Set2(tab, v)
return walkexpr(typecheck(call, ctxExpr), init)
case ir.OCONV, ir.OCONVNOP:
n := n.(*ir.ConvExpr)
- n.SetLeft(walkexpr(n.Left(), init))
- if n.Op() == ir.OCONVNOP && n.Type() == n.Left().Type() {
- return n.Left()
+ n.X = walkexpr(n.X, init)
+ if n.Op() == ir.OCONVNOP && n.Type() == n.X.Type() {
+ return n.X
}
if n.Op() == ir.OCONVNOP && checkPtr(Curfn, 1) {
- if n.Type().IsPtr() && n.Left().Type().IsUnsafePtr() { // unsafe.Pointer to *T
+ if n.Type().IsPtr() && n.X.Type().IsUnsafePtr() { // unsafe.Pointer to *T
return walkCheckPtrAlignment(n, init, nil)
}
- if n.Type().IsUnsafePtr() && n.Left().Type().IsUintptr() { // uintptr to unsafe.Pointer
+ if n.Type().IsUnsafePtr() && n.X.Type().IsUintptr() { // uintptr to unsafe.Pointer
return walkCheckPtrArithmetic(n, init)
}
}
- param, result := rtconvfn(n.Left().Type(), n.Type())
+ param, result := rtconvfn(n.X.Type(), n.Type())
if param == types.Txxx {
return n
}
fn := types.BasicTypeNames[param] + "to" + types.BasicTypeNames[result]
- return conv(mkcall(fn, types.Types[result], init, conv(n.Left(), types.Types[param])), n.Type())
+ return conv(mkcall(fn, types.Types[result], init, conv(n.X, types.Types[param])), n.Type())
case ir.ODIV, ir.OMOD:
n := n.(*ir.BinaryExpr)
- n.SetLeft(walkexpr(n.Left(), init))
- n.SetRight(walkexpr(n.Right(), init))
+ n.X = walkexpr(n.X, init)
+ n.Y = walkexpr(n.Y, init)
// rewrite complex div into function call.
- et := n.Left().Type().Kind()
+ et := n.X.Type().Kind()
if isComplex[et] && n.Op() == ir.ODIV {
t := n.Type()
- call := mkcall("complex128div", types.Types[types.TCOMPLEX128], init, conv(n.Left(), types.Types[types.TCOMPLEX128]), conv(n.Right(), types.Types[types.TCOMPLEX128]))
+ call := mkcall("complex128div", types.Types[types.TCOMPLEX128], init, conv(n.X, types.Types[types.TCOMPLEX128]), conv(n.Y, types.Types[types.TCOMPLEX128]))
return conv(call, t)
}
@@ -1116,12 +1116,12 @@ func walkexpr1(n ir.Node, init *ir.Nodes) ir.Node {
// TODO: Remove this code once we can introduce
// runtime calls late in SSA processing.
if Widthreg < 8 && (et == types.TINT64 || et == types.TUINT64) {
- if n.Right().Op() == ir.OLITERAL {
+ if n.Y.Op() == ir.OLITERAL {
// Leave div/mod by constant powers of 2 or small 16-bit constants.
// The SSA backend will handle those.
switch et {
case types.TINT64:
- c := ir.Int64Val(n.Right())
+ c := ir.Int64Val(n.Y)
if c < 0 {
c = -c
}
@@ -1129,7 +1129,7 @@ func walkexpr1(n ir.Node, init *ir.Nodes) ir.Node {
return n
}
case types.TUINT64:
- c := ir.Uint64Val(n.Right())
+ c := ir.Uint64Val(n.Y)
if c < 1<<16 {
return n
}
@@ -1149,49 +1149,49 @@ func walkexpr1(n ir.Node, init *ir.Nodes) ir.Node {
} else {
fn += "mod"
}
- return mkcall(fn, n.Type(), init, conv(n.Left(), types.Types[et]), conv(n.Right(), types.Types[et]))
+ return mkcall(fn, n.Type(), init, conv(n.X, types.Types[et]), conv(n.Y, types.Types[et]))
}
return n
case ir.OINDEX:
n := n.(*ir.IndexExpr)
- n.SetLeft(walkexpr(n.Left(), init))
+ n.X = walkexpr(n.X, init)
// save the original node for bounds checking elision.
// If it was a ODIV/OMOD walk might rewrite it.
- r := n.Right()
+ r := n.Index
- n.SetRight(walkexpr(n.Right(), init))
+ n.Index = walkexpr(n.Index, init)
// if range of type cannot exceed static array bound,
// disable bounds check.
if n.Bounded() {
return n
}
- t := n.Left().Type()
+ t := n.X.Type()
if t != nil && t.IsPtr() {
t = t.Elem()
}
if t.IsArray() {
n.SetBounded(bounded(r, t.NumElem()))
- if base.Flag.LowerM != 0 && n.Bounded() && !ir.IsConst(n.Right(), constant.Int) {
+ if base.Flag.LowerM != 0 && n.Bounded() && !ir.IsConst(n.Index, constant.Int) {
base.Warn("index bounds check elided")
}
- if smallintconst(n.Right()) && !n.Bounded() {
+ if smallintconst(n.Index) && !n.Bounded() {
base.Errorf("index out of bounds")
}
- } else if ir.IsConst(n.Left(), constant.String) {
- n.SetBounded(bounded(r, int64(len(ir.StringVal(n.Left())))))
- if base.Flag.LowerM != 0 && n.Bounded() && !ir.IsConst(n.Right(), constant.Int) {
+ } else if ir.IsConst(n.X, constant.String) {
+ n.SetBounded(bounded(r, int64(len(ir.StringVal(n.X)))))
+ if base.Flag.LowerM != 0 && n.Bounded() && !ir.IsConst(n.Index, constant.Int) {
base.Warn("index bounds check elided")
}
- if smallintconst(n.Right()) && !n.Bounded() {
+ if smallintconst(n.Index) && !n.Bounded() {
base.Errorf("index out of bounds")
}
}
- if ir.IsConst(n.Right(), constant.Int) {
- if v := n.Right().Val(); constant.Sign(v) < 0 || doesoverflow(v, types.Types[types.TINT]) {
+ if ir.IsConst(n.Index, constant.Int) {
+ if v := n.Index.Val(); constant.Sign(v) < 0 || doesoverflow(v, types.Types[types.TINT]) {
base.Errorf("index out of bounds")
}
}
@@ -1200,13 +1200,13 @@ func walkexpr1(n ir.Node, init *ir.Nodes) ir.Node {
case ir.OINDEXMAP:
// Replace m[k] with *map{access1,assign}(maptype, m, &k)
n := n.(*ir.IndexExpr)
- n.SetLeft(walkexpr(n.Left(), init))
- n.SetRight(walkexpr(n.Right(), init))
- map_ := n.Left()
- key := n.Right()
+ n.X = walkexpr(n.X, init)
+ n.Index = walkexpr(n.Index, init)
+ map_ := n.X
+ key := n.Index
t := map_.Type()
var call *ir.CallExpr
- if n.IndexMapLValue() {
+ if n.Assigned {
// This m[k] expression is on the left-hand side of an assignment.
fast := mapfast(t)
if fast == mapslow {
@@ -1244,20 +1244,20 @@ func walkexpr1(n ir.Node, init *ir.Nodes) ir.Node {
case ir.OSLICEHEADER:
n := n.(*ir.SliceHeaderExpr)
- n.SetLeft(walkexpr(n.Left(), init))
- n.List().SetFirst(walkexpr(n.List().First(), init))
- n.List().SetSecond(walkexpr(n.List().Second(), init))
+ n.Ptr = walkexpr(n.Ptr, init)
+ n.LenCap.SetFirst(walkexpr(n.LenCap.First(), init))
+ n.LenCap.SetSecond(walkexpr(n.LenCap.Second(), init))
return n
case ir.OSLICE, ir.OSLICEARR, ir.OSLICESTR, ir.OSLICE3, ir.OSLICE3ARR:
n := n.(*ir.SliceExpr)
- checkSlice := checkPtr(Curfn, 1) && n.Op() == ir.OSLICE3ARR && n.Left().Op() == ir.OCONVNOP && n.Left().(*ir.ConvExpr).Left().Type().IsUnsafePtr()
+ checkSlice := checkPtr(Curfn, 1) && n.Op() == ir.OSLICE3ARR && n.X.Op() == ir.OCONVNOP && n.X.(*ir.ConvExpr).X.Type().IsUnsafePtr()
if checkSlice {
- conv := n.Left().(*ir.ConvExpr)
- conv.SetLeft(walkexpr(conv.Left(), init))
+ conv := n.X.(*ir.ConvExpr)
+ conv.X = walkexpr(conv.X, init)
} else {
- n.SetLeft(walkexpr(n.Left(), init))
+ n.X = walkexpr(n.X, init)
}
low, high, max := n.SliceBounds()
@@ -1270,11 +1270,11 @@ func walkexpr1(n ir.Node, init *ir.Nodes) ir.Node {
max = walkexpr(max, init)
n.SetSliceBounds(low, high, max)
if checkSlice {
- n.SetLeft(walkCheckPtrAlignment(n.Left().(*ir.ConvExpr), init, max))
+ n.X = walkCheckPtrAlignment(n.X.(*ir.ConvExpr), init, max)
}
if n.Op().IsSlice3() {
- if max != nil && max.Op() == ir.OCAP && samesafeexpr(n.Left(), max.(*ir.UnaryExpr).Left()) {
+ if max != nil && max.Op() == ir.OCAP && samesafeexpr(n.X, max.(*ir.UnaryExpr).X) {
// Reduce x[i:j:cap(x)] to x[i:j].
if n.Op() == ir.OSLICE3 {
n.SetOp(ir.OSLICE)
@@ -1317,14 +1317,14 @@ func walkexpr1(n ir.Node, init *ir.Nodes) ir.Node {
// cannot use chanfn - closechan takes any, not chan any
n := n.(*ir.UnaryExpr)
fn := syslook("closechan")
- fn = substArgTypes(fn, n.Left().Type())
- return mkcall1(fn, nil, init, n.Left())
+ fn = substArgTypes(fn, n.X.Type())
+ return mkcall1(fn, nil, init, n.X)
case ir.OMAKECHAN:
// When size fits into int, use makechan instead of
// makechan64, which is faster and shorter on 32 bit platforms.
n := n.(*ir.MakeExpr)
- size := n.Left()
+ size := n.Len
fnname := "makechan64"
argtype := types.Types[types.TINT64]
@@ -1342,7 +1342,7 @@ func walkexpr1(n ir.Node, init *ir.Nodes) ir.Node {
n := n.(*ir.MakeExpr)
t := n.Type()
hmapType := hmap(t)
- hint := n.Left()
+ hint := n.Len
// var h *hmap
var h ir.Node
@@ -1373,11 +1373,11 @@ func walkexpr1(n ir.Node, init *ir.Nodes) ir.Node {
// }
nif := ir.NewIfStmt(base.Pos, ir.NewBinaryExpr(base.Pos, ir.OLE, hint, nodintconst(BUCKETSIZE)), nil, nil)
- nif.SetLikely(true)
+ nif.Likely = true
// var bv bmap
bv := temp(bmap(t))
- nif.PtrBody().Append(ir.NewAssignStmt(base.Pos, bv, nil))
+ nif.Body.Append(ir.NewAssignStmt(base.Pos, bv, nil))
// b = &bv
b := nodAddr(bv)
@@ -1385,7 +1385,7 @@ func walkexpr1(n ir.Node, init *ir.Nodes) ir.Node {
// h.buckets = b
bsym := hmapType.Field(5).Sym // hmap.buckets see reflect.go:hmap
na := ir.NewAssignStmt(base.Pos, ir.NewSelectorExpr(base.Pos, ir.ODOT, h, bsym), b)
- nif.PtrBody().Append(na)
+ nif.Body.Append(na)
appendWalkStmt(init, nif)
}
}
@@ -1442,8 +1442,8 @@ func walkexpr1(n ir.Node, init *ir.Nodes) ir.Node {
case ir.OMAKESLICE:
n := n.(*ir.MakeExpr)
- l := n.Left()
- r := n.Right()
+ l := n.Len
+ r := n.Cap
if r == nil {
r = safeexpr(l, init)
l = r
@@ -1472,8 +1472,8 @@ func walkexpr1(n ir.Node, init *ir.Nodes) ir.Node {
// }
nif := ir.NewIfStmt(base.Pos, ir.NewBinaryExpr(base.Pos, ir.OGT, conv(l, types.Types[types.TUINT64]), nodintconst(i)), nil, nil)
niflen := ir.NewIfStmt(base.Pos, ir.NewBinaryExpr(base.Pos, ir.OLT, l, nodintconst(0)), nil, nil)
- niflen.PtrBody().Set1(mkcall("panicmakeslicelen", nil, init))
- nif.PtrBody().Append(niflen, mkcall("panicmakeslicecap", nil, init))
+ niflen.Body.Set1(mkcall("panicmakeslicelen", nil, init))
+ nif.Body.Append(niflen, mkcall("panicmakeslicecap", nil, init))
init.Append(typecheck(nif, ctxStmt))
t = types.NewArray(t.Elem(), i) // [r]T
@@ -1507,9 +1507,9 @@ func walkexpr1(n ir.Node, init *ir.Nodes) ir.Node {
m.SetType(t)
fn := syslook(fnname)
- m.SetLeft(mkcall1(fn, types.Types[types.TUNSAFEPTR], init, typename(t.Elem()), conv(len, argtype), conv(cap, argtype)))
- m.Left().MarkNonNil()
- m.PtrList().Set2(conv(len, types.Types[types.TINT]), conv(cap, types.Types[types.TINT]))
+ m.Ptr = mkcall1(fn, types.Types[types.TUNSAFEPTR], init, typename(t.Elem()), conv(len, argtype), conv(cap, argtype))
+ m.Ptr.MarkNonNil()
+ m.LenCap.Set2(conv(len, types.Types[types.TINT]), conv(cap, types.Types[types.TINT]))
return walkexpr(typecheck(m, ctxExpr), init)
case ir.OMAKESLICECOPY:
@@ -1523,9 +1523,9 @@ func walkexpr1(n ir.Node, init *ir.Nodes) ir.Node {
base.Errorf("%v can't be allocated in Go; it is incomplete (or unallocatable)", t.Elem())
}
- length := conv(n.Left(), types.Types[types.TINT])
- copylen := ir.NewUnaryExpr(base.Pos, ir.OLEN, n.Right())
- copyptr := ir.NewUnaryExpr(base.Pos, ir.OSPTR, n.Right())
+ length := conv(n.Len, types.Types[types.TINT])
+ copylen := ir.NewUnaryExpr(base.Pos, ir.OLEN, n.Cap)
+ copyptr := ir.NewUnaryExpr(base.Pos, ir.OSPTR, n.Cap)
if !t.Elem().HasPointers() && n.Bounded() {
// When len(to)==len(from) and elements have no pointers:
@@ -1539,9 +1539,9 @@ func walkexpr1(n ir.Node, init *ir.Nodes) ir.Node {
// instantiate mallocgc(size uintptr, typ *byte, needszero bool) unsafe.Pointer
fn := syslook("mallocgc")
sh := ir.NewSliceHeaderExpr(base.Pos, nil, nil, nil, nil)
- sh.SetLeft(mkcall1(fn, types.Types[types.TUNSAFEPTR], init, size, nodnil(), nodbool(false)))
- sh.Left().MarkNonNil()
- sh.PtrList().Set2(length, length)
+ sh.Ptr = mkcall1(fn, types.Types[types.TUNSAFEPTR], init, size, nodnil(), nodbool(false))
+ sh.Ptr.MarkNonNil()
+ sh.LenCap.Set2(length, length)
sh.SetType(t)
s := temp(t)
@@ -1561,9 +1561,9 @@ func walkexpr1(n ir.Node, init *ir.Nodes) ir.Node {
// instantiate makeslicecopy(typ *byte, tolen int, fromlen int, from unsafe.Pointer) unsafe.Pointer
fn := syslook("makeslicecopy")
s := ir.NewSliceHeaderExpr(base.Pos, nil, nil, nil, nil)
- s.SetLeft(mkcall1(fn, types.Types[types.TUNSAFEPTR], init, typename(t.Elem()), length, copylen, conv(copyptr, types.Types[types.TUNSAFEPTR])))
- s.Left().MarkNonNil()
- s.PtrList().Set2(length, length)
+ s.Ptr = mkcall1(fn, types.Types[types.TUNSAFEPTR], init, typename(t.Elem()), length, copylen, conv(copyptr, types.Types[types.TUNSAFEPTR]))
+ s.Ptr.MarkNonNil()
+ s.LenCap.Set2(length, length)
s.SetType(t)
return walkexpr(typecheck(s, ctxExpr), init)
@@ -1575,7 +1575,7 @@ func walkexpr1(n ir.Node, init *ir.Nodes) ir.Node {
a = nodAddr(temp(t))
}
// intstring(*[4]byte, rune)
- return mkcall("intstring", n.Type(), init, a, conv(n.Left(), types.Types[types.TINT64]))
+ return mkcall("intstring", n.Type(), init, a, conv(n.X, types.Types[types.TINT64]))
case ir.OBYTES2STR, ir.ORUNES2STR:
n := n.(*ir.ConvExpr)
@@ -1587,29 +1587,29 @@ func walkexpr1(n ir.Node, init *ir.Nodes) ir.Node {
}
if n.Op() == ir.ORUNES2STR {
// slicerunetostring(*[32]byte, []rune) string
- return mkcall("slicerunetostring", n.Type(), init, a, n.Left())
+ return mkcall("slicerunetostring", n.Type(), init, a, n.X)
}
// slicebytetostring(*[32]byte, ptr *byte, n int) string
- n.SetLeft(cheapexpr(n.Left(), init))
- ptr, len := backingArrayPtrLen(n.Left())
+ n.X = cheapexpr(n.X, init)
+ ptr, len := backingArrayPtrLen(n.X)
return mkcall("slicebytetostring", n.Type(), init, a, ptr, len)
case ir.OBYTES2STRTMP:
n := n.(*ir.ConvExpr)
- n.SetLeft(walkexpr(n.Left(), init))
+ n.X = walkexpr(n.X, init)
if !instrumenting {
// Let the backend handle OBYTES2STRTMP directly
// to avoid a function call to slicebytetostringtmp.
return n
}
// slicebytetostringtmp(ptr *byte, n int) string
- n.SetLeft(cheapexpr(n.Left(), init))
- ptr, len := backingArrayPtrLen(n.Left())
+ n.X = cheapexpr(n.X, init)
+ ptr, len := backingArrayPtrLen(n.X)
return mkcall("slicebytetostringtmp", n.Type(), init, ptr, len)
case ir.OSTR2BYTES:
n := n.(*ir.ConvExpr)
- s := n.Left()
+ s := n.X
if ir.IsConst(s, constant.String) {
sc := ir.StringVal(s)
@@ -1655,7 +1655,7 @@ func walkexpr1(n ir.Node, init *ir.Nodes) ir.Node {
// The only such case today is:
// for i, c := range []byte(string)
n := n.(*ir.ConvExpr)
- n.SetLeft(walkexpr(n.Left(), init))
+ n.X = walkexpr(n.X, init)
return n
case ir.OSTR2RUNES:
@@ -1667,7 +1667,7 @@ func walkexpr1(n ir.Node, init *ir.Nodes) ir.Node {
a = nodAddr(temp(t))
}
// stringtoslicerune(*[32]rune, string) []rune
- return mkcall("stringtoslicerune", n.Type(), init, a, conv(n.Left(), types.Types[types.TSTRING]))
+ return mkcall("stringtoslicerune", n.Type(), init, a, conv(n.X, types.Types[types.TSTRING]))
case ir.OARRAYLIT, ir.OSLICELIT, ir.OMAPLIT, ir.OSTRUCTLIT, ir.OPTRLIT:
if isStaticCompositeLiteral(n) && !canSSAType(n.Type()) {
@@ -1684,11 +1684,11 @@ func walkexpr1(n ir.Node, init *ir.Nodes) ir.Node {
case ir.OSEND:
n := n.(*ir.SendStmt)
- n1 := n.Right()
- n1 = assignconv(n1, n.Left().Type().Elem(), "chan send")
+ n1 := n.Value
+ n1 = assignconv(n1, n.Chan.Type().Elem(), "chan send")
n1 = walkexpr(n1, init)
n1 = nodAddr(n1)
- return mkcall1(chanfn("chansend1", 2, n.Left().Type()), nil, init, n.Left(), n1)
+ return mkcall1(chanfn("chansend1", 2, n.Chan.Type()), nil, init, n.Chan, n1)
case ir.OCLOSURE:
return walkclosure(n.(*ir.ClosureExpr), init)
@@ -1716,14 +1716,14 @@ func markTypeUsedInInterface(t *types.Type, from *obj.LSym) {
// markUsedIfaceMethod marks that an interface method is used in the current
// function. n is OCALLINTER node.
func markUsedIfaceMethod(n *ir.CallExpr) {
- dot := n.Left().(*ir.SelectorExpr)
- ityp := dot.Left().Type()
+ dot := n.X.(*ir.SelectorExpr)
+ ityp := dot.X.Type()
tsym := typenamesym(ityp).Linksym()
r := obj.Addrel(Curfn.LSym)
r.Sym = tsym
// dot.Xoffset is the method index * Widthptr (the offset of code pointer
// in itab).
- midx := dot.Offset() / int64(Widthptr)
+ midx := dot.Offset / int64(Widthptr)
r.Add = ifaceMethodOffset(ityp, midx)
r.Type = objabi.R_USEIFACEMETHOD
}
@@ -1777,7 +1777,7 @@ func rtconvfn(src, dst *types.Type) (param, result types.Kind) {
// TODO(josharian): combine this with its caller and simplify
func reduceSlice(n *ir.SliceExpr) ir.Node {
low, high, max := n.SliceBounds()
- if high != nil && high.Op() == ir.OLEN && samesafeexpr(n.Left(), high.(*ir.UnaryExpr).Left()) {
+ if high != nil && high.Op() == ir.OLEN && samesafeexpr(n.X, high.(*ir.UnaryExpr).X) {
// Reduce x[i:len(x)] to x[i:].
high = nil
}
@@ -1787,7 +1787,7 @@ func reduceSlice(n *ir.SliceExpr) ir.Node {
if base.Debug.Slice > 0 {
base.Warn("slice: omit slice operation")
}
- return n.Left()
+ return n.X
}
return n
}
@@ -1878,7 +1878,7 @@ func ascompatet(nl ir.Nodes, nr *types.Type) []ir.Node {
}
res := ir.NewResultExpr(base.Pos, nil, types.BADWIDTH)
- res.SetOffset(base.Ctxt.FixedFrameSize() + r.Offset)
+ res.Offset = base.Ctxt.FixedFrameSize() + r.Offset
res.SetType(r.Type)
res.SetTypecheck(1)
@@ -1902,7 +1902,7 @@ func mkdotargslice(typ *types.Type, args []ir.Node) ir.Node {
n.SetType(typ)
} else {
lit := ir.NewCompLitExpr(base.Pos, ir.OCOMPLIT, ir.TypeNode(typ).(ir.Ntype), nil)
- lit.PtrList().Append(args...)
+ lit.List.Append(args...)
lit.SetImplicit(true)
n = lit
}
@@ -1917,42 +1917,42 @@ func mkdotargslice(typ *types.Type, args []ir.Node) ir.Node {
// fixVariadicCall rewrites calls to variadic functions to use an
// explicit ... argument if one is not already present.
func fixVariadicCall(call *ir.CallExpr) {
- fntype := call.Left().Type()
- if !fntype.IsVariadic() || call.IsDDD() {
+ fntype := call.X.Type()
+ if !fntype.IsVariadic() || call.IsDDD {
return
}
vi := fntype.NumParams() - 1
vt := fntype.Params().Field(vi).Type
- args := call.List().Slice()
+ args := call.Args.Slice()
extra := args[vi:]
slice := mkdotargslice(vt, extra)
for i := range extra {
extra[i] = nil // allow GC
}
- call.PtrList().Set(append(args[:vi], slice))
- call.SetIsDDD(true)
+ call.Args.Set(append(args[:vi], slice))
+ call.IsDDD = true
}
func walkCall(n *ir.CallExpr, init *ir.Nodes) {
- if n.Rlist().Len() != 0 {
+ if n.Rargs.Len() != 0 {
return // already walked
}
- params := n.Left().Type().Params()
- args := n.List().Slice()
+ params := n.X.Type().Params()
+ args := n.Args.Slice()
- n.SetLeft(walkexpr(n.Left(), init))
+ n.X = walkexpr(n.X, init)
walkexprlist(args, init)
// If this is a method call, add the receiver at the beginning of the args.
if n.Op() == ir.OCALLMETH {
withRecv := make([]ir.Node, len(args)+1)
- dot := n.Left().(*ir.SelectorExpr)
- withRecv[0] = dot.Left()
- dot.SetLeft(nil)
+ dot := n.X.(*ir.SelectorExpr)
+ withRecv[0] = dot.X
+ dot.X = nil
copy(withRecv[1:], args)
args = withRecv
}
@@ -1968,7 +1968,7 @@ func walkCall(n *ir.CallExpr, init *ir.Nodes) {
var t *types.Type
if n.Op() == ir.OCALLMETH {
if i == 0 {
- t = n.Left().Type().Recv().Type
+ t = n.X.Type().Recv().Type
} else {
t = params.Field(i - 1).Type
}
@@ -1985,18 +1985,18 @@ func walkCall(n *ir.CallExpr, init *ir.Nodes) {
}
}
- n.PtrList().Set(tempAssigns)
- n.PtrRlist().Set(args)
+ n.Args.Set(tempAssigns)
+ n.Rargs.Set(args)
}
// generate code for print
func walkprint(nn *ir.CallExpr, init *ir.Nodes) ir.Node {
// Hoist all the argument evaluation up before the lock.
- walkexprlistcheap(nn.List().Slice(), init)
+ walkexprlistcheap(nn.Args.Slice(), init)
// For println, add " " between elements and "\n" at the end.
if nn.Op() == ir.OPRINTN {
- s := nn.List().Slice()
+ s := nn.Args.Slice()
t := make([]ir.Node, 0, len(s)*2)
for i, n := range s {
if i != 0 {
@@ -2005,11 +2005,11 @@ func walkprint(nn *ir.CallExpr, init *ir.Nodes) ir.Node {
t = append(t, n)
}
t = append(t, nodstr("\n"))
- nn.PtrList().Set(t)
+ nn.Args.Set(t)
}
// Collapse runs of constant strings.
- s := nn.List().Slice()
+ s := nn.Args.Slice()
t := make([]ir.Node, 0, len(s))
for i := 0; i < len(s); {
var strs []string
@@ -2025,10 +2025,10 @@ func walkprint(nn *ir.CallExpr, init *ir.Nodes) ir.Node {
i++
}
}
- nn.PtrList().Set(t)
+ nn.Args.Set(t)
calls := []ir.Node{mkcall("printlock", nil, init)}
- for i, n := range nn.List().Slice() {
+ for i, n := range nn.Args.Slice() {
if n.Op() == ir.OLITERAL {
if n.Type() == types.UntypedRune {
n = defaultlit(n, types.RuneType)
@@ -2047,7 +2047,7 @@ func walkprint(nn *ir.CallExpr, init *ir.Nodes) ir.Node {
n = defaultlit(n, types.Types[types.TINT64])
}
n = defaultlit(n, nil)
- nn.List().SetIndex(i, n)
+ nn.Args.SetIndex(i, n)
if n.Type() == nil || n.Type().Kind() == types.TFORW {
continue
}
@@ -2116,7 +2116,7 @@ func walkprint(nn *ir.CallExpr, init *ir.Nodes) ir.Node {
n = ir.NewConvExpr(base.Pos, ir.OCONV, nil, n)
n.SetType(t)
}
- r.PtrList().Append(n)
+ r.Args.Append(n)
}
calls = append(calls, r)
}
@@ -2127,7 +2127,7 @@ func walkprint(nn *ir.CallExpr, init *ir.Nodes) ir.Node {
walkexprlist(calls, init)
r := ir.NewBlockStmt(base.Pos, nil)
- r.PtrList().Set(calls)
+ r.List.Set(calls)
return walkstmt(typecheck(r, ctxStmt))
}
@@ -2151,10 +2151,10 @@ func isReflectHeaderDataField(l ir.Node) bool {
switch l.Op() {
case ir.ODOT:
l := l.(*ir.SelectorExpr)
- tsym = l.Left().Type().Sym()
+ tsym = l.X.Type().Sym()
case ir.ODOTPTR:
l := l.(*ir.SelectorExpr)
- tsym = l.Left().Type().Elem().Sym()
+ tsym = l.X.Type().Elem().Sym()
default:
return false
}
@@ -2173,26 +2173,26 @@ func convas(n *ir.AssignStmt, init *ir.Nodes) *ir.AssignStmt {
n.SetTypecheck(1)
- if n.Left() == nil || n.Right() == nil {
+ if n.X == nil || n.Y == nil {
return n
}
- lt := n.Left().Type()
- rt := n.Right().Type()
+ lt := n.X.Type()
+ rt := n.Y.Type()
if lt == nil || rt == nil {
return n
}
- if ir.IsBlank(n.Left()) {
- n.SetRight(defaultlit(n.Right(), nil))
+ if ir.IsBlank(n.X) {
+ n.Y = defaultlit(n.Y, nil)
return n
}
if !types.Identical(lt, rt) {
- n.SetRight(assignconv(n.Right(), lt, "assignment"))
- n.SetRight(walkexpr(n.Right(), init))
+ n.Y = assignconv(n.Y, lt, "assignment")
+ n.Y = walkexpr(n.Y, init)
}
- dowidth(n.Right().Type())
+ dowidth(n.Y.Type())
return n
}
@@ -2212,7 +2212,7 @@ func reorder3(all []*ir.AssignStmt) []ir.Node {
var mapinit ir.Nodes
for i, n := range all {
- l := n.Left()
+ l := n.X
// Save subexpressions needed on left side.
// Drill through non-dereferences.
@@ -2220,17 +2220,17 @@ func reorder3(all []*ir.AssignStmt) []ir.Node {
switch ll := l; ll.Op() {
case ir.ODOT:
ll := ll.(*ir.SelectorExpr)
- l = ll.Left()
+ l = ll.X
continue
case ir.OPAREN:
ll := ll.(*ir.ParenExpr)
- l = ll.Left()
+ l = ll.X
continue
case ir.OINDEX:
ll := ll.(*ir.IndexExpr)
- if ll.Left().Type().IsArray() {
- ll.SetRight(reorder3save(ll.Right(), all, i, &early))
- l = ll.Left()
+ if ll.X.Type().IsArray() {
+ ll.Index = reorder3save(ll.Index, all, i, &early)
+ l = ll.X
continue
}
}
@@ -2246,22 +2246,22 @@ func reorder3(all []*ir.AssignStmt) []ir.Node {
case ir.OINDEX, ir.OINDEXMAP:
l := l.(*ir.IndexExpr)
- l.SetLeft(reorder3save(l.Left(), all, i, &early))
- l.SetRight(reorder3save(l.Right(), all, i, &early))
+ l.X = reorder3save(l.X, all, i, &early)
+ l.Index = reorder3save(l.Index, all, i, &early)
if l.Op() == ir.OINDEXMAP {
all[i] = convas(all[i], &mapinit)
}
case ir.ODEREF:
l := l.(*ir.StarExpr)
- l.SetLeft(reorder3save(l.Left(), all, i, &early))
+ l.X = reorder3save(l.X, all, i, &early)
case ir.ODOTPTR:
l := l.(*ir.SelectorExpr)
- l.SetLeft(reorder3save(l.Left(), all, i, &early))
+ l.X = reorder3save(l.X, all, i, &early)
}
// Save expression on right side.
- all[i].SetRight(reorder3save(all[i].Right(), all, i, &early))
+ all[i].Y = reorder3save(all[i].Y, all, i, &early)
}
early = append(mapinit.Slice(), early...)
@@ -2297,20 +2297,20 @@ func outervalue(n ir.Node) ir.Node {
base.Fatalf("OXDOT in walk")
case ir.ODOT:
nn := nn.(*ir.SelectorExpr)
- n = nn.Left()
+ n = nn.X
continue
case ir.OPAREN:
nn := nn.(*ir.ParenExpr)
- n = nn.Left()
+ n = nn.X
continue
case ir.OCONVNOP:
nn := nn.(*ir.ConvExpr)
- n = nn.Left()
+ n = nn.X
continue
case ir.OINDEX:
nn := nn.(*ir.IndexExpr)
- if nn.Left().Type() != nil && nn.Left().Type().IsArray() {
- n = nn.Left()
+ if nn.X.Type() != nil && nn.X.Type().IsArray() {
+ n = nn.X
continue
}
}
@@ -2329,7 +2329,7 @@ func aliased(r ir.Node, all []*ir.AssignStmt) bool {
// Treat all fields of a struct as referring to the whole struct.
// We could do better but we would have to keep track of the fields.
for r.Op() == ir.ODOT {
- r = r.(*ir.SelectorExpr).Left()
+ r = r.(*ir.SelectorExpr).X
}
// Look for obvious aliasing: a variable being assigned
@@ -2340,20 +2340,20 @@ func aliased(r ir.Node, all []*ir.AssignStmt) bool {
memwrite := false
for _, as := range all {
// We can ignore assignments to blank.
- if ir.IsBlank(as.Left()) {
+ if ir.IsBlank(as.X) {
continue
}
- lv := outervalue(as.Left())
+ lv := outervalue(as.X)
if lv.Op() != ir.ONAME {
memwrite = true
continue
}
l := lv.(*ir.Name)
- switch l.Class() {
+ switch l.Class_ {
default:
- base.Fatalf("unexpected class: %v, %v", l, l.Class())
+ base.Fatalf("unexpected class: %v, %v", l, l.Class_)
case ir.PAUTOHEAP, ir.PEXTERN:
memwrite = true
@@ -2401,7 +2401,7 @@ func anyAddrTaken(n ir.Node) bool {
switch n.Op() {
case ir.ONAME:
n := n.(*ir.Name)
- return n.Class() == ir.PEXTERN || n.Class() == ir.PAUTOHEAP || n.Name().Addrtaken()
+ return n.Class_ == ir.PEXTERN || n.Class_ == ir.PAUTOHEAP || n.Name().Addrtaken()
case ir.ODOT: // but not ODOTPTR - should have been handled in aliased.
base.Fatalf("anyAddrTaken unexpected ODOT")
@@ -2509,7 +2509,7 @@ func paramstoheap(params *types.Type) []ir.Node {
if stackcopy := v.Name().Stackcopy; stackcopy != nil {
nn = append(nn, walkstmt(ir.NewDecl(base.Pos, ir.ODCL, v)))
- if stackcopy.Class() == ir.PPARAM {
+ if stackcopy.Class_ == ir.PPARAM {
nn = append(nn, walkstmt(typecheck(ir.NewAssignStmt(base.Pos, v, stackcopy), ctxStmt)))
}
}
@@ -2557,7 +2557,7 @@ func returnsfromheap(params *types.Type) []ir.Node {
if v == nil {
continue
}
- if stackcopy := v.Name().Stackcopy; stackcopy != nil && stackcopy.Class() == ir.PPARAMOUT {
+ if stackcopy := v.Name().Stackcopy; stackcopy != nil && stackcopy.Class_ == ir.PPARAMOUT {
nn = append(nn, walkstmt(typecheck(ir.NewAssignStmt(base.Pos, stackcopy, v), ctxStmt)))
}
}
@@ -2736,7 +2736,7 @@ func writebarrierfn(name string, l *types.Type, r *types.Type) ir.Node {
}
func addstr(n *ir.AddStringExpr, init *ir.Nodes) ir.Node {
- c := n.List().Len()
+ c := n.List.Len()
if c < 2 {
base.Fatalf("addstr count %d too small", c)
@@ -2745,7 +2745,7 @@ func addstr(n *ir.AddStringExpr, init *ir.Nodes) ir.Node {
buf := nodnil()
if n.Esc() == EscNone {
sz := int64(0)
- for _, n1 := range n.List().Slice() {
+ for _, n1 := range n.List.Slice() {
if n1.Op() == ir.OLITERAL {
sz += int64(len(ir.StringVal(n1)))
}
@@ -2761,7 +2761,7 @@ func addstr(n *ir.AddStringExpr, init *ir.Nodes) ir.Node {
// build list of string arguments
args := []ir.Node{buf}
- for _, n2 := range n.List().Slice() {
+ for _, n2 := range n.List.Slice() {
args = append(args, conv(n2, types.Types[types.TSTRING]))
}
@@ -2784,7 +2784,7 @@ func addstr(n *ir.AddStringExpr, init *ir.Nodes) ir.Node {
cat := syslook(fn)
r := ir.NewCallExpr(base.Pos, ir.OCALL, cat, nil)
- r.PtrList().Set(args)
+ r.Args.Set(args)
r1 := typecheck(r, ctxExpr)
r1 = walkexpr(r1, init)
r1.SetType(n.Type())
@@ -2793,12 +2793,12 @@ func addstr(n *ir.AddStringExpr, init *ir.Nodes) ir.Node {
}
func walkAppendArgs(n *ir.CallExpr, init *ir.Nodes) {
- walkexprlistsafe(n.List().Slice(), init)
+ walkexprlistsafe(n.Args.Slice(), init)
// walkexprlistsafe will leave OINDEX (s[n]) alone if both s
// and n are name or literal, but those may index the slice we're
// modifying here. Fix explicitly.
- ls := n.List().Slice()
+ ls := n.Args.Slice()
for i1, n1 := range ls {
ls[i1] = cheapexpr(n1, init)
}
@@ -2821,10 +2821,10 @@ func walkAppendArgs(n *ir.CallExpr, init *ir.Nodes) {
func appendslice(n *ir.CallExpr, init *ir.Nodes) ir.Node {
walkAppendArgs(n, init)
- l1 := n.List().First()
- l2 := n.List().Second()
+ l1 := n.Args.First()
+ l2 := n.Args.Second()
l2 = cheapexpr(l2, init)
- n.List().SetSecond(l2)
+ n.Args.SetSecond(l2)
var nodes ir.Nodes
@@ -2842,14 +2842,14 @@ func appendslice(n *ir.CallExpr, init *ir.Nodes) ir.Node {
nif := ir.NewIfStmt(base.Pos, nil, nil, nil)
nuint := conv(nn, types.Types[types.TUINT])
scapuint := conv(ir.NewUnaryExpr(base.Pos, ir.OCAP, s), types.Types[types.TUINT])
- nif.SetLeft(ir.NewBinaryExpr(base.Pos, ir.OGT, nuint, scapuint))
+ nif.Cond = ir.NewBinaryExpr(base.Pos, ir.OGT, nuint, scapuint)
// instantiate growslice(typ *type, []any, int) []any
fn := syslook("growslice")
fn = substArgTypes(fn, elemtype, elemtype)
// s = growslice(T, s, n)
- nif.PtrBody().Set1(ir.NewAssignStmt(base.Pos, s, mkcall1(fn, s.Type(), nif.PtrInit(), typename(elemtype), s, nn)))
+ nif.Body.Set1(ir.NewAssignStmt(base.Pos, s, mkcall1(fn, s.Type(), nif.PtrInit(), typename(elemtype), s, nn)))
nodes.Append(nif)
// s = s[:n]
@@ -2926,12 +2926,12 @@ func isAppendOfMake(n ir.Node) bool {
return false
}
call := n.(*ir.CallExpr)
- if !call.IsDDD() || call.List().Len() != 2 || call.List().Second().Op() != ir.OMAKESLICE {
+ if !call.IsDDD || call.Args.Len() != 2 || call.Args.Second().Op() != ir.OMAKESLICE {
return false
}
- mk := call.List().Second().(*ir.MakeExpr)
- if mk.Right() != nil {
+ mk := call.Args.Second().(*ir.MakeExpr)
+ if mk.Cap != nil {
return false
}
@@ -2941,7 +2941,7 @@ func isAppendOfMake(n ir.Node) bool {
// typecheck made sure that constant arguments to make are not negative and fit into an int.
// The care of overflow of the len argument to make will be handled by an explicit check of int(len) < 0 during runtime.
- y := mk.Left()
+ y := mk.Len
if !ir.IsConst(y, constant.Int) && y.Type().Size() > types.Types[types.TUINT].Size() {
return false
}
@@ -2980,23 +2980,23 @@ func extendslice(n *ir.CallExpr, init *ir.Nodes) ir.Node {
// isAppendOfMake made sure all possible positive values of l2 fit into an uint.
// The case of l2 overflow when converting from e.g. uint to int is handled by an explicit
// check of l2 < 0 at runtime which is generated below.
- l2 := conv(n.List().Second().(*ir.MakeExpr).Left(), types.Types[types.TINT])
+ l2 := conv(n.Args.Second().(*ir.MakeExpr).Len, types.Types[types.TINT])
l2 = typecheck(l2, ctxExpr)
- n.List().SetSecond(l2) // walkAppendArgs expects l2 in n.List.Second().
+ n.Args.SetSecond(l2) // walkAppendArgs expects l2 in n.List.Second().
walkAppendArgs(n, init)
- l1 := n.List().First()
- l2 = n.List().Second() // re-read l2, as it may have been updated by walkAppendArgs
+ l1 := n.Args.First()
+ l2 = n.Args.Second() // re-read l2, as it may have been updated by walkAppendArgs
var nodes []ir.Node
// if l2 >= 0 (likely happens), do nothing
nifneg := ir.NewIfStmt(base.Pos, ir.NewBinaryExpr(base.Pos, ir.OGE, l2, nodintconst(0)), nil, nil)
- nifneg.SetLikely(true)
+ nifneg.Likely = true
// else panicmakeslicelen()
- nifneg.PtrRlist().Set1(mkcall("panicmakeslicelen", nil, init))
+ nifneg.Else.Set1(mkcall("panicmakeslicelen", nil, init))
nodes = append(nodes, nifneg)
// s := l1
@@ -3019,7 +3019,7 @@ func extendslice(n *ir.CallExpr, init *ir.Nodes) ir.Node {
fn = substArgTypes(fn, elemtype, elemtype)
// s = growslice(T, s, n)
- nif.PtrBody().Set1(ir.NewAssignStmt(base.Pos, s, mkcall1(fn, s.Type(), nif.PtrInit(), typename(elemtype), s, nn)))
+ nif.Body.Set1(ir.NewAssignStmt(base.Pos, s, mkcall1(fn, s.Type(), nif.PtrInit(), typename(elemtype), s, nn)))
nodes = append(nodes, nif)
// s = s[:n]
@@ -3060,7 +3060,7 @@ func extendslice(n *ir.CallExpr, init *ir.Nodes) ir.Node {
if hasPointers {
// if l1ptr == sptr
nifclr := ir.NewIfStmt(base.Pos, ir.NewBinaryExpr(base.Pos, ir.OEQ, l1ptr, sptr), nil, nil)
- nifclr.SetBody(clr)
+ nifclr.Body = clr
nodes = append(nodes, nifclr)
} else {
nodes = append(nodes, clr.Slice()...)
@@ -3094,13 +3094,13 @@ func extendslice(n *ir.CallExpr, init *ir.Nodes) ir.Node {
// }
// s
func walkappend(n *ir.CallExpr, init *ir.Nodes, dst ir.Node) ir.Node {
- if !samesafeexpr(dst, n.List().First()) {
- n.List().SetFirst(safeexpr(n.List().First(), init))
- n.List().SetFirst(walkexpr(n.List().First(), init))
+ if !samesafeexpr(dst, n.Args.First()) {
+ n.Args.SetFirst(safeexpr(n.Args.First(), init))
+ n.Args.SetFirst(walkexpr(n.Args.First(), init))
}
- walkexprlistsafe(n.List().Slice()[1:], init)
+ walkexprlistsafe(n.Args.Slice()[1:], init)
- nsrc := n.List().First()
+ nsrc := n.Args.First()
// walkexprlistsafe will leave OINDEX (s[n]) alone if both s
// and n are name or literal, but those may index the slice we're
@@ -3108,7 +3108,7 @@ func walkappend(n *ir.CallExpr, init *ir.Nodes, dst ir.Node) ir.Node {
// Using cheapexpr also makes sure that the evaluation
// of all arguments (and especially any panics) happen
// before we begin to modify the slice in a visible way.
- ls := n.List().Slice()[1:]
+ ls := n.Args.Slice()[1:]
for i, n := range ls {
n = cheapexpr(n, init)
if !types.Identical(n.Type(), nsrc.Type().Elem()) {
@@ -3118,7 +3118,7 @@ func walkappend(n *ir.CallExpr, init *ir.Nodes, dst ir.Node) ir.Node {
ls[i] = n
}
- argc := n.List().Len() - 1
+ argc := n.Args.Len() - 1
if argc < 1 {
return nsrc
}
@@ -3136,12 +3136,12 @@ func walkappend(n *ir.CallExpr, init *ir.Nodes, dst ir.Node) ir.Node {
na := nodintconst(int64(argc)) // const argc
nif := ir.NewIfStmt(base.Pos, nil, nil, nil) // if cap(s) - len(s) < argc
- nif.SetLeft(ir.NewBinaryExpr(base.Pos, ir.OLT, ir.NewBinaryExpr(base.Pos, ir.OSUB, ir.NewUnaryExpr(base.Pos, ir.OCAP, ns), ir.NewUnaryExpr(base.Pos, ir.OLEN, ns)), na))
+ nif.Cond = ir.NewBinaryExpr(base.Pos, ir.OLT, ir.NewBinaryExpr(base.Pos, ir.OSUB, ir.NewUnaryExpr(base.Pos, ir.OCAP, ns), ir.NewUnaryExpr(base.Pos, ir.OLEN, ns)), na)
fn := syslook("growslice") // growslice(<type>, old []T, mincap int) (ret []T)
fn = substArgTypes(fn, ns.Type().Elem(), ns.Type().Elem())
- nif.PtrBody().Set1(ir.NewAssignStmt(base.Pos, ns, mkcall1(fn, ns.Type(), nif.PtrInit(), typename(ns.Type().Elem()), ns,
+ nif.Body.Set1(ir.NewAssignStmt(base.Pos, ns, mkcall1(fn, ns.Type(), nif.PtrInit(), typename(ns.Type().Elem()), ns,
ir.NewBinaryExpr(base.Pos, ir.OADD, ir.NewUnaryExpr(base.Pos, ir.OLEN, ns), na))))
l = append(l, nif)
@@ -3154,7 +3154,7 @@ func walkappend(n *ir.CallExpr, init *ir.Nodes, dst ir.Node) ir.Node {
slice.SetBounded(true)
l = append(l, ir.NewAssignStmt(base.Pos, ns, slice)) // s = s[:n+argc]
- ls = n.List().Slice()[1:]
+ ls = n.Args.Slice()[1:]
for i, n := range ls {
ix := ir.NewIndexExpr(base.Pos, ns, nn) // s[n] ...
ix.SetBounded(true)
@@ -3182,14 +3182,14 @@ func walkappend(n *ir.CallExpr, init *ir.Nodes, dst ir.Node) ir.Node {
// Also works if b is a string.
//
func copyany(n *ir.BinaryExpr, init *ir.Nodes, runtimecall bool) ir.Node {
- if n.Left().Type().Elem().HasPointers() {
+ if n.X.Type().Elem().HasPointers() {
Curfn.SetWBPos(n.Pos())
- fn := writebarrierfn("typedslicecopy", n.Left().Type().Elem(), n.Right().Type().Elem())
- n.SetLeft(cheapexpr(n.Left(), init))
- ptrL, lenL := backingArrayPtrLen(n.Left())
- n.SetRight(cheapexpr(n.Right(), init))
- ptrR, lenR := backingArrayPtrLen(n.Right())
- return mkcall1(fn, n.Type(), init, typename(n.Left().Type().Elem()), ptrL, lenL, ptrR, lenR)
+ fn := writebarrierfn("typedslicecopy", n.X.Type().Elem(), n.Y.Type().Elem())
+ n.X = cheapexpr(n.X, init)
+ ptrL, lenL := backingArrayPtrLen(n.X)
+ n.Y = cheapexpr(n.Y, init)
+ ptrR, lenR := backingArrayPtrLen(n.Y)
+ return mkcall1(fn, n.Type(), init, typename(n.X.Type().Elem()), ptrL, lenL, ptrR, lenR)
}
if runtimecall {
@@ -3197,24 +3197,24 @@ func copyany(n *ir.BinaryExpr, init *ir.Nodes, runtimecall bool) ir.Node {
// copy(n.Left, n.Right)
// n.Right can be a slice or string.
- n.SetLeft(cheapexpr(n.Left(), init))
- ptrL, lenL := backingArrayPtrLen(n.Left())
- n.SetRight(cheapexpr(n.Right(), init))
- ptrR, lenR := backingArrayPtrLen(n.Right())
+ n.X = cheapexpr(n.X, init)
+ ptrL, lenL := backingArrayPtrLen(n.X)
+ n.Y = cheapexpr(n.Y, init)
+ ptrR, lenR := backingArrayPtrLen(n.Y)
fn := syslook("slicecopy")
fn = substArgTypes(fn, ptrL.Type().Elem(), ptrR.Type().Elem())
- return mkcall1(fn, n.Type(), init, ptrL, lenL, ptrR, lenR, nodintconst(n.Left().Type().Elem().Width))
+ return mkcall1(fn, n.Type(), init, ptrL, lenL, ptrR, lenR, nodintconst(n.X.Type().Elem().Width))
}
- n.SetLeft(walkexpr(n.Left(), init))
- n.SetRight(walkexpr(n.Right(), init))
- nl := temp(n.Left().Type())
- nr := temp(n.Right().Type())
+ n.X = walkexpr(n.X, init)
+ n.Y = walkexpr(n.Y, init)
+ nl := temp(n.X.Type())
+ nr := temp(n.Y.Type())
var l []ir.Node
- l = append(l, ir.NewAssignStmt(base.Pos, nl, n.Left()))
- l = append(l, ir.NewAssignStmt(base.Pos, nr, n.Right()))
+ l = append(l, ir.NewAssignStmt(base.Pos, nl, n.X))
+ l = append(l, ir.NewAssignStmt(base.Pos, nr, n.Y))
nfrm := ir.NewUnaryExpr(base.Pos, ir.OSPTR, nr)
nto := ir.NewUnaryExpr(base.Pos, ir.OSPTR, nl)
@@ -3227,23 +3227,23 @@ func copyany(n *ir.BinaryExpr, init *ir.Nodes, runtimecall bool) ir.Node {
// if n > len(frm) { n = len(frm) }
nif := ir.NewIfStmt(base.Pos, nil, nil, nil)
- nif.SetLeft(ir.NewBinaryExpr(base.Pos, ir.OGT, nlen, ir.NewUnaryExpr(base.Pos, ir.OLEN, nr)))
- nif.PtrBody().Append(ir.NewAssignStmt(base.Pos, nlen, ir.NewUnaryExpr(base.Pos, ir.OLEN, nr)))
+ nif.Cond = ir.NewBinaryExpr(base.Pos, ir.OGT, nlen, ir.NewUnaryExpr(base.Pos, ir.OLEN, nr))
+ nif.Body.Append(ir.NewAssignStmt(base.Pos, nlen, ir.NewUnaryExpr(base.Pos, ir.OLEN, nr)))
l = append(l, nif)
// if to.ptr != frm.ptr { memmove( ... ) }
ne := ir.NewIfStmt(base.Pos, ir.NewBinaryExpr(base.Pos, ir.ONE, nto, nfrm), nil, nil)
- ne.SetLikely(true)
+ ne.Likely = true
l = append(l, ne)
fn := syslook("memmove")
fn = substArgTypes(fn, nl.Type().Elem(), nl.Type().Elem())
nwid := ir.Node(temp(types.Types[types.TUINTPTR]))
setwid := ir.NewAssignStmt(base.Pos, nwid, conv(nlen, types.Types[types.TUINTPTR]))
- ne.PtrBody().Append(setwid)
+ ne.Body.Append(setwid)
nwid = ir.NewBinaryExpr(base.Pos, ir.OMUL, nwid, nodintconst(nl.Type().Elem().Width))
call := mkcall1(fn, nil, init, nto, nfrm, nwid)
- ne.PtrBody().Append(call)
+ ne.Body.Append(call)
typecheckslice(l, ctxStmt)
walkstmtlist(l)
@@ -3280,26 +3280,26 @@ func eqfor(t *types.Type) (n ir.Node, needsize bool) {
// The result of walkcompare MUST be assigned back to n, e.g.
// n.Left = walkcompare(n.Left, init)
func walkcompare(n *ir.BinaryExpr, init *ir.Nodes) ir.Node {
- if n.Left().Type().IsInterface() && n.Right().Type().IsInterface() && n.Left().Op() != ir.ONIL && n.Right().Op() != ir.ONIL {
+ if n.X.Type().IsInterface() && n.Y.Type().IsInterface() && n.X.Op() != ir.ONIL && n.Y.Op() != ir.ONIL {
return walkcompareInterface(n, init)
}
- if n.Left().Type().IsString() && n.Right().Type().IsString() {
+ if n.X.Type().IsString() && n.Y.Type().IsString() {
return walkcompareString(n, init)
}
- n.SetLeft(walkexpr(n.Left(), init))
- n.SetRight(walkexpr(n.Right(), init))
+ n.X = walkexpr(n.X, init)
+ n.Y = walkexpr(n.Y, init)
// Given mixed interface/concrete comparison,
// rewrite into types-equal && data-equal.
// This is efficient, avoids allocations, and avoids runtime calls.
- if n.Left().Type().IsInterface() != n.Right().Type().IsInterface() {
+ if n.X.Type().IsInterface() != n.Y.Type().IsInterface() {
// Preserve side-effects in case of short-circuiting; see #32187.
- l := cheapexpr(n.Left(), init)
- r := cheapexpr(n.Right(), init)
+ l := cheapexpr(n.X, init)
+ r := cheapexpr(n.Y, init)
// Swap so that l is the interface value and r is the concrete value.
- if n.Right().Type().IsInterface() {
+ if n.Y.Type().IsInterface() {
l, r = r, l
}
@@ -3337,7 +3337,7 @@ func walkcompare(n *ir.BinaryExpr, init *ir.Nodes) ir.Node {
// Otherwise back end handles it.
// While we're here, decide whether to
// inline or call an eq alg.
- t := n.Left().Type()
+ t := n.X.Type()
var inline bool
maxcmpsize := int64(4)
@@ -3350,14 +3350,14 @@ func walkcompare(n *ir.BinaryExpr, init *ir.Nodes) ir.Node {
switch t.Kind() {
default:
if base.Debug.Libfuzzer != 0 && t.IsInteger() {
- n.SetLeft(cheapexpr(n.Left(), init))
- n.SetRight(cheapexpr(n.Right(), init))
+ n.X = cheapexpr(n.X, init)
+ n.Y = cheapexpr(n.Y, init)
// If exactly one comparison operand is
// constant, invoke the constcmp functions
// instead, and arrange for the constant
// operand to be the first argument.
- l, r := n.Left(), n.Right()
+ l, r := n.X, n.Y
if r.Op() == ir.OLITERAL {
l, r = r, l
}
@@ -3403,13 +3403,13 @@ func walkcompare(n *ir.BinaryExpr, init *ir.Nodes) ir.Node {
inline = t.NumComponents(types.IgnoreBlankFields) <= 4
}
- cmpl := n.Left()
+ cmpl := n.X
for cmpl != nil && cmpl.Op() == ir.OCONVNOP {
- cmpl = cmpl.(*ir.ConvExpr).Left()
+ cmpl = cmpl.(*ir.ConvExpr).X
}
- cmpr := n.Right()
+ cmpr := n.Y
for cmpr != nil && cmpr.Op() == ir.OCONVNOP {
- cmpr = cmpr.(*ir.ConvExpr).Left()
+ cmpr = cmpr.(*ir.ConvExpr).X
}
// Chose not to inline. Call equality function directly.
@@ -3421,10 +3421,10 @@ func walkcompare(n *ir.BinaryExpr, init *ir.Nodes) ir.Node {
fn, needsize := eqfor(t)
call := ir.NewCallExpr(base.Pos, ir.OCALL, fn, nil)
- call.PtrList().Append(nodAddr(cmpl))
- call.PtrList().Append(nodAddr(cmpr))
+ call.Args.Append(nodAddr(cmpl))
+ call.Args.Append(nodAddr(cmpr))
if needsize {
- call.PtrList().Append(nodintconst(t.Width))
+ call.Args.Append(nodintconst(t.Width))
}
res := ir.Node(call)
if n.Op() != ir.OEQ {
@@ -3538,9 +3538,9 @@ func tracecmpArg(n ir.Node, t *types.Type, init *ir.Nodes) ir.Node {
}
func walkcompareInterface(n *ir.BinaryExpr, init *ir.Nodes) ir.Node {
- n.SetRight(cheapexpr(n.Right(), init))
- n.SetLeft(cheapexpr(n.Left(), init))
- eqtab, eqdata := eqinterface(n.Left(), n.Right())
+ n.Y = cheapexpr(n.Y, init)
+ n.X = cheapexpr(n.X, init)
+ eqtab, eqdata := eqinterface(n.X, n.Y)
var cmp ir.Node
if n.Op() == ir.OEQ {
cmp = ir.NewLogicalExpr(base.Pos, ir.OANDAND, eqtab, eqdata)
@@ -3555,21 +3555,21 @@ func walkcompareString(n *ir.BinaryExpr, init *ir.Nodes) ir.Node {
// Rewrite comparisons to short constant strings as length+byte-wise comparisons.
var cs, ncs ir.Node // const string, non-const string
switch {
- case ir.IsConst(n.Left(), constant.String) && ir.IsConst(n.Right(), constant.String):
+ case ir.IsConst(n.X, constant.String) && ir.IsConst(n.Y, constant.String):
// ignore; will be constant evaluated
- case ir.IsConst(n.Left(), constant.String):
- cs = n.Left()
- ncs = n.Right()
- case ir.IsConst(n.Right(), constant.String):
- cs = n.Right()
- ncs = n.Left()
+ case ir.IsConst(n.X, constant.String):
+ cs = n.X
+ ncs = n.Y
+ case ir.IsConst(n.Y, constant.String):
+ cs = n.Y
+ ncs = n.X
}
if cs != nil {
cmp := n.Op()
// Our comparison below assumes that the non-constant string
// is on the left hand side, so rewrite "" cmp x to x cmp "".
// See issue 24817.
- if ir.IsConst(n.Left(), constant.String) {
+ if ir.IsConst(n.X, constant.String) {
cmp = brrev(cmp)
}
@@ -3652,9 +3652,9 @@ func walkcompareString(n *ir.BinaryExpr, init *ir.Nodes) ir.Node {
var r ir.Node
if n.Op() == ir.OEQ || n.Op() == ir.ONE {
// prepare for rewrite below
- n.SetLeft(cheapexpr(n.Left(), init))
- n.SetRight(cheapexpr(n.Right(), init))
- eqlen, eqmem := eqstring(n.Left(), n.Right())
+ n.X = cheapexpr(n.X, init)
+ n.Y = cheapexpr(n.Y, init)
+ eqlen, eqmem := eqstring(n.X, n.Y)
// quick check of len before full compare for == or !=.
// memequal then tests equality up to length len.
if n.Op() == ir.OEQ {
@@ -3667,7 +3667,7 @@ func walkcompareString(n *ir.BinaryExpr, init *ir.Nodes) ir.Node {
}
} else {
// sys_cmpstring(s1, s2) :: 0
- r = mkcall("cmpstring", types.Types[types.TINT], init, conv(n.Left(), types.Types[types.TSTRING]), conv(n.Right(), types.Types[types.TSTRING]))
+ r = mkcall("cmpstring", types.Types[types.TINT], init, conv(n.X, types.Types[types.TSTRING]), conv(n.Y, types.Types[types.TSTRING]))
r = ir.NewBinaryExpr(base.Pos, n.Op(), r, nodintconst(0))
}
@@ -3702,10 +3702,10 @@ func bounded(n ir.Node, max int64) bool {
n := n.(*ir.BinaryExpr)
v := int64(-1)
switch {
- case smallintconst(n.Left()):
- v = ir.Int64Val(n.Left())
- case smallintconst(n.Right()):
- v = ir.Int64Val(n.Right())
+ case smallintconst(n.X):
+ v = ir.Int64Val(n.X)
+ case smallintconst(n.Y):
+ v = ir.Int64Val(n.Y)
if n.Op() == ir.OANDNOT {
v = ^v
if !sign {
@@ -3719,8 +3719,8 @@ func bounded(n ir.Node, max int64) bool {
case ir.OMOD:
n := n.(*ir.BinaryExpr)
- if !sign && smallintconst(n.Right()) {
- v := ir.Int64Val(n.Right())
+ if !sign && smallintconst(n.Y) {
+ v := ir.Int64Val(n.Y)
if 0 <= v && v <= max {
return true
}
@@ -3728,8 +3728,8 @@ func bounded(n ir.Node, max int64) bool {
case ir.ODIV:
n := n.(*ir.BinaryExpr)
- if !sign && smallintconst(n.Right()) {
- v := ir.Int64Val(n.Right())
+ if !sign && smallintconst(n.Y) {
+ v := ir.Int64Val(n.Y)
for bits > 0 && v >= 2 {
bits--
v >>= 1
@@ -3738,8 +3738,8 @@ func bounded(n ir.Node, max int64) bool {
case ir.ORSH:
n := n.(*ir.BinaryExpr)
- if !sign && smallintconst(n.Right()) {
- v := ir.Int64Val(n.Right())
+ if !sign && smallintconst(n.Y) {
+ v := ir.Int64Val(n.Y)
if v > int64(bits) {
return true
}
@@ -3756,7 +3756,7 @@ func bounded(n ir.Node, max int64) bool {
// usemethod checks interface method calls for uses of reflect.Type.Method.
func usemethod(n *ir.CallExpr) {
- t := n.Left().Type()
+ t := n.X.Type()
// Looking for either of:
// Method(int) reflect.Method
@@ -3812,28 +3812,28 @@ func usefield(n *ir.SelectorExpr) {
case ir.ODOT, ir.ODOTPTR:
break
}
- if n.Sym() == nil {
+ if n.Sel == nil {
// No field name. This DOTPTR was built by the compiler for access
// to runtime data structures. Ignore.
return
}
- t := n.Left().Type()
+ t := n.X.Type()
if t.IsPtr() {
t = t.Elem()
}
field := n.Selection
if field == nil {
- base.Fatalf("usefield %v %v without paramfld", n.Left().Type(), n.Sym())
+ base.Fatalf("usefield %v %v without paramfld", n.X.Type(), n.Sel)
}
- if field.Sym != n.Sym() || field.Offset != n.Offset() {
- base.Fatalf("field inconsistency: %v,%v != %v,%v", field.Sym, field.Offset, n.Sym(), n.Offset())
+ if field.Sym != n.Sel || field.Offset != n.Offset {
+ base.Fatalf("field inconsistency: %v,%v != %v,%v", field.Sym, field.Offset, n.Sel, n.Offset)
}
if !strings.Contains(field.Note, "go:\"track\"") {
return
}
- outer := n.Left().Type()
+ outer := n.X.Type()
if outer.IsPtr() {
outer = outer.Elem()
}
@@ -3918,7 +3918,7 @@ func anySideEffects(n ir.Node) bool {
// Only possible side effect is division by zero.
case ir.ODIV, ir.OMOD:
n := n.(*ir.BinaryExpr)
- if n.Right().Op() != ir.OLITERAL || constant.Sign(n.Right().Val()) == 0 {
+ if n.Y.Op() != ir.OLITERAL || constant.Sign(n.Y.Val()) == 0 {
return true
}
@@ -3926,7 +3926,7 @@ func anySideEffects(n ir.Node) bool {
// but many makechan and makemap use size zero, which is definitely OK.
case ir.OMAKECHAN, ir.OMAKEMAP:
n := n.(*ir.MakeExpr)
- if !ir.IsConst(n.Left(), constant.Int) || constant.Sign(n.Left().Val()) != 0 {
+ if !ir.IsConst(n.Len, constant.Int) || constant.Sign(n.Len.Val()) != 0 {
return true
}
@@ -3968,24 +3968,24 @@ func wrapCall(n *ir.CallExpr, init *ir.Nodes) ir.Node {
isBuiltinCall := n.Op() != ir.OCALLFUNC && n.Op() != ir.OCALLMETH && n.Op() != ir.OCALLINTER
// Turn f(a, b, []T{c, d, e}...) back into f(a, b, c, d, e).
- if !isBuiltinCall && n.IsDDD() {
- last := n.List().Len() - 1
- if va := n.List().Index(last); va.Op() == ir.OSLICELIT {
+ if !isBuiltinCall && n.IsDDD {
+ last := n.Args.Len() - 1
+ if va := n.Args.Index(last); va.Op() == ir.OSLICELIT {
va := va.(*ir.CompLitExpr)
- n.PtrList().Set(append(n.List().Slice()[:last], va.List().Slice()...))
- n.SetIsDDD(false)
+ n.Args.Set(append(n.Args.Slice()[:last], va.List.Slice()...))
+ n.IsDDD = false
}
}
// origArgs keeps track of what argument is uintptr-unsafe/unsafe-uintptr conversion.
- origArgs := make([]ir.Node, n.List().Len())
+ origArgs := make([]ir.Node, n.Args.Len())
var funcArgs []*ir.Field
- for i, arg := range n.List().Slice() {
+ for i, arg := range n.Args.Slice() {
s := lookupN("a", i)
- if !isBuiltinCall && arg.Op() == ir.OCONVNOP && arg.Type().IsUintptr() && arg.(*ir.ConvExpr).Left().Type().IsUnsafePtr() {
+ if !isBuiltinCall && arg.Op() == ir.OCONVNOP && arg.Type().IsUintptr() && arg.(*ir.ConvExpr).X.Type().IsUnsafePtr() {
origArgs[i] = arg
- arg = arg.(*ir.ConvExpr).Left()
- n.List().SetIndex(i, arg)
+ arg = arg.(*ir.ConvExpr).X
+ n.Args.SetIndex(i, arg)
}
funcArgs = append(funcArgs, symfield(s, arg.Type()))
}
@@ -4002,20 +4002,20 @@ func wrapCall(n *ir.CallExpr, init *ir.Nodes) ir.Node {
}
args[i] = ir.NewConvExpr(base.Pos, origArg.Op(), origArg.Type(), args[i])
}
- call := ir.NewCallExpr(base.Pos, n.Op(), n.Left(), args)
+ call := ir.NewCallExpr(base.Pos, n.Op(), n.X, args)
if !isBuiltinCall {
call.SetOp(ir.OCALL)
- call.SetIsDDD(n.IsDDD())
+ call.IsDDD = n.IsDDD
}
- fn.PtrBody().Set1(call)
+ fn.Body.Set1(call)
funcbody()
typecheckFunc(fn)
- typecheckslice(fn.Body().Slice(), ctxStmt)
+ typecheckslice(fn.Body.Slice(), ctxStmt)
Target.Decls = append(Target.Decls, fn)
- call = ir.NewCallExpr(base.Pos, ir.OCALL, fn.Nname, n.List().Slice())
+ call = ir.NewCallExpr(base.Pos, ir.OCALL, fn.Nname, n.Args.Slice())
return walkexpr(typecheck(call, ctxStmt), init)
}
@@ -4055,7 +4055,7 @@ func canMergeLoads() bool {
// isRuneCount reports whether n is of the form len([]rune(string)).
// These are optimized into a call to runtime.countrunes.
func isRuneCount(n ir.Node) bool {
- return base.Flag.N == 0 && !instrumenting && n.Op() == ir.OLEN && n.(*ir.UnaryExpr).Left().Op() == ir.OSTR2RUNES
+ return base.Flag.N == 0 && !instrumenting && n.Op() == ir.OLEN && n.(*ir.UnaryExpr).X.Op() == ir.OSTR2RUNES
}
func walkCheckPtrAlignment(n *ir.ConvExpr, init *ir.Nodes, count ir.Node) ir.Node {
@@ -4079,8 +4079,8 @@ func walkCheckPtrAlignment(n *ir.ConvExpr, init *ir.Nodes, count ir.Node) ir.Nod
count = nodintconst(1)
}
- n.SetLeft(cheapexpr(n.Left(), init))
- init.Append(mkcall("checkptrAlignment", nil, init, convnop(n.Left(), types.Types[types.TUNSAFEPTR]), typename(elem), conv(count, types.Types[types.TUINTPTR])))
+ n.X = cheapexpr(n.X, init)
+ init.Append(mkcall("checkptrAlignment", nil, init, convnop(n.X, types.Types[types.TUNSAFEPTR]), typename(elem), conv(count, types.Types[types.TUINTPTR])))
return n
}
@@ -4102,12 +4102,12 @@ func walkCheckPtrArithmetic(n *ir.ConvExpr, init *ir.Nodes) ir.Node {
// TODO(mdempsky): Make stricter. We only need to exempt
// reflect.Value.Pointer and reflect.Value.UnsafeAddr.
- switch n.Left().Op() {
+ switch n.X.Op() {
case ir.OCALLFUNC, ir.OCALLMETH, ir.OCALLINTER:
return n
}
- if n.Left().Op() == ir.ODOTPTR && isReflectHeaderDataField(n.Left()) {
+ if n.X.Op() == ir.ODOTPTR && isReflectHeaderDataField(n.X) {
return n
}
@@ -4123,20 +4123,20 @@ func walkCheckPtrArithmetic(n *ir.ConvExpr, init *ir.Nodes) ir.Node {
switch n.Op() {
case ir.OADD:
n := n.(*ir.BinaryExpr)
- walk(n.Left())
- walk(n.Right())
+ walk(n.X)
+ walk(n.Y)
case ir.OSUB, ir.OANDNOT:
n := n.(*ir.BinaryExpr)
- walk(n.Left())
+ walk(n.X)
case ir.OCONVNOP:
n := n.(*ir.ConvExpr)
- if n.Left().Type().IsUnsafePtr() {
- n.SetLeft(cheapexpr(n.Left(), init))
- originals = append(originals, convnop(n.Left(), types.Types[types.TUNSAFEPTR]))
+ if n.X.Type().IsUnsafePtr() {
+ n.X = cheapexpr(n.X, init)
+ originals = append(originals, convnop(n.X, types.Types[types.TUNSAFEPTR]))
}
}
}
- walk(n.Left())
+ walk(n.X)
cheap := cheapexpr(n, init)