aboutsummaryrefslogtreecommitdiff
path: root/src/cmd/compile/internal/gc/sinit.go
diff options
context:
space:
mode:
Diffstat (limited to 'src/cmd/compile/internal/gc/sinit.go')
-rw-r--r--src/cmd/compile/internal/gc/sinit.go738
1 files changed, 371 insertions, 367 deletions
diff --git a/src/cmd/compile/internal/gc/sinit.go b/src/cmd/compile/internal/gc/sinit.go
index 212fcc022d..e30663cfbb 100644
--- a/src/cmd/compile/internal/gc/sinit.go
+++ b/src/cmd/compile/internal/gc/sinit.go
@@ -5,14 +5,17 @@
package gc
import (
+ "cmd/compile/internal/base"
+ "cmd/compile/internal/ir"
"cmd/compile/internal/types"
"cmd/internal/obj"
"fmt"
+ "go/constant"
)
type InitEntry struct {
- Xoffset int64 // struct, array only
- Expr *Node // bytes of run-time computed expressions
+ Xoffset int64 // struct, array only
+ Expr ir.Node // bytes of run-time computed expressions
}
type InitPlan struct {
@@ -26,21 +29,21 @@ type InitPlan struct {
type InitSchedule struct {
// out is the ordered list of dynamic initialization
// statements.
- out []*Node
+ out []ir.Node
- initplans map[*Node]*InitPlan
- inittemps map[*Node]*Node
+ initplans map[ir.Node]*InitPlan
+ inittemps map[ir.Node]ir.Node
}
-func (s *InitSchedule) append(n *Node) {
+func (s *InitSchedule) append(n ir.Node) {
s.out = append(s.out, n)
}
// staticInit adds an initialization statement n to the schedule.
-func (s *InitSchedule) staticInit(n *Node) {
+func (s *InitSchedule) staticInit(n ir.Node) {
if !s.tryStaticInit(n) {
- if Debug.P != 0 {
- Dump("nonstatic", n)
+ if base.Flag.Percent != 0 {
+ ir.Dump("nonstatic", n)
}
s.append(n)
}
@@ -48,112 +51,115 @@ func (s *InitSchedule) staticInit(n *Node) {
// tryStaticInit attempts to statically execute an initialization
// statement and reports whether it succeeded.
-func (s *InitSchedule) tryStaticInit(n *Node) bool {
+func (s *InitSchedule) tryStaticInit(n ir.Node) bool {
// Only worry about simple "l = r" assignments. Multiple
// variable/expression OAS2 assignments have already been
// replaced by multiple simple OAS assignments, and the other
// OAS2* assignments mostly necessitate dynamic execution
// anyway.
- if n.Op != OAS {
+ if n.Op() != ir.OAS {
return false
}
- if n.Left.isBlank() && candiscard(n.Right) {
+ if ir.IsBlank(n.Left()) && candiscard(n.Right()) {
return true
}
lno := setlineno(n)
- defer func() { lineno = lno }()
- return s.staticassign(n.Left, n.Right)
+ defer func() { base.Pos = lno }()
+ return s.staticassign(n.Left(), n.Right())
}
// like staticassign but we are copying an already
// initialized value r.
-func (s *InitSchedule) staticcopy(l *Node, r *Node) bool {
- if r.Op != ONAME {
+func (s *InitSchedule) staticcopy(l ir.Node, r ir.Node) bool {
+ if r.Op() != ir.ONAME && r.Op() != ir.OMETHEXPR {
return false
}
- if r.Class() == PFUNC {
+ if r.Class() == ir.PFUNC {
pfuncsym(l, r)
return true
}
- if r.Class() != PEXTERN || r.Sym.Pkg != localpkg {
+ if r.Class() != ir.PEXTERN || r.Sym().Pkg != ir.LocalPkg {
return false
}
- if r.Name.Defn == nil { // probably zeroed but perhaps supplied externally and of unknown value
+ if r.Name().Defn == nil { // probably zeroed but perhaps supplied externally and of unknown value
return false
}
- if r.Name.Defn.Op != OAS {
+ if r.Name().Defn.Op() != ir.OAS {
return false
}
- if r.Type.IsString() { // perhaps overwritten by cmd/link -X (#34675)
+ if r.Type().IsString() { // perhaps overwritten by cmd/link -X (#34675)
return false
}
orig := r
- r = r.Name.Defn.Right
+ r = r.Name().Defn.Right()
- for r.Op == OCONVNOP && !types.Identical(r.Type, l.Type) {
- r = r.Left
+ for r.Op() == ir.OCONVNOP && !types.Identical(r.Type(), l.Type()) {
+ r = r.Left()
}
- switch r.Op {
- case ONAME:
+ switch r.Op() {
+ case ir.ONAME, ir.OMETHEXPR:
if s.staticcopy(l, r) {
return true
}
// We may have skipped past one or more OCONVNOPs, so
// use conv to ensure r is assignable to l (#13263).
- s.append(nod(OAS, l, conv(r, l.Type)))
+ s.append(ir.Nod(ir.OAS, l, conv(r, l.Type())))
return true
- case OLITERAL:
+ case ir.ONIL:
+ return true
+
+ case ir.OLITERAL:
if isZero(r) {
return true
}
- litsym(l, r, int(l.Type.Width))
+ litsym(l, r, int(l.Type().Width))
return true
- case OADDR:
- if a := r.Left; a.Op == ONAME {
+ case ir.OADDR:
+ if a := r.Left(); a.Op() == ir.ONAME {
addrsym(l, a)
return true
}
- case OPTRLIT:
- switch r.Left.Op {
- case OARRAYLIT, OSLICELIT, OSTRUCTLIT, OMAPLIT:
+ case ir.OPTRLIT:
+ switch r.Left().Op() {
+ case ir.OARRAYLIT, ir.OSLICELIT, ir.OSTRUCTLIT, ir.OMAPLIT:
// copy pointer
addrsym(l, s.inittemps[r])
return true
}
- case OSLICELIT:
+ case ir.OSLICELIT:
// copy slice
a := s.inittemps[r]
- slicesym(l, a, r.Right.Int64Val())
+ slicesym(l, a, r.Right().Int64Val())
return true
- case OARRAYLIT, OSTRUCTLIT:
+ case ir.OARRAYLIT, ir.OSTRUCTLIT:
p := s.initplans[r]
- n := l.copy()
+ n := ir.Copy(l)
for i := range p.E {
e := &p.E[i]
- n.Xoffset = l.Xoffset + e.Xoffset
- n.Type = e.Expr.Type
- if e.Expr.Op == OLITERAL {
- litsym(n, e.Expr, int(n.Type.Width))
+ n.SetOffset(l.Offset() + e.Xoffset)
+ n.SetType(e.Expr.Type())
+ if e.Expr.Op() == ir.OLITERAL || e.Expr.Op() == ir.ONIL {
+ litsym(n, e.Expr, int(n.Type().Width))
continue
}
- ll := n.sepcopy()
+ ll := ir.SepCopy(n)
if s.staticcopy(ll, e.Expr) {
continue
}
// Requires computation, but we're
// copying someone else's computation.
- rr := orig.sepcopy()
- rr.Type = ll.Type
- rr.Xoffset += e.Xoffset
+ rr := ir.SepCopy(orig)
+ rr.SetType(ll.Type())
+ rr.SetOffset(rr.Offset() + e.Xoffset)
setlineno(rr)
- s.append(nod(OAS, ll, rr))
+ s.append(ir.Nod(ir.OAS, ll, rr))
}
return true
@@ -162,59 +168,61 @@ func (s *InitSchedule) staticcopy(l *Node, r *Node) bool {
return false
}
-func (s *InitSchedule) staticassign(l *Node, r *Node) bool {
- for r.Op == OCONVNOP {
- r = r.Left
+func (s *InitSchedule) staticassign(l ir.Node, r ir.Node) bool {
+ for r.Op() == ir.OCONVNOP {
+ r = r.Left()
}
- switch r.Op {
- case ONAME:
+ switch r.Op() {
+ case ir.ONAME, ir.OMETHEXPR:
return s.staticcopy(l, r)
- case OLITERAL:
+ case ir.ONIL:
+ return true
+
+ case ir.OLITERAL:
if isZero(r) {
return true
}
- litsym(l, r, int(l.Type.Width))
+ litsym(l, r, int(l.Type().Width))
return true
- case OADDR:
- var nam Node
- if stataddr(&nam, r.Left) {
- addrsym(l, &nam)
+ case ir.OADDR:
+ if nam := stataddr(r.Left()); nam != nil {
+ addrsym(l, nam)
return true
}
fallthrough
- case OPTRLIT:
- switch r.Left.Op {
- case OARRAYLIT, OSLICELIT, OMAPLIT, OSTRUCTLIT:
+ case ir.OPTRLIT:
+ switch r.Left().Op() {
+ case ir.OARRAYLIT, ir.OSLICELIT, ir.OMAPLIT, ir.OSTRUCTLIT:
// Init pointer.
- a := staticname(r.Left.Type)
+ a := staticname(r.Left().Type())
s.inittemps[r] = a
addrsym(l, a)
// Init underlying literal.
- if !s.staticassign(a, r.Left) {
- s.append(nod(OAS, a, r.Left))
+ if !s.staticassign(a, r.Left()) {
+ s.append(ir.Nod(ir.OAS, a, r.Left()))
}
return true
}
//dump("not static ptrlit", r);
- case OSTR2BYTES:
- if l.Class() == PEXTERN && r.Left.Op == OLITERAL {
- sval := r.Left.StringVal()
+ case ir.OSTR2BYTES:
+ if l.Class() == ir.PEXTERN && r.Left().Op() == ir.OLITERAL {
+ sval := r.Left().StringVal()
slicebytes(l, sval)
return true
}
- case OSLICELIT:
+ case ir.OSLICELIT:
s.initplan(r)
// Init slice.
- bound := r.Right.Int64Val()
- ta := types.NewArray(r.Type.Elem(), bound)
+ bound := r.Right().Int64Val()
+ ta := types.NewArray(r.Type().Elem(), bound)
ta.SetNoalg(true)
a := staticname(ta)
s.inittemps[r] = a
@@ -223,96 +231,97 @@ func (s *InitSchedule) staticassign(l *Node, r *Node) bool {
l = a
fallthrough
- case OARRAYLIT, OSTRUCTLIT:
+ case ir.OARRAYLIT, ir.OSTRUCTLIT:
s.initplan(r)
p := s.initplans[r]
- n := l.copy()
+ n := ir.Copy(l)
for i := range p.E {
e := &p.E[i]
- n.Xoffset = l.Xoffset + e.Xoffset
- n.Type = e.Expr.Type
- if e.Expr.Op == OLITERAL {
- litsym(n, e.Expr, int(n.Type.Width))
+ n.SetOffset(l.Offset() + e.Xoffset)
+ n.SetType(e.Expr.Type())
+ if e.Expr.Op() == ir.OLITERAL || e.Expr.Op() == ir.ONIL {
+ litsym(n, e.Expr, int(n.Type().Width))
continue
}
setlineno(e.Expr)
- a := n.sepcopy()
+ a := ir.SepCopy(n)
if !s.staticassign(a, e.Expr) {
- s.append(nod(OAS, a, e.Expr))
+ s.append(ir.Nod(ir.OAS, a, e.Expr))
}
}
return true
- case OMAPLIT:
+ case ir.OMAPLIT:
break
- case OCLOSURE:
+ case ir.OCLOSURE:
if hasemptycvars(r) {
- if Debug_closure > 0 {
- Warnl(r.Pos, "closure converted to global")
+ if base.Debug.Closure > 0 {
+ base.WarnfAt(r.Pos(), "closure converted to global")
}
// Closures with no captured variables are globals,
// so the assignment can be done at link time.
- pfuncsym(l, r.Func.Closure.Func.Nname)
+ pfuncsym(l, r.Func().Nname)
return true
}
closuredebugruntimecheck(r)
- case OCONVIFACE:
+ case ir.OCONVIFACE:
// This logic is mirrored in isStaticCompositeLiteral.
// If you change something here, change it there, and vice versa.
// Determine the underlying concrete type and value we are converting from.
val := r
- for val.Op == OCONVIFACE {
- val = val.Left
+ for val.Op() == ir.OCONVIFACE {
+ val = val.Left()
}
- if val.Type.IsInterface() {
+
+ if val.Type().IsInterface() {
// val is an interface type.
// If val is nil, we can statically initialize l;
// both words are zero and so there no work to do, so report success.
// If val is non-nil, we have no concrete type to record,
// and we won't be able to statically initialize its value, so report failure.
- return Isconst(val, CTNIL)
+ return val.Op() == ir.ONIL
}
- markTypeUsedInInterface(val.Type, l.Sym.Linksym())
+ markTypeUsedInInterface(val.Type(), l.Sym().Linksym())
- var itab *Node
- if l.Type.IsEmptyInterface() {
- itab = typename(val.Type)
+ var itab ir.Node
+ if l.Type().IsEmptyInterface() {
+ itab = typename(val.Type())
} else {
- itab = itabname(val.Type, l.Type)
+ itab = itabname(val.Type(), l.Type())
}
// Create a copy of l to modify while we emit data.
- n := l.copy()
+ n := ir.Copy(l)
// Emit itab, advance offset.
- addrsym(n, itab.Left) // itab is an OADDR node
- n.Xoffset += int64(Widthptr)
+ addrsym(n, itab.Left()) // itab is an OADDR node
+ n.SetOffset(n.Offset() + int64(Widthptr))
// Emit data.
- if isdirectiface(val.Type) {
- if Isconst(val, CTNIL) {
+ if isdirectiface(val.Type()) {
+ if val.Op() == ir.ONIL {
// Nil is zero, nothing to do.
return true
}
// Copy val directly into n.
- n.Type = val.Type
+ n.SetType(val.Type())
setlineno(val)
- a := n.sepcopy()
+ a := ir.SepCopy(n)
if !s.staticassign(a, val) {
- s.append(nod(OAS, a, val))
+ s.append(ir.Nod(ir.OAS, a, val))
}
} else {
// Construct temp to hold val, write pointer to temp into n.
- a := staticname(val.Type)
+ a := staticname(val.Type())
s.inittemps[val] = a
if !s.staticassign(a, val) {
- s.append(nod(OAS, a, val))
+ s.append(ir.Nod(ir.OAS, a, val))
}
addrsym(n, a)
}
@@ -358,29 +367,29 @@ var statuniqgen int // name generator for static temps
// staticname returns a name backed by a (writable) static data symbol.
// Use readonlystaticname for read-only node.
-func staticname(t *types.Type) *Node {
+func staticname(t *types.Type) ir.Node {
// Don't use lookupN; it interns the resulting string, but these are all unique.
- n := newname(lookup(fmt.Sprintf("%s%d", obj.StaticNamePref, statuniqgen)))
+ n := NewName(lookup(fmt.Sprintf("%s%d", obj.StaticNamePref, statuniqgen)))
statuniqgen++
- addvar(n, t, PEXTERN)
- n.Sym.Linksym().Set(obj.AttrLocal, true)
+ addvar(n, t, ir.PEXTERN)
+ n.Sym().Linksym().Set(obj.AttrLocal, true)
return n
}
// readonlystaticname returns a name backed by a (writable) static data symbol.
-func readonlystaticname(t *types.Type) *Node {
+func readonlystaticname(t *types.Type) ir.Node {
n := staticname(t)
n.MarkReadonly()
- n.Sym.Linksym().Set(obj.AttrContentAddressable, true)
+ n.Sym().Linksym().Set(obj.AttrContentAddressable, true)
return n
}
-func (n *Node) isSimpleName() bool {
- return n.Op == ONAME && n.Class() != PAUTOHEAP && n.Class() != PEXTERN
+func isSimpleName(n ir.Node) bool {
+ return (n.Op() == ir.ONAME || n.Op() == ir.OMETHEXPR) && n.Class() != ir.PAUTOHEAP && n.Class() != ir.PEXTERN
}
-func litas(l *Node, r *Node, init *Nodes) {
- a := nod(OAS, l, r)
+func litas(l ir.Node, r ir.Node, init *ir.Nodes) {
+ a := ir.Nod(ir.OAS, l, r)
a = typecheck(a, ctxStmt)
a = walkexpr(a, init)
init.Append(a)
@@ -396,19 +405,19 @@ const (
// getdyn calculates the initGenType for n.
// If top is false, getdyn is recursing.
-func getdyn(n *Node, top bool) initGenType {
- switch n.Op {
+func getdyn(n ir.Node, top bool) initGenType {
+ switch n.Op() {
default:
- if n.isGoConst() {
+ if isGoConst(n) {
return initConst
}
return initDynamic
- case OSLICELIT:
+ case ir.OSLICELIT:
if !top {
return initDynamic
}
- if n.Right.Int64Val()/4 > int64(n.List.Len()) {
+ if n.Right().Int64Val()/4 > int64(n.List().Len()) {
// <25% of entries have explicit values.
// Very rough estimation, it takes 4 bytes of instructions
// to initialize 1 byte of result. So don't use a static
@@ -418,16 +427,16 @@ func getdyn(n *Node, top bool) initGenType {
return initDynamic
}
- case OARRAYLIT, OSTRUCTLIT:
+ case ir.OARRAYLIT, ir.OSTRUCTLIT:
}
var mode initGenType
- for _, n1 := range n.List.Slice() {
- switch n1.Op {
- case OKEY:
- n1 = n1.Right
- case OSTRUCTKEY:
- n1 = n1.Left
+ for _, n1 := range n.List().Slice() {
+ switch n1.Op() {
+ case ir.OKEY:
+ n1 = n1.Right()
+ case ir.OSTRUCTKEY:
+ n1 = n1.Left()
}
mode |= getdyn(n1, false)
if mode == initDynamic|initConst {
@@ -438,42 +447,42 @@ func getdyn(n *Node, top bool) initGenType {
}
// isStaticCompositeLiteral reports whether n is a compile-time constant.
-func isStaticCompositeLiteral(n *Node) bool {
- switch n.Op {
- case OSLICELIT:
+func isStaticCompositeLiteral(n ir.Node) bool {
+ switch n.Op() {
+ case ir.OSLICELIT:
return false
- case OARRAYLIT:
- for _, r := range n.List.Slice() {
- if r.Op == OKEY {
- r = r.Right
+ case ir.OARRAYLIT:
+ for _, r := range n.List().Slice() {
+ if r.Op() == ir.OKEY {
+ r = r.Right()
}
if !isStaticCompositeLiteral(r) {
return false
}
}
return true
- case OSTRUCTLIT:
- for _, r := range n.List.Slice() {
- if r.Op != OSTRUCTKEY {
- Fatalf("isStaticCompositeLiteral: rhs not OSTRUCTKEY: %v", r)
+ case ir.OSTRUCTLIT:
+ for _, r := range n.List().Slice() {
+ if r.Op() != ir.OSTRUCTKEY {
+ base.Fatalf("isStaticCompositeLiteral: rhs not OSTRUCTKEY: %v", r)
}
- if !isStaticCompositeLiteral(r.Left) {
+ if !isStaticCompositeLiteral(r.Left()) {
return false
}
}
return true
- case OLITERAL:
+ case ir.OLITERAL, ir.ONIL:
return true
- case OCONVIFACE:
+ case ir.OCONVIFACE:
// See staticassign's OCONVIFACE case for comments.
val := n
- for val.Op == OCONVIFACE {
- val = val.Left
+ for val.Op() == ir.OCONVIFACE {
+ val = val.Left()
}
- if val.Type.IsInterface() {
- return Isconst(val, CTNIL)
+ if val.Type().IsInterface() {
+ return val.Op() == ir.ONIL
}
- if isdirectiface(val.Type) && Isconst(val, CTNIL) {
+ if isdirectiface(val.Type()) && val.Op() == ir.ONIL {
return true
}
return isStaticCompositeLiteral(val)
@@ -500,96 +509,96 @@ const (
// fixedlit handles struct, array, and slice literals.
// TODO: expand documentation.
-func fixedlit(ctxt initContext, kind initKind, n *Node, var_ *Node, init *Nodes) {
- isBlank := var_ == nblank
- var splitnode func(*Node) (a *Node, value *Node)
- switch n.Op {
- case OARRAYLIT, OSLICELIT:
+func fixedlit(ctxt initContext, kind initKind, n ir.Node, var_ ir.Node, init *ir.Nodes) {
+ isBlank := var_ == ir.BlankNode
+ var splitnode func(ir.Node) (a ir.Node, value ir.Node)
+ switch n.Op() {
+ case ir.OARRAYLIT, ir.OSLICELIT:
var k int64
- splitnode = func(r *Node) (*Node, *Node) {
- if r.Op == OKEY {
- k = indexconst(r.Left)
+ splitnode = func(r ir.Node) (ir.Node, ir.Node) {
+ if r.Op() == ir.OKEY {
+ k = indexconst(r.Left())
if k < 0 {
- Fatalf("fixedlit: invalid index %v", r.Left)
+ base.Fatalf("fixedlit: invalid index %v", r.Left())
}
- r = r.Right
+ r = r.Right()
}
- a := nod(OINDEX, var_, nodintconst(k))
+ a := ir.Nod(ir.OINDEX, var_, nodintconst(k))
k++
if isBlank {
- a = nblank
+ a = ir.BlankNode
}
return a, r
}
- case OSTRUCTLIT:
- splitnode = func(r *Node) (*Node, *Node) {
- if r.Op != OSTRUCTKEY {
- Fatalf("fixedlit: rhs not OSTRUCTKEY: %v", r)
+ case ir.OSTRUCTLIT:
+ splitnode = func(r ir.Node) (ir.Node, ir.Node) {
+ if r.Op() != ir.OSTRUCTKEY {
+ base.Fatalf("fixedlit: rhs not OSTRUCTKEY: %v", r)
}
- if r.Sym.IsBlank() || isBlank {
- return nblank, r.Left
+ if r.Sym().IsBlank() || isBlank {
+ return ir.BlankNode, r.Left()
}
setlineno(r)
- return nodSym(ODOT, var_, r.Sym), r.Left
+ return nodSym(ir.ODOT, var_, r.Sym()), r.Left()
}
default:
- Fatalf("fixedlit bad op: %v", n.Op)
+ base.Fatalf("fixedlit bad op: %v", n.Op())
}
- for _, r := range n.List.Slice() {
+ for _, r := range n.List().Slice() {
a, value := splitnode(r)
- if a == nblank && candiscard(value) {
+ if a == ir.BlankNode && candiscard(value) {
continue
}
- switch value.Op {
- case OSLICELIT:
+ switch value.Op() {
+ case ir.OSLICELIT:
if (kind == initKindStatic && ctxt == inNonInitFunction) || (kind == initKindDynamic && ctxt == inInitFunction) {
slicelit(ctxt, value, a, init)
continue
}
- case OARRAYLIT, OSTRUCTLIT:
+ case ir.OARRAYLIT, ir.OSTRUCTLIT:
fixedlit(ctxt, kind, value, a, init)
continue
}
- islit := value.isGoConst()
+ islit := isGoConst(value)
if (kind == initKindStatic && !islit) || (kind == initKindDynamic && islit) {
continue
}
// build list of assignments: var[index] = expr
setlineno(a)
- a = nod(OAS, a, value)
+ a = ir.Nod(ir.OAS, a, value)
a = typecheck(a, ctxStmt)
switch kind {
case initKindStatic:
genAsStatic(a)
case initKindDynamic, initKindLocalCode:
- a = orderStmtInPlace(a, map[string][]*Node{})
+ a = orderStmtInPlace(a, map[string][]ir.Node{})
a = walkstmt(a)
init.Append(a)
default:
- Fatalf("fixedlit: bad kind %d", kind)
+ base.Fatalf("fixedlit: bad kind %d", kind)
}
}
}
-func isSmallSliceLit(n *Node) bool {
- if n.Op != OSLICELIT {
+func isSmallSliceLit(n ir.Node) bool {
+ if n.Op() != ir.OSLICELIT {
return false
}
- r := n.Right
+ r := n.Right()
- return smallintconst(r) && (n.Type.Elem().Width == 0 || r.Int64Val() <= smallArrayBytes/n.Type.Elem().Width)
+ return smallintconst(r) && (n.Type().Elem().Width == 0 || r.Int64Val() <= smallArrayBytes/n.Type().Elem().Width)
}
-func slicelit(ctxt initContext, n *Node, var_ *Node, init *Nodes) {
+func slicelit(ctxt initContext, n ir.Node, var_ ir.Node, init *ir.Nodes) {
// make an array type corresponding the number of elements we have
- t := types.NewArray(n.Type.Elem(), n.Right.Int64Val())
+ t := types.NewArray(n.Type().Elem(), n.Right().Int64Val())
dowidth(t)
if ctxt == inNonInitFunction {
@@ -601,11 +610,11 @@ func slicelit(ctxt initContext, n *Node, var_ *Node, init *Nodes) {
// copy static to slice
var_ = typecheck(var_, ctxExpr|ctxAssign)
- var nam Node
- if !stataddr(&nam, var_) || nam.Class() != PEXTERN {
- Fatalf("slicelit: %v", var_)
+ nam := stataddr(var_)
+ if nam == nil || nam.Class() != ir.PEXTERN {
+ base.Fatalf("slicelit: %v", var_)
}
- slicesym(&nam, vstat, t.NumElem())
+ slicesym(nam, vstat, t.NumElem())
return
}
@@ -630,7 +639,7 @@ func slicelit(ctxt initContext, n *Node, var_ *Node, init *Nodes) {
// if the literal contains constants,
// make static initialized array (1),(2)
- var vstat *Node
+ var vstat ir.Node
mode := getdyn(n, true)
if mode&initConst != 0 && !isSmallSliceLit(n) {
@@ -646,51 +655,51 @@ func slicelit(ctxt initContext, n *Node, var_ *Node, init *Nodes) {
vauto := temp(types.NewPtr(t))
// set auto to point at new temp or heap (3 assign)
- var a *Node
+ var a ir.Node
if x := prealloc[n]; x != nil {
// temp allocated during order.go for dddarg
- if !types.Identical(t, x.Type) {
+ if !types.Identical(t, x.Type()) {
panic("dotdotdot base type does not match order's assigned type")
}
if vstat == nil {
- a = nod(OAS, x, nil)
+ a = ir.Nod(ir.OAS, x, nil)
a = typecheck(a, ctxStmt)
init.Append(a) // zero new temp
} else {
// Declare that we're about to initialize all of x.
// (Which happens at the *vauto = vstat below.)
- init.Append(nod(OVARDEF, x, nil))
+ init.Append(ir.Nod(ir.OVARDEF, x, nil))
}
- a = nod(OADDR, x, nil)
- } else if n.Esc == EscNone {
+ a = ir.Nod(ir.OADDR, x, nil)
+ } else if n.Esc() == EscNone {
a = temp(t)
if vstat == nil {
- a = nod(OAS, temp(t), nil)
+ a = ir.Nod(ir.OAS, temp(t), nil)
a = typecheck(a, ctxStmt)
init.Append(a) // zero new temp
- a = a.Left
+ a = a.Left()
} else {
- init.Append(nod(OVARDEF, a, nil))
+ init.Append(ir.Nod(ir.OVARDEF, a, nil))
}
- a = nod(OADDR, a, nil)
+ a = ir.Nod(ir.OADDR, a, nil)
} else {
- a = nod(ONEW, nil, nil)
- a.List.Set1(typenod(t))
+ a = ir.Nod(ir.ONEW, nil, nil)
+ a.PtrList().Set1(typenod(t))
}
- a = nod(OAS, vauto, a)
+ a = ir.Nod(ir.OAS, vauto, a)
a = typecheck(a, ctxStmt)
a = walkexpr(a, init)
init.Append(a)
if vstat != nil {
// copy static to heap (4)
- a = nod(ODEREF, vauto, nil)
+ a = ir.Nod(ir.ODEREF, vauto, nil)
- a = nod(OAS, a, vstat)
+ a = ir.Nod(ir.OAS, a, vstat)
a = typecheck(a, ctxStmt)
a = walkexpr(a, init)
init.Append(a)
@@ -698,25 +707,25 @@ func slicelit(ctxt initContext, n *Node, var_ *Node, init *Nodes) {
// put dynamics into array (5)
var index int64
- for _, value := range n.List.Slice() {
- if value.Op == OKEY {
- index = indexconst(value.Left)
+ for _, value := range n.List().Slice() {
+ if value.Op() == ir.OKEY {
+ index = indexconst(value.Left())
if index < 0 {
- Fatalf("slicelit: invalid index %v", value.Left)
+ base.Fatalf("slicelit: invalid index %v", value.Left())
}
- value = value.Right
+ value = value.Right()
}
- a := nod(OINDEX, vauto, nodintconst(index))
+ a := ir.Nod(ir.OINDEX, vauto, nodintconst(index))
a.SetBounded(true)
index++
// TODO need to check bounds?
- switch value.Op {
- case OSLICELIT:
+ switch value.Op() {
+ case ir.OSLICELIT:
break
- case OARRAYLIT, OSTRUCTLIT:
+ case ir.OARRAYLIT, ir.OSTRUCTLIT:
k := initKindDynamic
if vstat == nil {
// Generate both static and dynamic initializations.
@@ -727,43 +736,43 @@ func slicelit(ctxt initContext, n *Node, var_ *Node, init *Nodes) {
continue
}
- if vstat != nil && value.isGoConst() { // already set by copy from static value
+ if vstat != nil && isGoConst(value) { // already set by copy from static value
continue
}
// build list of vauto[c] = expr
setlineno(value)
- a = nod(OAS, a, value)
+ a = ir.Nod(ir.OAS, a, value)
a = typecheck(a, ctxStmt)
- a = orderStmtInPlace(a, map[string][]*Node{})
+ a = orderStmtInPlace(a, map[string][]ir.Node{})
a = walkstmt(a)
init.Append(a)
}
// make slice out of heap (6)
- a = nod(OAS, var_, nod(OSLICE, vauto, nil))
+ a = ir.Nod(ir.OAS, var_, ir.Nod(ir.OSLICE, vauto, nil))
a = typecheck(a, ctxStmt)
- a = orderStmtInPlace(a, map[string][]*Node{})
+ a = orderStmtInPlace(a, map[string][]ir.Node{})
a = walkstmt(a)
init.Append(a)
}
-func maplit(n *Node, m *Node, init *Nodes) {
+func maplit(n ir.Node, m ir.Node, init *ir.Nodes) {
// make the map var
- a := nod(OMAKE, nil, nil)
- a.Esc = n.Esc
- a.List.Set2(typenod(n.Type), nodintconst(int64(n.List.Len())))
+ a := ir.Nod(ir.OMAKE, nil, nil)
+ a.SetEsc(n.Esc())
+ a.PtrList().Set2(typenod(n.Type()), nodintconst(int64(n.List().Len())))
litas(m, a, init)
- entries := n.List.Slice()
+ entries := n.List().Slice()
// The order pass already removed any dynamic (runtime-computed) entries.
// All remaining entries are static. Double-check that.
for _, r := range entries {
- if !isStaticCompositeLiteral(r.Left) || !isStaticCompositeLiteral(r.Right) {
- Fatalf("maplit: entry is not a literal: %v", r)
+ if !isStaticCompositeLiteral(r.Left()) || !isStaticCompositeLiteral(r.Right()) {
+ base.Fatalf("maplit: entry is not a literal: %v", r)
}
}
@@ -771,8 +780,8 @@ func maplit(n *Node, m *Node, init *Nodes) {
// For a large number of entries, put them in an array and loop.
// build types [count]Tindex and [count]Tvalue
- tk := types.NewArray(n.Type.Key(), int64(len(entries)))
- te := types.NewArray(n.Type.Elem(), int64(len(entries)))
+ tk := types.NewArray(n.Type().Key(), int64(len(entries)))
+ te := types.NewArray(n.Type().Elem(), int64(len(entries)))
tk.SetNoalg(true)
te.SetNoalg(true)
@@ -784,11 +793,11 @@ func maplit(n *Node, m *Node, init *Nodes) {
vstatk := readonlystaticname(tk)
vstate := readonlystaticname(te)
- datak := nod(OARRAYLIT, nil, nil)
- datae := nod(OARRAYLIT, nil, nil)
+ datak := ir.Nod(ir.OARRAYLIT, nil, nil)
+ datae := ir.Nod(ir.OARRAYLIT, nil, nil)
for _, r := range entries {
- datak.List.Append(r.Left)
- datae.List.Append(r.Right)
+ datak.PtrList().Append(r.Left())
+ datae.PtrList().Append(r.Right())
}
fixedlit(inInitFunction, initKindStatic, datak, vstatk, init)
fixedlit(inInitFunction, initKindStatic, datae, vstate, init)
@@ -797,22 +806,22 @@ func maplit(n *Node, m *Node, init *Nodes) {
// for i = 0; i < len(vstatk); i++ {
// map[vstatk[i]] = vstate[i]
// }
- i := temp(types.Types[TINT])
- rhs := nod(OINDEX, vstate, i)
+ i := temp(types.Types[types.TINT])
+ rhs := ir.Nod(ir.OINDEX, vstate, i)
rhs.SetBounded(true)
- kidx := nod(OINDEX, vstatk, i)
+ kidx := ir.Nod(ir.OINDEX, vstatk, i)
kidx.SetBounded(true)
- lhs := nod(OINDEX, m, kidx)
+ lhs := ir.Nod(ir.OINDEX, m, kidx)
- zero := nod(OAS, i, nodintconst(0))
- cond := nod(OLT, i, nodintconst(tk.NumElem()))
- incr := nod(OAS, i, nod(OADD, i, nodintconst(1)))
- body := nod(OAS, lhs, rhs)
+ zero := ir.Nod(ir.OAS, i, nodintconst(0))
+ cond := ir.Nod(ir.OLT, i, nodintconst(tk.NumElem()))
+ incr := ir.Nod(ir.OAS, i, ir.Nod(ir.OADD, i, nodintconst(1)))
+ body := ir.Nod(ir.OAS, lhs, rhs)
- loop := nod(OFOR, cond, incr)
- loop.Nbody.Set1(body)
- loop.Ninit.Set1(zero)
+ loop := ir.Nod(ir.OFOR, cond, incr)
+ loop.PtrBody().Set1(body)
+ loop.PtrInit().Set1(zero)
loop = typecheck(loop, ctxStmt)
loop = walkstmt(loop)
@@ -824,95 +833,95 @@ func maplit(n *Node, m *Node, init *Nodes) {
// Build list of var[c] = expr.
// Use temporaries so that mapassign1 can have addressable key, elem.
// TODO(josharian): avoid map key temporaries for mapfast_* assignments with literal keys.
- tmpkey := temp(m.Type.Key())
- tmpelem := temp(m.Type.Elem())
+ tmpkey := temp(m.Type().Key())
+ tmpelem := temp(m.Type().Elem())
for _, r := range entries {
- index, elem := r.Left, r.Right
+ index, elem := r.Left(), r.Right()
setlineno(index)
- a := nod(OAS, tmpkey, index)
+ a := ir.Nod(ir.OAS, tmpkey, index)
a = typecheck(a, ctxStmt)
a = walkstmt(a)
init.Append(a)
setlineno(elem)
- a = nod(OAS, tmpelem, elem)
+ a = ir.Nod(ir.OAS, tmpelem, elem)
a = typecheck(a, ctxStmt)
a = walkstmt(a)
init.Append(a)
setlineno(tmpelem)
- a = nod(OAS, nod(OINDEX, m, tmpkey), tmpelem)
+ a = ir.Nod(ir.OAS, ir.Nod(ir.OINDEX, m, tmpkey), tmpelem)
a = typecheck(a, ctxStmt)
a = walkstmt(a)
init.Append(a)
}
- a = nod(OVARKILL, tmpkey, nil)
+ a = ir.Nod(ir.OVARKILL, tmpkey, nil)
a = typecheck(a, ctxStmt)
init.Append(a)
- a = nod(OVARKILL, tmpelem, nil)
+ a = ir.Nod(ir.OVARKILL, tmpelem, nil)
a = typecheck(a, ctxStmt)
init.Append(a)
}
-func anylit(n *Node, var_ *Node, init *Nodes) {
- t := n.Type
- switch n.Op {
+func anylit(n ir.Node, var_ ir.Node, init *ir.Nodes) {
+ t := n.Type()
+ switch n.Op() {
default:
- Fatalf("anylit: not lit, op=%v node=%v", n.Op, n)
+ base.Fatalf("anylit: not lit, op=%v node=%v", n.Op(), n)
- case ONAME:
- a := nod(OAS, var_, n)
+ case ir.ONAME, ir.OMETHEXPR:
+ a := ir.Nod(ir.OAS, var_, n)
a = typecheck(a, ctxStmt)
init.Append(a)
- case OPTRLIT:
+ case ir.OPTRLIT:
if !t.IsPtr() {
- Fatalf("anylit: not ptr")
+ base.Fatalf("anylit: not ptr")
}
- var r *Node
- if n.Right != nil {
+ var r ir.Node
+ if n.Right() != nil {
// n.Right is stack temporary used as backing store.
- init.Append(nod(OAS, n.Right, nil)) // zero backing store, just in case (#18410)
- r = nod(OADDR, n.Right, nil)
+ init.Append(ir.Nod(ir.OAS, n.Right(), nil)) // zero backing store, just in case (#18410)
+ r = ir.Nod(ir.OADDR, n.Right(), nil)
r = typecheck(r, ctxExpr)
} else {
- r = nod(ONEW, nil, nil)
+ r = ir.Nod(ir.ONEW, nil, nil)
r.SetTypecheck(1)
- r.Type = t
- r.Esc = n.Esc
+ r.SetType(t)
+ r.SetEsc(n.Esc())
}
r = walkexpr(r, init)
- a := nod(OAS, var_, r)
+ a := ir.Nod(ir.OAS, var_, r)
a = typecheck(a, ctxStmt)
init.Append(a)
- var_ = nod(ODEREF, var_, nil)
+ var_ = ir.Nod(ir.ODEREF, var_, nil)
var_ = typecheck(var_, ctxExpr|ctxAssign)
- anylit(n.Left, var_, init)
+ anylit(n.Left(), var_, init)
- case OSTRUCTLIT, OARRAYLIT:
+ case ir.OSTRUCTLIT, ir.OARRAYLIT:
if !t.IsStruct() && !t.IsArray() {
- Fatalf("anylit: not struct/array")
+ base.Fatalf("anylit: not struct/array")
}
- if var_.isSimpleName() && n.List.Len() > 4 {
+ if isSimpleName(var_) && n.List().Len() > 4 {
// lay out static data
vstat := readonlystaticname(t)
ctxt := inInitFunction
- if n.Op == OARRAYLIT {
+ if n.Op() == ir.OARRAYLIT {
ctxt = inNonInitFunction
}
fixedlit(ctxt, initKindStatic, n, vstat, init)
// copy static to var
- a := nod(OAS, var_, vstat)
+ a := ir.Nod(ir.OAS, var_, vstat)
a = typecheck(a, ctxStmt)
a = walkexpr(a, init)
@@ -924,14 +933,14 @@ func anylit(n *Node, var_ *Node, init *Nodes) {
}
var components int64
- if n.Op == OARRAYLIT {
+ if n.Op() == ir.OARRAYLIT {
components = t.NumElem()
} else {
components = int64(t.NumFields())
}
// initialization of an array or struct with unspecified components (missing fields or arrays)
- if var_.isSimpleName() || int64(n.List.Len()) < components {
- a := nod(OAS, var_, nil)
+ if isSimpleName(var_) || int64(n.List().Len()) < components {
+ a := ir.Nod(ir.OAS, var_, nil)
a = typecheck(a, ctxStmt)
a = walkexpr(a, init)
init.Append(a)
@@ -939,149 +948,150 @@ func anylit(n *Node, var_ *Node, init *Nodes) {
fixedlit(inInitFunction, initKindLocalCode, n, var_, init)
- case OSLICELIT:
+ case ir.OSLICELIT:
slicelit(inInitFunction, n, var_, init)
- case OMAPLIT:
+ case ir.OMAPLIT:
if !t.IsMap() {
- Fatalf("anylit: not map")
+ base.Fatalf("anylit: not map")
}
maplit(n, var_, init)
}
}
-func oaslit(n *Node, init *Nodes) bool {
- if n.Left == nil || n.Right == nil {
+func oaslit(n ir.Node, init *ir.Nodes) bool {
+ if n.Left() == nil || n.Right() == nil {
// not a special composite literal assignment
return false
}
- if n.Left.Type == nil || n.Right.Type == nil {
+ if n.Left().Type() == nil || n.Right().Type() == nil {
// not a special composite literal assignment
return false
}
- if !n.Left.isSimpleName() {
+ if !isSimpleName(n.Left()) {
// not a special composite literal assignment
return false
}
- if !types.Identical(n.Left.Type, n.Right.Type) {
+ if !types.Identical(n.Left().Type(), n.Right().Type()) {
// not a special composite literal assignment
return false
}
- switch n.Right.Op {
+ switch n.Right().Op() {
default:
// not a special composite literal assignment
return false
- case OSTRUCTLIT, OARRAYLIT, OSLICELIT, OMAPLIT:
- if vmatch1(n.Left, n.Right) {
+ case ir.OSTRUCTLIT, ir.OARRAYLIT, ir.OSLICELIT, ir.OMAPLIT:
+ if vmatch1(n.Left(), n.Right()) {
// not a special composite literal assignment
return false
}
- anylit(n.Right, n.Left, init)
+ anylit(n.Right(), n.Left(), init)
}
- n.Op = OEMPTY
- n.Right = nil
+ n.SetOp(ir.OEMPTY)
+ n.SetRight(nil)
return true
}
-func getlit(lit *Node) int {
+func getlit(lit ir.Node) int {
if smallintconst(lit) {
return int(lit.Int64Val())
}
return -1
}
-// stataddr sets nam to the static address of n and reports whether it succeeded.
-func stataddr(nam *Node, n *Node) bool {
+// stataddr returns the static address of n, if n has one, or else nil.
+func stataddr(n ir.Node) ir.Node {
if n == nil {
- return false
+ return nil
}
- switch n.Op {
- case ONAME:
- *nam = *n
- return true
+ switch n.Op() {
+ case ir.ONAME, ir.OMETHEXPR:
+ return ir.SepCopy(n)
- case ODOT:
- if !stataddr(nam, n.Left) {
+ case ir.ODOT:
+ nam := stataddr(n.Left())
+ if nam == nil {
break
}
- nam.Xoffset += n.Xoffset
- nam.Type = n.Type
- return true
+ nam.SetOffset(nam.Offset() + n.Offset())
+ nam.SetType(n.Type())
+ return nam
- case OINDEX:
- if n.Left.Type.IsSlice() {
+ case ir.OINDEX:
+ if n.Left().Type().IsSlice() {
break
}
- if !stataddr(nam, n.Left) {
+ nam := stataddr(n.Left())
+ if nam == nil {
break
}
- l := getlit(n.Right)
+ l := getlit(n.Right())
if l < 0 {
break
}
// Check for overflow.
- if n.Type.Width != 0 && thearch.MAXWIDTH/n.Type.Width <= int64(l) {
+ if n.Type().Width != 0 && thearch.MAXWIDTH/n.Type().Width <= int64(l) {
break
}
- nam.Xoffset += int64(l) * n.Type.Width
- nam.Type = n.Type
- return true
+ nam.SetOffset(nam.Offset() + int64(l)*n.Type().Width)
+ nam.SetType(n.Type())
+ return nam
}
- return false
+ return nil
}
-func (s *InitSchedule) initplan(n *Node) {
+func (s *InitSchedule) initplan(n ir.Node) {
if s.initplans[n] != nil {
return
}
p := new(InitPlan)
s.initplans[n] = p
- switch n.Op {
+ switch n.Op() {
default:
- Fatalf("initplan")
+ base.Fatalf("initplan")
- case OARRAYLIT, OSLICELIT:
+ case ir.OARRAYLIT, ir.OSLICELIT:
var k int64
- for _, a := range n.List.Slice() {
- if a.Op == OKEY {
- k = indexconst(a.Left)
+ for _, a := range n.List().Slice() {
+ if a.Op() == ir.OKEY {
+ k = indexconst(a.Left())
if k < 0 {
- Fatalf("initplan arraylit: invalid index %v", a.Left)
+ base.Fatalf("initplan arraylit: invalid index %v", a.Left())
}
- a = a.Right
+ a = a.Right()
}
- s.addvalue(p, k*n.Type.Elem().Width, a)
+ s.addvalue(p, k*n.Type().Elem().Width, a)
k++
}
- case OSTRUCTLIT:
- for _, a := range n.List.Slice() {
- if a.Op != OSTRUCTKEY {
- Fatalf("initplan structlit")
+ case ir.OSTRUCTLIT:
+ for _, a := range n.List().Slice() {
+ if a.Op() != ir.OSTRUCTKEY {
+ base.Fatalf("initplan structlit")
}
- if a.Sym.IsBlank() {
+ if a.Sym().IsBlank() {
continue
}
- s.addvalue(p, a.Xoffset, a.Left)
+ s.addvalue(p, a.Offset(), a.Left())
}
- case OMAPLIT:
- for _, a := range n.List.Slice() {
- if a.Op != OKEY {
- Fatalf("initplan maplit")
+ case ir.OMAPLIT:
+ for _, a := range n.List().Slice() {
+ if a.Op() != ir.OKEY {
+ base.Fatalf("initplan maplit")
}
- s.addvalue(p, -1, a.Right)
+ s.addvalue(p, -1, a.Right())
}
}
}
-func (s *InitSchedule) addvalue(p *InitPlan, xoffset int64, n *Node) {
+func (s *InitSchedule) addvalue(p *InitPlan, xoffset int64, n ir.Node) {
// special case: zero can be dropped entirely
if isZero(n) {
return
@@ -1103,31 +1113,25 @@ func (s *InitSchedule) addvalue(p *InitPlan, xoffset int64, n *Node) {
p.E = append(p.E, InitEntry{Xoffset: xoffset, Expr: n})
}
-func isZero(n *Node) bool {
- switch n.Op {
- case OLITERAL:
- switch u := n.Val().U.(type) {
+func isZero(n ir.Node) bool {
+ switch n.Op() {
+ case ir.ONIL:
+ return true
+
+ case ir.OLITERAL:
+ switch u := n.Val(); u.Kind() {
+ case constant.String:
+ return constant.StringVal(u) == ""
+ case constant.Bool:
+ return !constant.BoolVal(u)
default:
- Dump("unexpected literal", n)
- Fatalf("isZero")
- case *NilVal:
- return true
- case string:
- return u == ""
- case bool:
- return !u
- case *Mpint:
- return u.CmpInt64(0) == 0
- case *Mpflt:
- return u.CmpFloat64(0) == 0
- case *Mpcplx:
- return u.Real.CmpFloat64(0) == 0 && u.Imag.CmpFloat64(0) == 0
+ return constant.Sign(u) == 0
}
- case OARRAYLIT:
- for _, n1 := range n.List.Slice() {
- if n1.Op == OKEY {
- n1 = n1.Right
+ case ir.OARRAYLIT:
+ for _, n1 := range n.List().Slice() {
+ if n1.Op() == ir.OKEY {
+ n1 = n1.Right()
}
if !isZero(n1) {
return false
@@ -1135,9 +1139,9 @@ func isZero(n *Node) bool {
}
return true
- case OSTRUCTLIT:
- for _, n1 := range n.List.Slice() {
- if !isZero(n1.Left) {
+ case ir.OSTRUCTLIT:
+ for _, n1 := range n.List().Slice() {
+ if !isZero(n1.Left()) {
return false
}
}
@@ -1147,26 +1151,26 @@ func isZero(n *Node) bool {
return false
}
-func isvaluelit(n *Node) bool {
- return n.Op == OARRAYLIT || n.Op == OSTRUCTLIT
+func isvaluelit(n ir.Node) bool {
+ return n.Op() == ir.OARRAYLIT || n.Op() == ir.OSTRUCTLIT
}
-func genAsStatic(as *Node) {
- if as.Left.Type == nil {
- Fatalf("genAsStatic as.Left not typechecked")
+func genAsStatic(as ir.Node) {
+ if as.Left().Type() == nil {
+ base.Fatalf("genAsStatic as.Left not typechecked")
}
- var nam Node
- if !stataddr(&nam, as.Left) || (nam.Class() != PEXTERN && as.Left != nblank) {
- Fatalf("genAsStatic: lhs %v", as.Left)
+ nam := stataddr(as.Left())
+ if nam == nil || (nam.Class() != ir.PEXTERN && as.Left() != ir.BlankNode) {
+ base.Fatalf("genAsStatic: lhs %v", as.Left())
}
switch {
- case as.Right.Op == OLITERAL:
- litsym(&nam, as.Right, int(as.Right.Type.Width))
- case as.Right.Op == ONAME && as.Right.Class() == PFUNC:
- pfuncsym(&nam, as.Right)
+ case as.Right().Op() == ir.OLITERAL:
+ litsym(nam, as.Right(), int(as.Right().Type().Width))
+ case (as.Right().Op() == ir.ONAME || as.Right().Op() == ir.OMETHEXPR) && as.Right().Class() == ir.PFUNC:
+ pfuncsym(nam, as.Right())
default:
- Fatalf("genAsStatic: rhs %v", as.Right)
+ base.Fatalf("genAsStatic: rhs %v", as.Right())
}
}