aboutsummaryrefslogtreecommitdiff
path: root/src/cmd/compile/internal/walk/complit.go
diff options
context:
space:
mode:
authorRuss Cox <rsc@golang.org>2020-12-23 01:07:07 -0500
committerRuss Cox <rsc@golang.org>2020-12-23 06:39:50 +0000
commit3f04d964ab05c31a41efa1590a8303376901ab60 (patch)
treefb7f69f4dea17cf19d310cf0c9d3960be1421817 /src/cmd/compile/internal/walk/complit.go
parente4895ab4c0eb44de6ddc5dc8d860a827b20d2781 (diff)
downloadgo-3f04d964ab05c31a41efa1590a8303376901ab60.tar.gz
go-3f04d964ab05c31a41efa1590a8303376901ab60.zip
[dev.regabi] cmd/compile: split up walkexpr1, walkstmt [generated]
walkexpr1 is the second largest non-machine-generated function in the compiler. weighing in at 1,164 lines. Since we are destroying the git blame history anyway, now is a good time to split each different case into its own function, making future work on this function more manageable. Do the same to walkstmt too for consistency, even though it is a paltry 259 lines. [git-generate] cd src/cmd/compile/internal/walk rf ' mv addstr walkAddString mv walkCall walkCall1 mv walkpartialcall walkCallPart mv walkclosure walkClosure mv walkrange walkRange mv walkselect walkSelect mv walkselectcases walkSelectCases mv walkswitch walkSwitch mv walkExprSwitch walkSwitchExpr mv walkTypeSwitch walkSwitchType mv walkstmt walkStmt mv walkstmtlist walkStmtList mv walkexprlist walkExprList mv walkexprlistsafe walkExprListSafe mv walkexprlistcheap walkExprListCheap mv walkexpr walkExpr mv walkexpr1 walkExpr1 mv walkprint walkPrint mv walkappend walkAppend mv walkcompare walkCompare mv walkcompareInterface walkCompareInterface mv walkcompareString walkCompareString mv appendslice appendSlice mv cheapexpr cheapExpr mv copyany walkCopy mv copyexpr copyExpr mv eqfor eqFor mv extendslice extendSlice mv finishcompare finishCompare mv safeexpr safeExpr mv walkStmt:/^\tcase ir.ORECV:/+2,/^\tcase /-2 walkRecv add walk.go:/^func walkRecv/-0 \ // walkRecv walks an ORECV node. mv walkStmt:/^\tcase ir.ODCL:/+2,/^\tcase /-2 walkDecl add walk.go:/^func walkDecl/-0 \ // walkDecl walks an ODCL node. mv walkStmt:/^\tcase ir.OGO:/+2,/^\tcase /-2 walkGoDefer add walk.go:/^func walkGoDefer/-0 \ // walkGoDefer walks an OGO or ODEFER node. mv walkStmt:/^\tcase ir.OFOR,/+2,/^\tcase /-2 walkFor add walk.go:/^func walkFor/-0 \ // walkFor walks an OFOR or OFORUNTIL node. mv walkStmt:/^\tcase ir.OIF:/+2,/^\tcase /-2 walkIf add walk.go:/^func walkIf/-0 \ // walkIf walks an OIF node. mv walkStmt:/^\tcase ir.ORETURN:/+2,/^\tcase /-2 walkReturn add walk.go:/^func walkReturn/-0 \ // walkReturn walks an ORETURN node. mv walkExpr1:/^\tcase ir.ODOT,/+2,/^\tcase /-2 walkDot add walk.go:/^func walkDot/-0 \ // walkDot walks an ODOT or ODOTPTR node. mv walkExpr1:/^\tcase ir.ODOTTYPE,/+2,/^\tcase /-2 walkDotType add walk.go:/^func walkDotType/-0 \ // walkDotType walks an ODOTTYPE or ODOTTYPE2 node. mv walkExpr1:/^\tcase ir.OLEN,/+2,/^\tcase /-2 walkLenCap add walk.go:/^func walkLenCap/-0 \ // walkLenCap walks an OLEN or OCAP node. mv walkExpr1:/^\tcase ir.OANDAND,/+2,/^\tcase /-2 walkLogical add walk.go:/^func walkLogical/-0 \ // walkLogical walks an OANDAND or OOROR node. mv walkExpr1:/^\tcase ir.OCALLINTER,/+2,/^\tcase /-2 walkCall add walk.go:/^func walkCall/-0 \ // walkCall walks an OCALLFUNC, OCALLINTER, or OCALLMETH node. mv walkExpr1:/^\tcase ir.OAS,/+1,/^\tcase /-2 walkAssign add walk.go:/^func walkAssign/-0 \ // walkAssign walks an OAS (AssignExpr) or OASOP (AssignOpExpr) node. mv walkExpr1:/^\tcase ir.OAS2:/+2,/^\tcase /-3 walkAssignList add walk.go:/^func walkAssignList/-0 \ // walkAssignList walks an OAS2 node. mv walkExpr1:/^\tcase ir.OAS2FUNC:/+2,/^\tcase /-4 walkAssignFunc add walk.go:/^func walkAssignFunc/-0 \ // walkAssignFunc walks an OAS2FUNC node. mv walkExpr1:/^\tcase ir.OAS2RECV:/+2,/^\tcase /-3 walkAssignRecv add walk.go:/^func walkAssignRecv/-0 \ // walkAssignRecv walks an OAS2RECV node. mv walkExpr1:/^\tcase ir.OAS2MAPR:/+2,/^\tcase /-2 walkAssignMapRead add walk.go:/^func walkAssignMapRead/-0 \ // walkAssignMapRead walks an OAS2MAPR node. mv walkExpr1:/^\tcase ir.ODELETE:/+2,/^\tcase /-2 walkDelete add walk.go:/^func walkDelete/-0 \ // walkDelete walks an ODELETE node. mv walkExpr1:/^\tcase ir.OAS2DOTTYPE:/+2,/^\tcase /-2 walkAssignDotType add walk.go:/^func walkAssignDotType/-0 \ // walkAssignDotType walks an OAS2DOTTYPE node. mv walkExpr1:/^\tcase ir.OCONVIFACE:/+2,/^\tcase /-2 walkConvInterface add walk.go:/^func walkConvInterface/-0 \ // walkConvInterface walks an OCONVIFACE node. mv walkExpr1:/^\tcase ir.OCONV,/+2,/^\tcase /-2 walkConv add walk.go:/^func walkConv/-0 \ // walkConv walks an OCONV or OCONVNOP (but not OCONVIFACE) node. mv walkExpr1:/^\tcase ir.ODIV,/+2,/^\tcase /-2 walkDivMod add walk.go:/^func walkDivMod/-0 \ // walkDivMod walks an ODIV or OMOD node. mv walkExpr1:/^\tcase ir.OINDEX:/+2,/^\tcase /-2 walkIndex add walk.go:/^func walkIndex/-0 \ // walkIndex walks an OINDEX node. # move type assertion above comment mv walkExpr1:/^\tcase ir.OINDEXMAP:/+/n := n/-+ walkExpr1:/^\tcase ir.OINDEXMAP:/+0 mv walkExpr1:/^\tcase ir.OINDEXMAP:/+2,/^\tcase /-2 walkIndexMap add walk.go:/^func walkIndexMap/-0 \ // walkIndexMap walks an OINDEXMAP node. mv walkExpr1:/^\tcase ir.OSLICEHEADER:/+2,/^\tcase /-2 walkSliceHeader add walk.go:/^func walkSliceHeader/-0 \ // walkSliceHeader walks an OSLICEHEADER node. mv walkExpr1:/^\tcase ir.OSLICE,/+2,/^\tcase /-2 walkSlice add walk.go:/^func walkSlice/-0 \ // walkSlice walks an OSLICE, OSLICEARR, OSLICESTR, OSLICE3, or OSLICE3ARR node. mv walkExpr1:/^\tcase ir.ONEW:/+2,/^\tcase /-2 walkNew add walk.go:/^func walkNew/-0 \ // walkNew walks an ONEW node. # move type assertion above comment mv walkExpr1:/^\tcase ir.OCLOSE:/+/n := n/-+ walkExpr1:/^\tcase ir.OCLOSE:/+0 mv walkExpr1:/^\tcase ir.OCLOSE:/+2,/^\tcase /-2 walkClose add walk.go:/^func walkClose/-0 \ // walkClose walks an OCLOSE node. # move type assertion above comment mv walkExpr1:/^\tcase ir.OMAKECHAN:/+/n := n/-+ walkExpr1:/^\tcase ir.OMAKECHAN:/+0 mv walkExpr1:/^\tcase ir.OMAKECHAN:/+2,/^\tcase /-2 walkMakeChan add walk.go:/^func walkMakeChan/-0 \ // walkMakeChan walks an OMAKECHAN node. mv walkExpr1:/^\tcase ir.OMAKEMAP:/+2,/^\tcase /-2 walkMakeMap add walk.go:/^func walkMakeMap/-0 \ // walkMakeMap walks an OMAKEMAP node. mv walkExpr1:/^\tcase ir.OMAKESLICE:/+2,/^\tcase /-2 walkMakeSlice add walk.go:/^func walkMakeSlice/-0 \ // walkMakeSlice walks an OMAKESLICE node. mv walkExpr1:/^\tcase ir.OMAKESLICECOPY:/+2,/^\tcase /-2 walkMakeSliceCopy add walk.go:/^func walkMakeSliceCopy/-0 \ // walkMakeSliceCopy walks an OMAKESLICECOPY node. mv walkExpr1:/^\tcase ir.ORUNESTR:/+2,/^\tcase /-2 walkRuneToString add walk.go:/^func walkRuneToString/-0 \ // walkRuneToString walks an ORUNESTR node. mv walkExpr1:/^\tcase ir.OBYTES2STR,/+2,/^\tcase /-2 walkBytesRunesToString add walk.go:/^func walkBytesRunesToString/-0 \ // walkBytesRunesToString walks an OBYTES2STR or ORUNES2STR node. mv walkExpr1:/^\tcase ir.OBYTES2STRTMP:/+2,/^\tcase /-2 walkBytesToStringTemp add walk.go:/^func walkBytesToStringTemp/-0 \ // walkBytesToStringTemp walks an OBYTES2STRTMP node. mv walkExpr1:/^\tcase ir.OSTR2BYTES:/+2,/^\tcase /-2 walkStringToBytes add walk.go:/^func walkStringToBytes/-0 \ // walkStringToBytes walks an OSTR2BYTES node. # move type assertion above comment mv walkExpr1:/^\tcase ir.OSTR2BYTESTMP:/+/n := n/-+ walkExpr1:/^\tcase ir.OSTR2BYTESTMP:/+0 mv walkExpr1:/^\tcase ir.OSTR2BYTESTMP:/+2,/^\tcase /-2 walkStringToBytesTemp add walk.go:/^func walkStringToBytesTemp/-0 \ // walkStringToBytesTemp walks an OSTR2BYTESTMP node. mv walkExpr1:/^\tcase ir.OSTR2RUNES:/+2,/^\tcase /-2 walkStringToRunes add walk.go:/^func walkStringToRunes/-0 \ // walkStringToRunes walks an OSTR2RUNES node. mv walkExpr1:/^\tcase ir.OARRAYLIT,/+1,/^\tcase /-2 walkCompLit add walk.go:/^func walkCompLit/-0 \ // walkCompLit walks a composite literal node: \ // OARRAYLIT, OSLICELIT, OMAPLIT, OSTRUCTLIT (all CompLitExpr), or OPTRLIT (AddrExpr). mv walkExpr1:/^\tcase ir.OSEND:/+2,/^\tcase /-2 walkSend add walk.go:/^func walkSend/-0 \ // walkSend walks an OSEND node. mv walkStmt walkStmtList \ walkDecl \ walkFor \ walkGoDefer \ walkIf \ wrapCall \ stmt.go mv walkExpr walkExpr1 walkExprList walkExprListCheap walkExprListSafe \ cheapExpr safeExpr copyExpr \ walkAddString \ walkCall \ walkCall1 \ walkDivMod \ walkDot \ walkDotType \ walkIndex \ walkIndexMap \ walkLogical \ walkSend \ walkSlice \ walkSliceHeader \ reduceSlice \ bounded \ usemethod \ usefield \ expr.go mv \ walkAssign \ walkAssignDotType \ walkAssignFunc \ walkAssignList \ walkAssignMapRead \ walkAssignRecv \ walkReturn \ fncall \ ascompatee \ ascompatee1 \ ascompatet \ reorder3 \ reorder3save \ aliased \ anyAddrTaken \ refersToName \ refersToCommonName \ appendSlice \ isAppendOfMake \ extendSlice \ assign.go mv \ walkCompare \ walkCompareInterface \ walkCompareString \ finishCompare \ eqFor \ brcom \ brrev \ tracecmpArg \ canMergeLoads \ compare.go mv \ walkConv \ walkConvInterface \ walkBytesRunesToString \ walkBytesToStringTemp \ walkRuneToString \ walkStringToBytes \ walkStringToBytesTemp \ walkStringToRunes \ convFuncName \ rtconvfn \ byteindex \ walkCheckPtrAlignment \ walkCheckPtrArithmetic \ convert.go mv \ walkAppend \ walkClose \ walkCopy \ walkDelete \ walkLenCap \ walkMakeChan \ walkMakeMap \ walkMakeSlice \ walkMakeSliceCopy \ walkNew \ walkPrint \ badtype \ callnew \ writebarrierfn \ isRuneCount \ builtin.go mv \ walkCompLit \ sinit.go \ complit.go mv subr.go walk.go ' Change-Id: Ie0cf3ba4adf363c120c134d57cb7ef37934eaab9 Reviewed-on: https://go-review.googlesource.com/c/go/+/279430 Trust: Russ Cox <rsc@golang.org> Run-TryBot: Russ Cox <rsc@golang.org> Reviewed-by: Matthew Dempsky <mdempsky@google.com>
Diffstat (limited to 'src/cmd/compile/internal/walk/complit.go')
-rw-r--r--src/cmd/compile/internal/walk/complit.go682
1 files changed, 682 insertions, 0 deletions
diff --git a/src/cmd/compile/internal/walk/complit.go b/src/cmd/compile/internal/walk/complit.go
new file mode 100644
index 0000000000..6fbbee9284
--- /dev/null
+++ b/src/cmd/compile/internal/walk/complit.go
@@ -0,0 +1,682 @@
+// Copyright 2009 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+package walk
+
+import (
+ "cmd/compile/internal/base"
+ "cmd/compile/internal/ir"
+ "cmd/compile/internal/ssagen"
+ "cmd/compile/internal/staticdata"
+ "cmd/compile/internal/staticinit"
+ "cmd/compile/internal/typecheck"
+ "cmd/compile/internal/types"
+ "cmd/internal/obj"
+)
+
+// walkCompLit walks a composite literal node:
+// OARRAYLIT, OSLICELIT, OMAPLIT, OSTRUCTLIT (all CompLitExpr), or OPTRLIT (AddrExpr).
+func walkCompLit(n ir.Node, init *ir.Nodes) ir.Node {
+ if isStaticCompositeLiteral(n) && !ssagen.TypeOK(n.Type()) {
+ n := n.(*ir.CompLitExpr) // not OPTRLIT
+ // n can be directly represented in the read-only data section.
+ // Make direct reference to the static data. See issue 12841.
+ vstat := readonlystaticname(n.Type())
+ fixedlit(inInitFunction, initKindStatic, n, vstat, init)
+ return typecheck.Expr(vstat)
+ }
+ var_ := typecheck.Temp(n.Type())
+ anylit(n, var_, init)
+ return var_
+}
+
+// initContext is the context in which static data is populated.
+// It is either in an init function or in any other function.
+// Static data populated in an init function will be written either
+// zero times (as a readonly, static data symbol) or
+// one time (during init function execution).
+// Either way, there is no opportunity for races or further modification,
+// so the data can be written to a (possibly readonly) data symbol.
+// Static data populated in any other function needs to be local to
+// that function to allow multiple instances of that function
+// to execute concurrently without clobbering each others' data.
+type initContext uint8
+
+const (
+ inInitFunction initContext = iota
+ inNonInitFunction
+)
+
+func (c initContext) String() string {
+ if c == inInitFunction {
+ return "inInitFunction"
+ }
+ return "inNonInitFunction"
+}
+
+// readonlystaticname returns a name backed by a (writable) static data symbol.
+func readonlystaticname(t *types.Type) *ir.Name {
+ n := staticinit.StaticName(t)
+ n.MarkReadonly()
+ n.Sym().Linksym().Set(obj.AttrContentAddressable, true)
+ return n
+}
+
+func isSimpleName(nn ir.Node) bool {
+ if nn.Op() != ir.ONAME {
+ return false
+ }
+ n := nn.(*ir.Name)
+ return n.Class_ != ir.PAUTOHEAP && n.Class_ != ir.PEXTERN
+}
+
+func litas(l ir.Node, r ir.Node, init *ir.Nodes) {
+ appendWalkStmt(init, ir.NewAssignStmt(base.Pos, l, r))
+}
+
+// initGenType is a bitmap indicating the types of generation that will occur for a static value.
+type initGenType uint8
+
+const (
+ initDynamic initGenType = 1 << iota // contains some dynamic values, for which init code will be generated
+ initConst // contains some constant values, which may be written into data symbols
+)
+
+// getdyn calculates the initGenType for n.
+// If top is false, getdyn is recursing.
+func getdyn(n ir.Node, top bool) initGenType {
+ switch n.Op() {
+ default:
+ if ir.IsConstNode(n) {
+ return initConst
+ }
+ return initDynamic
+
+ case ir.OSLICELIT:
+ n := n.(*ir.CompLitExpr)
+ if !top {
+ return initDynamic
+ }
+ if n.Len/4 > int64(len(n.List)) {
+ // <25% of entries have explicit values.
+ // Very rough estimation, it takes 4 bytes of instructions
+ // to initialize 1 byte of result. So don't use a static
+ // initializer if the dynamic initialization code would be
+ // smaller than the static value.
+ // See issue 23780.
+ return initDynamic
+ }
+
+ case ir.OARRAYLIT, ir.OSTRUCTLIT:
+ }
+ lit := n.(*ir.CompLitExpr)
+
+ var mode initGenType
+ for _, n1 := range lit.List {
+ switch n1.Op() {
+ case ir.OKEY:
+ n1 = n1.(*ir.KeyExpr).Value
+ case ir.OSTRUCTKEY:
+ n1 = n1.(*ir.StructKeyExpr).Value
+ }
+ mode |= getdyn(n1, false)
+ if mode == initDynamic|initConst {
+ break
+ }
+ }
+ return mode
+}
+
+// isStaticCompositeLiteral reports whether n is a compile-time constant.
+func isStaticCompositeLiteral(n ir.Node) bool {
+ switch n.Op() {
+ case ir.OSLICELIT:
+ return false
+ case ir.OARRAYLIT:
+ n := n.(*ir.CompLitExpr)
+ for _, r := range n.List {
+ if r.Op() == ir.OKEY {
+ r = r.(*ir.KeyExpr).Value
+ }
+ if !isStaticCompositeLiteral(r) {
+ return false
+ }
+ }
+ return true
+ case ir.OSTRUCTLIT:
+ n := n.(*ir.CompLitExpr)
+ for _, r := range n.List {
+ r := r.(*ir.StructKeyExpr)
+ if !isStaticCompositeLiteral(r.Value) {
+ return false
+ }
+ }
+ return true
+ case ir.OLITERAL, ir.ONIL:
+ return true
+ case ir.OCONVIFACE:
+ // See staticassign's OCONVIFACE case for comments.
+ n := n.(*ir.ConvExpr)
+ val := ir.Node(n)
+ for val.Op() == ir.OCONVIFACE {
+ val = val.(*ir.ConvExpr).X
+ }
+ if val.Type().IsInterface() {
+ return val.Op() == ir.ONIL
+ }
+ if types.IsDirectIface(val.Type()) && val.Op() == ir.ONIL {
+ return true
+ }
+ return isStaticCompositeLiteral(val)
+ }
+ return false
+}
+
+// initKind is a kind of static initialization: static, dynamic, or local.
+// Static initialization represents literals and
+// literal components of composite literals.
+// Dynamic initialization represents non-literals and
+// non-literal components of composite literals.
+// LocalCode initialization represents initialization
+// that occurs purely in generated code local to the function of use.
+// Initialization code is sometimes generated in passes,
+// first static then dynamic.
+type initKind uint8
+
+const (
+ initKindStatic initKind = iota + 1
+ initKindDynamic
+ initKindLocalCode
+)
+
+// fixedlit handles struct, array, and slice literals.
+// TODO: expand documentation.
+func fixedlit(ctxt initContext, kind initKind, n *ir.CompLitExpr, var_ ir.Node, init *ir.Nodes) {
+ isBlank := var_ == ir.BlankNode
+ var splitnode func(ir.Node) (a ir.Node, value ir.Node)
+ switch n.Op() {
+ case ir.OARRAYLIT, ir.OSLICELIT:
+ var k int64
+ splitnode = func(r ir.Node) (ir.Node, ir.Node) {
+ if r.Op() == ir.OKEY {
+ kv := r.(*ir.KeyExpr)
+ k = typecheck.IndexConst(kv.Key)
+ if k < 0 {
+ base.Fatalf("fixedlit: invalid index %v", kv.Key)
+ }
+ r = kv.Value
+ }
+ a := ir.NewIndexExpr(base.Pos, var_, ir.NewInt(k))
+ k++
+ if isBlank {
+ return ir.BlankNode, r
+ }
+ return a, r
+ }
+ case ir.OSTRUCTLIT:
+ splitnode = func(rn ir.Node) (ir.Node, ir.Node) {
+ r := rn.(*ir.StructKeyExpr)
+ if r.Field.IsBlank() || isBlank {
+ return ir.BlankNode, r.Value
+ }
+ ir.SetPos(r)
+ return ir.NewSelectorExpr(base.Pos, ir.ODOT, var_, r.Field), r.Value
+ }
+ default:
+ base.Fatalf("fixedlit bad op: %v", n.Op())
+ }
+
+ for _, r := range n.List {
+ a, value := splitnode(r)
+ if a == ir.BlankNode && !staticinit.AnySideEffects(value) {
+ // Discard.
+ continue
+ }
+
+ switch value.Op() {
+ case ir.OSLICELIT:
+ value := value.(*ir.CompLitExpr)
+ if (kind == initKindStatic && ctxt == inNonInitFunction) || (kind == initKindDynamic && ctxt == inInitFunction) {
+ slicelit(ctxt, value, a, init)
+ continue
+ }
+
+ case ir.OARRAYLIT, ir.OSTRUCTLIT:
+ value := value.(*ir.CompLitExpr)
+ fixedlit(ctxt, kind, value, a, init)
+ continue
+ }
+
+ islit := ir.IsConstNode(value)
+ if (kind == initKindStatic && !islit) || (kind == initKindDynamic && islit) {
+ continue
+ }
+
+ // build list of assignments: var[index] = expr
+ ir.SetPos(a)
+ as := ir.NewAssignStmt(base.Pos, a, value)
+ as = typecheck.Stmt(as).(*ir.AssignStmt)
+ switch kind {
+ case initKindStatic:
+ genAsStatic(as)
+ case initKindDynamic, initKindLocalCode:
+ a = orderStmtInPlace(as, map[string][]*ir.Name{})
+ a = walkStmt(a)
+ init.Append(a)
+ default:
+ base.Fatalf("fixedlit: bad kind %d", kind)
+ }
+
+ }
+}
+
+func isSmallSliceLit(n *ir.CompLitExpr) bool {
+ if n.Op() != ir.OSLICELIT {
+ return false
+ }
+
+ return n.Type().Elem().Width == 0 || n.Len <= ir.MaxSmallArraySize/n.Type().Elem().Width
+}
+
+func slicelit(ctxt initContext, n *ir.CompLitExpr, var_ ir.Node, init *ir.Nodes) {
+ // make an array type corresponding the number of elements we have
+ t := types.NewArray(n.Type().Elem(), n.Len)
+ types.CalcSize(t)
+
+ if ctxt == inNonInitFunction {
+ // put everything into static array
+ vstat := staticinit.StaticName(t)
+
+ fixedlit(ctxt, initKindStatic, n, vstat, init)
+ fixedlit(ctxt, initKindDynamic, n, vstat, init)
+
+ // copy static to slice
+ var_ = typecheck.AssignExpr(var_)
+ name, offset, ok := staticinit.StaticLoc(var_)
+ if !ok || name.Class_ != ir.PEXTERN {
+ base.Fatalf("slicelit: %v", var_)
+ }
+ staticdata.InitSlice(name, offset, vstat, t.NumElem())
+ return
+ }
+
+ // recipe for var = []t{...}
+ // 1. make a static array
+ // var vstat [...]t
+ // 2. assign (data statements) the constant part
+ // vstat = constpart{}
+ // 3. make an auto pointer to array and allocate heap to it
+ // var vauto *[...]t = new([...]t)
+ // 4. copy the static array to the auto array
+ // *vauto = vstat
+ // 5. for each dynamic part assign to the array
+ // vauto[i] = dynamic part
+ // 6. assign slice of allocated heap to var
+ // var = vauto[:]
+ //
+ // an optimization is done if there is no constant part
+ // 3. var vauto *[...]t = new([...]t)
+ // 5. vauto[i] = dynamic part
+ // 6. var = vauto[:]
+
+ // if the literal contains constants,
+ // make static initialized array (1),(2)
+ var vstat ir.Node
+
+ mode := getdyn(n, true)
+ if mode&initConst != 0 && !isSmallSliceLit(n) {
+ if ctxt == inInitFunction {
+ vstat = readonlystaticname(t)
+ } else {
+ vstat = staticinit.StaticName(t)
+ }
+ fixedlit(ctxt, initKindStatic, n, vstat, init)
+ }
+
+ // make new auto *array (3 declare)
+ vauto := typecheck.Temp(types.NewPtr(t))
+
+ // set auto to point at new temp or heap (3 assign)
+ var a ir.Node
+ if x := n.Prealloc; x != nil {
+ // temp allocated during order.go for dddarg
+ if !types.Identical(t, x.Type()) {
+ panic("dotdotdot base type does not match order's assigned type")
+ }
+
+ if vstat == nil {
+ a = ir.NewAssignStmt(base.Pos, x, nil)
+ a = typecheck.Stmt(a)
+ init.Append(a) // zero new temp
+ } else {
+ // Declare that we're about to initialize all of x.
+ // (Which happens at the *vauto = vstat below.)
+ init.Append(ir.NewUnaryExpr(base.Pos, ir.OVARDEF, x))
+ }
+
+ a = typecheck.NodAddr(x)
+ } else if n.Esc() == ir.EscNone {
+ a = typecheck.Temp(t)
+ if vstat == nil {
+ a = ir.NewAssignStmt(base.Pos, typecheck.Temp(t), nil)
+ a = typecheck.Stmt(a)
+ init.Append(a) // zero new temp
+ a = a.(*ir.AssignStmt).X
+ } else {
+ init.Append(ir.NewUnaryExpr(base.Pos, ir.OVARDEF, a))
+ }
+
+ a = typecheck.NodAddr(a)
+ } else {
+ a = ir.NewUnaryExpr(base.Pos, ir.ONEW, ir.TypeNode(t))
+ }
+ appendWalkStmt(init, ir.NewAssignStmt(base.Pos, vauto, a))
+
+ if vstat != nil {
+ // copy static to heap (4)
+ a = ir.NewStarExpr(base.Pos, vauto)
+ appendWalkStmt(init, ir.NewAssignStmt(base.Pos, a, vstat))
+ }
+
+ // put dynamics into array (5)
+ var index int64
+ for _, value := range n.List {
+ if value.Op() == ir.OKEY {
+ kv := value.(*ir.KeyExpr)
+ index = typecheck.IndexConst(kv.Key)
+ if index < 0 {
+ base.Fatalf("slicelit: invalid index %v", kv.Key)
+ }
+ value = kv.Value
+ }
+ a := ir.NewIndexExpr(base.Pos, vauto, ir.NewInt(index))
+ a.SetBounded(true)
+ index++
+
+ // TODO need to check bounds?
+
+ switch value.Op() {
+ case ir.OSLICELIT:
+ break
+
+ case ir.OARRAYLIT, ir.OSTRUCTLIT:
+ value := value.(*ir.CompLitExpr)
+ k := initKindDynamic
+ if vstat == nil {
+ // Generate both static and dynamic initializations.
+ // See issue #31987.
+ k = initKindLocalCode
+ }
+ fixedlit(ctxt, k, value, a, init)
+ continue
+ }
+
+ if vstat != nil && ir.IsConstNode(value) { // already set by copy from static value
+ continue
+ }
+
+ // build list of vauto[c] = expr
+ ir.SetPos(value)
+ as := typecheck.Stmt(ir.NewAssignStmt(base.Pos, a, value))
+ as = orderStmtInPlace(as, map[string][]*ir.Name{})
+ as = walkStmt(as)
+ init.Append(as)
+ }
+
+ // make slice out of heap (6)
+ a = ir.NewAssignStmt(base.Pos, var_, ir.NewSliceExpr(base.Pos, ir.OSLICE, vauto))
+
+ a = typecheck.Stmt(a)
+ a = orderStmtInPlace(a, map[string][]*ir.Name{})
+ a = walkStmt(a)
+ init.Append(a)
+}
+
+func maplit(n *ir.CompLitExpr, m ir.Node, init *ir.Nodes) {
+ // make the map var
+ a := ir.NewCallExpr(base.Pos, ir.OMAKE, nil, nil)
+ a.SetEsc(n.Esc())
+ a.Args = []ir.Node{ir.TypeNode(n.Type()), ir.NewInt(int64(len(n.List)))}
+ litas(m, a, init)
+
+ entries := n.List
+
+ // The order pass already removed any dynamic (runtime-computed) entries.
+ // All remaining entries are static. Double-check that.
+ for _, r := range entries {
+ r := r.(*ir.KeyExpr)
+ if !isStaticCompositeLiteral(r.Key) || !isStaticCompositeLiteral(r.Value) {
+ base.Fatalf("maplit: entry is not a literal: %v", r)
+ }
+ }
+
+ if len(entries) > 25 {
+ // For a large number of entries, put them in an array and loop.
+
+ // build types [count]Tindex and [count]Tvalue
+ tk := types.NewArray(n.Type().Key(), int64(len(entries)))
+ te := types.NewArray(n.Type().Elem(), int64(len(entries)))
+
+ tk.SetNoalg(true)
+ te.SetNoalg(true)
+
+ types.CalcSize(tk)
+ types.CalcSize(te)
+
+ // make and initialize static arrays
+ vstatk := readonlystaticname(tk)
+ vstate := readonlystaticname(te)
+
+ datak := ir.NewCompLitExpr(base.Pos, ir.OARRAYLIT, nil, nil)
+ datae := ir.NewCompLitExpr(base.Pos, ir.OARRAYLIT, nil, nil)
+ for _, r := range entries {
+ r := r.(*ir.KeyExpr)
+ datak.List.Append(r.Key)
+ datae.List.Append(r.Value)
+ }
+ fixedlit(inInitFunction, initKindStatic, datak, vstatk, init)
+ fixedlit(inInitFunction, initKindStatic, datae, vstate, init)
+
+ // loop adding structure elements to map
+ // for i = 0; i < len(vstatk); i++ {
+ // map[vstatk[i]] = vstate[i]
+ // }
+ i := typecheck.Temp(types.Types[types.TINT])
+ rhs := ir.NewIndexExpr(base.Pos, vstate, i)
+ rhs.SetBounded(true)
+
+ kidx := ir.NewIndexExpr(base.Pos, vstatk, i)
+ kidx.SetBounded(true)
+ lhs := ir.NewIndexExpr(base.Pos, m, kidx)
+
+ zero := ir.NewAssignStmt(base.Pos, i, ir.NewInt(0))
+ cond := ir.NewBinaryExpr(base.Pos, ir.OLT, i, ir.NewInt(tk.NumElem()))
+ incr := ir.NewAssignStmt(base.Pos, i, ir.NewBinaryExpr(base.Pos, ir.OADD, i, ir.NewInt(1)))
+ body := ir.NewAssignStmt(base.Pos, lhs, rhs)
+
+ loop := ir.NewForStmt(base.Pos, nil, cond, incr, nil)
+ loop.Body = []ir.Node{body}
+ *loop.PtrInit() = []ir.Node{zero}
+
+ appendWalkStmt(init, loop)
+ return
+ }
+ // For a small number of entries, just add them directly.
+
+ // Build list of var[c] = expr.
+ // Use temporaries so that mapassign1 can have addressable key, elem.
+ // TODO(josharian): avoid map key temporaries for mapfast_* assignments with literal keys.
+ tmpkey := typecheck.Temp(m.Type().Key())
+ tmpelem := typecheck.Temp(m.Type().Elem())
+
+ for _, r := range entries {
+ r := r.(*ir.KeyExpr)
+ index, elem := r.Key, r.Value
+
+ ir.SetPos(index)
+ appendWalkStmt(init, ir.NewAssignStmt(base.Pos, tmpkey, index))
+
+ ir.SetPos(elem)
+ appendWalkStmt(init, ir.NewAssignStmt(base.Pos, tmpelem, elem))
+
+ ir.SetPos(tmpelem)
+ appendWalkStmt(init, ir.NewAssignStmt(base.Pos, ir.NewIndexExpr(base.Pos, m, tmpkey), tmpelem))
+ }
+
+ appendWalkStmt(init, ir.NewUnaryExpr(base.Pos, ir.OVARKILL, tmpkey))
+ appendWalkStmt(init, ir.NewUnaryExpr(base.Pos, ir.OVARKILL, tmpelem))
+}
+
+func anylit(n ir.Node, var_ ir.Node, init *ir.Nodes) {
+ t := n.Type()
+ switch n.Op() {
+ default:
+ base.Fatalf("anylit: not lit, op=%v node=%v", n.Op(), n)
+
+ case ir.ONAME:
+ n := n.(*ir.Name)
+ appendWalkStmt(init, ir.NewAssignStmt(base.Pos, var_, n))
+
+ case ir.OMETHEXPR:
+ n := n.(*ir.MethodExpr)
+ anylit(n.FuncName(), var_, init)
+
+ case ir.OPTRLIT:
+ n := n.(*ir.AddrExpr)
+ if !t.IsPtr() {
+ base.Fatalf("anylit: not ptr")
+ }
+
+ var r ir.Node
+ if n.Alloc != nil {
+ // n.Right is stack temporary used as backing store.
+ appendWalkStmt(init, ir.NewAssignStmt(base.Pos, n.Alloc, nil)) // zero backing store, just in case (#18410)
+ r = typecheck.NodAddr(n.Alloc)
+ } else {
+ r = ir.NewUnaryExpr(base.Pos, ir.ONEW, ir.TypeNode(n.X.Type()))
+ r.SetEsc(n.Esc())
+ }
+ appendWalkStmt(init, ir.NewAssignStmt(base.Pos, var_, r))
+
+ var_ = ir.NewStarExpr(base.Pos, var_)
+ var_ = typecheck.AssignExpr(var_)
+ anylit(n.X, var_, init)
+
+ case ir.OSTRUCTLIT, ir.OARRAYLIT:
+ n := n.(*ir.CompLitExpr)
+ if !t.IsStruct() && !t.IsArray() {
+ base.Fatalf("anylit: not struct/array")
+ }
+
+ if isSimpleName(var_) && len(n.List) > 4 {
+ // lay out static data
+ vstat := readonlystaticname(t)
+
+ ctxt := inInitFunction
+ if n.Op() == ir.OARRAYLIT {
+ ctxt = inNonInitFunction
+ }
+ fixedlit(ctxt, initKindStatic, n, vstat, init)
+
+ // copy static to var
+ appendWalkStmt(init, ir.NewAssignStmt(base.Pos, var_, vstat))
+
+ // add expressions to automatic
+ fixedlit(inInitFunction, initKindDynamic, n, var_, init)
+ break
+ }
+
+ var components int64
+ if n.Op() == ir.OARRAYLIT {
+ components = t.NumElem()
+ } else {
+ components = int64(t.NumFields())
+ }
+ // initialization of an array or struct with unspecified components (missing fields or arrays)
+ if isSimpleName(var_) || int64(len(n.List)) < components {
+ appendWalkStmt(init, ir.NewAssignStmt(base.Pos, var_, nil))
+ }
+
+ fixedlit(inInitFunction, initKindLocalCode, n, var_, init)
+
+ case ir.OSLICELIT:
+ n := n.(*ir.CompLitExpr)
+ slicelit(inInitFunction, n, var_, init)
+
+ case ir.OMAPLIT:
+ n := n.(*ir.CompLitExpr)
+ if !t.IsMap() {
+ base.Fatalf("anylit: not map")
+ }
+ maplit(n, var_, init)
+ }
+}
+
+// oaslit handles special composite literal assignments.
+// It returns true if n's effects have been added to init,
+// in which case n should be dropped from the program by the caller.
+func oaslit(n *ir.AssignStmt, init *ir.Nodes) bool {
+ if n.X == nil || n.Y == nil {
+ // not a special composite literal assignment
+ return false
+ }
+ if n.X.Type() == nil || n.Y.Type() == nil {
+ // not a special composite literal assignment
+ return false
+ }
+ if !isSimpleName(n.X) {
+ // not a special composite literal assignment
+ return false
+ }
+ if !types.Identical(n.X.Type(), n.Y.Type()) {
+ // not a special composite literal assignment
+ return false
+ }
+
+ switch n.Y.Op() {
+ default:
+ // not a special composite literal assignment
+ return false
+
+ case ir.OSTRUCTLIT, ir.OARRAYLIT, ir.OSLICELIT, ir.OMAPLIT:
+ if refersToCommonName(n.X, n.Y) {
+ // not a special composite literal assignment
+ return false
+ }
+ anylit(n.Y, n.X, init)
+ }
+
+ return true
+}
+
+func genAsStatic(as *ir.AssignStmt) {
+ if as.X.Type() == nil {
+ base.Fatalf("genAsStatic as.Left not typechecked")
+ }
+
+ name, offset, ok := staticinit.StaticLoc(as.X)
+ if !ok || (name.Class_ != ir.PEXTERN && as.X != ir.BlankNode) {
+ base.Fatalf("genAsStatic: lhs %v", as.X)
+ }
+
+ switch r := as.Y; r.Op() {
+ case ir.OLITERAL:
+ staticdata.InitConst(name, offset, r, int(r.Type().Width))
+ return
+ case ir.OMETHEXPR:
+ r := r.(*ir.MethodExpr)
+ staticdata.InitFunc(name, offset, r.FuncName())
+ return
+ case ir.ONAME:
+ r := r.(*ir.Name)
+ if r.Offset_ != 0 {
+ base.Fatalf("genAsStatic %+v", as)
+ }
+ if r.Class_ == ir.PFUNC {
+ staticdata.InitFunc(name, offset, r)
+ return
+ }
+ }
+ base.Fatalf("genAsStatic: rhs %v", as.Y)
+}